diff options
author | shadchin <shadchin@yandex-team.ru> | 2022-02-10 16:44:39 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:44:39 +0300 |
commit | e9656aae26e0358d5378e5b63dcac5c8dbe0e4d0 (patch) | |
tree | 64175d5cadab313b3e7039ebaa06c5bc3295e274 /contrib/python/ipython/py3 | |
parent | 2598ef1d0aee359b4b6d5fdd1758916d5907d04f (diff) | |
download | ydb-e9656aae26e0358d5378e5b63dcac5c8dbe0e4d0.tar.gz |
Restoring authorship annotation for <shadchin@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/python/ipython/py3')
227 files changed, 53084 insertions, 53084 deletions
diff --git a/contrib/python/ipython/py3/.dist-info/METADATA b/contrib/python/ipython/py3/.dist-info/METADATA index 7235c4f5fa4..7073677dcad 100644 --- a/contrib/python/ipython/py3/.dist-info/METADATA +++ b/contrib/python/ipython/py3/.dist-info/METADATA @@ -1,117 +1,117 @@ -Metadata-Version: 2.1 -Name: ipython -Version: 7.31.1 -Summary: IPython: Productive Interactive Computing -Home-page: https://ipython.org -Author: The IPython Development Team -Author-email: ipython-dev@python.org -License: BSD -Project-URL: Documentation, https://ipython.readthedocs.io/ -Project-URL: Funding, https://numfocus.org/ -Project-URL: Source, https://github.com/ipython/ipython -Project-URL: Tracker, https://github.com/ipython/ipython/issues -Keywords: Interactive,Interpreter,Shell,Embedding -Platform: Linux -Platform: Mac OSX -Platform: Windows -Classifier: Framework :: IPython -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Science/Research -Classifier: License :: OSI Approved :: BSD License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Topic :: System :: Shells -Requires-Python: >=3.7 -License-File: LICENSE -Requires-Dist: setuptools (>=18.5) -Requires-Dist: jedi (>=0.13) -Requires-Dist: decorator -Requires-Dist: pickleshare -Requires-Dist: traitlets (>=4.2) -Requires-Dist: prompt-toolkit (!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0) -Requires-Dist: pygments -Requires-Dist: backcall -Requires-Dist: matplotlib-inline -Requires-Dist: pexpect (>4.3) ; sys_platform != "win32" -Requires-Dist: appnope ; sys_platform == "darwin" -Requires-Dist: colorama ; sys_platform == "win32" -Provides-Extra: all -Requires-Dist: Sphinx (>=1.3) ; extra == 'all' -Requires-Dist: ipykernel ; extra == 'all' -Requires-Dist: ipyparallel ; extra == 'all' -Requires-Dist: ipywidgets ; extra == 'all' -Requires-Dist: nbconvert ; extra == 'all' -Requires-Dist: nbformat ; extra == 'all' -Requires-Dist: nose (>=0.10.1) ; extra == 'all' -Requires-Dist: notebook ; extra == 'all' -Requires-Dist: numpy (>=1.17) ; extra == 'all' -Requires-Dist: pygments ; extra == 'all' -Requires-Dist: qtconsole ; extra == 'all' -Requires-Dist: requests ; extra == 'all' -Requires-Dist: testpath ; extra == 'all' -Provides-Extra: doc -Requires-Dist: Sphinx (>=1.3) ; extra == 'doc' -Provides-Extra: kernel -Requires-Dist: ipykernel ; extra == 'kernel' -Provides-Extra: nbconvert -Requires-Dist: nbconvert ; extra == 'nbconvert' -Provides-Extra: nbformat -Requires-Dist: nbformat ; extra == 'nbformat' -Provides-Extra: notebook -Requires-Dist: notebook ; extra == 'notebook' -Requires-Dist: ipywidgets ; extra == 'notebook' -Provides-Extra: parallel -Requires-Dist: ipyparallel ; extra == 'parallel' -Provides-Extra: qtconsole -Requires-Dist: qtconsole ; extra == 'qtconsole' -Provides-Extra: terminal -Provides-Extra: test -Requires-Dist: nose (>=0.10.1) ; extra == 'test' -Requires-Dist: requests ; extra == 'test' -Requires-Dist: testpath ; extra == 'test' -Requires-Dist: pygments ; extra == 'test' -Requires-Dist: nbformat ; extra == 'test' -Requires-Dist: ipykernel ; extra == 'test' -Requires-Dist: numpy (>=1.17) ; extra == 'test' - - -IPython provides a rich toolkit to help you make the most out of using Python -interactively. Its main components are: - -* A powerful interactive Python shell -* A `Jupyter <https://jupyter.org/>`_ kernel to work with Python code in Jupyter - notebooks and other interactive frontends. - -The enhanced interactive Python shells have the following main features: - -* Comprehensive object introspection. - -* Input history, persistent across sessions. - -* Caching of output results during a session with automatically generated - references. - -* Extensible tab completion, with support by default for completion of python - variables and keywords, filenames and function keywords. - -* Extensible system of 'magic' commands for controlling the environment and - performing many tasks related either to IPython or the operating system. - -* A rich configuration system with easy switching between different setups - (simpler than changing $PYTHONSTARTUP environment variables every time). - -* Session logging and reloading. - -* Extensible syntax processing for special purpose situations. - -* Access to the system shell with user-extensible alias system. - -* Easily embeddable in other Python programs and GUIs. - -* Integrated access to the pdb debugger and the Python profiler. - -The latest development version is always available from IPython's `GitHub -site <http://github.com/ipython>`_. - - +Metadata-Version: 2.1 +Name: ipython +Version: 7.31.1 +Summary: IPython: Productive Interactive Computing +Home-page: https://ipython.org +Author: The IPython Development Team +Author-email: ipython-dev@python.org +License: BSD +Project-URL: Documentation, https://ipython.readthedocs.io/ +Project-URL: Funding, https://numfocus.org/ +Project-URL: Source, https://github.com/ipython/ipython +Project-URL: Tracker, https://github.com/ipython/ipython/issues +Keywords: Interactive,Interpreter,Shell,Embedding +Platform: Linux +Platform: Mac OSX +Platform: Windows +Classifier: Framework :: IPython +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Topic :: System :: Shells +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: setuptools (>=18.5) +Requires-Dist: jedi (>=0.13) +Requires-Dist: decorator +Requires-Dist: pickleshare +Requires-Dist: traitlets (>=4.2) +Requires-Dist: prompt-toolkit (!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0) +Requires-Dist: pygments +Requires-Dist: backcall +Requires-Dist: matplotlib-inline +Requires-Dist: pexpect (>4.3) ; sys_platform != "win32" +Requires-Dist: appnope ; sys_platform == "darwin" +Requires-Dist: colorama ; sys_platform == "win32" +Provides-Extra: all +Requires-Dist: Sphinx (>=1.3) ; extra == 'all' +Requires-Dist: ipykernel ; extra == 'all' +Requires-Dist: ipyparallel ; extra == 'all' +Requires-Dist: ipywidgets ; extra == 'all' +Requires-Dist: nbconvert ; extra == 'all' +Requires-Dist: nbformat ; extra == 'all' +Requires-Dist: nose (>=0.10.1) ; extra == 'all' +Requires-Dist: notebook ; extra == 'all' +Requires-Dist: numpy (>=1.17) ; extra == 'all' +Requires-Dist: pygments ; extra == 'all' +Requires-Dist: qtconsole ; extra == 'all' +Requires-Dist: requests ; extra == 'all' +Requires-Dist: testpath ; extra == 'all' +Provides-Extra: doc +Requires-Dist: Sphinx (>=1.3) ; extra == 'doc' +Provides-Extra: kernel +Requires-Dist: ipykernel ; extra == 'kernel' +Provides-Extra: nbconvert +Requires-Dist: nbconvert ; extra == 'nbconvert' +Provides-Extra: nbformat +Requires-Dist: nbformat ; extra == 'nbformat' +Provides-Extra: notebook +Requires-Dist: notebook ; extra == 'notebook' +Requires-Dist: ipywidgets ; extra == 'notebook' +Provides-Extra: parallel +Requires-Dist: ipyparallel ; extra == 'parallel' +Provides-Extra: qtconsole +Requires-Dist: qtconsole ; extra == 'qtconsole' +Provides-Extra: terminal +Provides-Extra: test +Requires-Dist: nose (>=0.10.1) ; extra == 'test' +Requires-Dist: requests ; extra == 'test' +Requires-Dist: testpath ; extra == 'test' +Requires-Dist: pygments ; extra == 'test' +Requires-Dist: nbformat ; extra == 'test' +Requires-Dist: ipykernel ; extra == 'test' +Requires-Dist: numpy (>=1.17) ; extra == 'test' + + +IPython provides a rich toolkit to help you make the most out of using Python +interactively. Its main components are: + +* A powerful interactive Python shell +* A `Jupyter <https://jupyter.org/>`_ kernel to work with Python code in Jupyter + notebooks and other interactive frontends. + +The enhanced interactive Python shells have the following main features: + +* Comprehensive object introspection. + +* Input history, persistent across sessions. + +* Caching of output results during a session with automatically generated + references. + +* Extensible tab completion, with support by default for completion of python + variables and keywords, filenames and function keywords. + +* Extensible system of 'magic' commands for controlling the environment and + performing many tasks related either to IPython or the operating system. + +* A rich configuration system with easy switching between different setups + (simpler than changing $PYTHONSTARTUP environment variables every time). + +* Session logging and reloading. + +* Extensible syntax processing for special purpose situations. + +* Access to the system shell with user-extensible alias system. + +* Easily embeddable in other Python programs and GUIs. + +* Integrated access to the pdb debugger and the Python profiler. + +The latest development version is always available from IPython's `GitHub +site <http://github.com/ipython>`_. + + diff --git a/contrib/python/ipython/py3/.dist-info/entry_points.txt b/contrib/python/ipython/py3/.dist-info/entry_points.txt index 5adb0a05b1a..7795cb412ab 100644 --- a/contrib/python/ipython/py3/.dist-info/entry_points.txt +++ b/contrib/python/ipython/py3/.dist-info/entry_points.txt @@ -1,11 +1,11 @@ -[console_scripts] -iptest = IPython.testing.iptestcontroller:main -iptest3 = IPython.testing.iptestcontroller:main -ipython = IPython:start_ipython -ipython3 = IPython:start_ipython - -[pygments.lexers] -ipython = IPython.lib.lexers:IPythonLexer -ipython3 = IPython.lib.lexers:IPython3Lexer -ipythonconsole = IPython.lib.lexers:IPythonConsoleLexer - +[console_scripts] +iptest = IPython.testing.iptestcontroller:main +iptest3 = IPython.testing.iptestcontroller:main +ipython = IPython:start_ipython +ipython3 = IPython:start_ipython + +[pygments.lexers] +ipython = IPython.lib.lexers:IPythonLexer +ipython3 = IPython.lib.lexers:IPython3Lexer +ipythonconsole = IPython.lib.lexers:IPythonConsoleLexer + diff --git a/contrib/python/ipython/py3/.dist-info/top_level.txt b/contrib/python/ipython/py3/.dist-info/top_level.txt index c1dcdea3bbe..7fed997b4c4 100644 --- a/contrib/python/ipython/py3/.dist-info/top_level.txt +++ b/contrib/python/ipython/py3/.dist-info/top_level.txt @@ -1 +1 @@ -IPython +IPython diff --git a/contrib/python/ipython/py3/COPYING.rst b/contrib/python/ipython/py3/COPYING.rst index 5838623854e..e5c79ef38f0 100644 --- a/contrib/python/ipython/py3/COPYING.rst +++ b/contrib/python/ipython/py3/COPYING.rst @@ -1,41 +1,41 @@ -============================= - The IPython licensing terms -============================= - -IPython is licensed under the terms of the Modified BSD License (also known as -New or Revised or 3-Clause BSD). See the LICENSE file. - - -About the IPython Development Team ----------------------------------- - -Fernando Perez began IPython in 2001 based on code from Janko Hauser -<jhauser@zscout.de> and Nathaniel Gray <n8gray@caltech.edu>. Fernando is still -the project lead. - -The IPython Development Team is the set of all contributors to the IPython -project. This includes all of the IPython subprojects. - -The core team that coordinates development on GitHub can be found here: -https://github.com/ipython/. - -Our Copyright Policy --------------------- - -IPython uses a shared copyright model. Each contributor maintains copyright -over their contributions to IPython. But, it is important to note that these -contributions are typically only changes to the repositories. Thus, the IPython -source code, in its entirety is not the copyright of any single person or -institution. Instead, it is the collective copyright of the entire IPython -Development Team. If individual contributors want to maintain a record of what -changes/contributions they have specific copyright on, they should indicate -their copyright in the commit message of the change, when they commit the -change to one of the IPython repositories. - -With this in mind, the following banner should be used in any source code file -to indicate the copyright and license terms: - -:: - - # Copyright (c) IPython Development Team. - # Distributed under the terms of the Modified BSD License. +============================= + The IPython licensing terms +============================= + +IPython is licensed under the terms of the Modified BSD License (also known as +New or Revised or 3-Clause BSD). See the LICENSE file. + + +About the IPython Development Team +---------------------------------- + +Fernando Perez began IPython in 2001 based on code from Janko Hauser +<jhauser@zscout.de> and Nathaniel Gray <n8gray@caltech.edu>. Fernando is still +the project lead. + +The IPython Development Team is the set of all contributors to the IPython +project. This includes all of the IPython subprojects. + +The core team that coordinates development on GitHub can be found here: +https://github.com/ipython/. + +Our Copyright Policy +-------------------- + +IPython uses a shared copyright model. Each contributor maintains copyright +over their contributions to IPython. But, it is important to note that these +contributions are typically only changes to the repositories. Thus, the IPython +source code, in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire IPython +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the IPython repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + +:: + + # Copyright (c) IPython Development Team. + # Distributed under the terms of the Modified BSD License. diff --git a/contrib/python/ipython/py3/IPython/__init__.py b/contrib/python/ipython/py3/IPython/__init__.py index 9fe4a853d18..c17ec76a602 100644 --- a/contrib/python/ipython/py3/IPython/__init__.py +++ b/contrib/python/ipython/py3/IPython/__init__.py @@ -1,156 +1,156 @@ -# encoding: utf-8 -""" -IPython: tools for interactive and parallel computing in Python. - -https://ipython.org -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2008-2011, IPython Development Team. -# Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu> -# Copyright (c) 2001, Janko Hauser <jhauser@zscout.de> -# Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu> -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import os -import sys - -#----------------------------------------------------------------------------- -# Setup everything -#----------------------------------------------------------------------------- - -# Don't forget to also update setup.py when this changes! -if sys.version_info < (3, 6): - raise ImportError( -""" -IPython 7.10+ supports Python 3.6 and above. -When using Python 2.7, please install IPython 5.x LTS Long Term Support version. -Python 3.3 and 3.4 were supported up to IPython 6.x. -Python 3.5 was supported with IPython 7.0 to 7.9. - -See IPython `README.rst` file for more information: - - https://github.com/ipython/ipython/blob/master/README.rst - -""") - -# Make it easy to import extensions - they are always directly on pythonpath. -# Therefore, non-IPython modules can be added to extensions directory. -# This should probably be in ipapp.py. -sys.path.append(os.path.join(os.path.dirname(__file__), "extensions")) - -#----------------------------------------------------------------------------- -# Setup the top level names -#----------------------------------------------------------------------------- - -from .core.getipython import get_ipython -from .core import release -from .core.application import Application -from .terminal.embed import embed - -from .core.interactiveshell import InteractiveShell -from .testing import test -from .utils.sysinfo import sys_info -from .utils.frame import extract_module_locals - -# Release data -__author__ = '%s <%s>' % (release.author, release.author_email) -__license__ = release.license -__version__ = release.version -version_info = release.version_info -# list of CVEs that should have been patched in this release. -# this is informational and should not be relied upon. -__patched_cves__ = {"CVE-2022-21699"} - - -def embed_kernel(module=None, local_ns=None, **kwargs): - """Embed and start an IPython kernel in a given scope. - - If you don't want the kernel to initialize the namespace - from the scope of the surrounding function, - and/or you want to load full IPython configuration, - you probably want `IPython.start_kernel()` instead. - - Parameters - ---------- - module : types.ModuleType, optional - The module to load into IPython globals (default: caller) - local_ns : dict, optional - The namespace to load into IPython user namespace (default: caller) - - kwargs : various, optional - Further keyword args are relayed to the IPKernelApp constructor, - allowing configuration of the Kernel. Will only have an effect - on the first embed_kernel call for a given process. - """ - - (caller_module, caller_locals) = extract_module_locals(1) - if module is None: - module = caller_module - if local_ns is None: - local_ns = caller_locals - - # Only import .zmq when we really need it - from ipykernel.embed import embed_kernel as real_embed_kernel - real_embed_kernel(module=module, local_ns=local_ns, **kwargs) - -def start_ipython(argv=None, **kwargs): - """Launch a normal IPython instance (as opposed to embedded) - - `IPython.embed()` puts a shell in a particular calling scope, - such as a function or method for debugging purposes, - which is often not desirable. - - `start_ipython()` does full, regular IPython initialization, - including loading startup files, configuration, etc. - much of which is skipped by `embed()`. - - This is a public API method, and will survive implementation changes. - - Parameters - ---------- - - argv : list or None, optional - If unspecified or None, IPython will parse command-line options from sys.argv. - To prevent any command-line parsing, pass an empty list: `argv=[]`. - user_ns : dict, optional - specify this dictionary to initialize the IPython user namespace with particular values. - kwargs : various, optional - Any other kwargs will be passed to the Application constructor, - such as `config`. - """ - from IPython.terminal.ipapp import launch_new_instance - return launch_new_instance(argv=argv, **kwargs) - -def start_kernel(argv=None, **kwargs): - """Launch a normal IPython kernel instance (as opposed to embedded) - - `IPython.embed_kernel()` puts a shell in a particular calling scope, - such as a function or method for debugging purposes, - which is often not desirable. - - `start_kernel()` does full, regular IPython initialization, - including loading startup files, configuration, etc. - much of which is skipped by `embed()`. - - Parameters - ---------- - - argv : list or None, optional - If unspecified or None, IPython will parse command-line options from sys.argv. - To prevent any command-line parsing, pass an empty list: `argv=[]`. - user_ns : dict, optional - specify this dictionary to initialize the IPython user namespace with particular values. - kwargs : various, optional - Any other kwargs will be passed to the Application constructor, - such as `config`. - """ - from IPython.kernel.zmq.kernelapp import launch_new_instance - return launch_new_instance(argv=argv, **kwargs) +# encoding: utf-8 +""" +IPython: tools for interactive and parallel computing in Python. + +https://ipython.org +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2008-2011, IPython Development Team. +# Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu> +# Copyright (c) 2001, Janko Hauser <jhauser@zscout.de> +# Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu> +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import sys + +#----------------------------------------------------------------------------- +# Setup everything +#----------------------------------------------------------------------------- + +# Don't forget to also update setup.py when this changes! +if sys.version_info < (3, 6): + raise ImportError( +""" +IPython 7.10+ supports Python 3.6 and above. +When using Python 2.7, please install IPython 5.x LTS Long Term Support version. +Python 3.3 and 3.4 were supported up to IPython 6.x. +Python 3.5 was supported with IPython 7.0 to 7.9. + +See IPython `README.rst` file for more information: + + https://github.com/ipython/ipython/blob/master/README.rst + +""") + +# Make it easy to import extensions - they are always directly on pythonpath. +# Therefore, non-IPython modules can be added to extensions directory. +# This should probably be in ipapp.py. +sys.path.append(os.path.join(os.path.dirname(__file__), "extensions")) + +#----------------------------------------------------------------------------- +# Setup the top level names +#----------------------------------------------------------------------------- + +from .core.getipython import get_ipython +from .core import release +from .core.application import Application +from .terminal.embed import embed + +from .core.interactiveshell import InteractiveShell +from .testing import test +from .utils.sysinfo import sys_info +from .utils.frame import extract_module_locals + +# Release data +__author__ = '%s <%s>' % (release.author, release.author_email) +__license__ = release.license +__version__ = release.version +version_info = release.version_info +# list of CVEs that should have been patched in this release. +# this is informational and should not be relied upon. +__patched_cves__ = {"CVE-2022-21699"} + + +def embed_kernel(module=None, local_ns=None, **kwargs): + """Embed and start an IPython kernel in a given scope. + + If you don't want the kernel to initialize the namespace + from the scope of the surrounding function, + and/or you want to load full IPython configuration, + you probably want `IPython.start_kernel()` instead. + + Parameters + ---------- + module : types.ModuleType, optional + The module to load into IPython globals (default: caller) + local_ns : dict, optional + The namespace to load into IPython user namespace (default: caller) + + kwargs : various, optional + Further keyword args are relayed to the IPKernelApp constructor, + allowing configuration of the Kernel. Will only have an effect + on the first embed_kernel call for a given process. + """ + + (caller_module, caller_locals) = extract_module_locals(1) + if module is None: + module = caller_module + if local_ns is None: + local_ns = caller_locals + + # Only import .zmq when we really need it + from ipykernel.embed import embed_kernel as real_embed_kernel + real_embed_kernel(module=module, local_ns=local_ns, **kwargs) + +def start_ipython(argv=None, **kwargs): + """Launch a normal IPython instance (as opposed to embedded) + + `IPython.embed()` puts a shell in a particular calling scope, + such as a function or method for debugging purposes, + which is often not desirable. + + `start_ipython()` does full, regular IPython initialization, + including loading startup files, configuration, etc. + much of which is skipped by `embed()`. + + This is a public API method, and will survive implementation changes. + + Parameters + ---------- + + argv : list or None, optional + If unspecified or None, IPython will parse command-line options from sys.argv. + To prevent any command-line parsing, pass an empty list: `argv=[]`. + user_ns : dict, optional + specify this dictionary to initialize the IPython user namespace with particular values. + kwargs : various, optional + Any other kwargs will be passed to the Application constructor, + such as `config`. + """ + from IPython.terminal.ipapp import launch_new_instance + return launch_new_instance(argv=argv, **kwargs) + +def start_kernel(argv=None, **kwargs): + """Launch a normal IPython kernel instance (as opposed to embedded) + + `IPython.embed_kernel()` puts a shell in a particular calling scope, + such as a function or method for debugging purposes, + which is often not desirable. + + `start_kernel()` does full, regular IPython initialization, + including loading startup files, configuration, etc. + much of which is skipped by `embed()`. + + Parameters + ---------- + + argv : list or None, optional + If unspecified or None, IPython will parse command-line options from sys.argv. + To prevent any command-line parsing, pass an empty list: `argv=[]`. + user_ns : dict, optional + specify this dictionary to initialize the IPython user namespace with particular values. + kwargs : various, optional + Any other kwargs will be passed to the Application constructor, + such as `config`. + """ + from IPython.kernel.zmq.kernelapp import launch_new_instance + return launch_new_instance(argv=argv, **kwargs) diff --git a/contrib/python/ipython/py3/IPython/__main__.py b/contrib/python/ipython/py3/IPython/__main__.py index 2e142249b53..d5123f33a20 100644 --- a/contrib/python/ipython/py3/IPython/__main__.py +++ b/contrib/python/ipython/py3/IPython/__main__.py @@ -1,14 +1,14 @@ -# encoding: utf-8 -"""Terminal-based IPython entry point. -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012, IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -from IPython import start_ipython - -start_ipython() +# encoding: utf-8 +"""Terminal-based IPython entry point. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012, IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +from IPython import start_ipython + +start_ipython() diff --git a/contrib/python/ipython/py3/IPython/config.py b/contrib/python/ipython/py3/IPython/config.py index ed60fe4b078..964f46f10ac 100644 --- a/contrib/python/ipython/py3/IPython/config.py +++ b/contrib/python/ipython/py3/IPython/config.py @@ -1,19 +1,19 @@ -""" -Shim to maintain backwards compatibility with old IPython.config imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from .utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.config` package has been deprecated since IPython 4.0. " - "You should import from traitlets.config instead.", ShimWarning) - - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -sys.modules['IPython.config'] = ShimModule(src='IPython.config', mirror='traitlets.config') +""" +Shim to maintain backwards compatibility with old IPython.config imports. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import sys +from warnings import warn + +from .utils.shimmodule import ShimModule, ShimWarning + +warn("The `IPython.config` package has been deprecated since IPython 4.0. " + "You should import from traitlets.config instead.", ShimWarning) + + +# Unconditionally insert the shim into sys.modules so that further import calls +# trigger the custom attribute access above + +sys.modules['IPython.config'] = ShimModule(src='IPython.config', mirror='traitlets.config') diff --git a/contrib/python/ipython/py3/IPython/consoleapp.py b/contrib/python/ipython/py3/IPython/consoleapp.py index 2034dd81675..c2bbe1888f5 100644 --- a/contrib/python/ipython/py3/IPython/consoleapp.py +++ b/contrib/python/ipython/py3/IPython/consoleapp.py @@ -1,12 +1,12 @@ -""" -Shim to maintain backwards compatibility with old IPython.consoleapp imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -from warnings import warn - -warn("The `IPython.consoleapp` package has been deprecated since IPython 4.0." - "You should import from jupyter_client.consoleapp instead.", stacklevel=2) - -from jupyter_client.consoleapp import * +""" +Shim to maintain backwards compatibility with old IPython.consoleapp imports. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +from warnings import warn + +warn("The `IPython.consoleapp` package has been deprecated since IPython 4.0." + "You should import from jupyter_client.consoleapp instead.", stacklevel=2) + +from jupyter_client.consoleapp import * diff --git a/contrib/python/ipython/py3/IPython/core/alias.py b/contrib/python/ipython/py3/IPython/core/alias.py index c5aa00129d5..2ad990231a0 100644 --- a/contrib/python/ipython/py3/IPython/core/alias.py +++ b/contrib/python/ipython/py3/IPython/core/alias.py @@ -1,258 +1,258 @@ -# encoding: utf-8 -""" -System command aliases. - -Authors: - -* Fernando Perez -* Brian Granger -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import os -import re -import sys - -from traitlets.config.configurable import Configurable -from .error import UsageError - -from traitlets import List, Instance -from logging import error - -#----------------------------------------------------------------------------- -# Utilities -#----------------------------------------------------------------------------- - -# This is used as the pattern for calls to split_user_input. -shell_line_split = re.compile(r'^(\s*)()(\S+)(.*$)') - -def default_aliases(): - """Return list of shell aliases to auto-define. - """ - # Note: the aliases defined here should be safe to use on a kernel - # regardless of what frontend it is attached to. Frontends that use a - # kernel in-process can define additional aliases that will only work in - # their case. For example, things like 'less' or 'clear' that manipulate - # the terminal should NOT be declared here, as they will only work if the - # kernel is running inside a true terminal, and not over the network. - - if os.name == 'posix': - default_aliases = [('mkdir', 'mkdir'), ('rmdir', 'rmdir'), - ('mv', 'mv'), ('rm', 'rm'), ('cp', 'cp'), - ('cat', 'cat'), - ] - # Useful set of ls aliases. The GNU and BSD options are a little - # different, so we make aliases that provide as similar as possible - # behavior in ipython, by passing the right flags for each platform - if sys.platform.startswith('linux'): - ls_aliases = [('ls', 'ls -F --color'), - # long ls - ('ll', 'ls -F -o --color'), - # ls normal files only - ('lf', 'ls -F -o --color %l | grep ^-'), - # ls symbolic links - ('lk', 'ls -F -o --color %l | grep ^l'), - # directories or links to directories, - ('ldir', 'ls -F -o --color %l | grep /$'), - # things which are executable - ('lx', 'ls -F -o --color %l | grep ^-..x'), - ] - elif sys.platform.startswith('openbsd') or sys.platform.startswith('netbsd'): - # OpenBSD, NetBSD. The ls implementation on these platforms do not support - # the -G switch and lack the ability to use colorized output. - ls_aliases = [('ls', 'ls -F'), - # long ls - ('ll', 'ls -F -l'), - # ls normal files only - ('lf', 'ls -F -l %l | grep ^-'), - # ls symbolic links - ('lk', 'ls -F -l %l | grep ^l'), - # directories or links to directories, - ('ldir', 'ls -F -l %l | grep /$'), - # things which are executable - ('lx', 'ls -F -l %l | grep ^-..x'), - ] - else: - # BSD, OSX, etc. - ls_aliases = [('ls', 'ls -F -G'), - # long ls - ('ll', 'ls -F -l -G'), - # ls normal files only - ('lf', 'ls -F -l -G %l | grep ^-'), - # ls symbolic links - ('lk', 'ls -F -l -G %l | grep ^l'), - # directories or links to directories, - ('ldir', 'ls -F -G -l %l | grep /$'), - # things which are executable - ('lx', 'ls -F -l -G %l | grep ^-..x'), - ] - default_aliases = default_aliases + ls_aliases - elif os.name in ['nt', 'dos']: - default_aliases = [('ls', 'dir /on'), - ('ddir', 'dir /ad /on'), ('ldir', 'dir /ad /on'), - ('mkdir', 'mkdir'), ('rmdir', 'rmdir'), - ('echo', 'echo'), ('ren', 'ren'), ('copy', 'copy'), - ] - else: - default_aliases = [] - - return default_aliases - - -class AliasError(Exception): - pass - - -class InvalidAliasError(AliasError): - pass - -class Alias(object): - """Callable object storing the details of one alias. - - Instances are registered as magic functions to allow use of aliases. - """ - - # Prepare blacklist - blacklist = {'cd','popd','pushd','dhist','alias','unalias'} - - def __init__(self, shell, name, cmd): - self.shell = shell - self.name = name - self.cmd = cmd - self.__doc__ = "Alias for `!{}`".format(cmd) - self.nargs = self.validate() - - def validate(self): - """Validate the alias, and return the number of arguments.""" - if self.name in self.blacklist: - raise InvalidAliasError("The name %s can't be aliased " - "because it is a keyword or builtin." % self.name) - try: - caller = self.shell.magics_manager.magics['line'][self.name] - except KeyError: - pass - else: - if not isinstance(caller, Alias): - raise InvalidAliasError("The name %s can't be aliased " - "because it is another magic command." % self.name) - - if not (isinstance(self.cmd, str)): - raise InvalidAliasError("An alias command must be a string, " - "got: %r" % self.cmd) - - nargs = self.cmd.count('%s') - self.cmd.count('%%s') - - if (nargs > 0) and (self.cmd.find('%l') >= 0): - raise InvalidAliasError('The %s and %l specifiers are mutually ' - 'exclusive in alias definitions.') - - return nargs - - def __repr__(self): - return "<alias {} for {!r}>".format(self.name, self.cmd) - - def __call__(self, rest=''): - cmd = self.cmd - nargs = self.nargs - # Expand the %l special to be the user's input line - if cmd.find('%l') >= 0: - cmd = cmd.replace('%l', rest) - rest = '' - - if nargs==0: - if cmd.find('%%s') >= 1: - cmd = cmd.replace('%%s', '%s') - # Simple, argument-less aliases - cmd = '%s %s' % (cmd, rest) - else: - # Handle aliases with positional arguments - args = rest.split(None, nargs) - if len(args) < nargs: - raise UsageError('Alias <%s> requires %s arguments, %s given.' % - (self.name, nargs, len(args))) - cmd = '%s %s' % (cmd % tuple(args[:nargs]),' '.join(args[nargs:])) - - self.shell.system(cmd) - -#----------------------------------------------------------------------------- -# Main AliasManager class -#----------------------------------------------------------------------------- - -class AliasManager(Configurable): - - default_aliases = List(default_aliases()).tag(config=True) - user_aliases = List(default_value=[]).tag(config=True) - shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) - - def __init__(self, shell=None, **kwargs): - super(AliasManager, self).__init__(shell=shell, **kwargs) - # For convenient access - self.linemagics = self.shell.magics_manager.magics['line'] - self.init_aliases() - - def init_aliases(self): - # Load default & user aliases - for name, cmd in self.default_aliases + self.user_aliases: - if cmd.startswith('ls ') and self.shell.colors == 'NoColor': - cmd = cmd.replace(' --color', '') - self.soft_define_alias(name, cmd) - - @property - def aliases(self): - return [(n, func.cmd) for (n, func) in self.linemagics.items() - if isinstance(func, Alias)] - - def soft_define_alias(self, name, cmd): - """Define an alias, but don't raise on an AliasError.""" - try: - self.define_alias(name, cmd) - except AliasError as e: - error("Invalid alias: %s" % e) - - def define_alias(self, name, cmd): - """Define a new alias after validating it. - - This will raise an :exc:`AliasError` if there are validation - problems. - """ - caller = Alias(shell=self.shell, name=name, cmd=cmd) - self.shell.magics_manager.register_function(caller, magic_kind='line', - magic_name=name) - - def get_alias(self, name): - """Return an alias, or None if no alias by that name exists.""" - aname = self.linemagics.get(name, None) - return aname if isinstance(aname, Alias) else None - - def is_alias(self, name): - """Return whether or not a given name has been defined as an alias""" - return self.get_alias(name) is not None - - def undefine_alias(self, name): - if self.is_alias(name): - del self.linemagics[name] - else: - raise ValueError('%s is not an alias' % name) - - def clear_aliases(self): - for name, cmd in self.aliases: - self.undefine_alias(name) - - def retrieve_alias(self, name): - """Retrieve the command to which an alias expands.""" - caller = self.get_alias(name) - if caller: - return caller.cmd - else: - raise ValueError('%s is not an alias' % name) +# encoding: utf-8 +""" +System command aliases. + +Authors: + +* Fernando Perez +* Brian Granger +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import re +import sys + +from traitlets.config.configurable import Configurable +from .error import UsageError + +from traitlets import List, Instance +from logging import error + +#----------------------------------------------------------------------------- +# Utilities +#----------------------------------------------------------------------------- + +# This is used as the pattern for calls to split_user_input. +shell_line_split = re.compile(r'^(\s*)()(\S+)(.*$)') + +def default_aliases(): + """Return list of shell aliases to auto-define. + """ + # Note: the aliases defined here should be safe to use on a kernel + # regardless of what frontend it is attached to. Frontends that use a + # kernel in-process can define additional aliases that will only work in + # their case. For example, things like 'less' or 'clear' that manipulate + # the terminal should NOT be declared here, as they will only work if the + # kernel is running inside a true terminal, and not over the network. + + if os.name == 'posix': + default_aliases = [('mkdir', 'mkdir'), ('rmdir', 'rmdir'), + ('mv', 'mv'), ('rm', 'rm'), ('cp', 'cp'), + ('cat', 'cat'), + ] + # Useful set of ls aliases. The GNU and BSD options are a little + # different, so we make aliases that provide as similar as possible + # behavior in ipython, by passing the right flags for each platform + if sys.platform.startswith('linux'): + ls_aliases = [('ls', 'ls -F --color'), + # long ls + ('ll', 'ls -F -o --color'), + # ls normal files only + ('lf', 'ls -F -o --color %l | grep ^-'), + # ls symbolic links + ('lk', 'ls -F -o --color %l | grep ^l'), + # directories or links to directories, + ('ldir', 'ls -F -o --color %l | grep /$'), + # things which are executable + ('lx', 'ls -F -o --color %l | grep ^-..x'), + ] + elif sys.platform.startswith('openbsd') or sys.platform.startswith('netbsd'): + # OpenBSD, NetBSD. The ls implementation on these platforms do not support + # the -G switch and lack the ability to use colorized output. + ls_aliases = [('ls', 'ls -F'), + # long ls + ('ll', 'ls -F -l'), + # ls normal files only + ('lf', 'ls -F -l %l | grep ^-'), + # ls symbolic links + ('lk', 'ls -F -l %l | grep ^l'), + # directories or links to directories, + ('ldir', 'ls -F -l %l | grep /$'), + # things which are executable + ('lx', 'ls -F -l %l | grep ^-..x'), + ] + else: + # BSD, OSX, etc. + ls_aliases = [('ls', 'ls -F -G'), + # long ls + ('ll', 'ls -F -l -G'), + # ls normal files only + ('lf', 'ls -F -l -G %l | grep ^-'), + # ls symbolic links + ('lk', 'ls -F -l -G %l | grep ^l'), + # directories or links to directories, + ('ldir', 'ls -F -G -l %l | grep /$'), + # things which are executable + ('lx', 'ls -F -l -G %l | grep ^-..x'), + ] + default_aliases = default_aliases + ls_aliases + elif os.name in ['nt', 'dos']: + default_aliases = [('ls', 'dir /on'), + ('ddir', 'dir /ad /on'), ('ldir', 'dir /ad /on'), + ('mkdir', 'mkdir'), ('rmdir', 'rmdir'), + ('echo', 'echo'), ('ren', 'ren'), ('copy', 'copy'), + ] + else: + default_aliases = [] + + return default_aliases + + +class AliasError(Exception): + pass + + +class InvalidAliasError(AliasError): + pass + +class Alias(object): + """Callable object storing the details of one alias. + + Instances are registered as magic functions to allow use of aliases. + """ + + # Prepare blacklist + blacklist = {'cd','popd','pushd','dhist','alias','unalias'} + + def __init__(self, shell, name, cmd): + self.shell = shell + self.name = name + self.cmd = cmd + self.__doc__ = "Alias for `!{}`".format(cmd) + self.nargs = self.validate() + + def validate(self): + """Validate the alias, and return the number of arguments.""" + if self.name in self.blacklist: + raise InvalidAliasError("The name %s can't be aliased " + "because it is a keyword or builtin." % self.name) + try: + caller = self.shell.magics_manager.magics['line'][self.name] + except KeyError: + pass + else: + if not isinstance(caller, Alias): + raise InvalidAliasError("The name %s can't be aliased " + "because it is another magic command." % self.name) + + if not (isinstance(self.cmd, str)): + raise InvalidAliasError("An alias command must be a string, " + "got: %r" % self.cmd) + + nargs = self.cmd.count('%s') - self.cmd.count('%%s') + + if (nargs > 0) and (self.cmd.find('%l') >= 0): + raise InvalidAliasError('The %s and %l specifiers are mutually ' + 'exclusive in alias definitions.') + + return nargs + + def __repr__(self): + return "<alias {} for {!r}>".format(self.name, self.cmd) + + def __call__(self, rest=''): + cmd = self.cmd + nargs = self.nargs + # Expand the %l special to be the user's input line + if cmd.find('%l') >= 0: + cmd = cmd.replace('%l', rest) + rest = '' + + if nargs==0: + if cmd.find('%%s') >= 1: + cmd = cmd.replace('%%s', '%s') + # Simple, argument-less aliases + cmd = '%s %s' % (cmd, rest) + else: + # Handle aliases with positional arguments + args = rest.split(None, nargs) + if len(args) < nargs: + raise UsageError('Alias <%s> requires %s arguments, %s given.' % + (self.name, nargs, len(args))) + cmd = '%s %s' % (cmd % tuple(args[:nargs]),' '.join(args[nargs:])) + + self.shell.system(cmd) + +#----------------------------------------------------------------------------- +# Main AliasManager class +#----------------------------------------------------------------------------- + +class AliasManager(Configurable): + + default_aliases = List(default_aliases()).tag(config=True) + user_aliases = List(default_value=[]).tag(config=True) + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) + + def __init__(self, shell=None, **kwargs): + super(AliasManager, self).__init__(shell=shell, **kwargs) + # For convenient access + self.linemagics = self.shell.magics_manager.magics['line'] + self.init_aliases() + + def init_aliases(self): + # Load default & user aliases + for name, cmd in self.default_aliases + self.user_aliases: + if cmd.startswith('ls ') and self.shell.colors == 'NoColor': + cmd = cmd.replace(' --color', '') + self.soft_define_alias(name, cmd) + + @property + def aliases(self): + return [(n, func.cmd) for (n, func) in self.linemagics.items() + if isinstance(func, Alias)] + + def soft_define_alias(self, name, cmd): + """Define an alias, but don't raise on an AliasError.""" + try: + self.define_alias(name, cmd) + except AliasError as e: + error("Invalid alias: %s" % e) + + def define_alias(self, name, cmd): + """Define a new alias after validating it. + + This will raise an :exc:`AliasError` if there are validation + problems. + """ + caller = Alias(shell=self.shell, name=name, cmd=cmd) + self.shell.magics_manager.register_function(caller, magic_kind='line', + magic_name=name) + + def get_alias(self, name): + """Return an alias, or None if no alias by that name exists.""" + aname = self.linemagics.get(name, None) + return aname if isinstance(aname, Alias) else None + + def is_alias(self, name): + """Return whether or not a given name has been defined as an alias""" + return self.get_alias(name) is not None + + def undefine_alias(self, name): + if self.is_alias(name): + del self.linemagics[name] + else: + raise ValueError('%s is not an alias' % name) + + def clear_aliases(self): + for name, cmd in self.aliases: + self.undefine_alias(name) + + def retrieve_alias(self, name): + """Retrieve the command to which an alias expands.""" + caller = self.get_alias(name) + if caller: + return caller.cmd + else: + raise ValueError('%s is not an alias' % name) diff --git a/contrib/python/ipython/py3/IPython/core/application.py b/contrib/python/ipython/py3/IPython/core/application.py index cad86198035..b319888b59b 100644 --- a/contrib/python/ipython/py3/IPython/core/application.py +++ b/contrib/python/ipython/py3/IPython/core/application.py @@ -1,462 +1,462 @@ -# encoding: utf-8 -""" -An application for IPython. - -All top-level applications should use the classes in this module for -handling configuration and creating configurables. - -The job of an :class:`Application` is to create the master configuration -object and then create the configurable objects, passing the config to them. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import atexit -from copy import deepcopy -import glob -import logging -import os -import shutil -import sys - -from traitlets.config.application import Application, catch_config_error -from traitlets.config.loader import ConfigFileNotFound, PyFileConfigLoader -from IPython.core import release, crashhandler -from IPython.core.profiledir import ProfileDir, ProfileDirError -from IPython.paths import get_ipython_dir, get_ipython_package_dir -from IPython.utils.path import ensure_dir_exists -from traitlets import ( - List, Unicode, Type, Bool, Set, Instance, Undefined, - default, observe, -) - -if os.name == 'nt': - programdata = os.environ.get('PROGRAMDATA', None) - if programdata: - SYSTEM_CONFIG_DIRS = [os.path.join(programdata, 'ipython')] - else: # PROGRAMDATA is not defined by default on XP. - SYSTEM_CONFIG_DIRS = [] -else: - SYSTEM_CONFIG_DIRS = [ - "/usr/local/etc/ipython", - "/etc/ipython", - ] - - -ENV_CONFIG_DIRS = [] -_env_config_dir = os.path.join(sys.prefix, 'etc', 'ipython') -if _env_config_dir not in SYSTEM_CONFIG_DIRS: - # only add ENV_CONFIG if sys.prefix is not already included - ENV_CONFIG_DIRS.append(_env_config_dir) - - -_envvar = os.environ.get('IPYTHON_SUPPRESS_CONFIG_ERRORS') -if _envvar in {None, ''}: - IPYTHON_SUPPRESS_CONFIG_ERRORS = None -else: - if _envvar.lower() in {'1','true'}: - IPYTHON_SUPPRESS_CONFIG_ERRORS = True - elif _envvar.lower() in {'0','false'} : - IPYTHON_SUPPRESS_CONFIG_ERRORS = False - else: - sys.exit("Unsupported value for environment variable: 'IPYTHON_SUPPRESS_CONFIG_ERRORS' is set to '%s' which is none of {'0', '1', 'false', 'true', ''}."% _envvar ) - -# aliases and flags - -base_aliases = { - 'profile-dir' : 'ProfileDir.location', - 'profile' : 'BaseIPythonApplication.profile', - 'ipython-dir' : 'BaseIPythonApplication.ipython_dir', - 'log-level' : 'Application.log_level', - 'config' : 'BaseIPythonApplication.extra_config_file', -} - -base_flags = dict( - debug = ({'Application' : {'log_level' : logging.DEBUG}}, - "set log level to logging.DEBUG (maximize logging output)"), - quiet = ({'Application' : {'log_level' : logging.CRITICAL}}, - "set log level to logging.CRITICAL (minimize logging output)"), - init = ({'BaseIPythonApplication' : { - 'copy_config_files' : True, - 'auto_create' : True} - }, """Initialize profile with default config files. This is equivalent - to running `ipython profile create <profile>` prior to startup. - """) -) - -class ProfileAwareConfigLoader(PyFileConfigLoader): - """A Python file config loader that is aware of IPython profiles.""" - def load_subconfig(self, fname, path=None, profile=None): - if profile is not None: - try: - profile_dir = ProfileDir.find_profile_dir_by_name( - get_ipython_dir(), - profile, - ) - except ProfileDirError: - return - path = profile_dir.location - return super(ProfileAwareConfigLoader, self).load_subconfig(fname, path=path) - -class BaseIPythonApplication(Application): - - name = u'ipython' - description = Unicode(u'IPython: an enhanced interactive Python shell.') - version = Unicode(release.version) - - aliases = base_aliases - flags = base_flags - classes = List([ProfileDir]) - - # enable `load_subconfig('cfg.py', profile='name')` - python_config_loader_class = ProfileAwareConfigLoader - - # Track whether the config_file has changed, - # because some logic happens only if we aren't using the default. - config_file_specified = Set() - - config_file_name = Unicode() - @default('config_file_name') - def _config_file_name_default(self): - return self.name.replace('-','_') + u'_config.py' - @observe('config_file_name') - def _config_file_name_changed(self, change): - if change['new'] != change['old']: - self.config_file_specified.add(change['new']) - - # The directory that contains IPython's builtin profiles. - builtin_profile_dir = Unicode( - os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default') - ) - - config_file_paths = List(Unicode()) - @default('config_file_paths') - def _config_file_paths_default(self): - return [] - - extra_config_file = Unicode( - help="""Path to an extra config file to load. - - If specified, load this config file in addition to any other IPython config. - """).tag(config=True) - @observe('extra_config_file') - def _extra_config_file_changed(self, change): - old = change['old'] - new = change['new'] - try: - self.config_files.remove(old) - except ValueError: - pass - self.config_file_specified.add(new) - self.config_files.append(new) - - profile = Unicode(u'default', - help="""The IPython profile to use.""" - ).tag(config=True) - - @observe('profile') - def _profile_changed(self, change): - self.builtin_profile_dir = os.path.join( - get_ipython_package_dir(), u'config', u'profile', change['new'] - ) - - ipython_dir = Unicode( - help=""" - The name of the IPython directory. This directory is used for logging - configuration (through profiles), history storage, etc. The default - is usually $HOME/.ipython. This option can also be specified through - the environment variable IPYTHONDIR. - """ - ).tag(config=True) - @default('ipython_dir') - def _ipython_dir_default(self): - d = get_ipython_dir() - self._ipython_dir_changed({ - 'name': 'ipython_dir', - 'old': d, - 'new': d, - }) - return d - - _in_init_profile_dir = False - profile_dir = Instance(ProfileDir, allow_none=True) - @default('profile_dir') - def _profile_dir_default(self): - # avoid recursion - if self._in_init_profile_dir: - return - # profile_dir requested early, force initialization - self.init_profile_dir() - return self.profile_dir - - overwrite = Bool(False, - help="""Whether to overwrite existing config files when copying""" - ).tag(config=True) - auto_create = Bool(False, - help="""Whether to create profile dir if it doesn't exist""" - ).tag(config=True) - - config_files = List(Unicode()) - @default('config_files') - def _config_files_default(self): - return [self.config_file_name] - - copy_config_files = Bool(False, - help="""Whether to install the default config files into the profile dir. - If a new profile is being created, and IPython contains config files for that - profile, then they will be staged into the new directory. Otherwise, - default config files will be automatically generated. - """).tag(config=True) - - verbose_crash = Bool(False, - help="""Create a massive crash report when IPython encounters what may be an - internal error. The default is to append a short message to the - usual traceback""").tag(config=True) - - # The class to use as the crash handler. - crash_handler_class = Type(crashhandler.CrashHandler) - - @catch_config_error - def __init__(self, **kwargs): - super(BaseIPythonApplication, self).__init__(**kwargs) - # ensure current working directory exists - try: - os.getcwd() - except: - # exit if cwd doesn't exist - self.log.error("Current working directory doesn't exist.") - self.exit(1) - - #------------------------------------------------------------------------- - # Various stages of Application creation - #------------------------------------------------------------------------- - - deprecated_subcommands = {} - - def initialize_subcommand(self, subc, argv=None): - if subc in self.deprecated_subcommands: - self.log.warning("Subcommand `ipython {sub}` is deprecated and will be removed " - "in future versions.".format(sub=subc)) - self.log.warning("You likely want to use `jupyter {sub}` in the " - "future".format(sub=subc)) - return super(BaseIPythonApplication, self).initialize_subcommand(subc, argv) - - def init_crash_handler(self): - """Create a crash handler, typically setting sys.excepthook to it.""" - self.crash_handler = self.crash_handler_class(self) - sys.excepthook = self.excepthook - def unset_crashhandler(): - sys.excepthook = sys.__excepthook__ - atexit.register(unset_crashhandler) - - def excepthook(self, etype, evalue, tb): - """this is sys.excepthook after init_crashhandler - - set self.verbose_crash=True to use our full crashhandler, instead of - a regular traceback with a short message (crash_handler_lite) - """ - - if self.verbose_crash: - return self.crash_handler(etype, evalue, tb) - else: - return crashhandler.crash_handler_lite(etype, evalue, tb) - - @observe('ipython_dir') - def _ipython_dir_changed(self, change): - old = change['old'] - new = change['new'] - if old is not Undefined: - str_old = os.path.abspath(old) - if str_old in sys.path: - sys.path.remove(str_old) - str_path = os.path.abspath(new) - sys.path.append(str_path) - ensure_dir_exists(new) - readme = os.path.join(new, 'README') - readme_src = os.path.join(get_ipython_package_dir(), u'config', u'profile', 'README') - if not os.path.exists(readme) and os.path.exists(readme_src): - shutil.copy(readme_src, readme) - for d in ('extensions', 'nbextensions'): - path = os.path.join(new, d) - try: - ensure_dir_exists(path) - except OSError as e: - # this will not be EEXIST - self.log.error("couldn't create path %s: %s", path, e) - self.log.debug("IPYTHONDIR set to: %s" % new) - - def load_config_file(self, suppress_errors=IPYTHON_SUPPRESS_CONFIG_ERRORS): - """Load the config file. - - By default, errors in loading config are handled, and a warning - printed on screen. For testing, the suppress_errors option is set - to False, so errors will make tests fail. - - `suppress_errors` default value is to be `None` in which case the - behavior default to the one of `traitlets.Application`. - - The default value can be set : - - to `False` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '0', or 'false' (case insensitive). - - to `True` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '1' or 'true' (case insensitive). - - to `None` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '' (empty string) or leaving it unset. - - Any other value are invalid, and will make IPython exit with a non-zero return code. - """ - - - self.log.debug("Searching path %s for config files", self.config_file_paths) - base_config = 'ipython_config.py' - self.log.debug("Attempting to load config file: %s" % - base_config) - try: - if suppress_errors is not None: - old_value = Application.raise_config_file_errors - Application.raise_config_file_errors = not suppress_errors; - Application.load_config_file( - self, - base_config, - path=self.config_file_paths - ) - except ConfigFileNotFound: - # ignore errors loading parent - self.log.debug("Config file %s not found", base_config) - pass - if suppress_errors is not None: - Application.raise_config_file_errors = old_value - - for config_file_name in self.config_files: - if not config_file_name or config_file_name == base_config: - continue - self.log.debug("Attempting to load config file: %s" % - self.config_file_name) - try: - Application.load_config_file( - self, - config_file_name, - path=self.config_file_paths - ) - except ConfigFileNotFound: - # Only warn if the default config file was NOT being used. - if config_file_name in self.config_file_specified: - msg = self.log.warning - else: - msg = self.log.debug - msg("Config file not found, skipping: %s", config_file_name) - except Exception: - # For testing purposes. - if not suppress_errors: - raise - self.log.warning("Error loading config file: %s" % - self.config_file_name, exc_info=True) - - def init_profile_dir(self): - """initialize the profile dir""" - self._in_init_profile_dir = True - if self.profile_dir is not None: - # already ran - return - if 'ProfileDir.location' not in self.config: - # location not specified, find by profile name - try: - p = ProfileDir.find_profile_dir_by_name(self.ipython_dir, self.profile, self.config) - except ProfileDirError: - # not found, maybe create it (always create default profile) - if self.auto_create or self.profile == 'default': - try: - p = ProfileDir.create_profile_dir_by_name(self.ipython_dir, self.profile, self.config) - except ProfileDirError: - self.log.fatal("Could not create profile: %r"%self.profile) - self.exit(1) - else: - self.log.info("Created profile dir: %r"%p.location) - else: - self.log.fatal("Profile %r not found."%self.profile) - self.exit(1) - else: - self.log.debug(f"Using existing profile dir: {p.location!r}") - else: - location = self.config.ProfileDir.location - # location is fully specified - try: - p = ProfileDir.find_profile_dir(location, self.config) - except ProfileDirError: - # not found, maybe create it - if self.auto_create: - try: - p = ProfileDir.create_profile_dir(location, self.config) - except ProfileDirError: - self.log.fatal("Could not create profile directory: %r"%location) - self.exit(1) - else: - self.log.debug("Creating new profile dir: %r"%location) - else: - self.log.fatal("Profile directory %r not found."%location) - self.exit(1) - else: - self.log.debug(f"Using existing profile dir: {p.location!r}") - # if profile_dir is specified explicitly, set profile name - dir_name = os.path.basename(p.location) - if dir_name.startswith('profile_'): - self.profile = dir_name[8:] - - self.profile_dir = p - self.config_file_paths.append(p.location) - self._in_init_profile_dir = False - - def init_config_files(self): - """[optionally] copy default config files into profile dir.""" - self.config_file_paths.extend(ENV_CONFIG_DIRS) - self.config_file_paths.extend(SYSTEM_CONFIG_DIRS) - # copy config files - path = self.builtin_profile_dir - if self.copy_config_files: - src = self.profile - - cfg = self.config_file_name - if path and os.path.exists(os.path.join(path, cfg)): - self.log.warning("Staging %r from %s into %r [overwrite=%s]"%( - cfg, src, self.profile_dir.location, self.overwrite) - ) - self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite) - else: - self.stage_default_config_file() - else: - # Still stage *bundled* config files, but not generated ones - # This is necessary for `ipython profile=sympy` to load the profile - # on the first go - files = glob.glob(os.path.join(path, '*.py')) - for fullpath in files: - cfg = os.path.basename(fullpath) - if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False): - # file was copied - self.log.warning("Staging bundled %s from %s into %r"%( - cfg, self.profile, self.profile_dir.location) - ) - - - def stage_default_config_file(self): - """auto generate default config file, and stage it into the profile.""" - s = self.generate_config_file() - fname = os.path.join(self.profile_dir.location, self.config_file_name) - if self.overwrite or not os.path.exists(fname): - self.log.warning("Generating default config file: %r"%(fname)) - with open(fname, 'w') as f: - f.write(s) - - @catch_config_error - def initialize(self, argv=None): - # don't hook up crash handler before parsing command-line - self.parse_command_line(argv) - self.init_crash_handler() - if self.subapp is not None: - # stop here if subapp is taking over - return - # save a copy of CLI config to re-load after config files - # so that it has highest priority - cl_config = deepcopy(self.config) - self.init_profile_dir() - self.init_config_files() - self.load_config_file() - # enforce cl-opts override configfile opts: - self.update_config(cl_config) +# encoding: utf-8 +""" +An application for IPython. + +All top-level applications should use the classes in this module for +handling configuration and creating configurables. + +The job of an :class:`Application` is to create the master configuration +object and then create the configurable objects, passing the config to them. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import atexit +from copy import deepcopy +import glob +import logging +import os +import shutil +import sys + +from traitlets.config.application import Application, catch_config_error +from traitlets.config.loader import ConfigFileNotFound, PyFileConfigLoader +from IPython.core import release, crashhandler +from IPython.core.profiledir import ProfileDir, ProfileDirError +from IPython.paths import get_ipython_dir, get_ipython_package_dir +from IPython.utils.path import ensure_dir_exists +from traitlets import ( + List, Unicode, Type, Bool, Set, Instance, Undefined, + default, observe, +) + +if os.name == 'nt': + programdata = os.environ.get('PROGRAMDATA', None) + if programdata: + SYSTEM_CONFIG_DIRS = [os.path.join(programdata, 'ipython')] + else: # PROGRAMDATA is not defined by default on XP. + SYSTEM_CONFIG_DIRS = [] +else: + SYSTEM_CONFIG_DIRS = [ + "/usr/local/etc/ipython", + "/etc/ipython", + ] + + +ENV_CONFIG_DIRS = [] +_env_config_dir = os.path.join(sys.prefix, 'etc', 'ipython') +if _env_config_dir not in SYSTEM_CONFIG_DIRS: + # only add ENV_CONFIG if sys.prefix is not already included + ENV_CONFIG_DIRS.append(_env_config_dir) + + +_envvar = os.environ.get('IPYTHON_SUPPRESS_CONFIG_ERRORS') +if _envvar in {None, ''}: + IPYTHON_SUPPRESS_CONFIG_ERRORS = None +else: + if _envvar.lower() in {'1','true'}: + IPYTHON_SUPPRESS_CONFIG_ERRORS = True + elif _envvar.lower() in {'0','false'} : + IPYTHON_SUPPRESS_CONFIG_ERRORS = False + else: + sys.exit("Unsupported value for environment variable: 'IPYTHON_SUPPRESS_CONFIG_ERRORS' is set to '%s' which is none of {'0', '1', 'false', 'true', ''}."% _envvar ) + +# aliases and flags + +base_aliases = { + 'profile-dir' : 'ProfileDir.location', + 'profile' : 'BaseIPythonApplication.profile', + 'ipython-dir' : 'BaseIPythonApplication.ipython_dir', + 'log-level' : 'Application.log_level', + 'config' : 'BaseIPythonApplication.extra_config_file', +} + +base_flags = dict( + debug = ({'Application' : {'log_level' : logging.DEBUG}}, + "set log level to logging.DEBUG (maximize logging output)"), + quiet = ({'Application' : {'log_level' : logging.CRITICAL}}, + "set log level to logging.CRITICAL (minimize logging output)"), + init = ({'BaseIPythonApplication' : { + 'copy_config_files' : True, + 'auto_create' : True} + }, """Initialize profile with default config files. This is equivalent + to running `ipython profile create <profile>` prior to startup. + """) +) + +class ProfileAwareConfigLoader(PyFileConfigLoader): + """A Python file config loader that is aware of IPython profiles.""" + def load_subconfig(self, fname, path=None, profile=None): + if profile is not None: + try: + profile_dir = ProfileDir.find_profile_dir_by_name( + get_ipython_dir(), + profile, + ) + except ProfileDirError: + return + path = profile_dir.location + return super(ProfileAwareConfigLoader, self).load_subconfig(fname, path=path) + +class BaseIPythonApplication(Application): + + name = u'ipython' + description = Unicode(u'IPython: an enhanced interactive Python shell.') + version = Unicode(release.version) + + aliases = base_aliases + flags = base_flags + classes = List([ProfileDir]) + + # enable `load_subconfig('cfg.py', profile='name')` + python_config_loader_class = ProfileAwareConfigLoader + + # Track whether the config_file has changed, + # because some logic happens only if we aren't using the default. + config_file_specified = Set() + + config_file_name = Unicode() + @default('config_file_name') + def _config_file_name_default(self): + return self.name.replace('-','_') + u'_config.py' + @observe('config_file_name') + def _config_file_name_changed(self, change): + if change['new'] != change['old']: + self.config_file_specified.add(change['new']) + + # The directory that contains IPython's builtin profiles. + builtin_profile_dir = Unicode( + os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default') + ) + + config_file_paths = List(Unicode()) + @default('config_file_paths') + def _config_file_paths_default(self): + return [] + + extra_config_file = Unicode( + help="""Path to an extra config file to load. + + If specified, load this config file in addition to any other IPython config. + """).tag(config=True) + @observe('extra_config_file') + def _extra_config_file_changed(self, change): + old = change['old'] + new = change['new'] + try: + self.config_files.remove(old) + except ValueError: + pass + self.config_file_specified.add(new) + self.config_files.append(new) + + profile = Unicode(u'default', + help="""The IPython profile to use.""" + ).tag(config=True) + + @observe('profile') + def _profile_changed(self, change): + self.builtin_profile_dir = os.path.join( + get_ipython_package_dir(), u'config', u'profile', change['new'] + ) + + ipython_dir = Unicode( + help=""" + The name of the IPython directory. This directory is used for logging + configuration (through profiles), history storage, etc. The default + is usually $HOME/.ipython. This option can also be specified through + the environment variable IPYTHONDIR. + """ + ).tag(config=True) + @default('ipython_dir') + def _ipython_dir_default(self): + d = get_ipython_dir() + self._ipython_dir_changed({ + 'name': 'ipython_dir', + 'old': d, + 'new': d, + }) + return d + + _in_init_profile_dir = False + profile_dir = Instance(ProfileDir, allow_none=True) + @default('profile_dir') + def _profile_dir_default(self): + # avoid recursion + if self._in_init_profile_dir: + return + # profile_dir requested early, force initialization + self.init_profile_dir() + return self.profile_dir + + overwrite = Bool(False, + help="""Whether to overwrite existing config files when copying""" + ).tag(config=True) + auto_create = Bool(False, + help="""Whether to create profile dir if it doesn't exist""" + ).tag(config=True) + + config_files = List(Unicode()) + @default('config_files') + def _config_files_default(self): + return [self.config_file_name] + + copy_config_files = Bool(False, + help="""Whether to install the default config files into the profile dir. + If a new profile is being created, and IPython contains config files for that + profile, then they will be staged into the new directory. Otherwise, + default config files will be automatically generated. + """).tag(config=True) + + verbose_crash = Bool(False, + help="""Create a massive crash report when IPython encounters what may be an + internal error. The default is to append a short message to the + usual traceback""").tag(config=True) + + # The class to use as the crash handler. + crash_handler_class = Type(crashhandler.CrashHandler) + + @catch_config_error + def __init__(self, **kwargs): + super(BaseIPythonApplication, self).__init__(**kwargs) + # ensure current working directory exists + try: + os.getcwd() + except: + # exit if cwd doesn't exist + self.log.error("Current working directory doesn't exist.") + self.exit(1) + + #------------------------------------------------------------------------- + # Various stages of Application creation + #------------------------------------------------------------------------- + + deprecated_subcommands = {} + + def initialize_subcommand(self, subc, argv=None): + if subc in self.deprecated_subcommands: + self.log.warning("Subcommand `ipython {sub}` is deprecated and will be removed " + "in future versions.".format(sub=subc)) + self.log.warning("You likely want to use `jupyter {sub}` in the " + "future".format(sub=subc)) + return super(BaseIPythonApplication, self).initialize_subcommand(subc, argv) + + def init_crash_handler(self): + """Create a crash handler, typically setting sys.excepthook to it.""" + self.crash_handler = self.crash_handler_class(self) + sys.excepthook = self.excepthook + def unset_crashhandler(): + sys.excepthook = sys.__excepthook__ + atexit.register(unset_crashhandler) + + def excepthook(self, etype, evalue, tb): + """this is sys.excepthook after init_crashhandler + + set self.verbose_crash=True to use our full crashhandler, instead of + a regular traceback with a short message (crash_handler_lite) + """ + + if self.verbose_crash: + return self.crash_handler(etype, evalue, tb) + else: + return crashhandler.crash_handler_lite(etype, evalue, tb) + + @observe('ipython_dir') + def _ipython_dir_changed(self, change): + old = change['old'] + new = change['new'] + if old is not Undefined: + str_old = os.path.abspath(old) + if str_old in sys.path: + sys.path.remove(str_old) + str_path = os.path.abspath(new) + sys.path.append(str_path) + ensure_dir_exists(new) + readme = os.path.join(new, 'README') + readme_src = os.path.join(get_ipython_package_dir(), u'config', u'profile', 'README') + if not os.path.exists(readme) and os.path.exists(readme_src): + shutil.copy(readme_src, readme) + for d in ('extensions', 'nbextensions'): + path = os.path.join(new, d) + try: + ensure_dir_exists(path) + except OSError as e: + # this will not be EEXIST + self.log.error("couldn't create path %s: %s", path, e) + self.log.debug("IPYTHONDIR set to: %s" % new) + + def load_config_file(self, suppress_errors=IPYTHON_SUPPRESS_CONFIG_ERRORS): + """Load the config file. + + By default, errors in loading config are handled, and a warning + printed on screen. For testing, the suppress_errors option is set + to False, so errors will make tests fail. + + `suppress_errors` default value is to be `None` in which case the + behavior default to the one of `traitlets.Application`. + + The default value can be set : + - to `False` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '0', or 'false' (case insensitive). + - to `True` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '1' or 'true' (case insensitive). + - to `None` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '' (empty string) or leaving it unset. + + Any other value are invalid, and will make IPython exit with a non-zero return code. + """ + + + self.log.debug("Searching path %s for config files", self.config_file_paths) + base_config = 'ipython_config.py' + self.log.debug("Attempting to load config file: %s" % + base_config) + try: + if suppress_errors is not None: + old_value = Application.raise_config_file_errors + Application.raise_config_file_errors = not suppress_errors; + Application.load_config_file( + self, + base_config, + path=self.config_file_paths + ) + except ConfigFileNotFound: + # ignore errors loading parent + self.log.debug("Config file %s not found", base_config) + pass + if suppress_errors is not None: + Application.raise_config_file_errors = old_value + + for config_file_name in self.config_files: + if not config_file_name or config_file_name == base_config: + continue + self.log.debug("Attempting to load config file: %s" % + self.config_file_name) + try: + Application.load_config_file( + self, + config_file_name, + path=self.config_file_paths + ) + except ConfigFileNotFound: + # Only warn if the default config file was NOT being used. + if config_file_name in self.config_file_specified: + msg = self.log.warning + else: + msg = self.log.debug + msg("Config file not found, skipping: %s", config_file_name) + except Exception: + # For testing purposes. + if not suppress_errors: + raise + self.log.warning("Error loading config file: %s" % + self.config_file_name, exc_info=True) + + def init_profile_dir(self): + """initialize the profile dir""" + self._in_init_profile_dir = True + if self.profile_dir is not None: + # already ran + return + if 'ProfileDir.location' not in self.config: + # location not specified, find by profile name + try: + p = ProfileDir.find_profile_dir_by_name(self.ipython_dir, self.profile, self.config) + except ProfileDirError: + # not found, maybe create it (always create default profile) + if self.auto_create or self.profile == 'default': + try: + p = ProfileDir.create_profile_dir_by_name(self.ipython_dir, self.profile, self.config) + except ProfileDirError: + self.log.fatal("Could not create profile: %r"%self.profile) + self.exit(1) + else: + self.log.info("Created profile dir: %r"%p.location) + else: + self.log.fatal("Profile %r not found."%self.profile) + self.exit(1) + else: + self.log.debug(f"Using existing profile dir: {p.location!r}") + else: + location = self.config.ProfileDir.location + # location is fully specified + try: + p = ProfileDir.find_profile_dir(location, self.config) + except ProfileDirError: + # not found, maybe create it + if self.auto_create: + try: + p = ProfileDir.create_profile_dir(location, self.config) + except ProfileDirError: + self.log.fatal("Could not create profile directory: %r"%location) + self.exit(1) + else: + self.log.debug("Creating new profile dir: %r"%location) + else: + self.log.fatal("Profile directory %r not found."%location) + self.exit(1) + else: + self.log.debug(f"Using existing profile dir: {p.location!r}") + # if profile_dir is specified explicitly, set profile name + dir_name = os.path.basename(p.location) + if dir_name.startswith('profile_'): + self.profile = dir_name[8:] + + self.profile_dir = p + self.config_file_paths.append(p.location) + self._in_init_profile_dir = False + + def init_config_files(self): + """[optionally] copy default config files into profile dir.""" + self.config_file_paths.extend(ENV_CONFIG_DIRS) + self.config_file_paths.extend(SYSTEM_CONFIG_DIRS) + # copy config files + path = self.builtin_profile_dir + if self.copy_config_files: + src = self.profile + + cfg = self.config_file_name + if path and os.path.exists(os.path.join(path, cfg)): + self.log.warning("Staging %r from %s into %r [overwrite=%s]"%( + cfg, src, self.profile_dir.location, self.overwrite) + ) + self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite) + else: + self.stage_default_config_file() + else: + # Still stage *bundled* config files, but not generated ones + # This is necessary for `ipython profile=sympy` to load the profile + # on the first go + files = glob.glob(os.path.join(path, '*.py')) + for fullpath in files: + cfg = os.path.basename(fullpath) + if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False): + # file was copied + self.log.warning("Staging bundled %s from %s into %r"%( + cfg, self.profile, self.profile_dir.location) + ) + + + def stage_default_config_file(self): + """auto generate default config file, and stage it into the profile.""" + s = self.generate_config_file() + fname = os.path.join(self.profile_dir.location, self.config_file_name) + if self.overwrite or not os.path.exists(fname): + self.log.warning("Generating default config file: %r"%(fname)) + with open(fname, 'w') as f: + f.write(s) + + @catch_config_error + def initialize(self, argv=None): + # don't hook up crash handler before parsing command-line + self.parse_command_line(argv) + self.init_crash_handler() + if self.subapp is not None: + # stop here if subapp is taking over + return + # save a copy of CLI config to re-load after config files + # so that it has highest priority + cl_config = deepcopy(self.config) + self.init_profile_dir() + self.init_config_files() + self.load_config_file() + # enforce cl-opts override configfile opts: + self.update_config(cl_config) diff --git a/contrib/python/ipython/py3/IPython/core/async_helpers.py b/contrib/python/ipython/py3/IPython/core/async_helpers.py index 070468e70ae..fca78def85a 100644 --- a/contrib/python/ipython/py3/IPython/core/async_helpers.py +++ b/contrib/python/ipython/py3/IPython/core/async_helpers.py @@ -1,183 +1,183 @@ -""" -Async helper function that are invalid syntax on Python 3.5 and below. - -This code is best effort, and may have edge cases not behaving as expected. In -particular it contain a number of heuristics to detect whether code is -effectively async and need to run in an event loop or not. - -Some constructs (like top-level `return`, or `yield`) are taken care of -explicitly to actually raise a SyntaxError and stay as close as possible to -Python semantics. -""" - - -import ast -import sys -import asyncio -import inspect -from textwrap import dedent, indent - - -class _AsyncIORunner: - def __init__(self): - self._loop = None - - @property - def loop(self): - """Always returns a non-closed event loop""" - if self._loop is None or self._loop.is_closed(): - policy = asyncio.get_event_loop_policy() - self._loop = policy.new_event_loop() - policy.set_event_loop(self._loop) - return self._loop - - def __call__(self, coro): - """ - Handler for asyncio autoawait - """ - return self.loop.run_until_complete(coro) - - def __str__(self): - return 'asyncio' - -_asyncio_runner = _AsyncIORunner() - - -def _curio_runner(coroutine): - """ - handler for curio autoawait - """ - import curio - - return curio.run(coroutine) - - -def _trio_runner(async_fn): - import trio - - async def loc(coro): - """ - We need the dummy no-op async def to protect from - trio's internal. See https://github.com/python-trio/trio/issues/89 - """ - return await coro - - return trio.run(loc, async_fn) - - -def _pseudo_sync_runner(coro): - """ - A runner that does not really allow async execution, and just advance the coroutine. - - See discussion in https://github.com/python-trio/trio/issues/608, - - Credit to Nathaniel Smith - - """ - try: - coro.send(None) - except StopIteration as exc: - return exc.value - else: - # TODO: do not raise but return an execution result with the right info. - raise RuntimeError( - "{coro_name!r} needs a real async loop".format(coro_name=coro.__name__) - ) - - -def _asyncify(code: str) -> str: - """wrap code in async def definition. - - And setup a bit of context to run it later. - """ - res = dedent( - """ - async def __wrapper__(): - try: - {usercode} - finally: - locals() - """ - ).format(usercode=indent(code, " " * 8)) - return res - - -class _AsyncSyntaxErrorVisitor(ast.NodeVisitor): - """ - Find syntax errors that would be an error in an async repl, but because - the implementation involves wrapping the repl in an async function, it - is erroneously allowed (e.g. yield or return at the top level) - """ - def __init__(self): - if sys.version_info >= (3,8): - raise ValueError('DEPRECATED in Python 3.8+') - self.depth = 0 - super().__init__() - - def generic_visit(self, node): - func_types = (ast.FunctionDef, ast.AsyncFunctionDef) - invalid_types_by_depth = { - 0: (ast.Return, ast.Yield, ast.YieldFrom), - 1: (ast.Nonlocal,) - } - - should_traverse = self.depth < max(invalid_types_by_depth.keys()) - if isinstance(node, func_types) and should_traverse: - self.depth += 1 - super().generic_visit(node) - self.depth -= 1 - elif isinstance(node, invalid_types_by_depth[self.depth]): - raise SyntaxError() - else: - super().generic_visit(node) - - -def _async_parse_cell(cell: str) -> ast.AST: - """ - This is a compatibility shim for pre-3.7 when async outside of a function - is a syntax error at the parse stage. - - It will return an abstract syntax tree parsed as if async and await outside - of a function were not a syntax error. - """ - if sys.version_info < (3, 7): - # Prior to 3.7 you need to asyncify before parse - wrapped_parse_tree = ast.parse(_asyncify(cell)) - return wrapped_parse_tree.body[0].body[0] - else: - return ast.parse(cell) - - -def _should_be_async(cell: str) -> bool: - """Detect if a block of code need to be wrapped in an `async def` - - Attempt to parse the block of code, it it compile we're fine. - Otherwise we wrap if and try to compile. - - If it works, assume it should be async. Otherwise Return False. - - Not handled yet: If the block of code has a return statement as the top - level, it will be seen as async. This is a know limitation. - """ - if sys.version_info > (3, 8): - try: - code = compile(cell, "<>", "exec", flags=getattr(ast,'PyCF_ALLOW_TOP_LEVEL_AWAIT', 0x0)) - return inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE - except (SyntaxError, MemoryError): - return False - try: - # we can't limit ourself to ast.parse, as it __accepts__ to parse on - # 3.7+, but just does not _compile_ - code = compile(cell, "<>", "exec") - except (SyntaxError, MemoryError): - try: - parse_tree = _async_parse_cell(cell) - - # Raise a SyntaxError if there are top-level return or yields - v = _AsyncSyntaxErrorVisitor() - v.visit(parse_tree) - - except (SyntaxError, MemoryError): - return False - return True - return False +""" +Async helper function that are invalid syntax on Python 3.5 and below. + +This code is best effort, and may have edge cases not behaving as expected. In +particular it contain a number of heuristics to detect whether code is +effectively async and need to run in an event loop or not. + +Some constructs (like top-level `return`, or `yield`) are taken care of +explicitly to actually raise a SyntaxError and stay as close as possible to +Python semantics. +""" + + +import ast +import sys +import asyncio +import inspect +from textwrap import dedent, indent + + +class _AsyncIORunner: + def __init__(self): + self._loop = None + + @property + def loop(self): + """Always returns a non-closed event loop""" + if self._loop is None or self._loop.is_closed(): + policy = asyncio.get_event_loop_policy() + self._loop = policy.new_event_loop() + policy.set_event_loop(self._loop) + return self._loop + + def __call__(self, coro): + """ + Handler for asyncio autoawait + """ + return self.loop.run_until_complete(coro) + + def __str__(self): + return 'asyncio' + +_asyncio_runner = _AsyncIORunner() + + +def _curio_runner(coroutine): + """ + handler for curio autoawait + """ + import curio + + return curio.run(coroutine) + + +def _trio_runner(async_fn): + import trio + + async def loc(coro): + """ + We need the dummy no-op async def to protect from + trio's internal. See https://github.com/python-trio/trio/issues/89 + """ + return await coro + + return trio.run(loc, async_fn) + + +def _pseudo_sync_runner(coro): + """ + A runner that does not really allow async execution, and just advance the coroutine. + + See discussion in https://github.com/python-trio/trio/issues/608, + + Credit to Nathaniel Smith + + """ + try: + coro.send(None) + except StopIteration as exc: + return exc.value + else: + # TODO: do not raise but return an execution result with the right info. + raise RuntimeError( + "{coro_name!r} needs a real async loop".format(coro_name=coro.__name__) + ) + + +def _asyncify(code: str) -> str: + """wrap code in async def definition. + + And setup a bit of context to run it later. + """ + res = dedent( + """ + async def __wrapper__(): + try: + {usercode} + finally: + locals() + """ + ).format(usercode=indent(code, " " * 8)) + return res + + +class _AsyncSyntaxErrorVisitor(ast.NodeVisitor): + """ + Find syntax errors that would be an error in an async repl, but because + the implementation involves wrapping the repl in an async function, it + is erroneously allowed (e.g. yield or return at the top level) + """ + def __init__(self): + if sys.version_info >= (3,8): + raise ValueError('DEPRECATED in Python 3.8+') + self.depth = 0 + super().__init__() + + def generic_visit(self, node): + func_types = (ast.FunctionDef, ast.AsyncFunctionDef) + invalid_types_by_depth = { + 0: (ast.Return, ast.Yield, ast.YieldFrom), + 1: (ast.Nonlocal,) + } + + should_traverse = self.depth < max(invalid_types_by_depth.keys()) + if isinstance(node, func_types) and should_traverse: + self.depth += 1 + super().generic_visit(node) + self.depth -= 1 + elif isinstance(node, invalid_types_by_depth[self.depth]): + raise SyntaxError() + else: + super().generic_visit(node) + + +def _async_parse_cell(cell: str) -> ast.AST: + """ + This is a compatibility shim for pre-3.7 when async outside of a function + is a syntax error at the parse stage. + + It will return an abstract syntax tree parsed as if async and await outside + of a function were not a syntax error. + """ + if sys.version_info < (3, 7): + # Prior to 3.7 you need to asyncify before parse + wrapped_parse_tree = ast.parse(_asyncify(cell)) + return wrapped_parse_tree.body[0].body[0] + else: + return ast.parse(cell) + + +def _should_be_async(cell: str) -> bool: + """Detect if a block of code need to be wrapped in an `async def` + + Attempt to parse the block of code, it it compile we're fine. + Otherwise we wrap if and try to compile. + + If it works, assume it should be async. Otherwise Return False. + + Not handled yet: If the block of code has a return statement as the top + level, it will be seen as async. This is a know limitation. + """ + if sys.version_info > (3, 8): + try: + code = compile(cell, "<>", "exec", flags=getattr(ast,'PyCF_ALLOW_TOP_LEVEL_AWAIT', 0x0)) + return inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE + except (SyntaxError, MemoryError): + return False + try: + # we can't limit ourself to ast.parse, as it __accepts__ to parse on + # 3.7+, but just does not _compile_ + code = compile(cell, "<>", "exec") + except (SyntaxError, MemoryError): + try: + parse_tree = _async_parse_cell(cell) + + # Raise a SyntaxError if there are top-level return or yields + v = _AsyncSyntaxErrorVisitor() + v.visit(parse_tree) + + except (SyntaxError, MemoryError): + return False + return True + return False diff --git a/contrib/python/ipython/py3/IPython/core/autocall.py b/contrib/python/ipython/py3/IPython/core/autocall.py index 4ef2bce59c1..bab7f859c96 100644 --- a/contrib/python/ipython/py3/IPython/core/autocall.py +++ b/contrib/python/ipython/py3/IPython/core/autocall.py @@ -1,70 +1,70 @@ -# encoding: utf-8 -""" -Autocall capabilities for IPython.core. - -Authors: - -* Brian Granger -* Fernando Perez -* Thomas Kluyver - -Notes ------ -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -class IPyAutocall(object): - """ Instances of this class are always autocalled - - This happens regardless of 'autocall' variable state. Use this to - develop macro-like mechanisms. - """ - _ip = None - rewrite = True - def __init__(self, ip=None): - self._ip = ip - - def set_ip(self, ip): - """ Will be used to set _ip point to current ipython instance b/f call - - Override this method if you don't want this to happen. - - """ - self._ip = ip - - -class ExitAutocall(IPyAutocall): - """An autocallable object which will be added to the user namespace so that - exit, exit(), quit or quit() are all valid ways to close the shell.""" - rewrite = False - - def __call__(self): - self._ip.ask_exit() - -class ZMQExitAutocall(ExitAutocall): - """Exit IPython. Autocallable, so it needn't be explicitly called. - - Parameters - ---------- - keep_kernel : bool - If True, leave the kernel alive. Otherwise, tell the kernel to exit too - (default). - """ - def __call__(self, keep_kernel=False): - self._ip.keepkernel_on_exit = keep_kernel - self._ip.ask_exit() +# encoding: utf-8 +""" +Autocall capabilities for IPython.core. + +Authors: + +* Brian Granger +* Fernando Perez +* Thomas Kluyver + +Notes +----- +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +class IPyAutocall(object): + """ Instances of this class are always autocalled + + This happens regardless of 'autocall' variable state. Use this to + develop macro-like mechanisms. + """ + _ip = None + rewrite = True + def __init__(self, ip=None): + self._ip = ip + + def set_ip(self, ip): + """ Will be used to set _ip point to current ipython instance b/f call + + Override this method if you don't want this to happen. + + """ + self._ip = ip + + +class ExitAutocall(IPyAutocall): + """An autocallable object which will be added to the user namespace so that + exit, exit(), quit or quit() are all valid ways to close the shell.""" + rewrite = False + + def __call__(self): + self._ip.ask_exit() + +class ZMQExitAutocall(ExitAutocall): + """Exit IPython. Autocallable, so it needn't be explicitly called. + + Parameters + ---------- + keep_kernel : bool + If True, leave the kernel alive. Otherwise, tell the kernel to exit too + (default). + """ + def __call__(self, keep_kernel=False): + self._ip.keepkernel_on_exit = keep_kernel + self._ip.ask_exit() diff --git a/contrib/python/ipython/py3/IPython/core/builtin_trap.py b/contrib/python/ipython/py3/IPython/core/builtin_trap.py index c3f0b1eda81..a8ea4abcd9d 100644 --- a/contrib/python/ipython/py3/IPython/core/builtin_trap.py +++ b/contrib/python/ipython/py3/IPython/core/builtin_trap.py @@ -1,86 +1,86 @@ -""" -A context manager for managing things injected into :mod:`builtins`. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. -import builtins as builtin_mod - -from traitlets.config.configurable import Configurable - -from traitlets import Instance - - -class __BuiltinUndefined(object): pass -BuiltinUndefined = __BuiltinUndefined() - -class __HideBuiltin(object): pass -HideBuiltin = __HideBuiltin() - - -class BuiltinTrap(Configurable): - - shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', - allow_none=True) - - def __init__(self, shell=None): - super(BuiltinTrap, self).__init__(shell=shell, config=None) - self._orig_builtins = {} - # We define this to track if a single BuiltinTrap is nested. - # Only turn off the trap when the outermost call to __exit__ is made. - self._nested_level = 0 - self.shell = shell - # builtins we always add - if set to HideBuiltin, they will just - # be removed instead of being replaced by something else - self.auto_builtins = {'exit': HideBuiltin, - 'quit': HideBuiltin, - 'get_ipython': self.shell.get_ipython, - } - - def __enter__(self): - if self._nested_level == 0: - self.activate() - self._nested_level += 1 - # I return self, so callers can use add_builtin in a with clause. - return self - - def __exit__(self, type, value, traceback): - if self._nested_level == 1: - self.deactivate() - self._nested_level -= 1 - # Returning False will cause exceptions to propagate - return False - - def add_builtin(self, key, value): - """Add a builtin and save the original.""" - bdict = builtin_mod.__dict__ - orig = bdict.get(key, BuiltinUndefined) - if value is HideBuiltin: - if orig is not BuiltinUndefined: #same as 'key in bdict' - self._orig_builtins[key] = orig - del bdict[key] - else: - self._orig_builtins[key] = orig - bdict[key] = value - - def remove_builtin(self, key, orig): - """Remove an added builtin and re-set the original.""" - if orig is BuiltinUndefined: - del builtin_mod.__dict__[key] - else: - builtin_mod.__dict__[key] = orig - - def activate(self): - """Store ipython references in the __builtin__ namespace.""" - - add_builtin = self.add_builtin - for name, func in self.auto_builtins.items(): - add_builtin(name, func) - - def deactivate(self): - """Remove any builtins which might have been added by add_builtins, or - restore overwritten ones to their previous values.""" - remove_builtin = self.remove_builtin - for key, val in self._orig_builtins.items(): - remove_builtin(key, val) - self._orig_builtins.clear() - self._builtins_added = False +""" +A context manager for managing things injected into :mod:`builtins`. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. +import builtins as builtin_mod + +from traitlets.config.configurable import Configurable + +from traitlets import Instance + + +class __BuiltinUndefined(object): pass +BuiltinUndefined = __BuiltinUndefined() + +class __HideBuiltin(object): pass +HideBuiltin = __HideBuiltin() + + +class BuiltinTrap(Configurable): + + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', + allow_none=True) + + def __init__(self, shell=None): + super(BuiltinTrap, self).__init__(shell=shell, config=None) + self._orig_builtins = {} + # We define this to track if a single BuiltinTrap is nested. + # Only turn off the trap when the outermost call to __exit__ is made. + self._nested_level = 0 + self.shell = shell + # builtins we always add - if set to HideBuiltin, they will just + # be removed instead of being replaced by something else + self.auto_builtins = {'exit': HideBuiltin, + 'quit': HideBuiltin, + 'get_ipython': self.shell.get_ipython, + } + + def __enter__(self): + if self._nested_level == 0: + self.activate() + self._nested_level += 1 + # I return self, so callers can use add_builtin in a with clause. + return self + + def __exit__(self, type, value, traceback): + if self._nested_level == 1: + self.deactivate() + self._nested_level -= 1 + # Returning False will cause exceptions to propagate + return False + + def add_builtin(self, key, value): + """Add a builtin and save the original.""" + bdict = builtin_mod.__dict__ + orig = bdict.get(key, BuiltinUndefined) + if value is HideBuiltin: + if orig is not BuiltinUndefined: #same as 'key in bdict' + self._orig_builtins[key] = orig + del bdict[key] + else: + self._orig_builtins[key] = orig + bdict[key] = value + + def remove_builtin(self, key, orig): + """Remove an added builtin and re-set the original.""" + if orig is BuiltinUndefined: + del builtin_mod.__dict__[key] + else: + builtin_mod.__dict__[key] = orig + + def activate(self): + """Store ipython references in the __builtin__ namespace.""" + + add_builtin = self.add_builtin + for name, func in self.auto_builtins.items(): + add_builtin(name, func) + + def deactivate(self): + """Remove any builtins which might have been added by add_builtins, or + restore overwritten ones to their previous values.""" + remove_builtin = self.remove_builtin + for key, val in self._orig_builtins.items(): + remove_builtin(key, val) + self._orig_builtins.clear() + self._builtins_added = False diff --git a/contrib/python/ipython/py3/IPython/core/compilerop.py b/contrib/python/ipython/py3/IPython/core/compilerop.py index 68b1d3ed140..50672a19541 100644 --- a/contrib/python/ipython/py3/IPython/core/compilerop.py +++ b/contrib/python/ipython/py3/IPython/core/compilerop.py @@ -1,188 +1,188 @@ -"""Compiler tools with improved interactive support. - -Provides compilation machinery similar to codeop, but with caching support so -we can provide interactive tracebacks. - -Authors -------- -* Robert Kern -* Fernando Perez -* Thomas Kluyver -""" - -# Note: though it might be more natural to name this module 'compiler', that -# name is in the stdlib and name collisions with the stdlib tend to produce -# weird problems (often with third-party tools). - -#----------------------------------------------------------------------------- -# Copyright (C) 2010-2011 The IPython Development Team. -# -# Distributed under the terms of the BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Stdlib imports -import __future__ -from ast import PyCF_ONLY_AST -import codeop -import functools -import hashlib -import linecache -import operator -import time -from contextlib import contextmanager - -#----------------------------------------------------------------------------- -# Constants -#----------------------------------------------------------------------------- - -# Roughly equal to PyCF_MASK | PyCF_MASK_OBSOLETE as defined in pythonrun.h, -# this is used as a bitmask to extract future-related code flags. -PyCF_MASK = functools.reduce(operator.or_, - (getattr(__future__, fname).compiler_flag - for fname in __future__.all_feature_names)) - -#----------------------------------------------------------------------------- -# Local utilities -#----------------------------------------------------------------------------- - -def code_name(code, number=0): - """ Compute a (probably) unique name for code for caching. - - This now expects code to be unicode. - """ - hash_digest = hashlib.sha1(code.encode("utf-8")).hexdigest() - # Include the number and 12 characters of the hash in the name. It's - # pretty much impossible that in a single session we'll have collisions - # even with truncated hashes, and the full one makes tracebacks too long - return '<ipython-input-{0}-{1}>'.format(number, hash_digest[:12]) - -#----------------------------------------------------------------------------- -# Classes and functions -#----------------------------------------------------------------------------- - -class CachingCompiler(codeop.Compile): - """A compiler that caches code compiled from interactive statements. - """ - - def __init__(self): - codeop.Compile.__init__(self) - - # This is ugly, but it must be done this way to allow multiple - # simultaneous ipython instances to coexist. Since Python itself - # directly accesses the data structures in the linecache module, and - # the cache therein is global, we must work with that data structure. - # We must hold a reference to the original checkcache routine and call - # that in our own check_cache() below, but the special IPython cache - # must also be shared by all IPython instances. If we were to hold - # separate caches (one in each CachingCompiler instance), any call made - # by Python itself to linecache.checkcache() would obliterate the - # cached data from the other IPython instances. - if not hasattr(linecache, '_ipython_cache'): - linecache._ipython_cache = {} - if not hasattr(linecache, '_checkcache_ori'): - linecache._checkcache_ori = linecache.checkcache - # Now, we must monkeypatch the linecache directly so that parts of the - # stdlib that call it outside our control go through our codepath - # (otherwise we'd lose our tracebacks). - linecache.checkcache = check_linecache_ipython - - - def ast_parse(self, source, filename='<unknown>', symbol='exec'): - """Parse code to an AST with the current compiler flags active. - - Arguments are exactly the same as ast.parse (in the standard library), - and are passed to the built-in compile function.""" - return compile(source, filename, symbol, self.flags | PyCF_ONLY_AST, 1) - - def reset_compiler_flags(self): - """Reset compiler flags to default state.""" - # This value is copied from codeop.Compile.__init__, so if that ever - # changes, it will need to be updated. - self.flags = codeop.PyCF_DONT_IMPLY_DEDENT - - @property - def compiler_flags(self): - """Flags currently active in the compilation process. - """ - return self.flags - - def get_code_name(self, raw_code, transformed_code, number): - """Compute filename given the code, and the cell number. - - Parameters - ---------- - raw_code : str - The raw cell code. - transformed_code : str - The executable Python source code to cache and compile. - number : int - A number which forms part of the code's name. Used for the execution - counter. - - Returns - ------- - The computed filename. - """ - return code_name(transformed_code, number) - - def cache(self, transformed_code, number=0, raw_code=None): - """Make a name for a block of code, and cache the code. - - Parameters - ---------- - transformed_code : str - The executable Python source code to cache and compile. - number : int - A number which forms part of the code's name. Used for the execution - counter. - raw_code : str - The raw code before transformation, if None, set to `transformed_code`. - - Returns - ------- - The name of the cached code (as a string). Pass this as the filename - argument to compilation, so that tracebacks are correctly hooked up. - """ - if raw_code is None: - raw_code = transformed_code - - name = self.get_code_name(raw_code, transformed_code, number) - entry = ( - len(transformed_code), - time.time(), - [line + "\n" for line in transformed_code.splitlines()], - name, - ) - linecache.cache[name] = entry - linecache._ipython_cache[name] = entry - return name - - @contextmanager - def extra_flags(self, flags): - ## bits that we'll set to 1 - turn_on_bits = ~self.flags & flags - - - self.flags = self.flags | flags - try: - yield - finally: - # turn off only the bits we turned on so that something like - # __future__ that set flags stays. - self.flags &= ~turn_on_bits - - -def check_linecache_ipython(*args): - """Call linecache.checkcache() safely protecting our cached values. - """ - # First call the original checkcache as intended - linecache._checkcache_ori(*args) - # Then, update back the cache with our data, so that tracebacks related - # to our compiled codes can be produced. - linecache.cache.update(linecache._ipython_cache) +"""Compiler tools with improved interactive support. + +Provides compilation machinery similar to codeop, but with caching support so +we can provide interactive tracebacks. + +Authors +------- +* Robert Kern +* Fernando Perez +* Thomas Kluyver +""" + +# Note: though it might be more natural to name this module 'compiler', that +# name is in the stdlib and name collisions with the stdlib tend to produce +# weird problems (often with third-party tools). + +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011 The IPython Development Team. +# +# Distributed under the terms of the BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib imports +import __future__ +from ast import PyCF_ONLY_AST +import codeop +import functools +import hashlib +import linecache +import operator +import time +from contextlib import contextmanager + +#----------------------------------------------------------------------------- +# Constants +#----------------------------------------------------------------------------- + +# Roughly equal to PyCF_MASK | PyCF_MASK_OBSOLETE as defined in pythonrun.h, +# this is used as a bitmask to extract future-related code flags. +PyCF_MASK = functools.reduce(operator.or_, + (getattr(__future__, fname).compiler_flag + for fname in __future__.all_feature_names)) + +#----------------------------------------------------------------------------- +# Local utilities +#----------------------------------------------------------------------------- + +def code_name(code, number=0): + """ Compute a (probably) unique name for code for caching. + + This now expects code to be unicode. + """ + hash_digest = hashlib.sha1(code.encode("utf-8")).hexdigest() + # Include the number and 12 characters of the hash in the name. It's + # pretty much impossible that in a single session we'll have collisions + # even with truncated hashes, and the full one makes tracebacks too long + return '<ipython-input-{0}-{1}>'.format(number, hash_digest[:12]) + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + +class CachingCompiler(codeop.Compile): + """A compiler that caches code compiled from interactive statements. + """ + + def __init__(self): + codeop.Compile.__init__(self) + + # This is ugly, but it must be done this way to allow multiple + # simultaneous ipython instances to coexist. Since Python itself + # directly accesses the data structures in the linecache module, and + # the cache therein is global, we must work with that data structure. + # We must hold a reference to the original checkcache routine and call + # that in our own check_cache() below, but the special IPython cache + # must also be shared by all IPython instances. If we were to hold + # separate caches (one in each CachingCompiler instance), any call made + # by Python itself to linecache.checkcache() would obliterate the + # cached data from the other IPython instances. + if not hasattr(linecache, '_ipython_cache'): + linecache._ipython_cache = {} + if not hasattr(linecache, '_checkcache_ori'): + linecache._checkcache_ori = linecache.checkcache + # Now, we must monkeypatch the linecache directly so that parts of the + # stdlib that call it outside our control go through our codepath + # (otherwise we'd lose our tracebacks). + linecache.checkcache = check_linecache_ipython + + + def ast_parse(self, source, filename='<unknown>', symbol='exec'): + """Parse code to an AST with the current compiler flags active. + + Arguments are exactly the same as ast.parse (in the standard library), + and are passed to the built-in compile function.""" + return compile(source, filename, symbol, self.flags | PyCF_ONLY_AST, 1) + + def reset_compiler_flags(self): + """Reset compiler flags to default state.""" + # This value is copied from codeop.Compile.__init__, so if that ever + # changes, it will need to be updated. + self.flags = codeop.PyCF_DONT_IMPLY_DEDENT + + @property + def compiler_flags(self): + """Flags currently active in the compilation process. + """ + return self.flags + + def get_code_name(self, raw_code, transformed_code, number): + """Compute filename given the code, and the cell number. + + Parameters + ---------- + raw_code : str + The raw cell code. + transformed_code : str + The executable Python source code to cache and compile. + number : int + A number which forms part of the code's name. Used for the execution + counter. + + Returns + ------- + The computed filename. + """ + return code_name(transformed_code, number) + + def cache(self, transformed_code, number=0, raw_code=None): + """Make a name for a block of code, and cache the code. + + Parameters + ---------- + transformed_code : str + The executable Python source code to cache and compile. + number : int + A number which forms part of the code's name. Used for the execution + counter. + raw_code : str + The raw code before transformation, if None, set to `transformed_code`. + + Returns + ------- + The name of the cached code (as a string). Pass this as the filename + argument to compilation, so that tracebacks are correctly hooked up. + """ + if raw_code is None: + raw_code = transformed_code + + name = self.get_code_name(raw_code, transformed_code, number) + entry = ( + len(transformed_code), + time.time(), + [line + "\n" for line in transformed_code.splitlines()], + name, + ) + linecache.cache[name] = entry + linecache._ipython_cache[name] = entry + return name + + @contextmanager + def extra_flags(self, flags): + ## bits that we'll set to 1 + turn_on_bits = ~self.flags & flags + + + self.flags = self.flags | flags + try: + yield + finally: + # turn off only the bits we turned on so that something like + # __future__ that set flags stays. + self.flags &= ~turn_on_bits + + +def check_linecache_ipython(*args): + """Call linecache.checkcache() safely protecting our cached values. + """ + # First call the original checkcache as intended + linecache._checkcache_ori(*args) + # Then, update back the cache with our data, so that tracebacks related + # to our compiled codes can be produced. + linecache.cache.update(linecache._ipython_cache) diff --git a/contrib/python/ipython/py3/IPython/core/completer.py b/contrib/python/ipython/py3/IPython/core/completer.py index 484d4c1bf51..776edeb52b7 100644 --- a/contrib/python/ipython/py3/IPython/core/completer.py +++ b/contrib/python/ipython/py3/IPython/core/completer.py @@ -1,2118 +1,2118 @@ -"""Completion for IPython. - -This module started as fork of the rlcompleter module in the Python standard -library. The original enhancements made to rlcompleter have been sent -upstream and were accepted as of Python 2.3, - -This module now support a wide variety of completion mechanism both available -for normal classic Python code, as well as completer for IPython specific -Syntax like magics. - -Latex and Unicode completion -============================ - -IPython and compatible frontends not only can complete your code, but can help -you to input a wide range of characters. In particular we allow you to insert -a unicode character using the tab completion mechanism. - -Forward latex/unicode completion --------------------------------- - -Forward completion allows you to easily type a unicode character using its latex -name, or unicode long description. To do so type a backslash follow by the -relevant name and press tab: - - -Using latex completion: - -.. code:: - - \\alpha<tab> - α - -or using unicode completion: - - -.. code:: - - \\greek small letter alpha<tab> - α - - -Only valid Python identifiers will complete. Combining characters (like arrow or -dots) are also available, unlike latex they need to be put after the their -counterpart that is to say, `F\\\\vec<tab>` is correct, not `\\\\vec<tab>F`. - -Some browsers are known to display combining characters incorrectly. - -Backward latex completion -------------------------- - -It is sometime challenging to know how to type a character, if you are using -IPython, or any compatible frontend you can prepend backslash to the character -and press `<tab>` to expand it to its latex form. - -.. code:: - - \\α<tab> - \\alpha - - -Both forward and backward completions can be deactivated by setting the -``Completer.backslash_combining_completions`` option to ``False``. - - -Experimental -============ - -Starting with IPython 6.0, this module can make use of the Jedi library to -generate completions both using static analysis of the code, and dynamically -inspecting multiple namespaces. Jedi is an autocompletion and static analysis -for Python. The APIs attached to this new mechanism is unstable and will -raise unless use in an :any:`provisionalcompleter` context manager. - -You will find that the following are experimental: - - - :any:`provisionalcompleter` - - :any:`IPCompleter.completions` - - :any:`Completion` - - :any:`rectify_completions` - -.. note:: - - better name for :any:`rectify_completions` ? - -We welcome any feedback on these new API, and we also encourage you to try this -module in debug mode (start IPython with ``--Completer.debug=True``) in order -to have extra logging information if :any:`jedi` is crashing, or if current -IPython completer pending deprecations are returning results not yet handled -by :any:`jedi` - -Using Jedi for tab completion allow snippets like the following to work without -having to execute any code: - - >>> myvar = ['hello', 42] - ... myvar[1].bi<tab> - -Tab completion will be able to infer that ``myvar[1]`` is a real number without -executing any code unlike the previously available ``IPCompleter.greedy`` -option. - -Be sure to update :any:`jedi` to the latest stable version or to try the -current development version to get better completions. -""" - - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. -# -# Some of this code originated from rlcompleter in the Python standard library -# Copyright (C) 2001 Python Software Foundation, www.python.org - - -import builtins as builtin_mod -import glob -import inspect -import itertools -import keyword -import os -import re -import string -import sys -import time -import unicodedata -import warnings -from contextlib import contextmanager -from importlib import import_module -from types import SimpleNamespace -from typing import Iterable, Iterator, List, Tuple - -from IPython.core.error import TryNext -from IPython.core.inputtransformer2 import ESC_MAGIC -from IPython.core.latex_symbols import latex_symbols, reverse_latex_symbol -from IPython.core.oinspect import InspectColors -from IPython.utils import generics -from IPython.utils.dir2 import dir2, get_real_method -from IPython.utils.process import arg_split -from traitlets import Bool, Enum, Int, observe -from traitlets.config.configurable import Configurable - -import __main__ - -# skip module docstests -skip_doctest = True - -try: - import jedi - jedi.settings.case_insensitive_completion = False - import jedi.api.helpers - import jedi.api.classes - JEDI_INSTALLED = True -except ImportError: - JEDI_INSTALLED = False -#----------------------------------------------------------------------------- -# Globals -#----------------------------------------------------------------------------- - -# Public API -__all__ = ['Completer','IPCompleter'] - -if sys.platform == 'win32': - PROTECTABLES = ' ' -else: - PROTECTABLES = ' ()[]{}?=\\|;:\'#*"^&' - -# Protect against returning an enormous number of completions which the frontend -# may have trouble processing. -MATCHES_LIMIT = 500 - - -class Sentinel: - def __repr__(self): - return "<deprecated sentinel>" - - -_deprecation_readline_sentinel = Sentinel() - - -class ProvisionalCompleterWarning(FutureWarning): - """ - Exception raise by an experimental feature in this module. - - Wrap code in :any:`provisionalcompleter` context manager if you - are certain you want to use an unstable feature. - """ - pass - -warnings.filterwarnings('error', category=ProvisionalCompleterWarning) - -@contextmanager -def provisionalcompleter(action='ignore'): - """ - - - This context manager has to be used in any place where unstable completer - behavior and API may be called. - - >>> with provisionalcompleter(): - ... completer.do_experimental_things() # works - - >>> completer.do_experimental_things() # raises. - - .. note:: - - Unstable - - By using this context manager you agree that the API in use may change - without warning, and that you won't complain if they do so. - - You also understand that, if the API is not to your liking, you should report - a bug to explain your use case upstream. - - We'll be happy to get your feedback, feature requests, and improvements on - any of the unstable APIs! - """ - with warnings.catch_warnings(): - warnings.filterwarnings(action, category=ProvisionalCompleterWarning) - yield - - -def has_open_quotes(s): - """Return whether a string has open quotes. - - This simply counts whether the number of quote characters of either type in - the string is odd. - - Returns - ------- - If there is an open quote, the quote character is returned. Else, return - False. - """ - # We check " first, then ', so complex cases with nested quotes will get - # the " to take precedence. - if s.count('"') % 2: - return '"' - elif s.count("'") % 2: - return "'" - else: - return False - - -def protect_filename(s, protectables=PROTECTABLES): - """Escape a string to protect certain characters.""" - if set(s) & set(protectables): - if sys.platform == "win32": - return '"' + s + '"' - else: - return "".join(("\\" + c if c in protectables else c) for c in s) - else: - return s - - -def expand_user(path:str) -> Tuple[str, bool, str]: - """Expand ``~``-style usernames in strings. - - This is similar to :func:`os.path.expanduser`, but it computes and returns - extra information that will be useful if the input was being used in - computing completions, and you wish to return the completions with the - original '~' instead of its expanded value. - - Parameters - ---------- - path : str - String to be expanded. If no ~ is present, the output is the same as the - input. - - Returns - ------- - newpath : str - Result of ~ expansion in the input path. - tilde_expand : bool - Whether any expansion was performed or not. - tilde_val : str - The value that ~ was replaced with. - """ - # Default values - tilde_expand = False - tilde_val = '' - newpath = path - - if path.startswith('~'): - tilde_expand = True - rest = len(path)-1 - newpath = os.path.expanduser(path) - if rest: - tilde_val = newpath[:-rest] - else: - tilde_val = newpath - - return newpath, tilde_expand, tilde_val - - -def compress_user(path:str, tilde_expand:bool, tilde_val:str) -> str: - """Does the opposite of expand_user, with its outputs. - """ - if tilde_expand: - return path.replace(tilde_val, '~') - else: - return path - - -def completions_sorting_key(word): - """key for sorting completions - - This does several things: - - - Demote any completions starting with underscores to the end - - Insert any %magic and %%cellmagic completions in the alphabetical order - by their name - """ - prio1, prio2 = 0, 0 - - if word.startswith('__'): - prio1 = 2 - elif word.startswith('_'): - prio1 = 1 - - if word.endswith('='): - prio1 = -1 - - if word.startswith('%%'): - # If there's another % in there, this is something else, so leave it alone - if not "%" in word[2:]: - word = word[2:] - prio2 = 2 - elif word.startswith('%'): - if not "%" in word[1:]: - word = word[1:] - prio2 = 1 - - return prio1, word, prio2 - - -class _FakeJediCompletion: - """ - This is a workaround to communicate to the UI that Jedi has crashed and to - report a bug. Will be used only id :any:`IPCompleter.debug` is set to true. - - Added in IPython 6.0 so should likely be removed for 7.0 - - """ - - def __init__(self, name): - - self.name = name - self.complete = name - self.type = 'crashed' - self.name_with_symbols = name - self.signature = '' - self._origin = 'fake' - - def __repr__(self): - return '<Fake completion object jedi has crashed>' - - -class Completion: - """ - Completion object used and return by IPython completers. - - .. warning:: - - Unstable - - This function is unstable, API may change without warning. - It will also raise unless use in proper context manager. - - This act as a middle ground :any:`Completion` object between the - :any:`jedi.api.classes.Completion` object and the Prompt Toolkit completion - object. While Jedi need a lot of information about evaluator and how the - code should be ran/inspected, PromptToolkit (and other frontend) mostly - need user facing information. - - - Which range should be replaced replaced by what. - - Some metadata (like completion type), or meta information to displayed to - the use user. - - For debugging purpose we can also store the origin of the completion (``jedi``, - ``IPython.python_matches``, ``IPython.magics_matches``...). - """ - - __slots__ = ['start', 'end', 'text', 'type', 'signature', '_origin'] - - def __init__(self, start: int, end: int, text: str, *, type: str=None, _origin='', signature='') -> None: - warnings.warn("``Completion`` is a provisional API (as of IPython 6.0). " - "It may change without warnings. " - "Use in corresponding context manager.", - category=ProvisionalCompleterWarning, stacklevel=2) - - self.start = start - self.end = end - self.text = text - self.type = type - self.signature = signature - self._origin = _origin - - def __repr__(self): - return '<Completion start=%s end=%s text=%r type=%r, signature=%r,>' % \ - (self.start, self.end, self.text, self.type or '?', self.signature or '?') - - def __eq__(self, other)->Bool: - """ - Equality and hash do not hash the type (as some completer may not be - able to infer the type), but are use to (partially) de-duplicate - completion. - - Completely de-duplicating completion is a bit tricker that just - comparing as it depends on surrounding text, which Completions are not - aware of. - """ - return self.start == other.start and \ - self.end == other.end and \ - self.text == other.text - - def __hash__(self): - return hash((self.start, self.end, self.text)) - - -_IC = Iterable[Completion] - - -def _deduplicate_completions(text: str, completions: _IC)-> _IC: - """ - Deduplicate a set of completions. - - .. warning:: - - Unstable - - This function is unstable, API may change without warning. - - Parameters - ---------- - text: str - text that should be completed. - completions: Iterator[Completion] - iterator over the completions to deduplicate - - Yields - ------ - `Completions` objects - - - Completions coming from multiple sources, may be different but end up having - the same effect when applied to ``text``. If this is the case, this will - consider completions as equal and only emit the first encountered. - - Not folded in `completions()` yet for debugging purpose, and to detect when - the IPython completer does return things that Jedi does not, but should be - at some point. - """ - completions = list(completions) - if not completions: - return - - new_start = min(c.start for c in completions) - new_end = max(c.end for c in completions) - - seen = set() - for c in completions: - new_text = text[new_start:c.start] + c.text + text[c.end:new_end] - if new_text not in seen: - yield c - seen.add(new_text) - - -def rectify_completions(text: str, completions: _IC, *, _debug=False)->_IC: - """ - Rectify a set of completions to all have the same ``start`` and ``end`` - - .. warning:: - - Unstable - - This function is unstable, API may change without warning. - It will also raise unless use in proper context manager. - - Parameters - ---------- - text: str - text that should be completed. - completions: Iterator[Completion] - iterator over the completions to rectify - - - :any:`jedi.api.classes.Completion` s returned by Jedi may not have the same start and end, though - the Jupyter Protocol requires them to behave like so. This will readjust - the completion to have the same ``start`` and ``end`` by padding both - extremities with surrounding text. - - During stabilisation should support a ``_debug`` option to log which - completion are return by the IPython completer and not found in Jedi in - order to make upstream bug report. - """ - warnings.warn("`rectify_completions` is a provisional API (as of IPython 6.0). " - "It may change without warnings. " - "Use in corresponding context manager.", - category=ProvisionalCompleterWarning, stacklevel=2) - - completions = list(completions) - if not completions: - return - starts = (c.start for c in completions) - ends = (c.end for c in completions) - - new_start = min(starts) - new_end = max(ends) - - seen_jedi = set() - seen_python_matches = set() - for c in completions: - new_text = text[new_start:c.start] + c.text + text[c.end:new_end] - if c._origin == 'jedi': - seen_jedi.add(new_text) - elif c._origin == 'IPCompleter.python_matches': - seen_python_matches.add(new_text) - yield Completion(new_start, new_end, new_text, type=c.type, _origin=c._origin, signature=c.signature) - diff = seen_python_matches.difference(seen_jedi) - if diff and _debug: - print('IPython.python matches have extras:', diff) - - -if sys.platform == 'win32': - DELIMS = ' \t\n`!@#$^&*()=+[{]}|;\'",<>?' -else: - DELIMS = ' \t\n`!@#$^&*()=+[{]}\\|;:\'",<>?' - -GREEDY_DELIMS = ' =\r\n' - - -class CompletionSplitter(object): - """An object to split an input line in a manner similar to readline. - - By having our own implementation, we can expose readline-like completion in - a uniform manner to all frontends. This object only needs to be given the - line of text to be split and the cursor position on said line, and it - returns the 'word' to be completed on at the cursor after splitting the - entire line. - - What characters are used as splitting delimiters can be controlled by - setting the ``delims`` attribute (this is a property that internally - automatically builds the necessary regular expression)""" - - # Private interface - - # A string of delimiter characters. The default value makes sense for - # IPython's most typical usage patterns. - _delims = DELIMS - - # The expression (a normal string) to be compiled into a regular expression - # for actual splitting. We store it as an attribute mostly for ease of - # debugging, since this type of code can be so tricky to debug. - _delim_expr = None - - # The regular expression that does the actual splitting - _delim_re = None - - def __init__(self, delims=None): - delims = CompletionSplitter._delims if delims is None else delims - self.delims = delims - - @property - def delims(self): - """Return the string of delimiter characters.""" - return self._delims - - @delims.setter - def delims(self, delims): - """Set the delimiters for line splitting.""" - expr = '[' + ''.join('\\'+ c for c in delims) + ']' - self._delim_re = re.compile(expr) - self._delims = delims - self._delim_expr = expr - - def split_line(self, line, cursor_pos=None): - """Split a line of text with a cursor at the given position. - """ - l = line if cursor_pos is None else line[:cursor_pos] - return self._delim_re.split(l)[-1] - - - -class Completer(Configurable): - - greedy = Bool(False, - help="""Activate greedy completion - PENDING DEPRECTION. this is now mostly taken care of with Jedi. - - This will enable completion on elements of lists, results of function calls, etc., - but can be unsafe because the code is actually evaluated on TAB. - """ - ).tag(config=True) - - use_jedi = Bool(default_value=JEDI_INSTALLED, - help="Experimental: Use Jedi to generate autocompletions. " - "Default to True if jedi is installed.").tag(config=True) - - jedi_compute_type_timeout = Int(default_value=400, - help="""Experimental: restrict time (in milliseconds) during which Jedi can compute types. - Set to 0 to stop computing types. Non-zero value lower than 100ms may hurt - performance by preventing jedi to build its cache. - """).tag(config=True) - - debug = Bool(default_value=False, - help='Enable debug for the Completer. Mostly print extra ' - 'information for experimental jedi integration.')\ - .tag(config=True) - - backslash_combining_completions = Bool(True, - help="Enable unicode completions, e.g. \\alpha<tab> . " - "Includes completion of latex commands, unicode names, and expanding " - "unicode characters back to latex commands.").tag(config=True) - - - - def __init__(self, namespace=None, global_namespace=None, **kwargs): - """Create a new completer for the command line. - - Completer(namespace=ns, global_namespace=ns2) -> completer instance. - - If unspecified, the default namespace where completions are performed - is __main__ (technically, __main__.__dict__). Namespaces should be - given as dictionaries. - - An optional second namespace can be given. This allows the completer - to handle cases where both the local and global scopes need to be - distinguished. - """ - - # Don't bind to namespace quite yet, but flag whether the user wants a - # specific namespace or to use __main__.__dict__. This will allow us - # to bind to __main__.__dict__ at completion time, not now. - if namespace is None: - self.use_main_ns = True - else: - self.use_main_ns = False - self.namespace = namespace - - # The global namespace, if given, can be bound directly - if global_namespace is None: - self.global_namespace = {} - else: - self.global_namespace = global_namespace - - self.custom_matchers = [] - - super(Completer, self).__init__(**kwargs) - - def complete(self, text, state): - """Return the next possible completion for 'text'. - - This is called successively with state == 0, 1, 2, ... until it - returns None. The completion should begin with 'text'. - - """ - if self.use_main_ns: - self.namespace = __main__.__dict__ - - if state == 0: - if "." in text: - self.matches = self.attr_matches(text) - else: - self.matches = self.global_matches(text) - try: - return self.matches[state] - except IndexError: - return None - - def global_matches(self, text): - """Compute matches when text is a simple name. - - Return a list of all keywords, built-in functions and names currently - defined in self.namespace or self.global_namespace that match. - - """ - matches = [] - match_append = matches.append - n = len(text) - for lst in [keyword.kwlist, - builtin_mod.__dict__.keys(), - self.namespace.keys(), - self.global_namespace.keys()]: - for word in lst: - if word[:n] == text and word != "__builtins__": - match_append(word) - - snake_case_re = re.compile(r"[^_]+(_[^_]+)+?\Z") - for lst in [self.namespace.keys(), - self.global_namespace.keys()]: - shortened = {"_".join([sub[0] for sub in word.split('_')]) : word - for word in lst if snake_case_re.match(word)} - for word in shortened.keys(): - if word[:n] == text and word != "__builtins__": - match_append(shortened[word]) - return matches - - def attr_matches(self, text): - """Compute matches when text contains a dot. - - Assuming the text is of the form NAME.NAME....[NAME], and is - evaluatable in self.namespace or self.global_namespace, it will be - evaluated and its attributes (as revealed by dir()) are used as - possible completions. (For class instances, class members are - also considered.) - - WARNING: this can still invoke arbitrary C code, if an object - with a __getattr__ hook is evaluated. - - """ - - # Another option, seems to work great. Catches things like ''.<tab> - m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) - - if m: - expr, attr = m.group(1, 3) - elif self.greedy: - m2 = re.match(r"(.+)\.(\w*)$", self.line_buffer) - if not m2: - return [] - expr, attr = m2.group(1,2) - else: - return [] - - try: - obj = eval(expr, self.namespace) - except: - try: - obj = eval(expr, self.global_namespace) - except: - return [] - - if self.limit_to__all__ and hasattr(obj, '__all__'): - words = get__all__entries(obj) - else: - words = dir2(obj) - - try: - words = generics.complete_object(obj, words) - except TryNext: - pass - except AssertionError: - raise - except Exception: - # Silence errors from completion function - #raise # dbg - pass - # Build match list to return - n = len(attr) - return [u"%s.%s" % (expr, w) for w in words if w[:n] == attr ] - - -def get__all__entries(obj): - """returns the strings in the __all__ attribute""" - try: - words = getattr(obj, '__all__') - except: - return [] - - return [w for w in words if isinstance(w, str)] - - -def match_dict_keys(keys: List[str], prefix: str, delims: str): - """Used by dict_key_matches, matching the prefix to a list of keys - - Parameters - ========== - keys: - list of keys in dictionary currently being completed. - prefix: - Part of the text already typed by the user. e.g. `mydict[b'fo` - delims: - String of delimiters to consider when finding the current key. - - Returns - ======= - - A tuple of three elements: ``quote``, ``token_start``, ``matched``, with - ``quote`` being the quote that need to be used to close current string. - ``token_start`` the position where the replacement should start occurring, - ``matches`` a list of replacement/completion - - """ - if not prefix: - return None, 0, [repr(k) for k in keys - if isinstance(k, (str, bytes))] - quote_match = re.search('["\']', prefix) - quote = quote_match.group() - try: - prefix_str = eval(prefix + quote, {}) - except Exception: - return None, 0, [] - - pattern = '[^' + ''.join('\\' + c for c in delims) + ']*$' - token_match = re.search(pattern, prefix, re.UNICODE) - token_start = token_match.start() - token_prefix = token_match.group() - - matched = [] - for key in keys: - try: - if not key.startswith(prefix_str): - continue - except (AttributeError, TypeError, UnicodeError): - # Python 3+ TypeError on b'a'.startswith('a') or vice-versa - continue - - # reformat remainder of key to begin with prefix - rem = key[len(prefix_str):] - # force repr wrapped in ' - rem_repr = repr(rem + '"') if isinstance(rem, str) else repr(rem + b'"') - if rem_repr.startswith('u') and prefix[0] not in 'uU': - # Found key is unicode, but prefix is Py2 string. - # Therefore attempt to interpret key as string. - try: - rem_repr = repr(rem.encode('ascii') + '"') - except UnicodeEncodeError: - continue - - rem_repr = rem_repr[1 + rem_repr.index("'"):-2] - if quote == '"': - # The entered prefix is quoted with ", - # but the match is quoted with '. - # A contained " hence needs escaping for comparison: - rem_repr = rem_repr.replace('"', '\\"') - - # then reinsert prefix from start of token - matched.append('%s%s' % (token_prefix, rem_repr)) - return quote, token_start, matched - - -def cursor_to_position(text:str, line:int, column:int)->int: - """ - - Convert the (line,column) position of the cursor in text to an offset in a - string. - - Parameters - ---------- - - text : str - The text in which to calculate the cursor offset - line : int - Line of the cursor; 0-indexed - column : int - Column of the cursor 0-indexed - - Return - ------ - Position of the cursor in ``text``, 0-indexed. - - See Also - -------- - position_to_cursor: reciprocal of this function - - """ - lines = text.split('\n') - assert line <= len(lines), '{} <= {}'.format(str(line), str(len(lines))) - - return sum(len(l) + 1 for l in lines[:line]) + column - -def position_to_cursor(text:str, offset:int)->Tuple[int, int]: - """ - Convert the position of the cursor in text (0 indexed) to a line - number(0-indexed) and a column number (0-indexed) pair - - Position should be a valid position in ``text``. - - Parameters - ---------- - - text : str - The text in which to calculate the cursor offset - offset : int - Position of the cursor in ``text``, 0-indexed. - - Return - ------ - (line, column) : (int, int) - Line of the cursor; 0-indexed, column of the cursor 0-indexed - - - See Also - -------- - cursor_to_position : reciprocal of this function - - - """ - - assert 0 <= offset <= len(text) , "0 <= %s <= %s" % (offset , len(text)) - - before = text[:offset] - blines = before.split('\n') # ! splitnes trim trailing \n - line = before.count('\n') - col = len(blines[-1]) - return line, col - - -def _safe_isinstance(obj, module, class_name): - """Checks if obj is an instance of module.class_name if loaded - """ - return (module in sys.modules and - isinstance(obj, getattr(import_module(module), class_name))) - - -def back_unicode_name_matches(text): - u"""Match unicode characters back to unicode name - - This does ``☃`` -> ``\\snowman`` - - Note that snowman is not a valid python3 combining character but will be expanded. - Though it will not recombine back to the snowman character by the completion machinery. - - This will not either back-complete standard sequences like \\n, \\b ... - - Used on Python 3 only. - """ - if len(text)<2: - return u'', () - maybe_slash = text[-2] - if maybe_slash != '\\': - return u'', () - - char = text[-1] - # no expand on quote for completion in strings. - # nor backcomplete standard ascii keys - if char in string.ascii_letters or char in ['"',"'"]: - return u'', () - try : - unic = unicodedata.name(char) - return '\\'+char,['\\'+unic] - except KeyError: - pass - return u'', () - -def back_latex_name_matches(text:str): - """Match latex characters back to unicode name - - This does ``\\ℵ`` -> ``\\aleph`` - - Used on Python 3 only. - """ - if len(text)<2: - return u'', () - maybe_slash = text[-2] - if maybe_slash != '\\': - return u'', () - - - char = text[-1] - # no expand on quote for completion in strings. - # nor backcomplete standard ascii keys - if char in string.ascii_letters or char in ['"',"'"]: - return u'', () - try : - latex = reverse_latex_symbol[char] - # '\\' replace the \ as well - return '\\'+char,[latex] - except KeyError: - pass - return u'', () - - -def _formatparamchildren(parameter) -> str: - """ - Get parameter name and value from Jedi Private API - - Jedi does not expose a simple way to get `param=value` from its API. - - Parameter - ========= - - parameter: - Jedi's function `Param` - - Returns - ======= - - A string like 'a', 'b=1', '*args', '**kwargs' - - - """ - description = parameter.description - if not description.startswith('param '): - raise ValueError('Jedi function parameter description have change format.' - 'Expected "param ...", found %r".' % description) - return description[6:] - -def _make_signature(completion)-> str: - """ - Make the signature from a jedi completion - - Parameter - ========= - - completion: jedi.Completion - object does not complete a function type - - Returns - ======= - - a string consisting of the function signature, with the parenthesis but - without the function name. example: - `(a, *args, b=1, **kwargs)` - - """ - - return '(%s)'% ', '.join([f for f in (_formatparamchildren(p) for p in completion.params) if f]) - # it looks like this might work on jedi 0.17 - if hasattr(completion, 'get_signatures'): - signatures = completion.get_signatures() - if not signatures: - return '(?)' - - c0 = completion.get_signatures()[0] - return '('+c0.to_string().split('(', maxsplit=1)[1] - - return '(%s)'% ', '.join([f for f in (_formatparamchildren(p) for signature in completion.get_signatures() - for p in signature.defined_names()) if f]) - -class IPCompleter(Completer): - """Extension of the completer class with IPython-specific features""" - - _names = None - - @observe('greedy') - def _greedy_changed(self, change): - """update the splitter and readline delims when greedy is changed""" - if change['new']: - self.splitter.delims = GREEDY_DELIMS - else: - self.splitter.delims = DELIMS - - dict_keys_only = Bool(False, - help="""Whether to show dict key matches only""") - - merge_completions = Bool(True, - help="""Whether to merge completion results into a single list - - If False, only the completion results from the first non-empty - completer will be returned. - """ - ).tag(config=True) - omit__names = Enum((0,1,2), default_value=2, - help="""Instruct the completer to omit private method names - - Specifically, when completing on ``object.<tab>``. - - When 2 [default]: all names that start with '_' will be excluded. - - When 1: all 'magic' names (``__foo__``) will be excluded. - - When 0: nothing will be excluded. - """ - ).tag(config=True) - limit_to__all__ = Bool(False, - help=""" - DEPRECATED as of version 5.0. - - Instruct the completer to use __all__ for the completion - - Specifically, when completing on ``object.<tab>``. - - When True: only those names in obj.__all__ will be included. - - When False [default]: the __all__ attribute is ignored - """, - ).tag(config=True) - - @observe('limit_to__all__') - def _limit_to_all_changed(self, change): - warnings.warn('`IPython.core.IPCompleter.limit_to__all__` configuration ' - 'value has been deprecated since IPython 5.0, will be made to have ' - 'no effects and then removed in future version of IPython.', - UserWarning) - - def __init__(self, shell=None, namespace=None, global_namespace=None, - use_readline=_deprecation_readline_sentinel, config=None, **kwargs): - """IPCompleter() -> completer - - Return a completer object. - - Parameters - ---------- - - shell - a pointer to the ipython shell itself. This is needed - because this completer knows about magic functions, and those can - only be accessed via the ipython instance. - - namespace : dict, optional - an optional dict where completions are performed. - - global_namespace : dict, optional - secondary optional dict for completions, to - handle cases (such as IPython embedded inside functions) where - both Python scopes are visible. - - use_readline : bool, optional - DEPRECATED, ignored since IPython 6.0, will have no effects - """ - - self.magic_escape = ESC_MAGIC - self.splitter = CompletionSplitter() - - if use_readline is not _deprecation_readline_sentinel: - warnings.warn('The `use_readline` parameter is deprecated and ignored since IPython 6.0.', - DeprecationWarning, stacklevel=2) - - # _greedy_changed() depends on splitter and readline being defined: - Completer.__init__(self, namespace=namespace, global_namespace=global_namespace, - config=config, **kwargs) - - # List where completion matches will be stored - self.matches = [] - self.shell = shell - # Regexp to split filenames with spaces in them - self.space_name_re = re.compile(r'([^\\] )') - # Hold a local ref. to glob.glob for speed - self.glob = glob.glob - - # Determine if we are running on 'dumb' terminals, like (X)Emacs - # buffers, to avoid completion problems. - term = os.environ.get('TERM','xterm') - self.dumb_terminal = term in ['dumb','emacs'] - - # Special handling of backslashes needed in win32 platforms - if sys.platform == "win32": - self.clean_glob = self._clean_glob_win32 - else: - self.clean_glob = self._clean_glob - - #regexp to parse docstring for function signature - self.docstring_sig_re = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') - self.docstring_kwd_re = re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') - #use this if positional argument name is also needed - #= re.compile(r'[\s|\[]*(\w+)(?:\s*=?\s*.*)') - - self.magic_arg_matchers = [ - self.magic_config_matches, - self.magic_color_matches, - ] - - # This is set externally by InteractiveShell - self.custom_completers = None - - @property - def matchers(self): - """All active matcher routines for completion""" - if self.dict_keys_only: - return [self.dict_key_matches] - - if self.use_jedi: - return [ - *self.custom_matchers, - self.dict_key_matches, - self.file_matches, - self.magic_matches, - ] - else: - return [ - *self.custom_matchers, - self.dict_key_matches, - self.python_matches, - self.file_matches, - self.magic_matches, - self.python_func_kw_matches, - ] - - def all_completions(self, text) -> List[str]: - """ - Wrapper around the completion methods for the benefit of emacs. - """ - prefix = text.rpartition('.')[0] - with provisionalcompleter(): - return ['.'.join([prefix, c.text]) if prefix and self.use_jedi else c.text - for c in self.completions(text, len(text))] - - return self.complete(text)[1] - - def _clean_glob(self, text): - return self.glob("%s*" % text) - - def _clean_glob_win32(self,text): - return [f.replace("\\","/") - for f in self.glob("%s*" % text)] - - def file_matches(self, text): - """Match filenames, expanding ~USER type strings. - - Most of the seemingly convoluted logic in this completer is an - attempt to handle filenames with spaces in them. And yet it's not - quite perfect, because Python's readline doesn't expose all of the - GNU readline details needed for this to be done correctly. - - For a filename with a space in it, the printed completions will be - only the parts after what's already been typed (instead of the - full completions, as is normally done). I don't think with the - current (as of Python 2.3) Python readline it's possible to do - better.""" - - # chars that require escaping with backslash - i.e. chars - # that readline treats incorrectly as delimiters, but we - # don't want to treat as delimiters in filename matching - # when escaped with backslash - if text.startswith('!'): - text = text[1:] - text_prefix = u'!' - else: - text_prefix = u'' - - text_until_cursor = self.text_until_cursor - # track strings with open quotes - open_quotes = has_open_quotes(text_until_cursor) - - if '(' in text_until_cursor or '[' in text_until_cursor: - lsplit = text - else: - try: - # arg_split ~ shlex.split, but with unicode bugs fixed by us - lsplit = arg_split(text_until_cursor)[-1] - except ValueError: - # typically an unmatched ", or backslash without escaped char. - if open_quotes: - lsplit = text_until_cursor.split(open_quotes)[-1] - else: - return [] - except IndexError: - # tab pressed on empty line - lsplit = "" - - if not open_quotes and lsplit != protect_filename(lsplit): - # if protectables are found, do matching on the whole escaped name - has_protectables = True - text0,text = text,lsplit - else: - has_protectables = False - text = os.path.expanduser(text) - - if text == "": - return [text_prefix + protect_filename(f) for f in self.glob("*")] - - # Compute the matches from the filesystem - if sys.platform == 'win32': - m0 = self.clean_glob(text) - else: - m0 = self.clean_glob(text.replace('\\', '')) - - if has_protectables: - # If we had protectables, we need to revert our changes to the - # beginning of filename so that we don't double-write the part - # of the filename we have so far - len_lsplit = len(lsplit) - matches = [text_prefix + text0 + - protect_filename(f[len_lsplit:]) for f in m0] - else: - if open_quotes: - # if we have a string with an open quote, we don't need to - # protect the names beyond the quote (and we _shouldn't_, as - # it would cause bugs when the filesystem call is made). - matches = m0 if sys.platform == "win32" else\ - [protect_filename(f, open_quotes) for f in m0] - else: - matches = [text_prefix + - protect_filename(f) for f in m0] - - # Mark directories in input list by appending '/' to their names. - return [x+'/' if os.path.isdir(x) else x for x in matches] - - def magic_matches(self, text): - """Match magics""" - # Get all shell magics now rather than statically, so magics loaded at - # runtime show up too. - lsm = self.shell.magics_manager.lsmagic() - line_magics = lsm['line'] - cell_magics = lsm['cell'] - pre = self.magic_escape - pre2 = pre+pre - - explicit_magic = text.startswith(pre) - - # Completion logic: - # - user gives %%: only do cell magics - # - user gives %: do both line and cell magics - # - no prefix: do both - # In other words, line magics are skipped if the user gives %% explicitly - # - # We also exclude magics that match any currently visible names: - # https://github.com/ipython/ipython/issues/4877, unless the user has - # typed a %: - # https://github.com/ipython/ipython/issues/10754 - bare_text = text.lstrip(pre) - global_matches = self.global_matches(bare_text) - if not explicit_magic: - def matches(magic): - """ - Filter magics, in particular remove magics that match - a name present in global namespace. - """ - return ( magic.startswith(bare_text) and - magic not in global_matches ) - else: - def matches(magic): - return magic.startswith(bare_text) - - comp = [ pre2+m for m in cell_magics if matches(m)] - if not text.startswith(pre2): - comp += [ pre+m for m in line_magics if matches(m)] - - return comp - - def magic_config_matches(self, text:str) -> List[str]: - """ Match class names and attributes for %config magic """ - texts = text.strip().split() - - if len(texts) > 0 and (texts[0] == 'config' or texts[0] == '%config'): - # get all configuration classes - classes = sorted(set([ c for c in self.shell.configurables - if c.__class__.class_traits(config=True) - ]), key=lambda x: x.__class__.__name__) - classnames = [ c.__class__.__name__ for c in classes ] - - # return all classnames if config or %config is given - if len(texts) == 1: - return classnames - - # match classname - classname_texts = texts[1].split('.') - classname = classname_texts[0] - classname_matches = [ c for c in classnames - if c.startswith(classname) ] - - # return matched classes or the matched class with attributes - if texts[1].find('.') < 0: - return classname_matches - elif len(classname_matches) == 1 and \ - classname_matches[0] == classname: - cls = classes[classnames.index(classname)].__class__ - help = cls.class_get_help() - # strip leading '--' from cl-args: - help = re.sub(re.compile(r'^--', re.MULTILINE), '', help) - return [ attr.split('=')[0] - for attr in help.strip().splitlines() - if attr.startswith(texts[1]) ] - return [] - - def magic_color_matches(self, text:str) -> List[str] : - """ Match color schemes for %colors magic""" - texts = text.split() - if text.endswith(' '): - # .split() strips off the trailing whitespace. Add '' back - # so that: '%colors ' -> ['%colors', ''] - texts.append('') - - if len(texts) == 2 and (texts[0] == 'colors' or texts[0] == '%colors'): - prefix = texts[1] - return [ color for color in InspectColors.keys() - if color.startswith(prefix) ] - return [] - - def _jedi_matches(self, cursor_column:int, cursor_line:int, text:str): - """ - - Return a list of :any:`jedi.api.Completions` object from a ``text`` and - cursor position. - - Parameters - ---------- - cursor_column : int - column position of the cursor in ``text``, 0-indexed. - cursor_line : int - line position of the cursor in ``text``, 0-indexed - text : str - text to complete - - Debugging - --------- - - If ``IPCompleter.debug`` is ``True`` may return a :any:`_FakeJediCompletion` - object containing a string with the Jedi debug information attached. - """ - namespaces = [self.namespace] - if self.global_namespace is not None: - namespaces.append(self.global_namespace) - - completion_filter = lambda x:x - offset = cursor_to_position(text, cursor_line, cursor_column) - # filter output if we are completing for object members - if offset: - pre = text[offset-1] - if pre == '.': - if self.omit__names == 2: - completion_filter = lambda c:not c.name.startswith('_') - elif self.omit__names == 1: - completion_filter = lambda c:not (c.name.startswith('__') and c.name.endswith('__')) - elif self.omit__names == 0: - completion_filter = lambda x:x - else: - raise ValueError("Don't understand self.omit__names == {}".format(self.omit__names)) - - interpreter = jedi.Interpreter(text[:offset], namespaces, column=cursor_column, line=cursor_line + 1) - try_jedi = True - - try: - # find the first token in the current tree -- if it is a ' or " then we are in a string - completing_string = False - try: - first_child = next(c for c in interpreter._get_module().tree_node.children if hasattr(c, 'value')) - except StopIteration: - pass - else: - # note the value may be ', ", or it may also be ''' or """, or - # in some cases, """what/you/typed..., but all of these are - # strings. - completing_string = len(first_child.value) > 0 and first_child.value[0] in {"'", '"'} - - # if we are in a string jedi is likely not the right candidate for - # now. Skip it. - try_jedi = not completing_string - except Exception as e: - # many of things can go wrong, we are using private API just don't crash. - if self.debug: - print("Error detecting if completing a non-finished string :", e, '|') - - if not try_jedi: - return [] - try: - return filter(completion_filter, interpreter.completions()) - except Exception as e: - if self.debug: - return [_FakeJediCompletion('Oops Jedi has crashed, please report a bug with the following:\n"""\n%s\ns"""' % (e))] - else: - return [] - - def python_matches(self, text): - """Match attributes or global python names""" - if "." in text: - try: - matches = self.attr_matches(text) - if text.endswith('.') and self.omit__names: - if self.omit__names == 1: - # true if txt is _not_ a __ name, false otherwise: - no__name = (lambda txt: - re.match(r'.*\.__.*?__',txt) is None) - else: - # true if txt is _not_ a _ name, false otherwise: - no__name = (lambda txt: - re.match(r'\._.*?',txt[txt.rindex('.'):]) is None) - matches = filter(no__name, matches) - except NameError: - # catches <undefined attributes>.<tab> - matches = [] - else: - matches = self.global_matches(text) - return matches - - def _default_arguments_from_docstring(self, doc): - """Parse the first line of docstring for call signature. - - Docstring should be of the form 'min(iterable[, key=func])\n'. - It can also parse cython docstring of the form - 'Minuit.migrad(self, int ncall=10000, resume=True, int nsplit=1)'. - """ - if doc is None: - return [] - - #care only the firstline - line = doc.lstrip().splitlines()[0] - - #p = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') - #'min(iterable[, key=func])\n' -> 'iterable[, key=func]' - sig = self.docstring_sig_re.search(line) - if sig is None: - return [] - # iterable[, key=func]' -> ['iterable[' ,' key=func]'] - sig = sig.groups()[0].split(',') - ret = [] - for s in sig: - #re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') - ret += self.docstring_kwd_re.findall(s) - return ret - - def _default_arguments(self, obj): - """Return the list of default arguments of obj if it is callable, - or empty list otherwise.""" - call_obj = obj - ret = [] - if inspect.isbuiltin(obj): - pass - elif not (inspect.isfunction(obj) or inspect.ismethod(obj)): - if inspect.isclass(obj): - #for cython embedsignature=True the constructor docstring - #belongs to the object itself not __init__ - ret += self._default_arguments_from_docstring( - getattr(obj, '__doc__', '')) - # for classes, check for __init__,__new__ - call_obj = (getattr(obj, '__init__', None) or - getattr(obj, '__new__', None)) - # for all others, check if they are __call__able - elif hasattr(obj, '__call__'): - call_obj = obj.__call__ - ret += self._default_arguments_from_docstring( - getattr(call_obj, '__doc__', '')) - - _keeps = (inspect.Parameter.KEYWORD_ONLY, - inspect.Parameter.POSITIONAL_OR_KEYWORD) - - try: - sig = inspect.signature(obj) - ret.extend(k for k, v in sig.parameters.items() if - v.kind in _keeps) - except ValueError: - pass - - return list(set(ret)) - - def python_func_kw_matches(self,text): - """Match named parameters (kwargs) of the last open function""" - - if "." in text: # a parameter cannot be dotted - return [] - try: regexp = self.__funcParamsRegex - except AttributeError: - regexp = self.__funcParamsRegex = re.compile(r''' - '.*?(?<!\\)' | # single quoted strings or - ".*?(?<!\\)" | # double quoted strings or - \w+ | # identifier - \S # other characters - ''', re.VERBOSE | re.DOTALL) - # 1. find the nearest identifier that comes before an unclosed - # parenthesis before the cursor - # e.g. for "foo (1+bar(x), pa<cursor>,a=1)", the candidate is "foo" - tokens = regexp.findall(self.text_until_cursor) - iterTokens = reversed(tokens); openPar = 0 - - for token in iterTokens: - if token == ')': - openPar -= 1 - elif token == '(': - openPar += 1 - if openPar > 0: - # found the last unclosed parenthesis - break - else: - return [] - # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" ) - ids = [] - isId = re.compile(r'\w+$').match - - while True: - try: - ids.append(next(iterTokens)) - if not isId(ids[-1]): - ids.pop(); break - if not next(iterTokens) == '.': - break - except StopIteration: - break - - # Find all named arguments already assigned to, as to avoid suggesting - # them again - usedNamedArgs = set() - par_level = -1 - for token, next_token in zip(tokens, tokens[1:]): - if token == '(': - par_level += 1 - elif token == ')': - par_level -= 1 - - if par_level != 0: - continue - - if next_token != '=': - continue - - usedNamedArgs.add(token) - - argMatches = [] - try: - callableObj = '.'.join(ids[::-1]) - namedArgs = self._default_arguments(eval(callableObj, - self.namespace)) - - # Remove used named arguments from the list, no need to show twice - for namedArg in set(namedArgs) - usedNamedArgs: - if namedArg.startswith(text): - argMatches.append(u"%s=" %namedArg) - except: - pass - - return argMatches - - def dict_key_matches(self, text): - "Match string keys in a dictionary, after e.g. 'foo[' " - def get_keys(obj): - # Objects can define their own completions by defining an - # _ipy_key_completions_() method. - method = get_real_method(obj, '_ipython_key_completions_') - if method is not None: - return method() - - # Special case some common in-memory dict-like types - if isinstance(obj, dict) or\ - _safe_isinstance(obj, 'pandas', 'DataFrame'): - try: - return list(obj.keys()) - except Exception: - return [] - elif _safe_isinstance(obj, 'numpy', 'ndarray') or\ - _safe_isinstance(obj, 'numpy', 'void'): - return obj.dtype.names or [] - return [] - - try: - regexps = self.__dict_key_regexps - except AttributeError: - dict_key_re_fmt = r'''(?x) - ( # match dict-referring expression wrt greedy setting - %s - ) - \[ # open bracket - \s* # and optional whitespace - ([uUbB]? # string prefix (r not handled) - (?: # unclosed string - '(?:[^']|(?<!\\)\\')* - | - "(?:[^"]|(?<!\\)\\")* - ) - )? - $ - ''' - regexps = self.__dict_key_regexps = { - False: re.compile(dict_key_re_fmt % r''' - # identifiers separated by . - (?!\d)\w+ - (?:\.(?!\d)\w+)* - '''), - True: re.compile(dict_key_re_fmt % ''' - .+ - ''') - } - - match = regexps[self.greedy].search(self.text_until_cursor) - if match is None: - return [] - - expr, prefix = match.groups() - try: - obj = eval(expr, self.namespace) - except Exception: - try: - obj = eval(expr, self.global_namespace) - except Exception: - return [] - - keys = get_keys(obj) - if not keys: - return keys - closing_quote, token_offset, matches = match_dict_keys(keys, prefix, self.splitter.delims) - if not matches: - return matches - - # get the cursor position of - # - the text being completed - # - the start of the key text - # - the start of the completion - text_start = len(self.text_until_cursor) - len(text) - if prefix: - key_start = match.start(2) - completion_start = key_start + token_offset - else: - key_start = completion_start = match.end() - - # grab the leading prefix, to make sure all completions start with `text` - if text_start > key_start: - leading = '' - else: - leading = text[text_start:completion_start] - - # the index of the `[` character - bracket_idx = match.end(1) - - # append closing quote and bracket as appropriate - # this is *not* appropriate if the opening quote or bracket is outside - # the text given to this method - suf = '' - continuation = self.line_buffer[len(self.text_until_cursor):] - if key_start > text_start and closing_quote: - # quotes were opened inside text, maybe close them - if continuation.startswith(closing_quote): - continuation = continuation[len(closing_quote):] - else: - suf += closing_quote - if bracket_idx > text_start: - # brackets were opened inside text, maybe close them - if not continuation.startswith(']'): - suf += ']' - - return [leading + k + suf for k in matches] - - def unicode_name_matches(self, text): - u"""Match Latex-like syntax for unicode characters base - on the name of the character. - - This does ``\\GREEK SMALL LETTER ETA`` -> ``η`` - - Works only on valid python 3 identifier, or on combining characters that - will combine to form a valid identifier. - - Used on Python 3 only. - """ - slashpos = text.rfind('\\') - if slashpos > -1: - s = text[slashpos+1:] - try : - unic = unicodedata.lookup(s) - # allow combining chars - if ('a'+unic).isidentifier(): - return '\\'+s,[unic] - except KeyError: - pass - return u'', [] - - - def latex_matches(self, text): - u"""Match Latex syntax for unicode characters. - - This does both ``\\alp`` -> ``\\alpha`` and ``\\alpha`` -> ``α`` - """ - slashpos = text.rfind('\\') - if slashpos > -1: - s = text[slashpos:] - if s in latex_symbols: - # Try to complete a full latex symbol to unicode - # \\alpha -> α - return s, [latex_symbols[s]] - else: - # If a user has partially typed a latex symbol, give them - # a full list of options \al -> [\aleph, \alpha] - matches = [k for k in latex_symbols if k.startswith(s)] - if matches: - return s, matches - return u'', [] - - def dispatch_custom_completer(self, text): - if not self.custom_completers: - return - - line = self.line_buffer - if not line.strip(): - return None - - # Create a little structure to pass all the relevant information about - # the current completion to any custom completer. - event = SimpleNamespace() - event.line = line - event.symbol = text - cmd = line.split(None,1)[0] - event.command = cmd - event.text_until_cursor = self.text_until_cursor - - # for foo etc, try also to find completer for %foo - if not cmd.startswith(self.magic_escape): - try_magic = self.custom_completers.s_matches( - self.magic_escape + cmd) - else: - try_magic = [] - - for c in itertools.chain(self.custom_completers.s_matches(cmd), - try_magic, - self.custom_completers.flat_matches(self.text_until_cursor)): - try: - res = c(event) - if res: - # first, try case sensitive match - withcase = [r for r in res if r.startswith(text)] - if withcase: - return withcase - # if none, then case insensitive ones are ok too - text_low = text.lower() - return [r for r in res if r.lower().startswith(text_low)] - except TryNext: - pass - except KeyboardInterrupt: - """ - If custom completer take too long, - let keyboard interrupt abort and return nothing. - """ - break - - return None - - def completions(self, text: str, offset: int)->Iterator[Completion]: - """ - Returns an iterator over the possible completions - - .. warning:: - - Unstable - - This function is unstable, API may change without warning. - It will also raise unless use in proper context manager. - - Parameters - ---------- - - text:str - Full text of the current input, multi line string. - offset:int - Integer representing the position of the cursor in ``text``. Offset - is 0-based indexed. - - Yields - ------ - :any:`Completion` object - - - The cursor on a text can either be seen as being "in between" - characters or "On" a character depending on the interface visible to - the user. For consistency the cursor being on "in between" characters X - and Y is equivalent to the cursor being "on" character Y, that is to say - the character the cursor is on is considered as being after the cursor. - - Combining characters may span more that one position in the - text. - - - .. note:: - - If ``IPCompleter.debug`` is :any:`True` will yield a ``--jedi/ipython--`` - fake Completion token to distinguish completion returned by Jedi - and usual IPython completion. - - .. note:: - - Completions are not completely deduplicated yet. If identical - completions are coming from different sources this function does not - ensure that each completion object will only be present once. - """ - warnings.warn("_complete is a provisional API (as of IPython 6.0). " - "It may change without warnings. " - "Use in corresponding context manager.", - category=ProvisionalCompleterWarning, stacklevel=2) - - seen = set() - try: - for c in self._completions(text, offset, _timeout=self.jedi_compute_type_timeout/1000): - if c and (c in seen): - continue - yield c - seen.add(c) - except KeyboardInterrupt: - """if completions take too long and users send keyboard interrupt, - do not crash and return ASAP. """ - pass - - def _completions(self, full_text: str, offset: int, *, _timeout)->Iterator[Completion]: - """ - Core completion module.Same signature as :any:`completions`, with the - extra `timeout` parameter (in seconds). - - - Computing jedi's completion ``.type`` can be quite expensive (it is a - lazy property) and can require some warm-up, more warm up than just - computing the ``name`` of a completion. The warm-up can be : - - - Long warm-up the first time a module is encountered after - install/update: actually build parse/inference tree. - - - first time the module is encountered in a session: load tree from - disk. - - We don't want to block completions for tens of seconds so we give the - completer a "budget" of ``_timeout`` seconds per invocation to compute - completions types, the completions that have not yet been computed will - be marked as "unknown" an will have a chance to be computed next round - are things get cached. - - Keep in mind that Jedi is not the only thing treating the completion so - keep the timeout short-ish as if we take more than 0.3 second we still - have lots of processing to do. - - """ - deadline = time.monotonic() + _timeout - - - before = full_text[:offset] - cursor_line, cursor_column = position_to_cursor(full_text, offset) - - matched_text, matches, matches_origin, jedi_matches = self._complete( - full_text=full_text, cursor_line=cursor_line, cursor_pos=cursor_column) - - iter_jm = iter(jedi_matches) - if _timeout: - for jm in iter_jm: - try: - type_ = jm.type - except Exception: - if self.debug: - print("Error in Jedi getting type of ", jm) - type_ = None - delta = len(jm.name_with_symbols) - len(jm.complete) - if type_ == 'function': - signature = _make_signature(jm) - else: - signature = '' - yield Completion(start=offset - delta, - end=offset, - text=jm.name_with_symbols, - type=type_, - signature=signature, - _origin='jedi') - - if time.monotonic() > deadline: - break - - for jm in iter_jm: - delta = len(jm.name_with_symbols) - len(jm.complete) - yield Completion(start=offset - delta, - end=offset, - text=jm.name_with_symbols, - type='<unknown>', # don't compute type for speed - _origin='jedi', - signature='') - - - start_offset = before.rfind(matched_text) - - # TODO: - # Suppress this, right now just for debug. - if jedi_matches and matches and self.debug: - yield Completion(start=start_offset, end=offset, text='--jedi/ipython--', - _origin='debug', type='none', signature='') - - # I'm unsure if this is always true, so let's assert and see if it - # crash - assert before.endswith(matched_text) - for m, t in zip(matches, matches_origin): - yield Completion(start=start_offset, end=offset, text=m, _origin=t, signature='', type='<unknown>') - - - def complete(self, text=None, line_buffer=None, cursor_pos=None): - """Find completions for the given text and line context. - - Note that both the text and the line_buffer are optional, but at least - one of them must be given. - - Parameters - ---------- - text : string, optional - Text to perform the completion on. If not given, the line buffer - is split using the instance's CompletionSplitter object. - - line_buffer : string, optional - If not given, the completer attempts to obtain the current line - buffer via readline. This keyword allows clients which are - requesting for text completions in non-readline contexts to inform - the completer of the entire text. - - cursor_pos : int, optional - Index of the cursor in the full line buffer. Should be provided by - remote frontends where kernel has no access to frontend state. - - Returns - ------- - text : str - Text that was actually used in the completion. - - matches : list - A list of completion matches. - - - .. note:: - - This API is likely to be deprecated and replaced by - :any:`IPCompleter.completions` in the future. - - - """ - warnings.warn('`Completer.complete` is pending deprecation since ' - 'IPython 6.0 and will be replaced by `Completer.completions`.', - PendingDeprecationWarning) - # potential todo, FOLD the 3rd throw away argument of _complete - # into the first 2 one. - return self._complete(line_buffer=line_buffer, cursor_pos=cursor_pos, text=text, cursor_line=0)[:2] - - def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None, - full_text=None) -> Tuple[str, List[str], List[str], Iterable[_FakeJediCompletion]]: - """ - - Like complete but can also returns raw jedi completions as well as the - origin of the completion text. This could (and should) be made much - cleaner but that will be simpler once we drop the old (and stateful) - :any:`complete` API. - - - With current provisional API, cursor_pos act both (depending on the - caller) as the offset in the ``text`` or ``line_buffer``, or as the - ``column`` when passing multiline strings this could/should be renamed - but would add extra noise. - """ - - # if the cursor position isn't given, the only sane assumption we can - # make is that it's at the end of the line (the common case) - if cursor_pos is None: - cursor_pos = len(line_buffer) if text is None else len(text) - - if self.use_main_ns: - self.namespace = __main__.__dict__ - - # if text is either None or an empty string, rely on the line buffer - if (not line_buffer) and full_text: - line_buffer = full_text.split('\n')[cursor_line] - if not text: # issue #11508: check line_buffer before calling split_line - text = self.splitter.split_line(line_buffer, cursor_pos) if line_buffer else '' - - if self.backslash_combining_completions: - # allow deactivation of these on windows. - base_text = text if not line_buffer else line_buffer[:cursor_pos] - latex_text, latex_matches = self.latex_matches(base_text) - if latex_matches: - return latex_text, latex_matches, ['latex_matches']*len(latex_matches), () - name_text = '' - name_matches = [] - # need to add self.fwd_unicode_match() function here when done - for meth in (self.unicode_name_matches, back_latex_name_matches, back_unicode_name_matches, self.fwd_unicode_match): - name_text, name_matches = meth(base_text) - if name_text: - return name_text, name_matches[:MATCHES_LIMIT], \ - [meth.__qualname__]*min(len(name_matches), MATCHES_LIMIT), () - - - # If no line buffer is given, assume the input text is all there was - if line_buffer is None: - line_buffer = text - - self.line_buffer = line_buffer - self.text_until_cursor = self.line_buffer[:cursor_pos] - - # Do magic arg matches - for matcher in self.magic_arg_matchers: - matches = list(matcher(line_buffer))[:MATCHES_LIMIT] - if matches: - origins = [matcher.__qualname__] * len(matches) - return text, matches, origins, () - - # Start with a clean slate of completions - matches = [] - - # FIXME: we should extend our api to return a dict with completions for - # different types of objects. The rlcomplete() method could then - # simply collapse the dict into a list for readline, but we'd have - # richer completion semantics in other environments. - completions = () - if self.use_jedi: - if not full_text: - full_text = line_buffer - completions = self._jedi_matches( - cursor_pos, cursor_line, full_text) - - if self.merge_completions: - matches = [] - for matcher in self.matchers: - try: - matches.extend([(m, matcher.__qualname__) - for m in matcher(text)]) - except: - # Show the ugly traceback if the matcher causes an - # exception, but do NOT crash the kernel! - sys.excepthook(*sys.exc_info()) - else: - for matcher in self.matchers: - matches = [(m, matcher.__qualname__) - for m in matcher(text)] - if matches: - break - - seen = set() - filtered_matches = set() - for m in matches: - t, c = m - if t not in seen: - filtered_matches.add(m) - seen.add(t) - - _filtered_matches = sorted(filtered_matches, key=lambda x: completions_sorting_key(x[0])) - - custom_res = [(m, 'custom') for m in self.dispatch_custom_completer(text) or []] - - _filtered_matches = custom_res or _filtered_matches - - _filtered_matches = _filtered_matches[:MATCHES_LIMIT] - _matches = [m[0] for m in _filtered_matches] - origins = [m[1] for m in _filtered_matches] - - self.matches = _matches - - return text, _matches, origins, completions - - def fwd_unicode_match(self, text:str) -> Tuple[str, list]: - if self._names is None: - self._names = [] - for c in range(0,0x10FFFF + 1): - try: - self._names.append(unicodedata.name(chr(c))) - except ValueError: - pass - - slashpos = text.rfind('\\') - # if text starts with slash - if slashpos > -1: - s = text[slashpos+1:] - candidates = [x for x in self._names if x.startswith(s)] - if candidates: - return s, candidates - else: - return '', () - - # if text does not start with slash - else: - return u'', () +"""Completion for IPython. + +This module started as fork of the rlcompleter module in the Python standard +library. The original enhancements made to rlcompleter have been sent +upstream and were accepted as of Python 2.3, + +This module now support a wide variety of completion mechanism both available +for normal classic Python code, as well as completer for IPython specific +Syntax like magics. + +Latex and Unicode completion +============================ + +IPython and compatible frontends not only can complete your code, but can help +you to input a wide range of characters. In particular we allow you to insert +a unicode character using the tab completion mechanism. + +Forward latex/unicode completion +-------------------------------- + +Forward completion allows you to easily type a unicode character using its latex +name, or unicode long description. To do so type a backslash follow by the +relevant name and press tab: + + +Using latex completion: + +.. code:: + + \\alpha<tab> + α + +or using unicode completion: + + +.. code:: + + \\greek small letter alpha<tab> + α + + +Only valid Python identifiers will complete. Combining characters (like arrow or +dots) are also available, unlike latex they need to be put after the their +counterpart that is to say, `F\\\\vec<tab>` is correct, not `\\\\vec<tab>F`. + +Some browsers are known to display combining characters incorrectly. + +Backward latex completion +------------------------- + +It is sometime challenging to know how to type a character, if you are using +IPython, or any compatible frontend you can prepend backslash to the character +and press `<tab>` to expand it to its latex form. + +.. code:: + + \\α<tab> + \\alpha + + +Both forward and backward completions can be deactivated by setting the +``Completer.backslash_combining_completions`` option to ``False``. + + +Experimental +============ + +Starting with IPython 6.0, this module can make use of the Jedi library to +generate completions both using static analysis of the code, and dynamically +inspecting multiple namespaces. Jedi is an autocompletion and static analysis +for Python. The APIs attached to this new mechanism is unstable and will +raise unless use in an :any:`provisionalcompleter` context manager. + +You will find that the following are experimental: + + - :any:`provisionalcompleter` + - :any:`IPCompleter.completions` + - :any:`Completion` + - :any:`rectify_completions` + +.. note:: + + better name for :any:`rectify_completions` ? + +We welcome any feedback on these new API, and we also encourage you to try this +module in debug mode (start IPython with ``--Completer.debug=True``) in order +to have extra logging information if :any:`jedi` is crashing, or if current +IPython completer pending deprecations are returning results not yet handled +by :any:`jedi` + +Using Jedi for tab completion allow snippets like the following to work without +having to execute any code: + + >>> myvar = ['hello', 42] + ... myvar[1].bi<tab> + +Tab completion will be able to infer that ``myvar[1]`` is a real number without +executing any code unlike the previously available ``IPCompleter.greedy`` +option. + +Be sure to update :any:`jedi` to the latest stable version or to try the +current development version to get better completions. +""" + + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. +# +# Some of this code originated from rlcompleter in the Python standard library +# Copyright (C) 2001 Python Software Foundation, www.python.org + + +import builtins as builtin_mod +import glob +import inspect +import itertools +import keyword +import os +import re +import string +import sys +import time +import unicodedata +import warnings +from contextlib import contextmanager +from importlib import import_module +from types import SimpleNamespace +from typing import Iterable, Iterator, List, Tuple + +from IPython.core.error import TryNext +from IPython.core.inputtransformer2 import ESC_MAGIC +from IPython.core.latex_symbols import latex_symbols, reverse_latex_symbol +from IPython.core.oinspect import InspectColors +from IPython.utils import generics +from IPython.utils.dir2 import dir2, get_real_method +from IPython.utils.process import arg_split +from traitlets import Bool, Enum, Int, observe +from traitlets.config.configurable import Configurable + +import __main__ + +# skip module docstests +skip_doctest = True + +try: + import jedi + jedi.settings.case_insensitive_completion = False + import jedi.api.helpers + import jedi.api.classes + JEDI_INSTALLED = True +except ImportError: + JEDI_INSTALLED = False +#----------------------------------------------------------------------------- +# Globals +#----------------------------------------------------------------------------- + +# Public API +__all__ = ['Completer','IPCompleter'] + +if sys.platform == 'win32': + PROTECTABLES = ' ' +else: + PROTECTABLES = ' ()[]{}?=\\|;:\'#*"^&' + +# Protect against returning an enormous number of completions which the frontend +# may have trouble processing. +MATCHES_LIMIT = 500 + + +class Sentinel: + def __repr__(self): + return "<deprecated sentinel>" + + +_deprecation_readline_sentinel = Sentinel() + + +class ProvisionalCompleterWarning(FutureWarning): + """ + Exception raise by an experimental feature in this module. + + Wrap code in :any:`provisionalcompleter` context manager if you + are certain you want to use an unstable feature. + """ + pass + +warnings.filterwarnings('error', category=ProvisionalCompleterWarning) + +@contextmanager +def provisionalcompleter(action='ignore'): + """ + + + This context manager has to be used in any place where unstable completer + behavior and API may be called. + + >>> with provisionalcompleter(): + ... completer.do_experimental_things() # works + + >>> completer.do_experimental_things() # raises. + + .. note:: + + Unstable + + By using this context manager you agree that the API in use may change + without warning, and that you won't complain if they do so. + + You also understand that, if the API is not to your liking, you should report + a bug to explain your use case upstream. + + We'll be happy to get your feedback, feature requests, and improvements on + any of the unstable APIs! + """ + with warnings.catch_warnings(): + warnings.filterwarnings(action, category=ProvisionalCompleterWarning) + yield + + +def has_open_quotes(s): + """Return whether a string has open quotes. + + This simply counts whether the number of quote characters of either type in + the string is odd. + + Returns + ------- + If there is an open quote, the quote character is returned. Else, return + False. + """ + # We check " first, then ', so complex cases with nested quotes will get + # the " to take precedence. + if s.count('"') % 2: + return '"' + elif s.count("'") % 2: + return "'" + else: + return False + + +def protect_filename(s, protectables=PROTECTABLES): + """Escape a string to protect certain characters.""" + if set(s) & set(protectables): + if sys.platform == "win32": + return '"' + s + '"' + else: + return "".join(("\\" + c if c in protectables else c) for c in s) + else: + return s + + +def expand_user(path:str) -> Tuple[str, bool, str]: + """Expand ``~``-style usernames in strings. + + This is similar to :func:`os.path.expanduser`, but it computes and returns + extra information that will be useful if the input was being used in + computing completions, and you wish to return the completions with the + original '~' instead of its expanded value. + + Parameters + ---------- + path : str + String to be expanded. If no ~ is present, the output is the same as the + input. + + Returns + ------- + newpath : str + Result of ~ expansion in the input path. + tilde_expand : bool + Whether any expansion was performed or not. + tilde_val : str + The value that ~ was replaced with. + """ + # Default values + tilde_expand = False + tilde_val = '' + newpath = path + + if path.startswith('~'): + tilde_expand = True + rest = len(path)-1 + newpath = os.path.expanduser(path) + if rest: + tilde_val = newpath[:-rest] + else: + tilde_val = newpath + + return newpath, tilde_expand, tilde_val + + +def compress_user(path:str, tilde_expand:bool, tilde_val:str) -> str: + """Does the opposite of expand_user, with its outputs. + """ + if tilde_expand: + return path.replace(tilde_val, '~') + else: + return path + + +def completions_sorting_key(word): + """key for sorting completions + + This does several things: + + - Demote any completions starting with underscores to the end + - Insert any %magic and %%cellmagic completions in the alphabetical order + by their name + """ + prio1, prio2 = 0, 0 + + if word.startswith('__'): + prio1 = 2 + elif word.startswith('_'): + prio1 = 1 + + if word.endswith('='): + prio1 = -1 + + if word.startswith('%%'): + # If there's another % in there, this is something else, so leave it alone + if not "%" in word[2:]: + word = word[2:] + prio2 = 2 + elif word.startswith('%'): + if not "%" in word[1:]: + word = word[1:] + prio2 = 1 + + return prio1, word, prio2 + + +class _FakeJediCompletion: + """ + This is a workaround to communicate to the UI that Jedi has crashed and to + report a bug. Will be used only id :any:`IPCompleter.debug` is set to true. + + Added in IPython 6.0 so should likely be removed for 7.0 + + """ + + def __init__(self, name): + + self.name = name + self.complete = name + self.type = 'crashed' + self.name_with_symbols = name + self.signature = '' + self._origin = 'fake' + + def __repr__(self): + return '<Fake completion object jedi has crashed>' + + +class Completion: + """ + Completion object used and return by IPython completers. + + .. warning:: + + Unstable + + This function is unstable, API may change without warning. + It will also raise unless use in proper context manager. + + This act as a middle ground :any:`Completion` object between the + :any:`jedi.api.classes.Completion` object and the Prompt Toolkit completion + object. While Jedi need a lot of information about evaluator and how the + code should be ran/inspected, PromptToolkit (and other frontend) mostly + need user facing information. + + - Which range should be replaced replaced by what. + - Some metadata (like completion type), or meta information to displayed to + the use user. + + For debugging purpose we can also store the origin of the completion (``jedi``, + ``IPython.python_matches``, ``IPython.magics_matches``...). + """ + + __slots__ = ['start', 'end', 'text', 'type', 'signature', '_origin'] + + def __init__(self, start: int, end: int, text: str, *, type: str=None, _origin='', signature='') -> None: + warnings.warn("``Completion`` is a provisional API (as of IPython 6.0). " + "It may change without warnings. " + "Use in corresponding context manager.", + category=ProvisionalCompleterWarning, stacklevel=2) + + self.start = start + self.end = end + self.text = text + self.type = type + self.signature = signature + self._origin = _origin + + def __repr__(self): + return '<Completion start=%s end=%s text=%r type=%r, signature=%r,>' % \ + (self.start, self.end, self.text, self.type or '?', self.signature or '?') + + def __eq__(self, other)->Bool: + """ + Equality and hash do not hash the type (as some completer may not be + able to infer the type), but are use to (partially) de-duplicate + completion. + + Completely de-duplicating completion is a bit tricker that just + comparing as it depends on surrounding text, which Completions are not + aware of. + """ + return self.start == other.start and \ + self.end == other.end and \ + self.text == other.text + + def __hash__(self): + return hash((self.start, self.end, self.text)) + + +_IC = Iterable[Completion] + + +def _deduplicate_completions(text: str, completions: _IC)-> _IC: + """ + Deduplicate a set of completions. + + .. warning:: + + Unstable + + This function is unstable, API may change without warning. + + Parameters + ---------- + text: str + text that should be completed. + completions: Iterator[Completion] + iterator over the completions to deduplicate + + Yields + ------ + `Completions` objects + + + Completions coming from multiple sources, may be different but end up having + the same effect when applied to ``text``. If this is the case, this will + consider completions as equal and only emit the first encountered. + + Not folded in `completions()` yet for debugging purpose, and to detect when + the IPython completer does return things that Jedi does not, but should be + at some point. + """ + completions = list(completions) + if not completions: + return + + new_start = min(c.start for c in completions) + new_end = max(c.end for c in completions) + + seen = set() + for c in completions: + new_text = text[new_start:c.start] + c.text + text[c.end:new_end] + if new_text not in seen: + yield c + seen.add(new_text) + + +def rectify_completions(text: str, completions: _IC, *, _debug=False)->_IC: + """ + Rectify a set of completions to all have the same ``start`` and ``end`` + + .. warning:: + + Unstable + + This function is unstable, API may change without warning. + It will also raise unless use in proper context manager. + + Parameters + ---------- + text: str + text that should be completed. + completions: Iterator[Completion] + iterator over the completions to rectify + + + :any:`jedi.api.classes.Completion` s returned by Jedi may not have the same start and end, though + the Jupyter Protocol requires them to behave like so. This will readjust + the completion to have the same ``start`` and ``end`` by padding both + extremities with surrounding text. + + During stabilisation should support a ``_debug`` option to log which + completion are return by the IPython completer and not found in Jedi in + order to make upstream bug report. + """ + warnings.warn("`rectify_completions` is a provisional API (as of IPython 6.0). " + "It may change without warnings. " + "Use in corresponding context manager.", + category=ProvisionalCompleterWarning, stacklevel=2) + + completions = list(completions) + if not completions: + return + starts = (c.start for c in completions) + ends = (c.end for c in completions) + + new_start = min(starts) + new_end = max(ends) + + seen_jedi = set() + seen_python_matches = set() + for c in completions: + new_text = text[new_start:c.start] + c.text + text[c.end:new_end] + if c._origin == 'jedi': + seen_jedi.add(new_text) + elif c._origin == 'IPCompleter.python_matches': + seen_python_matches.add(new_text) + yield Completion(new_start, new_end, new_text, type=c.type, _origin=c._origin, signature=c.signature) + diff = seen_python_matches.difference(seen_jedi) + if diff and _debug: + print('IPython.python matches have extras:', diff) + + +if sys.platform == 'win32': + DELIMS = ' \t\n`!@#$^&*()=+[{]}|;\'",<>?' +else: + DELIMS = ' \t\n`!@#$^&*()=+[{]}\\|;:\'",<>?' + +GREEDY_DELIMS = ' =\r\n' + + +class CompletionSplitter(object): + """An object to split an input line in a manner similar to readline. + + By having our own implementation, we can expose readline-like completion in + a uniform manner to all frontends. This object only needs to be given the + line of text to be split and the cursor position on said line, and it + returns the 'word' to be completed on at the cursor after splitting the + entire line. + + What characters are used as splitting delimiters can be controlled by + setting the ``delims`` attribute (this is a property that internally + automatically builds the necessary regular expression)""" + + # Private interface + + # A string of delimiter characters. The default value makes sense for + # IPython's most typical usage patterns. + _delims = DELIMS + + # The expression (a normal string) to be compiled into a regular expression + # for actual splitting. We store it as an attribute mostly for ease of + # debugging, since this type of code can be so tricky to debug. + _delim_expr = None + + # The regular expression that does the actual splitting + _delim_re = None + + def __init__(self, delims=None): + delims = CompletionSplitter._delims if delims is None else delims + self.delims = delims + + @property + def delims(self): + """Return the string of delimiter characters.""" + return self._delims + + @delims.setter + def delims(self, delims): + """Set the delimiters for line splitting.""" + expr = '[' + ''.join('\\'+ c for c in delims) + ']' + self._delim_re = re.compile(expr) + self._delims = delims + self._delim_expr = expr + + def split_line(self, line, cursor_pos=None): + """Split a line of text with a cursor at the given position. + """ + l = line if cursor_pos is None else line[:cursor_pos] + return self._delim_re.split(l)[-1] + + + +class Completer(Configurable): + + greedy = Bool(False, + help="""Activate greedy completion + PENDING DEPRECTION. this is now mostly taken care of with Jedi. + + This will enable completion on elements of lists, results of function calls, etc., + but can be unsafe because the code is actually evaluated on TAB. + """ + ).tag(config=True) + + use_jedi = Bool(default_value=JEDI_INSTALLED, + help="Experimental: Use Jedi to generate autocompletions. " + "Default to True if jedi is installed.").tag(config=True) + + jedi_compute_type_timeout = Int(default_value=400, + help="""Experimental: restrict time (in milliseconds) during which Jedi can compute types. + Set to 0 to stop computing types. Non-zero value lower than 100ms may hurt + performance by preventing jedi to build its cache. + """).tag(config=True) + + debug = Bool(default_value=False, + help='Enable debug for the Completer. Mostly print extra ' + 'information for experimental jedi integration.')\ + .tag(config=True) + + backslash_combining_completions = Bool(True, + help="Enable unicode completions, e.g. \\alpha<tab> . " + "Includes completion of latex commands, unicode names, and expanding " + "unicode characters back to latex commands.").tag(config=True) + + + + def __init__(self, namespace=None, global_namespace=None, **kwargs): + """Create a new completer for the command line. + + Completer(namespace=ns, global_namespace=ns2) -> completer instance. + + If unspecified, the default namespace where completions are performed + is __main__ (technically, __main__.__dict__). Namespaces should be + given as dictionaries. + + An optional second namespace can be given. This allows the completer + to handle cases where both the local and global scopes need to be + distinguished. + """ + + # Don't bind to namespace quite yet, but flag whether the user wants a + # specific namespace or to use __main__.__dict__. This will allow us + # to bind to __main__.__dict__ at completion time, not now. + if namespace is None: + self.use_main_ns = True + else: + self.use_main_ns = False + self.namespace = namespace + + # The global namespace, if given, can be bound directly + if global_namespace is None: + self.global_namespace = {} + else: + self.global_namespace = global_namespace + + self.custom_matchers = [] + + super(Completer, self).__init__(**kwargs) + + def complete(self, text, state): + """Return the next possible completion for 'text'. + + This is called successively with state == 0, 1, 2, ... until it + returns None. The completion should begin with 'text'. + + """ + if self.use_main_ns: + self.namespace = __main__.__dict__ + + if state == 0: + if "." in text: + self.matches = self.attr_matches(text) + else: + self.matches = self.global_matches(text) + try: + return self.matches[state] + except IndexError: + return None + + def global_matches(self, text): + """Compute matches when text is a simple name. + + Return a list of all keywords, built-in functions and names currently + defined in self.namespace or self.global_namespace that match. + + """ + matches = [] + match_append = matches.append + n = len(text) + for lst in [keyword.kwlist, + builtin_mod.__dict__.keys(), + self.namespace.keys(), + self.global_namespace.keys()]: + for word in lst: + if word[:n] == text and word != "__builtins__": + match_append(word) + + snake_case_re = re.compile(r"[^_]+(_[^_]+)+?\Z") + for lst in [self.namespace.keys(), + self.global_namespace.keys()]: + shortened = {"_".join([sub[0] for sub in word.split('_')]) : word + for word in lst if snake_case_re.match(word)} + for word in shortened.keys(): + if word[:n] == text and word != "__builtins__": + match_append(shortened[word]) + return matches + + def attr_matches(self, text): + """Compute matches when text contains a dot. + + Assuming the text is of the form NAME.NAME....[NAME], and is + evaluatable in self.namespace or self.global_namespace, it will be + evaluated and its attributes (as revealed by dir()) are used as + possible completions. (For class instances, class members are + also considered.) + + WARNING: this can still invoke arbitrary C code, if an object + with a __getattr__ hook is evaluated. + + """ + + # Another option, seems to work great. Catches things like ''.<tab> + m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) + + if m: + expr, attr = m.group(1, 3) + elif self.greedy: + m2 = re.match(r"(.+)\.(\w*)$", self.line_buffer) + if not m2: + return [] + expr, attr = m2.group(1,2) + else: + return [] + + try: + obj = eval(expr, self.namespace) + except: + try: + obj = eval(expr, self.global_namespace) + except: + return [] + + if self.limit_to__all__ and hasattr(obj, '__all__'): + words = get__all__entries(obj) + else: + words = dir2(obj) + + try: + words = generics.complete_object(obj, words) + except TryNext: + pass + except AssertionError: + raise + except Exception: + # Silence errors from completion function + #raise # dbg + pass + # Build match list to return + n = len(attr) + return [u"%s.%s" % (expr, w) for w in words if w[:n] == attr ] + + +def get__all__entries(obj): + """returns the strings in the __all__ attribute""" + try: + words = getattr(obj, '__all__') + except: + return [] + + return [w for w in words if isinstance(w, str)] + + +def match_dict_keys(keys: List[str], prefix: str, delims: str): + """Used by dict_key_matches, matching the prefix to a list of keys + + Parameters + ========== + keys: + list of keys in dictionary currently being completed. + prefix: + Part of the text already typed by the user. e.g. `mydict[b'fo` + delims: + String of delimiters to consider when finding the current key. + + Returns + ======= + + A tuple of three elements: ``quote``, ``token_start``, ``matched``, with + ``quote`` being the quote that need to be used to close current string. + ``token_start`` the position where the replacement should start occurring, + ``matches`` a list of replacement/completion + + """ + if not prefix: + return None, 0, [repr(k) for k in keys + if isinstance(k, (str, bytes))] + quote_match = re.search('["\']', prefix) + quote = quote_match.group() + try: + prefix_str = eval(prefix + quote, {}) + except Exception: + return None, 0, [] + + pattern = '[^' + ''.join('\\' + c for c in delims) + ']*$' + token_match = re.search(pattern, prefix, re.UNICODE) + token_start = token_match.start() + token_prefix = token_match.group() + + matched = [] + for key in keys: + try: + if not key.startswith(prefix_str): + continue + except (AttributeError, TypeError, UnicodeError): + # Python 3+ TypeError on b'a'.startswith('a') or vice-versa + continue + + # reformat remainder of key to begin with prefix + rem = key[len(prefix_str):] + # force repr wrapped in ' + rem_repr = repr(rem + '"') if isinstance(rem, str) else repr(rem + b'"') + if rem_repr.startswith('u') and prefix[0] not in 'uU': + # Found key is unicode, but prefix is Py2 string. + # Therefore attempt to interpret key as string. + try: + rem_repr = repr(rem.encode('ascii') + '"') + except UnicodeEncodeError: + continue + + rem_repr = rem_repr[1 + rem_repr.index("'"):-2] + if quote == '"': + # The entered prefix is quoted with ", + # but the match is quoted with '. + # A contained " hence needs escaping for comparison: + rem_repr = rem_repr.replace('"', '\\"') + + # then reinsert prefix from start of token + matched.append('%s%s' % (token_prefix, rem_repr)) + return quote, token_start, matched + + +def cursor_to_position(text:str, line:int, column:int)->int: + """ + + Convert the (line,column) position of the cursor in text to an offset in a + string. + + Parameters + ---------- + + text : str + The text in which to calculate the cursor offset + line : int + Line of the cursor; 0-indexed + column : int + Column of the cursor 0-indexed + + Return + ------ + Position of the cursor in ``text``, 0-indexed. + + See Also + -------- + position_to_cursor: reciprocal of this function + + """ + lines = text.split('\n') + assert line <= len(lines), '{} <= {}'.format(str(line), str(len(lines))) + + return sum(len(l) + 1 for l in lines[:line]) + column + +def position_to_cursor(text:str, offset:int)->Tuple[int, int]: + """ + Convert the position of the cursor in text (0 indexed) to a line + number(0-indexed) and a column number (0-indexed) pair + + Position should be a valid position in ``text``. + + Parameters + ---------- + + text : str + The text in which to calculate the cursor offset + offset : int + Position of the cursor in ``text``, 0-indexed. + + Return + ------ + (line, column) : (int, int) + Line of the cursor; 0-indexed, column of the cursor 0-indexed + + + See Also + -------- + cursor_to_position : reciprocal of this function + + + """ + + assert 0 <= offset <= len(text) , "0 <= %s <= %s" % (offset , len(text)) + + before = text[:offset] + blines = before.split('\n') # ! splitnes trim trailing \n + line = before.count('\n') + col = len(blines[-1]) + return line, col + + +def _safe_isinstance(obj, module, class_name): + """Checks if obj is an instance of module.class_name if loaded + """ + return (module in sys.modules and + isinstance(obj, getattr(import_module(module), class_name))) + + +def back_unicode_name_matches(text): + u"""Match unicode characters back to unicode name + + This does ``☃`` -> ``\\snowman`` + + Note that snowman is not a valid python3 combining character but will be expanded. + Though it will not recombine back to the snowman character by the completion machinery. + + This will not either back-complete standard sequences like \\n, \\b ... + + Used on Python 3 only. + """ + if len(text)<2: + return u'', () + maybe_slash = text[-2] + if maybe_slash != '\\': + return u'', () + + char = text[-1] + # no expand on quote for completion in strings. + # nor backcomplete standard ascii keys + if char in string.ascii_letters or char in ['"',"'"]: + return u'', () + try : + unic = unicodedata.name(char) + return '\\'+char,['\\'+unic] + except KeyError: + pass + return u'', () + +def back_latex_name_matches(text:str): + """Match latex characters back to unicode name + + This does ``\\ℵ`` -> ``\\aleph`` + + Used on Python 3 only. + """ + if len(text)<2: + return u'', () + maybe_slash = text[-2] + if maybe_slash != '\\': + return u'', () + + + char = text[-1] + # no expand on quote for completion in strings. + # nor backcomplete standard ascii keys + if char in string.ascii_letters or char in ['"',"'"]: + return u'', () + try : + latex = reverse_latex_symbol[char] + # '\\' replace the \ as well + return '\\'+char,[latex] + except KeyError: + pass + return u'', () + + +def _formatparamchildren(parameter) -> str: + """ + Get parameter name and value from Jedi Private API + + Jedi does not expose a simple way to get `param=value` from its API. + + Parameter + ========= + + parameter: + Jedi's function `Param` + + Returns + ======= + + A string like 'a', 'b=1', '*args', '**kwargs' + + + """ + description = parameter.description + if not description.startswith('param '): + raise ValueError('Jedi function parameter description have change format.' + 'Expected "param ...", found %r".' % description) + return description[6:] + +def _make_signature(completion)-> str: + """ + Make the signature from a jedi completion + + Parameter + ========= + + completion: jedi.Completion + object does not complete a function type + + Returns + ======= + + a string consisting of the function signature, with the parenthesis but + without the function name. example: + `(a, *args, b=1, **kwargs)` + + """ + + return '(%s)'% ', '.join([f for f in (_formatparamchildren(p) for p in completion.params) if f]) + # it looks like this might work on jedi 0.17 + if hasattr(completion, 'get_signatures'): + signatures = completion.get_signatures() + if not signatures: + return '(?)' + + c0 = completion.get_signatures()[0] + return '('+c0.to_string().split('(', maxsplit=1)[1] + + return '(%s)'% ', '.join([f for f in (_formatparamchildren(p) for signature in completion.get_signatures() + for p in signature.defined_names()) if f]) + +class IPCompleter(Completer): + """Extension of the completer class with IPython-specific features""" + + _names = None + + @observe('greedy') + def _greedy_changed(self, change): + """update the splitter and readline delims when greedy is changed""" + if change['new']: + self.splitter.delims = GREEDY_DELIMS + else: + self.splitter.delims = DELIMS + + dict_keys_only = Bool(False, + help="""Whether to show dict key matches only""") + + merge_completions = Bool(True, + help="""Whether to merge completion results into a single list + + If False, only the completion results from the first non-empty + completer will be returned. + """ + ).tag(config=True) + omit__names = Enum((0,1,2), default_value=2, + help="""Instruct the completer to omit private method names + + Specifically, when completing on ``object.<tab>``. + + When 2 [default]: all names that start with '_' will be excluded. + + When 1: all 'magic' names (``__foo__``) will be excluded. + + When 0: nothing will be excluded. + """ + ).tag(config=True) + limit_to__all__ = Bool(False, + help=""" + DEPRECATED as of version 5.0. + + Instruct the completer to use __all__ for the completion + + Specifically, when completing on ``object.<tab>``. + + When True: only those names in obj.__all__ will be included. + + When False [default]: the __all__ attribute is ignored + """, + ).tag(config=True) + + @observe('limit_to__all__') + def _limit_to_all_changed(self, change): + warnings.warn('`IPython.core.IPCompleter.limit_to__all__` configuration ' + 'value has been deprecated since IPython 5.0, will be made to have ' + 'no effects and then removed in future version of IPython.', + UserWarning) + + def __init__(self, shell=None, namespace=None, global_namespace=None, + use_readline=_deprecation_readline_sentinel, config=None, **kwargs): + """IPCompleter() -> completer + + Return a completer object. + + Parameters + ---------- + + shell + a pointer to the ipython shell itself. This is needed + because this completer knows about magic functions, and those can + only be accessed via the ipython instance. + + namespace : dict, optional + an optional dict where completions are performed. + + global_namespace : dict, optional + secondary optional dict for completions, to + handle cases (such as IPython embedded inside functions) where + both Python scopes are visible. + + use_readline : bool, optional + DEPRECATED, ignored since IPython 6.0, will have no effects + """ + + self.magic_escape = ESC_MAGIC + self.splitter = CompletionSplitter() + + if use_readline is not _deprecation_readline_sentinel: + warnings.warn('The `use_readline` parameter is deprecated and ignored since IPython 6.0.', + DeprecationWarning, stacklevel=2) + + # _greedy_changed() depends on splitter and readline being defined: + Completer.__init__(self, namespace=namespace, global_namespace=global_namespace, + config=config, **kwargs) + + # List where completion matches will be stored + self.matches = [] + self.shell = shell + # Regexp to split filenames with spaces in them + self.space_name_re = re.compile(r'([^\\] )') + # Hold a local ref. to glob.glob for speed + self.glob = glob.glob + + # Determine if we are running on 'dumb' terminals, like (X)Emacs + # buffers, to avoid completion problems. + term = os.environ.get('TERM','xterm') + self.dumb_terminal = term in ['dumb','emacs'] + + # Special handling of backslashes needed in win32 platforms + if sys.platform == "win32": + self.clean_glob = self._clean_glob_win32 + else: + self.clean_glob = self._clean_glob + + #regexp to parse docstring for function signature + self.docstring_sig_re = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') + self.docstring_kwd_re = re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') + #use this if positional argument name is also needed + #= re.compile(r'[\s|\[]*(\w+)(?:\s*=?\s*.*)') + + self.magic_arg_matchers = [ + self.magic_config_matches, + self.magic_color_matches, + ] + + # This is set externally by InteractiveShell + self.custom_completers = None + + @property + def matchers(self): + """All active matcher routines for completion""" + if self.dict_keys_only: + return [self.dict_key_matches] + + if self.use_jedi: + return [ + *self.custom_matchers, + self.dict_key_matches, + self.file_matches, + self.magic_matches, + ] + else: + return [ + *self.custom_matchers, + self.dict_key_matches, + self.python_matches, + self.file_matches, + self.magic_matches, + self.python_func_kw_matches, + ] + + def all_completions(self, text) -> List[str]: + """ + Wrapper around the completion methods for the benefit of emacs. + """ + prefix = text.rpartition('.')[0] + with provisionalcompleter(): + return ['.'.join([prefix, c.text]) if prefix and self.use_jedi else c.text + for c in self.completions(text, len(text))] + + return self.complete(text)[1] + + def _clean_glob(self, text): + return self.glob("%s*" % text) + + def _clean_glob_win32(self,text): + return [f.replace("\\","/") + for f in self.glob("%s*" % text)] + + def file_matches(self, text): + """Match filenames, expanding ~USER type strings. + + Most of the seemingly convoluted logic in this completer is an + attempt to handle filenames with spaces in them. And yet it's not + quite perfect, because Python's readline doesn't expose all of the + GNU readline details needed for this to be done correctly. + + For a filename with a space in it, the printed completions will be + only the parts after what's already been typed (instead of the + full completions, as is normally done). I don't think with the + current (as of Python 2.3) Python readline it's possible to do + better.""" + + # chars that require escaping with backslash - i.e. chars + # that readline treats incorrectly as delimiters, but we + # don't want to treat as delimiters in filename matching + # when escaped with backslash + if text.startswith('!'): + text = text[1:] + text_prefix = u'!' + else: + text_prefix = u'' + + text_until_cursor = self.text_until_cursor + # track strings with open quotes + open_quotes = has_open_quotes(text_until_cursor) + + if '(' in text_until_cursor or '[' in text_until_cursor: + lsplit = text + else: + try: + # arg_split ~ shlex.split, but with unicode bugs fixed by us + lsplit = arg_split(text_until_cursor)[-1] + except ValueError: + # typically an unmatched ", or backslash without escaped char. + if open_quotes: + lsplit = text_until_cursor.split(open_quotes)[-1] + else: + return [] + except IndexError: + # tab pressed on empty line + lsplit = "" + + if not open_quotes and lsplit != protect_filename(lsplit): + # if protectables are found, do matching on the whole escaped name + has_protectables = True + text0,text = text,lsplit + else: + has_protectables = False + text = os.path.expanduser(text) + + if text == "": + return [text_prefix + protect_filename(f) for f in self.glob("*")] + + # Compute the matches from the filesystem + if sys.platform == 'win32': + m0 = self.clean_glob(text) + else: + m0 = self.clean_glob(text.replace('\\', '')) + + if has_protectables: + # If we had protectables, we need to revert our changes to the + # beginning of filename so that we don't double-write the part + # of the filename we have so far + len_lsplit = len(lsplit) + matches = [text_prefix + text0 + + protect_filename(f[len_lsplit:]) for f in m0] + else: + if open_quotes: + # if we have a string with an open quote, we don't need to + # protect the names beyond the quote (and we _shouldn't_, as + # it would cause bugs when the filesystem call is made). + matches = m0 if sys.platform == "win32" else\ + [protect_filename(f, open_quotes) for f in m0] + else: + matches = [text_prefix + + protect_filename(f) for f in m0] + + # Mark directories in input list by appending '/' to their names. + return [x+'/' if os.path.isdir(x) else x for x in matches] + + def magic_matches(self, text): + """Match magics""" + # Get all shell magics now rather than statically, so magics loaded at + # runtime show up too. + lsm = self.shell.magics_manager.lsmagic() + line_magics = lsm['line'] + cell_magics = lsm['cell'] + pre = self.magic_escape + pre2 = pre+pre + + explicit_magic = text.startswith(pre) + + # Completion logic: + # - user gives %%: only do cell magics + # - user gives %: do both line and cell magics + # - no prefix: do both + # In other words, line magics are skipped if the user gives %% explicitly + # + # We also exclude magics that match any currently visible names: + # https://github.com/ipython/ipython/issues/4877, unless the user has + # typed a %: + # https://github.com/ipython/ipython/issues/10754 + bare_text = text.lstrip(pre) + global_matches = self.global_matches(bare_text) + if not explicit_magic: + def matches(magic): + """ + Filter magics, in particular remove magics that match + a name present in global namespace. + """ + return ( magic.startswith(bare_text) and + magic not in global_matches ) + else: + def matches(magic): + return magic.startswith(bare_text) + + comp = [ pre2+m for m in cell_magics if matches(m)] + if not text.startswith(pre2): + comp += [ pre+m for m in line_magics if matches(m)] + + return comp + + def magic_config_matches(self, text:str) -> List[str]: + """ Match class names and attributes for %config magic """ + texts = text.strip().split() + + if len(texts) > 0 and (texts[0] == 'config' or texts[0] == '%config'): + # get all configuration classes + classes = sorted(set([ c for c in self.shell.configurables + if c.__class__.class_traits(config=True) + ]), key=lambda x: x.__class__.__name__) + classnames = [ c.__class__.__name__ for c in classes ] + + # return all classnames if config or %config is given + if len(texts) == 1: + return classnames + + # match classname + classname_texts = texts[1].split('.') + classname = classname_texts[0] + classname_matches = [ c for c in classnames + if c.startswith(classname) ] + + # return matched classes or the matched class with attributes + if texts[1].find('.') < 0: + return classname_matches + elif len(classname_matches) == 1 and \ + classname_matches[0] == classname: + cls = classes[classnames.index(classname)].__class__ + help = cls.class_get_help() + # strip leading '--' from cl-args: + help = re.sub(re.compile(r'^--', re.MULTILINE), '', help) + return [ attr.split('=')[0] + for attr in help.strip().splitlines() + if attr.startswith(texts[1]) ] + return [] + + def magic_color_matches(self, text:str) -> List[str] : + """ Match color schemes for %colors magic""" + texts = text.split() + if text.endswith(' '): + # .split() strips off the trailing whitespace. Add '' back + # so that: '%colors ' -> ['%colors', ''] + texts.append('') + + if len(texts) == 2 and (texts[0] == 'colors' or texts[0] == '%colors'): + prefix = texts[1] + return [ color for color in InspectColors.keys() + if color.startswith(prefix) ] + return [] + + def _jedi_matches(self, cursor_column:int, cursor_line:int, text:str): + """ + + Return a list of :any:`jedi.api.Completions` object from a ``text`` and + cursor position. + + Parameters + ---------- + cursor_column : int + column position of the cursor in ``text``, 0-indexed. + cursor_line : int + line position of the cursor in ``text``, 0-indexed + text : str + text to complete + + Debugging + --------- + + If ``IPCompleter.debug`` is ``True`` may return a :any:`_FakeJediCompletion` + object containing a string with the Jedi debug information attached. + """ + namespaces = [self.namespace] + if self.global_namespace is not None: + namespaces.append(self.global_namespace) + + completion_filter = lambda x:x + offset = cursor_to_position(text, cursor_line, cursor_column) + # filter output if we are completing for object members + if offset: + pre = text[offset-1] + if pre == '.': + if self.omit__names == 2: + completion_filter = lambda c:not c.name.startswith('_') + elif self.omit__names == 1: + completion_filter = lambda c:not (c.name.startswith('__') and c.name.endswith('__')) + elif self.omit__names == 0: + completion_filter = lambda x:x + else: + raise ValueError("Don't understand self.omit__names == {}".format(self.omit__names)) + + interpreter = jedi.Interpreter(text[:offset], namespaces, column=cursor_column, line=cursor_line + 1) + try_jedi = True + + try: + # find the first token in the current tree -- if it is a ' or " then we are in a string + completing_string = False + try: + first_child = next(c for c in interpreter._get_module().tree_node.children if hasattr(c, 'value')) + except StopIteration: + pass + else: + # note the value may be ', ", or it may also be ''' or """, or + # in some cases, """what/you/typed..., but all of these are + # strings. + completing_string = len(first_child.value) > 0 and first_child.value[0] in {"'", '"'} + + # if we are in a string jedi is likely not the right candidate for + # now. Skip it. + try_jedi = not completing_string + except Exception as e: + # many of things can go wrong, we are using private API just don't crash. + if self.debug: + print("Error detecting if completing a non-finished string :", e, '|') + + if not try_jedi: + return [] + try: + return filter(completion_filter, interpreter.completions()) + except Exception as e: + if self.debug: + return [_FakeJediCompletion('Oops Jedi has crashed, please report a bug with the following:\n"""\n%s\ns"""' % (e))] + else: + return [] + + def python_matches(self, text): + """Match attributes or global python names""" + if "." in text: + try: + matches = self.attr_matches(text) + if text.endswith('.') and self.omit__names: + if self.omit__names == 1: + # true if txt is _not_ a __ name, false otherwise: + no__name = (lambda txt: + re.match(r'.*\.__.*?__',txt) is None) + else: + # true if txt is _not_ a _ name, false otherwise: + no__name = (lambda txt: + re.match(r'\._.*?',txt[txt.rindex('.'):]) is None) + matches = filter(no__name, matches) + except NameError: + # catches <undefined attributes>.<tab> + matches = [] + else: + matches = self.global_matches(text) + return matches + + def _default_arguments_from_docstring(self, doc): + """Parse the first line of docstring for call signature. + + Docstring should be of the form 'min(iterable[, key=func])\n'. + It can also parse cython docstring of the form + 'Minuit.migrad(self, int ncall=10000, resume=True, int nsplit=1)'. + """ + if doc is None: + return [] + + #care only the firstline + line = doc.lstrip().splitlines()[0] + + #p = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') + #'min(iterable[, key=func])\n' -> 'iterable[, key=func]' + sig = self.docstring_sig_re.search(line) + if sig is None: + return [] + # iterable[, key=func]' -> ['iterable[' ,' key=func]'] + sig = sig.groups()[0].split(',') + ret = [] + for s in sig: + #re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') + ret += self.docstring_kwd_re.findall(s) + return ret + + def _default_arguments(self, obj): + """Return the list of default arguments of obj if it is callable, + or empty list otherwise.""" + call_obj = obj + ret = [] + if inspect.isbuiltin(obj): + pass + elif not (inspect.isfunction(obj) or inspect.ismethod(obj)): + if inspect.isclass(obj): + #for cython embedsignature=True the constructor docstring + #belongs to the object itself not __init__ + ret += self._default_arguments_from_docstring( + getattr(obj, '__doc__', '')) + # for classes, check for __init__,__new__ + call_obj = (getattr(obj, '__init__', None) or + getattr(obj, '__new__', None)) + # for all others, check if they are __call__able + elif hasattr(obj, '__call__'): + call_obj = obj.__call__ + ret += self._default_arguments_from_docstring( + getattr(call_obj, '__doc__', '')) + + _keeps = (inspect.Parameter.KEYWORD_ONLY, + inspect.Parameter.POSITIONAL_OR_KEYWORD) + + try: + sig = inspect.signature(obj) + ret.extend(k for k, v in sig.parameters.items() if + v.kind in _keeps) + except ValueError: + pass + + return list(set(ret)) + + def python_func_kw_matches(self,text): + """Match named parameters (kwargs) of the last open function""" + + if "." in text: # a parameter cannot be dotted + return [] + try: regexp = self.__funcParamsRegex + except AttributeError: + regexp = self.__funcParamsRegex = re.compile(r''' + '.*?(?<!\\)' | # single quoted strings or + ".*?(?<!\\)" | # double quoted strings or + \w+ | # identifier + \S # other characters + ''', re.VERBOSE | re.DOTALL) + # 1. find the nearest identifier that comes before an unclosed + # parenthesis before the cursor + # e.g. for "foo (1+bar(x), pa<cursor>,a=1)", the candidate is "foo" + tokens = regexp.findall(self.text_until_cursor) + iterTokens = reversed(tokens); openPar = 0 + + for token in iterTokens: + if token == ')': + openPar -= 1 + elif token == '(': + openPar += 1 + if openPar > 0: + # found the last unclosed parenthesis + break + else: + return [] + # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" ) + ids = [] + isId = re.compile(r'\w+$').match + + while True: + try: + ids.append(next(iterTokens)) + if not isId(ids[-1]): + ids.pop(); break + if not next(iterTokens) == '.': + break + except StopIteration: + break + + # Find all named arguments already assigned to, as to avoid suggesting + # them again + usedNamedArgs = set() + par_level = -1 + for token, next_token in zip(tokens, tokens[1:]): + if token == '(': + par_level += 1 + elif token == ')': + par_level -= 1 + + if par_level != 0: + continue + + if next_token != '=': + continue + + usedNamedArgs.add(token) + + argMatches = [] + try: + callableObj = '.'.join(ids[::-1]) + namedArgs = self._default_arguments(eval(callableObj, + self.namespace)) + + # Remove used named arguments from the list, no need to show twice + for namedArg in set(namedArgs) - usedNamedArgs: + if namedArg.startswith(text): + argMatches.append(u"%s=" %namedArg) + except: + pass + + return argMatches + + def dict_key_matches(self, text): + "Match string keys in a dictionary, after e.g. 'foo[' " + def get_keys(obj): + # Objects can define their own completions by defining an + # _ipy_key_completions_() method. + method = get_real_method(obj, '_ipython_key_completions_') + if method is not None: + return method() + + # Special case some common in-memory dict-like types + if isinstance(obj, dict) or\ + _safe_isinstance(obj, 'pandas', 'DataFrame'): + try: + return list(obj.keys()) + except Exception: + return [] + elif _safe_isinstance(obj, 'numpy', 'ndarray') or\ + _safe_isinstance(obj, 'numpy', 'void'): + return obj.dtype.names or [] + return [] + + try: + regexps = self.__dict_key_regexps + except AttributeError: + dict_key_re_fmt = r'''(?x) + ( # match dict-referring expression wrt greedy setting + %s + ) + \[ # open bracket + \s* # and optional whitespace + ([uUbB]? # string prefix (r not handled) + (?: # unclosed string + '(?:[^']|(?<!\\)\\')* + | + "(?:[^"]|(?<!\\)\\")* + ) + )? + $ + ''' + regexps = self.__dict_key_regexps = { + False: re.compile(dict_key_re_fmt % r''' + # identifiers separated by . + (?!\d)\w+ + (?:\.(?!\d)\w+)* + '''), + True: re.compile(dict_key_re_fmt % ''' + .+ + ''') + } + + match = regexps[self.greedy].search(self.text_until_cursor) + if match is None: + return [] + + expr, prefix = match.groups() + try: + obj = eval(expr, self.namespace) + except Exception: + try: + obj = eval(expr, self.global_namespace) + except Exception: + return [] + + keys = get_keys(obj) + if not keys: + return keys + closing_quote, token_offset, matches = match_dict_keys(keys, prefix, self.splitter.delims) + if not matches: + return matches + + # get the cursor position of + # - the text being completed + # - the start of the key text + # - the start of the completion + text_start = len(self.text_until_cursor) - len(text) + if prefix: + key_start = match.start(2) + completion_start = key_start + token_offset + else: + key_start = completion_start = match.end() + + # grab the leading prefix, to make sure all completions start with `text` + if text_start > key_start: + leading = '' + else: + leading = text[text_start:completion_start] + + # the index of the `[` character + bracket_idx = match.end(1) + + # append closing quote and bracket as appropriate + # this is *not* appropriate if the opening quote or bracket is outside + # the text given to this method + suf = '' + continuation = self.line_buffer[len(self.text_until_cursor):] + if key_start > text_start and closing_quote: + # quotes were opened inside text, maybe close them + if continuation.startswith(closing_quote): + continuation = continuation[len(closing_quote):] + else: + suf += closing_quote + if bracket_idx > text_start: + # brackets were opened inside text, maybe close them + if not continuation.startswith(']'): + suf += ']' + + return [leading + k + suf for k in matches] + + def unicode_name_matches(self, text): + u"""Match Latex-like syntax for unicode characters base + on the name of the character. + + This does ``\\GREEK SMALL LETTER ETA`` -> ``η`` + + Works only on valid python 3 identifier, or on combining characters that + will combine to form a valid identifier. + + Used on Python 3 only. + """ + slashpos = text.rfind('\\') + if slashpos > -1: + s = text[slashpos+1:] + try : + unic = unicodedata.lookup(s) + # allow combining chars + if ('a'+unic).isidentifier(): + return '\\'+s,[unic] + except KeyError: + pass + return u'', [] + + + def latex_matches(self, text): + u"""Match Latex syntax for unicode characters. + + This does both ``\\alp`` -> ``\\alpha`` and ``\\alpha`` -> ``α`` + """ + slashpos = text.rfind('\\') + if slashpos > -1: + s = text[slashpos:] + if s in latex_symbols: + # Try to complete a full latex symbol to unicode + # \\alpha -> α + return s, [latex_symbols[s]] + else: + # If a user has partially typed a latex symbol, give them + # a full list of options \al -> [\aleph, \alpha] + matches = [k for k in latex_symbols if k.startswith(s)] + if matches: + return s, matches + return u'', [] + + def dispatch_custom_completer(self, text): + if not self.custom_completers: + return + + line = self.line_buffer + if not line.strip(): + return None + + # Create a little structure to pass all the relevant information about + # the current completion to any custom completer. + event = SimpleNamespace() + event.line = line + event.symbol = text + cmd = line.split(None,1)[0] + event.command = cmd + event.text_until_cursor = self.text_until_cursor + + # for foo etc, try also to find completer for %foo + if not cmd.startswith(self.magic_escape): + try_magic = self.custom_completers.s_matches( + self.magic_escape + cmd) + else: + try_magic = [] + + for c in itertools.chain(self.custom_completers.s_matches(cmd), + try_magic, + self.custom_completers.flat_matches(self.text_until_cursor)): + try: + res = c(event) + if res: + # first, try case sensitive match + withcase = [r for r in res if r.startswith(text)] + if withcase: + return withcase + # if none, then case insensitive ones are ok too + text_low = text.lower() + return [r for r in res if r.lower().startswith(text_low)] + except TryNext: + pass + except KeyboardInterrupt: + """ + If custom completer take too long, + let keyboard interrupt abort and return nothing. + """ + break + + return None + + def completions(self, text: str, offset: int)->Iterator[Completion]: + """ + Returns an iterator over the possible completions + + .. warning:: + + Unstable + + This function is unstable, API may change without warning. + It will also raise unless use in proper context manager. + + Parameters + ---------- + + text:str + Full text of the current input, multi line string. + offset:int + Integer representing the position of the cursor in ``text``. Offset + is 0-based indexed. + + Yields + ------ + :any:`Completion` object + + + The cursor on a text can either be seen as being "in between" + characters or "On" a character depending on the interface visible to + the user. For consistency the cursor being on "in between" characters X + and Y is equivalent to the cursor being "on" character Y, that is to say + the character the cursor is on is considered as being after the cursor. + + Combining characters may span more that one position in the + text. + + + .. note:: + + If ``IPCompleter.debug`` is :any:`True` will yield a ``--jedi/ipython--`` + fake Completion token to distinguish completion returned by Jedi + and usual IPython completion. + + .. note:: + + Completions are not completely deduplicated yet. If identical + completions are coming from different sources this function does not + ensure that each completion object will only be present once. + """ + warnings.warn("_complete is a provisional API (as of IPython 6.0). " + "It may change without warnings. " + "Use in corresponding context manager.", + category=ProvisionalCompleterWarning, stacklevel=2) + + seen = set() + try: + for c in self._completions(text, offset, _timeout=self.jedi_compute_type_timeout/1000): + if c and (c in seen): + continue + yield c + seen.add(c) + except KeyboardInterrupt: + """if completions take too long and users send keyboard interrupt, + do not crash and return ASAP. """ + pass + + def _completions(self, full_text: str, offset: int, *, _timeout)->Iterator[Completion]: + """ + Core completion module.Same signature as :any:`completions`, with the + extra `timeout` parameter (in seconds). + + + Computing jedi's completion ``.type`` can be quite expensive (it is a + lazy property) and can require some warm-up, more warm up than just + computing the ``name`` of a completion. The warm-up can be : + + - Long warm-up the first time a module is encountered after + install/update: actually build parse/inference tree. + + - first time the module is encountered in a session: load tree from + disk. + + We don't want to block completions for tens of seconds so we give the + completer a "budget" of ``_timeout`` seconds per invocation to compute + completions types, the completions that have not yet been computed will + be marked as "unknown" an will have a chance to be computed next round + are things get cached. + + Keep in mind that Jedi is not the only thing treating the completion so + keep the timeout short-ish as if we take more than 0.3 second we still + have lots of processing to do. + + """ + deadline = time.monotonic() + _timeout + + + before = full_text[:offset] + cursor_line, cursor_column = position_to_cursor(full_text, offset) + + matched_text, matches, matches_origin, jedi_matches = self._complete( + full_text=full_text, cursor_line=cursor_line, cursor_pos=cursor_column) + + iter_jm = iter(jedi_matches) + if _timeout: + for jm in iter_jm: + try: + type_ = jm.type + except Exception: + if self.debug: + print("Error in Jedi getting type of ", jm) + type_ = None + delta = len(jm.name_with_symbols) - len(jm.complete) + if type_ == 'function': + signature = _make_signature(jm) + else: + signature = '' + yield Completion(start=offset - delta, + end=offset, + text=jm.name_with_symbols, + type=type_, + signature=signature, + _origin='jedi') + + if time.monotonic() > deadline: + break + + for jm in iter_jm: + delta = len(jm.name_with_symbols) - len(jm.complete) + yield Completion(start=offset - delta, + end=offset, + text=jm.name_with_symbols, + type='<unknown>', # don't compute type for speed + _origin='jedi', + signature='') + + + start_offset = before.rfind(matched_text) + + # TODO: + # Suppress this, right now just for debug. + if jedi_matches and matches and self.debug: + yield Completion(start=start_offset, end=offset, text='--jedi/ipython--', + _origin='debug', type='none', signature='') + + # I'm unsure if this is always true, so let's assert and see if it + # crash + assert before.endswith(matched_text) + for m, t in zip(matches, matches_origin): + yield Completion(start=start_offset, end=offset, text=m, _origin=t, signature='', type='<unknown>') + + + def complete(self, text=None, line_buffer=None, cursor_pos=None): + """Find completions for the given text and line context. + + Note that both the text and the line_buffer are optional, but at least + one of them must be given. + + Parameters + ---------- + text : string, optional + Text to perform the completion on. If not given, the line buffer + is split using the instance's CompletionSplitter object. + + line_buffer : string, optional + If not given, the completer attempts to obtain the current line + buffer via readline. This keyword allows clients which are + requesting for text completions in non-readline contexts to inform + the completer of the entire text. + + cursor_pos : int, optional + Index of the cursor in the full line buffer. Should be provided by + remote frontends where kernel has no access to frontend state. + + Returns + ------- + text : str + Text that was actually used in the completion. + + matches : list + A list of completion matches. + + + .. note:: + + This API is likely to be deprecated and replaced by + :any:`IPCompleter.completions` in the future. + + + """ + warnings.warn('`Completer.complete` is pending deprecation since ' + 'IPython 6.0 and will be replaced by `Completer.completions`.', + PendingDeprecationWarning) + # potential todo, FOLD the 3rd throw away argument of _complete + # into the first 2 one. + return self._complete(line_buffer=line_buffer, cursor_pos=cursor_pos, text=text, cursor_line=0)[:2] + + def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None, + full_text=None) -> Tuple[str, List[str], List[str], Iterable[_FakeJediCompletion]]: + """ + + Like complete but can also returns raw jedi completions as well as the + origin of the completion text. This could (and should) be made much + cleaner but that will be simpler once we drop the old (and stateful) + :any:`complete` API. + + + With current provisional API, cursor_pos act both (depending on the + caller) as the offset in the ``text`` or ``line_buffer``, or as the + ``column`` when passing multiline strings this could/should be renamed + but would add extra noise. + """ + + # if the cursor position isn't given, the only sane assumption we can + # make is that it's at the end of the line (the common case) + if cursor_pos is None: + cursor_pos = len(line_buffer) if text is None else len(text) + + if self.use_main_ns: + self.namespace = __main__.__dict__ + + # if text is either None or an empty string, rely on the line buffer + if (not line_buffer) and full_text: + line_buffer = full_text.split('\n')[cursor_line] + if not text: # issue #11508: check line_buffer before calling split_line + text = self.splitter.split_line(line_buffer, cursor_pos) if line_buffer else '' + + if self.backslash_combining_completions: + # allow deactivation of these on windows. + base_text = text if not line_buffer else line_buffer[:cursor_pos] + latex_text, latex_matches = self.latex_matches(base_text) + if latex_matches: + return latex_text, latex_matches, ['latex_matches']*len(latex_matches), () + name_text = '' + name_matches = [] + # need to add self.fwd_unicode_match() function here when done + for meth in (self.unicode_name_matches, back_latex_name_matches, back_unicode_name_matches, self.fwd_unicode_match): + name_text, name_matches = meth(base_text) + if name_text: + return name_text, name_matches[:MATCHES_LIMIT], \ + [meth.__qualname__]*min(len(name_matches), MATCHES_LIMIT), () + + + # If no line buffer is given, assume the input text is all there was + if line_buffer is None: + line_buffer = text + + self.line_buffer = line_buffer + self.text_until_cursor = self.line_buffer[:cursor_pos] + + # Do magic arg matches + for matcher in self.magic_arg_matchers: + matches = list(matcher(line_buffer))[:MATCHES_LIMIT] + if matches: + origins = [matcher.__qualname__] * len(matches) + return text, matches, origins, () + + # Start with a clean slate of completions + matches = [] + + # FIXME: we should extend our api to return a dict with completions for + # different types of objects. The rlcomplete() method could then + # simply collapse the dict into a list for readline, but we'd have + # richer completion semantics in other environments. + completions = () + if self.use_jedi: + if not full_text: + full_text = line_buffer + completions = self._jedi_matches( + cursor_pos, cursor_line, full_text) + + if self.merge_completions: + matches = [] + for matcher in self.matchers: + try: + matches.extend([(m, matcher.__qualname__) + for m in matcher(text)]) + except: + # Show the ugly traceback if the matcher causes an + # exception, but do NOT crash the kernel! + sys.excepthook(*sys.exc_info()) + else: + for matcher in self.matchers: + matches = [(m, matcher.__qualname__) + for m in matcher(text)] + if matches: + break + + seen = set() + filtered_matches = set() + for m in matches: + t, c = m + if t not in seen: + filtered_matches.add(m) + seen.add(t) + + _filtered_matches = sorted(filtered_matches, key=lambda x: completions_sorting_key(x[0])) + + custom_res = [(m, 'custom') for m in self.dispatch_custom_completer(text) or []] + + _filtered_matches = custom_res or _filtered_matches + + _filtered_matches = _filtered_matches[:MATCHES_LIMIT] + _matches = [m[0] for m in _filtered_matches] + origins = [m[1] for m in _filtered_matches] + + self.matches = _matches + + return text, _matches, origins, completions + + def fwd_unicode_match(self, text:str) -> Tuple[str, list]: + if self._names is None: + self._names = [] + for c in range(0,0x10FFFF + 1): + try: + self._names.append(unicodedata.name(chr(c))) + except ValueError: + pass + + slashpos = text.rfind('\\') + # if text starts with slash + if slashpos > -1: + s = text[slashpos+1:] + candidates = [x for x in self._names if x.startswith(s)] + if candidates: + return s, candidates + else: + return '', () + + # if text does not start with slash + else: + return u'', () diff --git a/contrib/python/ipython/py3/IPython/core/completerlib.py b/contrib/python/ipython/py3/IPython/core/completerlib.py index 6f9ea485136..bda665d8a2b 100644 --- a/contrib/python/ipython/py3/IPython/core/completerlib.py +++ b/contrib/python/ipython/py3/IPython/core/completerlib.py @@ -1,402 +1,402 @@ -# encoding: utf-8 -"""Implementations for various useful completers. - -These are all loaded by default by IPython. -""" -#----------------------------------------------------------------------------- -# Copyright (C) 2010-2011 The IPython Development Team. -# -# Distributed under the terms of the BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Stdlib imports -import glob -import inspect -import itertools -import os -import re -import sys -from importlib import import_module -from importlib.machinery import all_suffixes - - -# Third-party imports -from time import time -from zipimport import zipimporter - -# Our own imports -from .completer import expand_user, compress_user -from .error import TryNext -from ..utils._process_common import arg_split - -# FIXME: this should be pulled in with the right call via the component system -from IPython import get_ipython - -from typing import List - -from __res import importer - -#----------------------------------------------------------------------------- -# Globals and constants -#----------------------------------------------------------------------------- -_suffixes = all_suffixes() - -# Time in seconds after which the rootmodules will be stored permanently in the -# ipython ip.db database (kept in the user's .ipython dir). -TIMEOUT_STORAGE = 2 - -# Time in seconds after which we give up -TIMEOUT_GIVEUP = 20 - -# Regular expression for the python import statement -import_re = re.compile(r'(?P<name>[^\W\d]\w*?)' - r'(?P<package>[/\\]__init__)?' - r'(?P<suffix>%s)$' % - r'|'.join(re.escape(s) for s in _suffixes)) - -# RE for the ipython %run command (python + ipython scripts) -magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$') - -#----------------------------------------------------------------------------- -# Local utilities -#----------------------------------------------------------------------------- - -arcadia_rootmodules_cache = None -arcadia_modules_cache = None - - -def arcadia_init_cache(): - global arcadia_rootmodules_cache, arcadia_modules_cache - arcadia_rootmodules_cache = set() - arcadia_modules_cache = {} - - all_modules = itertools.chain( - sys.builtin_module_names, - importer.memory - ) - - for name in all_modules: - path = name.split('.') - arcadia_rootmodules_cache.add(path[0]) - - prefix = path[0] - for element in path[1:]: - if element == '__init__': - continue - - arcadia_modules_cache.setdefault(prefix, set()).add(element) - prefix += '.' + element - - arcadia_rootmodules_cache = sorted(arcadia_rootmodules_cache) - arcadia_modules_cache = {k: sorted(v) for k, v in arcadia_modules_cache.items()} - - -def arcadia_module_list(mod): - if arcadia_modules_cache is None: - arcadia_init_cache() - - return arcadia_modules_cache.get(mod, ()) - - -def arcadia_get_root_modules(): - if arcadia_rootmodules_cache is None: - arcadia_init_cache() - - return arcadia_rootmodules_cache - - -def module_list(path): - """ - Return the list containing the names of the modules available in the given - folder. - """ - # sys.path has the cwd as an empty string, but isdir/listdir need it as '.' - if path == '': - path = '.' - - # A few local constants to be used in loops below - pjoin = os.path.join - - if os.path.isdir(path): - # Build a list of all files in the directory and all files - # in its subdirectories. For performance reasons, do not - # recurse more than one level into subdirectories. - files = [] - for root, dirs, nondirs in os.walk(path, followlinks=True): - subdir = root[len(path)+1:] - if subdir: - files.extend(pjoin(subdir, f) for f in nondirs) - dirs[:] = [] # Do not recurse into additional subdirectories. - else: - files.extend(nondirs) - - else: - try: - files = list(zipimporter(path)._files.keys()) - except: - files = [] - - # Build a list of modules which match the import_re regex. - modules = [] - for f in files: - m = import_re.match(f) - if m: - modules.append(m.group('name')) - return list(set(modules)) - - -def get_root_modules(): - """ - Returns a list containing the names of all the modules available in the - folders of the pythonpath. - - ip.db['rootmodules_cache'] maps sys.path entries to list of modules. - """ - ip = get_ipython() - if ip is None: - # No global shell instance to store cached list of modules. - # Don't try to scan for modules every time. - return list(sys.builtin_module_names) - - rootmodules_cache = ip.db.get('rootmodules_cache', {}) - rootmodules = list(sys.builtin_module_names) - start_time = time() - store = False - for path in sys.path: - try: - modules = rootmodules_cache[path] - except KeyError: - modules = module_list(path) - try: - modules.remove('__init__') - except ValueError: - pass - if path not in ('', '.'): # cwd modules should not be cached - rootmodules_cache[path] = modules - if time() - start_time > TIMEOUT_STORAGE and not store: - store = True - print("\nCaching the list of root modules, please wait!") - print("(This will only be done once - type '%rehashx' to " - "reset cache!)\n") - sys.stdout.flush() - if time() - start_time > TIMEOUT_GIVEUP: - print("This is taking too long, we give up.\n") - return [] - rootmodules.extend(modules) - if store: - ip.db['rootmodules_cache'] = rootmodules_cache - rootmodules = list(set(rootmodules)) - return rootmodules - - -def is_importable(module, attr, only_modules): - if only_modules: - return inspect.ismodule(getattr(module, attr)) - else: - return not(attr[:2] == '__' and attr[-2:] == '__') - - -def try_import(mod: str, only_modules=False) -> List[str]: - """ - Try to import given module and return list of potential completions. - """ - mod = mod.rstrip('.') - try: - m = import_module(mod) - except: - return [] - - filename = getattr(m, '__file__', '') - m_is_init = '__init__' in (filename or '') or filename == mod - - completions = [] - if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init: - completions.extend( [attr for attr in dir(m) if - is_importable(m, attr, only_modules)]) - - completions.extend(getattr(m, '__all__', [])) - if m_is_init: - completions.extend(arcadia_module_list(mod)) - completions_set = {c for c in completions if isinstance(c, str)} - completions_set.discard('__init__') - return sorted(completions_set) - - -#----------------------------------------------------------------------------- -# Completion-related functions. -#----------------------------------------------------------------------------- - -def quick_completer(cmd, completions): - r""" Easily create a trivial completer for a command. - - Takes either a list of completions, or all completions in string (that will - be split on whitespace). - - Example:: - - [d:\ipython]|1> import ipy_completers - [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz']) - [d:\ipython]|3> foo b<TAB> - bar baz - [d:\ipython]|3> foo ba - """ - - if isinstance(completions, str): - completions = completions.split() - - def do_complete(self, event): - return completions - - get_ipython().set_hook('complete_command',do_complete, str_key = cmd) - -def module_completion(line): - """ - Returns a list containing the completion possibilities for an import line. - - The line looks like this : - 'import xml.d' - 'from xml.dom import' - """ - - words = line.split(' ') - nwords = len(words) - - # from whatever <tab> -> 'import ' - if nwords == 3 and words[0] == 'from': - return ['import '] - - # 'from xy<tab>' or 'import xy<tab>' - if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) : - if nwords == 1: - return arcadia_get_root_modules() - mod = words[1].split('.') - if len(mod) < 2: - return arcadia_get_root_modules() - completion_list = try_import('.'.join(mod[:-1]), True) - return ['.'.join(mod[:-1] + [el]) for el in completion_list] - - # 'from xyz import abc<tab>' - if nwords >= 3 and words[0] == 'from': - mod = words[1] - return try_import(mod) - -#----------------------------------------------------------------------------- -# Completers -#----------------------------------------------------------------------------- -# These all have the func(self, event) signature to be used as custom -# completers - -def module_completer(self,event): - """Give completions after user has typed 'import ...' or 'from ...'""" - - # This works in all versions of python. While 2.5 has - # pkgutil.walk_packages(), that particular routine is fairly dangerous, - # since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full - # of possibly problematic side effects. - # This search the folders in the sys.path for available modules. - - return module_completion(event.line) - -# FIXME: there's a lot of logic common to the run, cd and builtin file -# completers, that is currently reimplemented in each. - -def magic_run_completer(self, event): - """Complete files that end in .py or .ipy or .ipynb for the %run command. - """ - comps = arg_split(event.line, strict=False) - # relpath should be the current token that we need to complete. - if (len(comps) > 1) and (not event.line.endswith(' ')): - relpath = comps[-1].strip("'\"") - else: - relpath = '' - - #print("\nev=", event) # dbg - #print("rp=", relpath) # dbg - #print('comps=', comps) # dbg - - lglob = glob.glob - isdir = os.path.isdir - relpath, tilde_expand, tilde_val = expand_user(relpath) - - # Find if the user has already typed the first filename, after which we - # should complete on all files, since after the first one other files may - # be arguments to the input script. - - if any(magic_run_re.match(c) for c in comps): - matches = [f.replace('\\','/') + ('/' if isdir(f) else '') - for f in lglob(relpath+'*')] - else: - dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)] - pys = [f.replace('\\','/') - for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') + - lglob(relpath+'*.ipynb') + lglob(relpath + '*.pyw')] - - matches = dirs + pys - - #print('run comp:', dirs+pys) # dbg - return [compress_user(p, tilde_expand, tilde_val) for p in matches] - - -def cd_completer(self, event): - """Completer function for cd, which only returns directories.""" - ip = get_ipython() - relpath = event.symbol - - #print(event) # dbg - if event.line.endswith('-b') or ' -b ' in event.line: - # return only bookmark completions - bkms = self.db.get('bookmarks', None) - if bkms: - return bkms.keys() - else: - return [] - - if event.symbol == '-': - width_dh = str(len(str(len(ip.user_ns['_dh']) + 1))) - # jump in directory history by number - fmt = '-%0' + width_dh +'d [%s]' - ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])] - if len(ents) > 1: - return ents - return [] - - if event.symbol.startswith('--'): - return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']] - - # Expand ~ in path and normalize directory separators. - relpath, tilde_expand, tilde_val = expand_user(relpath) - relpath = relpath.replace('\\','/') - - found = [] - for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*') - if os.path.isdir(f)]: - if ' ' in d: - # we don't want to deal with any of that, complex code - # for this is elsewhere - raise TryNext - - found.append(d) - - if not found: - if os.path.isdir(relpath): - return [compress_user(relpath, tilde_expand, tilde_val)] - - # if no completions so far, try bookmarks - bks = self.db.get('bookmarks',{}) - bkmatches = [s for s in bks if s.startswith(event.symbol)] - if bkmatches: - return bkmatches - - raise TryNext - - return [compress_user(p, tilde_expand, tilde_val) for p in found] - -def reset_completer(self, event): - "A completer for %reset magic" - return '-f -s in out array dhist'.split() +# encoding: utf-8 +"""Implementations for various useful completers. + +These are all loaded by default by IPython. +""" +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011 The IPython Development Team. +# +# Distributed under the terms of the BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib imports +import glob +import inspect +import itertools +import os +import re +import sys +from importlib import import_module +from importlib.machinery import all_suffixes + + +# Third-party imports +from time import time +from zipimport import zipimporter + +# Our own imports +from .completer import expand_user, compress_user +from .error import TryNext +from ..utils._process_common import arg_split + +# FIXME: this should be pulled in with the right call via the component system +from IPython import get_ipython + +from typing import List + +from __res import importer + +#----------------------------------------------------------------------------- +# Globals and constants +#----------------------------------------------------------------------------- +_suffixes = all_suffixes() + +# Time in seconds after which the rootmodules will be stored permanently in the +# ipython ip.db database (kept in the user's .ipython dir). +TIMEOUT_STORAGE = 2 + +# Time in seconds after which we give up +TIMEOUT_GIVEUP = 20 + +# Regular expression for the python import statement +import_re = re.compile(r'(?P<name>[^\W\d]\w*?)' + r'(?P<package>[/\\]__init__)?' + r'(?P<suffix>%s)$' % + r'|'.join(re.escape(s) for s in _suffixes)) + +# RE for the ipython %run command (python + ipython scripts) +magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$') + +#----------------------------------------------------------------------------- +# Local utilities +#----------------------------------------------------------------------------- + +arcadia_rootmodules_cache = None +arcadia_modules_cache = None + + +def arcadia_init_cache(): + global arcadia_rootmodules_cache, arcadia_modules_cache + arcadia_rootmodules_cache = set() + arcadia_modules_cache = {} + + all_modules = itertools.chain( + sys.builtin_module_names, + importer.memory + ) + + for name in all_modules: + path = name.split('.') + arcadia_rootmodules_cache.add(path[0]) + + prefix = path[0] + for element in path[1:]: + if element == '__init__': + continue + + arcadia_modules_cache.setdefault(prefix, set()).add(element) + prefix += '.' + element + + arcadia_rootmodules_cache = sorted(arcadia_rootmodules_cache) + arcadia_modules_cache = {k: sorted(v) for k, v in arcadia_modules_cache.items()} + + +def arcadia_module_list(mod): + if arcadia_modules_cache is None: + arcadia_init_cache() + + return arcadia_modules_cache.get(mod, ()) + + +def arcadia_get_root_modules(): + if arcadia_rootmodules_cache is None: + arcadia_init_cache() + + return arcadia_rootmodules_cache + + +def module_list(path): + """ + Return the list containing the names of the modules available in the given + folder. + """ + # sys.path has the cwd as an empty string, but isdir/listdir need it as '.' + if path == '': + path = '.' + + # A few local constants to be used in loops below + pjoin = os.path.join + + if os.path.isdir(path): + # Build a list of all files in the directory and all files + # in its subdirectories. For performance reasons, do not + # recurse more than one level into subdirectories. + files = [] + for root, dirs, nondirs in os.walk(path, followlinks=True): + subdir = root[len(path)+1:] + if subdir: + files.extend(pjoin(subdir, f) for f in nondirs) + dirs[:] = [] # Do not recurse into additional subdirectories. + else: + files.extend(nondirs) + + else: + try: + files = list(zipimporter(path)._files.keys()) + except: + files = [] + + # Build a list of modules which match the import_re regex. + modules = [] + for f in files: + m = import_re.match(f) + if m: + modules.append(m.group('name')) + return list(set(modules)) + + +def get_root_modules(): + """ + Returns a list containing the names of all the modules available in the + folders of the pythonpath. + + ip.db['rootmodules_cache'] maps sys.path entries to list of modules. + """ + ip = get_ipython() + if ip is None: + # No global shell instance to store cached list of modules. + # Don't try to scan for modules every time. + return list(sys.builtin_module_names) + + rootmodules_cache = ip.db.get('rootmodules_cache', {}) + rootmodules = list(sys.builtin_module_names) + start_time = time() + store = False + for path in sys.path: + try: + modules = rootmodules_cache[path] + except KeyError: + modules = module_list(path) + try: + modules.remove('__init__') + except ValueError: + pass + if path not in ('', '.'): # cwd modules should not be cached + rootmodules_cache[path] = modules + if time() - start_time > TIMEOUT_STORAGE and not store: + store = True + print("\nCaching the list of root modules, please wait!") + print("(This will only be done once - type '%rehashx' to " + "reset cache!)\n") + sys.stdout.flush() + if time() - start_time > TIMEOUT_GIVEUP: + print("This is taking too long, we give up.\n") + return [] + rootmodules.extend(modules) + if store: + ip.db['rootmodules_cache'] = rootmodules_cache + rootmodules = list(set(rootmodules)) + return rootmodules + + +def is_importable(module, attr, only_modules): + if only_modules: + return inspect.ismodule(getattr(module, attr)) + else: + return not(attr[:2] == '__' and attr[-2:] == '__') + + +def try_import(mod: str, only_modules=False) -> List[str]: + """ + Try to import given module and return list of potential completions. + """ + mod = mod.rstrip('.') + try: + m = import_module(mod) + except: + return [] + + filename = getattr(m, '__file__', '') + m_is_init = '__init__' in (filename or '') or filename == mod + + completions = [] + if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init: + completions.extend( [attr for attr in dir(m) if + is_importable(m, attr, only_modules)]) + + completions.extend(getattr(m, '__all__', [])) + if m_is_init: + completions.extend(arcadia_module_list(mod)) + completions_set = {c for c in completions if isinstance(c, str)} + completions_set.discard('__init__') + return sorted(completions_set) + + +#----------------------------------------------------------------------------- +# Completion-related functions. +#----------------------------------------------------------------------------- + +def quick_completer(cmd, completions): + r""" Easily create a trivial completer for a command. + + Takes either a list of completions, or all completions in string (that will + be split on whitespace). + + Example:: + + [d:\ipython]|1> import ipy_completers + [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz']) + [d:\ipython]|3> foo b<TAB> + bar baz + [d:\ipython]|3> foo ba + """ + + if isinstance(completions, str): + completions = completions.split() + + def do_complete(self, event): + return completions + + get_ipython().set_hook('complete_command',do_complete, str_key = cmd) + +def module_completion(line): + """ + Returns a list containing the completion possibilities for an import line. + + The line looks like this : + 'import xml.d' + 'from xml.dom import' + """ + + words = line.split(' ') + nwords = len(words) + + # from whatever <tab> -> 'import ' + if nwords == 3 and words[0] == 'from': + return ['import '] + + # 'from xy<tab>' or 'import xy<tab>' + if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) : + if nwords == 1: + return arcadia_get_root_modules() + mod = words[1].split('.') + if len(mod) < 2: + return arcadia_get_root_modules() + completion_list = try_import('.'.join(mod[:-1]), True) + return ['.'.join(mod[:-1] + [el]) for el in completion_list] + + # 'from xyz import abc<tab>' + if nwords >= 3 and words[0] == 'from': + mod = words[1] + return try_import(mod) + +#----------------------------------------------------------------------------- +# Completers +#----------------------------------------------------------------------------- +# These all have the func(self, event) signature to be used as custom +# completers + +def module_completer(self,event): + """Give completions after user has typed 'import ...' or 'from ...'""" + + # This works in all versions of python. While 2.5 has + # pkgutil.walk_packages(), that particular routine is fairly dangerous, + # since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full + # of possibly problematic side effects. + # This search the folders in the sys.path for available modules. + + return module_completion(event.line) + +# FIXME: there's a lot of logic common to the run, cd and builtin file +# completers, that is currently reimplemented in each. + +def magic_run_completer(self, event): + """Complete files that end in .py or .ipy or .ipynb for the %run command. + """ + comps = arg_split(event.line, strict=False) + # relpath should be the current token that we need to complete. + if (len(comps) > 1) and (not event.line.endswith(' ')): + relpath = comps[-1].strip("'\"") + else: + relpath = '' + + #print("\nev=", event) # dbg + #print("rp=", relpath) # dbg + #print('comps=', comps) # dbg + + lglob = glob.glob + isdir = os.path.isdir + relpath, tilde_expand, tilde_val = expand_user(relpath) + + # Find if the user has already typed the first filename, after which we + # should complete on all files, since after the first one other files may + # be arguments to the input script. + + if any(magic_run_re.match(c) for c in comps): + matches = [f.replace('\\','/') + ('/' if isdir(f) else '') + for f in lglob(relpath+'*')] + else: + dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)] + pys = [f.replace('\\','/') + for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') + + lglob(relpath+'*.ipynb') + lglob(relpath + '*.pyw')] + + matches = dirs + pys + + #print('run comp:', dirs+pys) # dbg + return [compress_user(p, tilde_expand, tilde_val) for p in matches] + + +def cd_completer(self, event): + """Completer function for cd, which only returns directories.""" + ip = get_ipython() + relpath = event.symbol + + #print(event) # dbg + if event.line.endswith('-b') or ' -b ' in event.line: + # return only bookmark completions + bkms = self.db.get('bookmarks', None) + if bkms: + return bkms.keys() + else: + return [] + + if event.symbol == '-': + width_dh = str(len(str(len(ip.user_ns['_dh']) + 1))) + # jump in directory history by number + fmt = '-%0' + width_dh +'d [%s]' + ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])] + if len(ents) > 1: + return ents + return [] + + if event.symbol.startswith('--'): + return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']] + + # Expand ~ in path and normalize directory separators. + relpath, tilde_expand, tilde_val = expand_user(relpath) + relpath = relpath.replace('\\','/') + + found = [] + for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*') + if os.path.isdir(f)]: + if ' ' in d: + # we don't want to deal with any of that, complex code + # for this is elsewhere + raise TryNext + + found.append(d) + + if not found: + if os.path.isdir(relpath): + return [compress_user(relpath, tilde_expand, tilde_val)] + + # if no completions so far, try bookmarks + bks = self.db.get('bookmarks',{}) + bkmatches = [s for s in bks if s.startswith(event.symbol)] + if bkmatches: + return bkmatches + + raise TryNext + + return [compress_user(p, tilde_expand, tilde_val) for p in found] + +def reset_completer(self, event): + "A completer for %reset magic" + return '-f -s in out array dhist'.split() diff --git a/contrib/python/ipython/py3/IPython/core/crashhandler.py b/contrib/python/ipython/py3/IPython/core/crashhandler.py index b39aac4c1f2..1e0b429d09a 100644 --- a/contrib/python/ipython/py3/IPython/core/crashhandler.py +++ b/contrib/python/ipython/py3/IPython/core/crashhandler.py @@ -1,228 +1,228 @@ -# encoding: utf-8 -"""sys.excepthook for IPython itself, leaves a detailed report on disk. - -Authors: - -* Fernando Perez -* Brian E. Granger -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu> -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import os -import sys -import traceback -from pprint import pformat - -from IPython.core import ultratb -from IPython.core.release import author_email -from IPython.utils.sysinfo import sys_info -from IPython.utils.py3compat import input - -from IPython.core.release import __version__ as version - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -# Template for the user message. -_default_message_template = """\ -Oops, {app_name} crashed. We do our best to make it stable, but... - -A crash report was automatically generated with the following information: - - A verbatim copy of the crash traceback. - - A copy of your input history during this session. - - Data on your current {app_name} configuration. - -It was left in the file named: -\t'{crash_report_fname}' -If you can email this file to the developers, the information in it will help -them in understanding and correcting the problem. - -You can mail it to: {contact_name} at {contact_email} -with the subject '{app_name} Crash Report'. - -If you want to do it now, the following command will work (under Unix): -mail -s '{app_name} Crash Report' {contact_email} < {crash_report_fname} - -In your email, please also include information about: -- The operating system under which the crash happened: Linux, macOS, Windows, - other, and which exact version (for example: Ubuntu 16.04.3, macOS 10.13.2, - Windows 10 Pro), and whether it is 32-bit or 64-bit; -- How {app_name} was installed: using pip or conda, from GitHub, as part of - a Docker container, or other, providing more detail if possible; -- How to reproduce the crash: what exact sequence of instructions can one - input to get the same crash? Ideally, find a minimal yet complete sequence - of instructions that yields the crash. - -To ensure accurate tracking of this issue, please file a report about it at: -{bug_tracker} -""" - -_lite_message_template = """ -If you suspect this is an IPython {version} bug, please report it at: - https://github.com/ipython/ipython/issues -or send an email to the mailing list at {email} - -You can print a more detailed traceback right now with "%tb", or use "%debug" -to interactively debug it. - -Extra-detailed tracebacks for bug-reporting purposes can be enabled via: - {config}Application.verbose_crash=True -""" - - -class CrashHandler(object): - """Customizable crash handlers for IPython applications. - - Instances of this class provide a :meth:`__call__` method which can be - used as a ``sys.excepthook``. The :meth:`__call__` signature is:: - - def __call__(self, etype, evalue, etb) - """ - - message_template = _default_message_template - section_sep = '\n\n'+'*'*75+'\n\n' - - def __init__(self, app, contact_name=None, contact_email=None, - bug_tracker=None, show_crash_traceback=True, call_pdb=False): - """Create a new crash handler - - Parameters - ---------- - app : Application - A running :class:`Application` instance, which will be queried at - crash time for internal information. - - contact_name : str - A string with the name of the person to contact. - - contact_email : str - A string with the email address of the contact. - - bug_tracker : str - A string with the URL for your project's bug tracker. - - show_crash_traceback : bool - If false, don't print the crash traceback on stderr, only generate - the on-disk report - - Non-argument instance attributes: - - These instances contain some non-argument attributes which allow for - further customization of the crash handler's behavior. Please see the - source for further details. - """ - self.crash_report_fname = "Crash_report_%s.txt" % app.name - self.app = app - self.call_pdb = call_pdb - #self.call_pdb = True # dbg - self.show_crash_traceback = show_crash_traceback - self.info = dict(app_name = app.name, - contact_name = contact_name, - contact_email = contact_email, - bug_tracker = bug_tracker, - crash_report_fname = self.crash_report_fname) - - - def __call__(self, etype, evalue, etb): - """Handle an exception, call for compatible with sys.excepthook""" - - # do not allow the crash handler to be called twice without reinstalling it - # this prevents unlikely errors in the crash handling from entering an - # infinite loop. - sys.excepthook = sys.__excepthook__ - - # Report tracebacks shouldn't use color in general (safer for users) - color_scheme = 'NoColor' - - # Use this ONLY for developer debugging (keep commented out for release) - #color_scheme = 'Linux' # dbg - try: - rptdir = self.app.ipython_dir - except: - rptdir = os.getcwd() - if rptdir is None or not os.path.isdir(rptdir): - rptdir = os.getcwd() - report_name = os.path.join(rptdir,self.crash_report_fname) - # write the report filename into the instance dict so it can get - # properly expanded out in the user message template - self.crash_report_fname = report_name - self.info['crash_report_fname'] = report_name - TBhandler = ultratb.VerboseTB( - color_scheme=color_scheme, - long_header=1, - call_pdb=self.call_pdb, - ) - if self.call_pdb: - TBhandler(etype,evalue,etb) - return - else: - traceback = TBhandler.text(etype,evalue,etb,context=31) - - # print traceback to screen - if self.show_crash_traceback: - print(traceback, file=sys.stderr) - - # and generate a complete report on disk - try: - report = open(report_name,'w') - except: - print('Could not create crash report on disk.', file=sys.stderr) - return - - with report: - # Inform user on stderr of what happened - print('\n'+'*'*70+'\n', file=sys.stderr) - print(self.message_template.format(**self.info), file=sys.stderr) - - # Construct report on disk - report.write(self.make_report(traceback)) - - input("Hit <Enter> to quit (your terminal may close):") - - def make_report(self,traceback): - """Return a string containing a crash report.""" - - sec_sep = self.section_sep - - report = ['*'*75+'\n\n'+'IPython post-mortem report\n\n'] - rpt_add = report.append - rpt_add(sys_info()) - - try: - config = pformat(self.app.config) - rpt_add(sec_sep) - rpt_add('Application name: %s\n\n' % self.app_name) - rpt_add('Current user configuration structure:\n\n') - rpt_add(config) - except: - pass - rpt_add(sec_sep+'Crash traceback:\n\n' + traceback) - - return ''.join(report) - - -def crash_handler_lite(etype, evalue, tb): - """a light excepthook, adding a small message to the usual traceback""" - traceback.print_exception(etype, evalue, tb) - - from IPython.core.interactiveshell import InteractiveShell - if InteractiveShell.initialized(): - # we are in a Shell environment, give %magic example - config = "%config " - else: - # we are not in a shell, show generic config - config = "c." - print(_lite_message_template.format(email=author_email, config=config, version=version), file=sys.stderr) - +# encoding: utf-8 +"""sys.excepthook for IPython itself, leaves a detailed report on disk. + +Authors: + +* Fernando Perez +* Brian E. Granger +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu> +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import sys +import traceback +from pprint import pformat + +from IPython.core import ultratb +from IPython.core.release import author_email +from IPython.utils.sysinfo import sys_info +from IPython.utils.py3compat import input + +from IPython.core.release import __version__ as version + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +# Template for the user message. +_default_message_template = """\ +Oops, {app_name} crashed. We do our best to make it stable, but... + +A crash report was automatically generated with the following information: + - A verbatim copy of the crash traceback. + - A copy of your input history during this session. + - Data on your current {app_name} configuration. + +It was left in the file named: +\t'{crash_report_fname}' +If you can email this file to the developers, the information in it will help +them in understanding and correcting the problem. + +You can mail it to: {contact_name} at {contact_email} +with the subject '{app_name} Crash Report'. + +If you want to do it now, the following command will work (under Unix): +mail -s '{app_name} Crash Report' {contact_email} < {crash_report_fname} + +In your email, please also include information about: +- The operating system under which the crash happened: Linux, macOS, Windows, + other, and which exact version (for example: Ubuntu 16.04.3, macOS 10.13.2, + Windows 10 Pro), and whether it is 32-bit or 64-bit; +- How {app_name} was installed: using pip or conda, from GitHub, as part of + a Docker container, or other, providing more detail if possible; +- How to reproduce the crash: what exact sequence of instructions can one + input to get the same crash? Ideally, find a minimal yet complete sequence + of instructions that yields the crash. + +To ensure accurate tracking of this issue, please file a report about it at: +{bug_tracker} +""" + +_lite_message_template = """ +If you suspect this is an IPython {version} bug, please report it at: + https://github.com/ipython/ipython/issues +or send an email to the mailing list at {email} + +You can print a more detailed traceback right now with "%tb", or use "%debug" +to interactively debug it. + +Extra-detailed tracebacks for bug-reporting purposes can be enabled via: + {config}Application.verbose_crash=True +""" + + +class CrashHandler(object): + """Customizable crash handlers for IPython applications. + + Instances of this class provide a :meth:`__call__` method which can be + used as a ``sys.excepthook``. The :meth:`__call__` signature is:: + + def __call__(self, etype, evalue, etb) + """ + + message_template = _default_message_template + section_sep = '\n\n'+'*'*75+'\n\n' + + def __init__(self, app, contact_name=None, contact_email=None, + bug_tracker=None, show_crash_traceback=True, call_pdb=False): + """Create a new crash handler + + Parameters + ---------- + app : Application + A running :class:`Application` instance, which will be queried at + crash time for internal information. + + contact_name : str + A string with the name of the person to contact. + + contact_email : str + A string with the email address of the contact. + + bug_tracker : str + A string with the URL for your project's bug tracker. + + show_crash_traceback : bool + If false, don't print the crash traceback on stderr, only generate + the on-disk report + + Non-argument instance attributes: + + These instances contain some non-argument attributes which allow for + further customization of the crash handler's behavior. Please see the + source for further details. + """ + self.crash_report_fname = "Crash_report_%s.txt" % app.name + self.app = app + self.call_pdb = call_pdb + #self.call_pdb = True # dbg + self.show_crash_traceback = show_crash_traceback + self.info = dict(app_name = app.name, + contact_name = contact_name, + contact_email = contact_email, + bug_tracker = bug_tracker, + crash_report_fname = self.crash_report_fname) + + + def __call__(self, etype, evalue, etb): + """Handle an exception, call for compatible with sys.excepthook""" + + # do not allow the crash handler to be called twice without reinstalling it + # this prevents unlikely errors in the crash handling from entering an + # infinite loop. + sys.excepthook = sys.__excepthook__ + + # Report tracebacks shouldn't use color in general (safer for users) + color_scheme = 'NoColor' + + # Use this ONLY for developer debugging (keep commented out for release) + #color_scheme = 'Linux' # dbg + try: + rptdir = self.app.ipython_dir + except: + rptdir = os.getcwd() + if rptdir is None or not os.path.isdir(rptdir): + rptdir = os.getcwd() + report_name = os.path.join(rptdir,self.crash_report_fname) + # write the report filename into the instance dict so it can get + # properly expanded out in the user message template + self.crash_report_fname = report_name + self.info['crash_report_fname'] = report_name + TBhandler = ultratb.VerboseTB( + color_scheme=color_scheme, + long_header=1, + call_pdb=self.call_pdb, + ) + if self.call_pdb: + TBhandler(etype,evalue,etb) + return + else: + traceback = TBhandler.text(etype,evalue,etb,context=31) + + # print traceback to screen + if self.show_crash_traceback: + print(traceback, file=sys.stderr) + + # and generate a complete report on disk + try: + report = open(report_name,'w') + except: + print('Could not create crash report on disk.', file=sys.stderr) + return + + with report: + # Inform user on stderr of what happened + print('\n'+'*'*70+'\n', file=sys.stderr) + print(self.message_template.format(**self.info), file=sys.stderr) + + # Construct report on disk + report.write(self.make_report(traceback)) + + input("Hit <Enter> to quit (your terminal may close):") + + def make_report(self,traceback): + """Return a string containing a crash report.""" + + sec_sep = self.section_sep + + report = ['*'*75+'\n\n'+'IPython post-mortem report\n\n'] + rpt_add = report.append + rpt_add(sys_info()) + + try: + config = pformat(self.app.config) + rpt_add(sec_sep) + rpt_add('Application name: %s\n\n' % self.app_name) + rpt_add('Current user configuration structure:\n\n') + rpt_add(config) + except: + pass + rpt_add(sec_sep+'Crash traceback:\n\n' + traceback) + + return ''.join(report) + + +def crash_handler_lite(etype, evalue, tb): + """a light excepthook, adding a small message to the usual traceback""" + traceback.print_exception(etype, evalue, tb) + + from IPython.core.interactiveshell import InteractiveShell + if InteractiveShell.initialized(): + # we are in a Shell environment, give %magic example + config = "%config " + else: + # we are not in a shell, show generic config + config = "c." + print(_lite_message_template.format(email=author_email, config=config, version=version), file=sys.stderr) + diff --git a/contrib/python/ipython/py3/IPython/core/debugger.py b/contrib/python/ipython/py3/IPython/core/debugger.py index 0622342239f..1744bdb8a8e 100644 --- a/contrib/python/ipython/py3/IPython/core/debugger.py +++ b/contrib/python/ipython/py3/IPython/core/debugger.py @@ -1,1099 +1,1099 @@ -# -*- coding: utf-8 -*- -""" -Pdb debugger class. - - -This is an extension to PDB which adds a number of new features. -Note that there is also the `IPython.terminal.debugger` class which provides UI -improvements. - -We also strongly recommend to use this via the `ipdb` package, which provides -extra configuration options. - -Among other things, this subclass of PDB: - - supports many IPython magics like pdef/psource - - hide frames in tracebacks based on `__tracebackhide__` - - allows to skip frames based on `__debuggerskip__` - -The skipping and hiding frames are configurable via the `skip_predicates` -command. - -By default, frames from readonly files will be hidden, frames containing -``__tracebackhide__=True`` will be hidden. - -Frames containing ``__debuggerskip__`` will be stepped over, frames who's parent -frames value of ``__debuggerskip__`` is ``True`` will be skipped. - - >>> def helpers_helper(): - ... pass - ... - ... def helper_1(): - ... print("don't step in me") - ... helpers_helpers() # will be stepped over unless breakpoint set. - ... - ... - ... def helper_2(): - ... print("in me neither") - ... - -One can define a decorator that wraps a function between the two helpers: - - >>> def pdb_skipped_decorator(function): - ... - ... - ... def wrapped_fn(*args, **kwargs): - ... __debuggerskip__ = True - ... helper_1() - ... __debuggerskip__ = False - ... result = function(*args, **kwargs) - ... __debuggerskip__ = True - ... helper_2() - ... # setting __debuggerskip__ to False again is not necessary - ... return result - ... - ... return wrapped_fn - -When decorating a function, ipdb will directly step into ``bar()`` by -default: - - >>> @foo_decorator - ... def bar(x, y): - ... return x * y - - -You can toggle the behavior with - - ipdb> skip_predicates debuggerskip false - -or configure it in your ``.pdbrc`` - - - -Licencse --------- - -Modified from the standard pdb.Pdb class to avoid including readline, so that -the command line completion of other programs which include this isn't -damaged. - -In the future, this class will be expanded with improvements over the standard -pdb. - -The original code in this file is mainly lifted out of cmd.py in Python 2.2, -with minor changes. Licensing should therefore be under the standard Python -terms. For details on the PSF (Python Software Foundation) standard license, -see: - -https://docs.python.org/2/license.html - - -All the changes since then are under the same license as IPython. - -""" - -#***************************************************************************** -# -# This file is licensed under the PSF license. -# -# Copyright (C) 2001 Python Software Foundation, www.python.org -# Copyright (C) 2005-2006 Fernando Perez. <fperez@colorado.edu> -# -# -#***************************************************************************** - -import bdb -import functools -import inspect -import linecache -import sys -import warnings -import re -import os - -from IPython import get_ipython -from IPython.utils import PyColorize -from IPython.utils import coloransi, py3compat -from IPython.core.excolors import exception_colors -from IPython.testing.skipdoctest import skip_doctest - - -prompt = 'ipdb> ' - -#We have to check this directly from sys.argv, config struct not yet available -from pdb import Pdb as OldPdb - -# Allow the set_trace code to operate outside of an ipython instance, even if -# it does so with some limitations. The rest of this support is implemented in -# the Tracer constructor. - -DEBUGGERSKIP = "__debuggerskip__" - - -def make_arrow(pad): - """generate the leading arrow in front of traceback or debugger""" - if pad >= 2: - return '-'*(pad-2) + '> ' - elif pad == 1: - return '>' - return '' - - -def BdbQuit_excepthook(et, ev, tb, excepthook=None): - """Exception hook which handles `BdbQuit` exceptions. - - All other exceptions are processed using the `excepthook` - parameter. - """ - warnings.warn("`BdbQuit_excepthook` is deprecated since version 5.1", - DeprecationWarning, stacklevel=2) - if et==bdb.BdbQuit: - print('Exiting Debugger.') - elif excepthook is not None: - excepthook(et, ev, tb) - else: - # Backwards compatibility. Raise deprecation warning? - BdbQuit_excepthook.excepthook_ori(et,ev,tb) - - -def BdbQuit_IPython_excepthook(self,et,ev,tb,tb_offset=None): - warnings.warn( - "`BdbQuit_IPython_excepthook` is deprecated since version 5.1", - DeprecationWarning, stacklevel=2) - print('Exiting Debugger.') - - -class Tracer(object): - """ - DEPRECATED - - Class for local debugging, similar to pdb.set_trace. - - Instances of this class, when called, behave like pdb.set_trace, but - providing IPython's enhanced capabilities. - - This is implemented as a class which must be initialized in your own code - and not as a standalone function because we need to detect at runtime - whether IPython is already active or not. That detection is done in the - constructor, ensuring that this code plays nicely with a running IPython, - while functioning acceptably (though with limitations) if outside of it. - """ - - @skip_doctest - def __init__(self, colors=None): - """ - DEPRECATED - - Create a local debugger instance. - - Parameters - ---------- - - colors : str, optional - The name of the color scheme to use, it must be one of IPython's - valid color schemes. If not given, the function will default to - the current IPython scheme when running inside IPython, and to - 'NoColor' otherwise. - - Examples - -------- - :: - - from IPython.core.debugger import Tracer; debug_here = Tracer() - - Later in your code:: - - debug_here() # -> will open up the debugger at that point. - - Once the debugger activates, you can use all of its regular commands to - step through code, set breakpoints, etc. See the pdb documentation - from the Python standard library for usage details. - """ - warnings.warn("`Tracer` is deprecated since version 5.1, directly use " - "`IPython.core.debugger.Pdb.set_trace()`", - DeprecationWarning, stacklevel=2) - - ip = get_ipython() - if ip is None: - # Outside of ipython, we set our own exception hook manually - sys.excepthook = functools.partial(BdbQuit_excepthook, - excepthook=sys.excepthook) - def_colors = 'NoColor' - else: - # In ipython, we use its custom exception handler mechanism - def_colors = ip.colors - ip.set_custom_exc((bdb.BdbQuit,), BdbQuit_IPython_excepthook) - - if colors is None: - colors = def_colors - - # The stdlib debugger internally uses a modified repr from the `repr` - # module, that limits the length of printed strings to a hardcoded - # limit of 30 characters. That much trimming is too aggressive, let's - # at least raise that limit to 80 chars, which should be enough for - # most interactive uses. - try: - from reprlib import aRepr - aRepr.maxstring = 80 - except: - # This is only a user-facing convenience, so any error we encounter - # here can be warned about but can be otherwise ignored. These - # printouts will tell us about problems if this API changes - import traceback - traceback.print_exc() - - self.debugger = Pdb(colors) - - def __call__(self): - """Starts an interactive debugger at the point where called. - - This is similar to the pdb.set_trace() function from the std lib, but - using IPython's enhanced debugger.""" - - self.debugger.set_trace(sys._getframe().f_back) - - -RGX_EXTRA_INDENT = re.compile(r'(?<=\n)\s+') - - -def strip_indentation(multiline_string): - return RGX_EXTRA_INDENT.sub('', multiline_string) - - -def decorate_fn_with_doc(new_fn, old_fn, additional_text=""): - """Make new_fn have old_fn's doc string. This is particularly useful - for the ``do_...`` commands that hook into the help system. - Adapted from from a comp.lang.python posting - by Duncan Booth.""" - def wrapper(*args, **kw): - return new_fn(*args, **kw) - if old_fn.__doc__: - wrapper.__doc__ = strip_indentation(old_fn.__doc__) + additional_text - return wrapper - - -class Pdb(OldPdb): - """Modified Pdb class, does not load readline. - - for a standalone version that uses prompt_toolkit, see - `IPython.terminal.debugger.TerminalPdb` and - `IPython.terminal.debugger.set_trace()` - - - This debugger can hide and skip frames that are tagged according to some predicates. - See the `skip_predicates` commands. - - """ - - default_predicates = { - "tbhide": True, - "readonly": False, - "ipython_internal": True, - "debuggerskip": True, - } - - def __init__(self, color_scheme=None, completekey=None, - stdin=None, stdout=None, context=5, **kwargs): - """Create a new IPython debugger. - - Parameters - ---------- - color_scheme : default None - Deprecated, do not use. - completekey : default None - Passed to pdb.Pdb. - stdin : default None - Passed to pdb.Pdb. - stdout : default None - Passed to pdb.Pdb. - context : int - Number of lines of source code context to show when - displaying stacktrace information. - **kwargs - Passed to pdb.Pdb. - - Notes - ----- - The possibilities are python version dependent, see the python - docs for more info. - """ - - # Parent constructor: - try: - self.context = int(context) - if self.context <= 0: - raise ValueError("Context must be a positive integer") - except (TypeError, ValueError): - raise ValueError("Context must be a positive integer") - - # `kwargs` ensures full compatibility with stdlib's `pdb.Pdb`. - OldPdb.__init__(self, completekey, stdin, stdout, **kwargs) - - # IPython changes... - self.shell = get_ipython() - - if self.shell is None: - save_main = sys.modules['__main__'] - # No IPython instance running, we must create one - from IPython.terminal.interactiveshell import \ - TerminalInteractiveShell - self.shell = TerminalInteractiveShell.instance() - # needed by any code which calls __import__("__main__") after - # the debugger was entered. See also #9941. - sys.modules['__main__'] = save_main - - if color_scheme is not None: - warnings.warn( - "The `color_scheme` argument is deprecated since version 5.1", - DeprecationWarning, stacklevel=2) - else: - color_scheme = self.shell.colors - - self.aliases = {} - - # Create color table: we copy the default one from the traceback - # module and add a few attributes needed for debugging - self.color_scheme_table = exception_colors() - - # shorthands - C = coloransi.TermColors - cst = self.color_scheme_table - - cst['NoColor'].colors.prompt = C.NoColor - cst['NoColor'].colors.breakpoint_enabled = C.NoColor - cst['NoColor'].colors.breakpoint_disabled = C.NoColor - - cst['Linux'].colors.prompt = C.Green - cst['Linux'].colors.breakpoint_enabled = C.LightRed - cst['Linux'].colors.breakpoint_disabled = C.Red - - cst['LightBG'].colors.prompt = C.Blue - cst['LightBG'].colors.breakpoint_enabled = C.LightRed - cst['LightBG'].colors.breakpoint_disabled = C.Red - - cst['Neutral'].colors.prompt = C.Blue - cst['Neutral'].colors.breakpoint_enabled = C.LightRed - cst['Neutral'].colors.breakpoint_disabled = C.Red - - - # Add a python parser so we can syntax highlight source while - # debugging. - self.parser = PyColorize.Parser(style=color_scheme) - self.set_colors(color_scheme) - - # Set the prompt - the default prompt is '(Pdb)' - self.prompt = prompt - self.skip_hidden = True - self.report_skipped = True - - # list of predicates we use to skip frames - self._predicates = self.default_predicates - - # - def set_colors(self, scheme): - """Shorthand access to the color table scheme selector method.""" - self.color_scheme_table.set_active_scheme(scheme) - self.parser.style = scheme - - def set_trace(self, frame=None): - if frame is None: - frame = sys._getframe().f_back - self.initial_frame = frame - return super().set_trace(frame) - - def _hidden_predicate(self, frame): - """ - Given a frame return whether it it should be hidden or not by IPython. - """ - - if self._predicates["readonly"]: - fname = frame.f_code.co_filename - # we need to check for file existence and interactively define - # function would otherwise appear as RO. - if os.path.isfile(fname) and not os.access(fname, os.W_OK): - return True - - if self._predicates["tbhide"]: - if frame in (self.curframe, getattr(self, "initial_frame", None)): - return False - frame_locals = self._get_frame_locals(frame) - if "__tracebackhide__" not in frame_locals: - return False - return frame_locals["__tracebackhide__"] - return False - - def hidden_frames(self, stack): - """ - Given an index in the stack return wether it should be skipped. - - This is used in up/down and where to skip frames. - """ - # The f_locals dictionary is updated from the actual frame - # locals whenever the .f_locals accessor is called, so we - # avoid calling it here to preserve self.curframe_locals. - # Futhermore, there is no good reason to hide the current frame. - ip_hide = [self._hidden_predicate(s[0]) for s in stack] - ip_start = [i for i, s in enumerate(ip_hide) if s == "__ipython_bottom__"] - if ip_start and self._predicates["ipython_internal"]: - ip_hide = [h if i > ip_start[0] else True for (i, h) in enumerate(ip_hide)] - return ip_hide - - def interaction(self, frame, traceback): - try: - OldPdb.interaction(self, frame, traceback) - except KeyboardInterrupt: - self.stdout.write("\n" + self.shell.get_exception_only()) - - def new_do_frame(self, arg): - OldPdb.do_frame(self, arg) - - def new_do_quit(self, arg): - - if hasattr(self, 'old_all_completions'): - self.shell.Completer.all_completions=self.old_all_completions - - return OldPdb.do_quit(self, arg) - - do_q = do_quit = decorate_fn_with_doc(new_do_quit, OldPdb.do_quit) - - def new_do_restart(self, arg): - """Restart command. In the context of ipython this is exactly the same - thing as 'quit'.""" - self.msg("Restart doesn't make sense here. Using 'quit' instead.") - return self.do_quit(arg) - - def print_stack_trace(self, context=None): - Colors = self.color_scheme_table.active_colors - ColorsNormal = Colors.Normal - if context is None: - context = self.context - try: - context=int(context) - if context <= 0: - raise ValueError("Context must be a positive integer") - except (TypeError, ValueError): - raise ValueError("Context must be a positive integer") - try: - skipped = 0 - for hidden, frame_lineno in zip(self.hidden_frames(self.stack), self.stack): - if hidden and self.skip_hidden: - skipped += 1 - continue - if skipped: - print( - f"{Colors.excName} [... skipping {skipped} hidden frame(s)]{ColorsNormal}\n" - ) - skipped = 0 - self.print_stack_entry(frame_lineno, context=context) - if skipped: - print( - f"{Colors.excName} [... skipping {skipped} hidden frame(s)]{ColorsNormal}\n" - ) - except KeyboardInterrupt: - pass - - def print_stack_entry(self, frame_lineno, prompt_prefix='\n-> ', - context=None): - if context is None: - context = self.context - try: - context=int(context) - if context <= 0: - raise ValueError("Context must be a positive integer") - except (TypeError, ValueError): - raise ValueError("Context must be a positive integer") - print(self.format_stack_entry(frame_lineno, '', context), file=self.stdout) - - # vds: >> - frame, lineno = frame_lineno - filename = frame.f_code.co_filename - self.shell.hooks.synchronize_with_editor(filename, lineno, 0) - # vds: << - - def _get_frame_locals(self, frame): - """ " - Acessing f_local of current frame reset the namespace, so we want to avoid - that or the following can happend - - ipdb> foo - "old" - ipdb> foo = "new" - ipdb> foo - "new" - ipdb> where - ipdb> foo - "old" - - So if frame is self.current_frame we instead return self.curframe_locals - - """ - if frame is self.curframe: - return self.curframe_locals - else: - return frame.f_locals - - def format_stack_entry(self, frame_lineno, lprefix=': ', context=None): - if context is None: - context = self.context - try: - context=int(context) - if context <= 0: - print("Context must be a positive integer", file=self.stdout) - except (TypeError, ValueError): - print("Context must be a positive integer", file=self.stdout) - try: - import reprlib # Py 3 - except ImportError: - import repr as reprlib # Py 2 - - ret = [] - - Colors = self.color_scheme_table.active_colors - ColorsNormal = Colors.Normal - tpl_link = u'%s%%s%s' % (Colors.filenameEm, ColorsNormal) - tpl_call = u'%s%%s%s%%s%s' % (Colors.vName, Colors.valEm, ColorsNormal) - tpl_line = u'%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal) - tpl_line_em = u'%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, - ColorsNormal) - - frame, lineno = frame_lineno - - return_value = '' - loc_frame = self._get_frame_locals(frame) - if "__return__" in loc_frame: - rv = loc_frame["__return__"] - # return_value += '->' - return_value += reprlib.repr(rv) + "\n" - ret.append(return_value) - - #s = filename + '(' + `lineno` + ')' - filename = self.canonic(frame.f_code.co_filename) - link = tpl_link % py3compat.cast_unicode(filename) - - if frame.f_code.co_name: - func = frame.f_code.co_name - else: - func = "<lambda>" - - call = "" - if func != "?": - if "__args__" in loc_frame: - args = reprlib.repr(loc_frame["__args__"]) - else: - args = '()' - call = tpl_call % (func, args) - - # The level info should be generated in the same format pdb uses, to - # avoid breaking the pdbtrack functionality of python-mode in *emacs. - if frame is self.curframe: - ret.append('> ') - else: - ret.append(' ') - ret.append(u'%s(%s)%s\n' % (link,lineno,call)) - - start = lineno - 1 - context//2 - lines = linecache.getlines(filename) - start = min(start, len(lines) - context) - start = max(start, 0) - lines = lines[start : start + context] - - for i,line in enumerate(lines): - show_arrow = (start + 1 + i == lineno) - linetpl = (frame is self.curframe or show_arrow) \ - and tpl_line_em \ - or tpl_line - ret.append(self.__format_line(linetpl, filename, - start + 1 + i, line, - arrow = show_arrow) ) - return ''.join(ret) - - def __format_line(self, tpl_line, filename, lineno, line, arrow = False): - bp_mark = "" - bp_mark_color = "" - - new_line, err = self.parser.format2(line, 'str') - if not err: - line = new_line - - bp = None - if lineno in self.get_file_breaks(filename): - bps = self.get_breaks(filename, lineno) - bp = bps[-1] - - if bp: - Colors = self.color_scheme_table.active_colors - bp_mark = str(bp.number) - bp_mark_color = Colors.breakpoint_enabled - if not bp.enabled: - bp_mark_color = Colors.breakpoint_disabled - - numbers_width = 7 - if arrow: - # This is the line with the error - pad = numbers_width - len(str(lineno)) - len(bp_mark) - num = '%s%s' % (make_arrow(pad), str(lineno)) - else: - num = '%*s' % (numbers_width - len(bp_mark), str(lineno)) - - return tpl_line % (bp_mark_color + bp_mark, num, line) - - - def print_list_lines(self, filename, first, last): - """The printing (as opposed to the parsing part of a 'list' - command.""" - try: - Colors = self.color_scheme_table.active_colors - ColorsNormal = Colors.Normal - tpl_line = '%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal) - tpl_line_em = '%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, ColorsNormal) - src = [] - if filename == "<string>" and hasattr(self, "_exec_filename"): - filename = self._exec_filename - - for lineno in range(first, last+1): - line = linecache.getline(filename, lineno) - if not line: - break - - if lineno == self.curframe.f_lineno: - line = self.__format_line(tpl_line_em, filename, lineno, line, arrow = True) - else: - line = self.__format_line(tpl_line, filename, lineno, line, arrow = False) - - src.append(line) - self.lineno = lineno - - print(''.join(src), file=self.stdout) - - except KeyboardInterrupt: - pass - - def do_skip_predicates(self, args): - """ - Turn on/off individual predicates as to whether a frame should be hidden/skip. - - The global option to skip (or not) hidden frames is set with skip_hidden - - To change the value of a predicate - - skip_predicates key [true|false] - - Call without arguments to see the current values. - - To permanently change the value of an option add the corresponding - command to your ``~/.pdbrc`` file. If you are programmatically using the - Pdb instance you can also change the ``default_predicates`` class - attribute. - """ - if not args.strip(): - print("current predicates:") - for (p, v) in self._predicates.items(): - print(" ", p, ":", v) - return - type_value = args.strip().split(" ") - if len(type_value) != 2: - print( - f"Usage: skip_predicates <type> <value>, with <type> one of {set(self._predicates.keys())}" - ) - return - - type_, value = type_value - if type_ not in self._predicates: - print(f"{type_!r} not in {set(self._predicates.keys())}") - return - if value.lower() not in ("true", "yes", "1", "no", "false", "0"): - print( - f"{value!r} is invalid - use one of ('true', 'yes', '1', 'no', 'false', '0')" - ) - return - - self._predicates[type_] = value.lower() in ("true", "yes", "1") - if not any(self._predicates.values()): - print( - "Warning, all predicates set to False, skip_hidden may not have any effects." - ) - - def do_skip_hidden(self, arg): - """ - Change whether or not we should skip frames with the - __tracebackhide__ attribute. - """ - if not arg.strip(): - print( - f"skip_hidden = {self.skip_hidden}, use 'yes','no', 'true', or 'false' to change." - ) - elif arg.strip().lower() in ("true", "yes"): - self.skip_hidden = True - elif arg.strip().lower() in ("false", "no"): - self.skip_hidden = False - if not any(self._predicates.values()): - print( - "Warning, all predicates set to False, skip_hidden may not have any effects." - ) - - def do_list(self, arg): - """Print lines of code from the current stack frame - """ - self.lastcmd = 'list' - last = None - if arg: - try: - x = eval(arg, {}, {}) - if type(x) == type(()): - first, last = x - first = int(first) - last = int(last) - if last < first: - # Assume it's a count - last = first + last - else: - first = max(1, int(x) - 5) - except: - print('*** Error in argument:', repr(arg), file=self.stdout) - return - elif self.lineno is None: - first = max(1, self.curframe.f_lineno - 5) - else: - first = self.lineno + 1 - if last is None: - last = first + 10 - self.print_list_lines(self.curframe.f_code.co_filename, first, last) - - # vds: >> - lineno = first - filename = self.curframe.f_code.co_filename - self.shell.hooks.synchronize_with_editor(filename, lineno, 0) - # vds: << - - do_l = do_list - - def getsourcelines(self, obj): - lines, lineno = inspect.findsource(obj) - if inspect.isframe(obj) and obj.f_globals is self._get_frame_locals(obj): - # must be a module frame: do not try to cut a block out of it - return lines, 1 - elif inspect.ismodule(obj): - return lines, 1 - return inspect.getblock(lines[lineno:]), lineno+1 - - def do_longlist(self, arg): - """Print lines of code from the current stack frame. - - Shows more lines than 'list' does. - """ - self.lastcmd = 'longlist' - try: - lines, lineno = self.getsourcelines(self.curframe) - except OSError as err: - self.error(err) - return - last = lineno + len(lines) - self.print_list_lines(self.curframe.f_code.co_filename, lineno, last) - do_ll = do_longlist - - def do_debug(self, arg): - """debug code - Enter a recursive debugger that steps through the code - argument (which is an arbitrary expression or statement to be - executed in the current environment). - """ - trace_function = sys.gettrace() - sys.settrace(None) - globals = self.curframe.f_globals - locals = self.curframe_locals - p = self.__class__(completekey=self.completekey, - stdin=self.stdin, stdout=self.stdout) - p.use_rawinput = self.use_rawinput - p.prompt = "(%s) " % self.prompt.strip() - self.message("ENTERING RECURSIVE DEBUGGER") - sys.call_tracing(p.run, (arg, globals, locals)) - self.message("LEAVING RECURSIVE DEBUGGER") - sys.settrace(trace_function) - self.lastcmd = p.lastcmd - - def do_pdef(self, arg): - """Print the call signature for any callable object. - - The debugger interface to %pdef""" - namespaces = [ - ("Locals", self.curframe_locals), - ("Globals", self.curframe.f_globals), - ] - self.shell.find_line_magic("pdef")(arg, namespaces=namespaces) - - def do_pdoc(self, arg): - """Print the docstring for an object. - - The debugger interface to %pdoc.""" - namespaces = [ - ("Locals", self.curframe_locals), - ("Globals", self.curframe.f_globals), - ] - self.shell.find_line_magic("pdoc")(arg, namespaces=namespaces) - - def do_pfile(self, arg): - """Print (or run through pager) the file where an object is defined. - - The debugger interface to %pfile. - """ - namespaces = [ - ("Locals", self.curframe_locals), - ("Globals", self.curframe.f_globals), - ] - self.shell.find_line_magic("pfile")(arg, namespaces=namespaces) - - def do_pinfo(self, arg): - """Provide detailed information about an object. - - The debugger interface to %pinfo, i.e., obj?.""" - namespaces = [ - ("Locals", self.curframe_locals), - ("Globals", self.curframe.f_globals), - ] - self.shell.find_line_magic("pinfo")(arg, namespaces=namespaces) - - def do_pinfo2(self, arg): - """Provide extra detailed information about an object. - - The debugger interface to %pinfo2, i.e., obj??.""" - namespaces = [ - ("Locals", self.curframe_locals), - ("Globals", self.curframe.f_globals), - ] - self.shell.find_line_magic("pinfo2")(arg, namespaces=namespaces) - - def do_psource(self, arg): - """Print (or run through pager) the source code for an object.""" - namespaces = [ - ("Locals", self.curframe_locals), - ("Globals", self.curframe.f_globals), - ] - self.shell.find_line_magic("psource")(arg, namespaces=namespaces) - - def do_where(self, arg): - """w(here) - Print a stack trace, with the most recent frame at the bottom. - An arrow indicates the "current frame", which determines the - context of most commands. 'bt' is an alias for this command. - - Take a number as argument as an (optional) number of context line to - print""" - if arg: - try: - context = int(arg) - except ValueError as err: - self.error(err) - return - self.print_stack_trace(context) - else: - self.print_stack_trace() - - do_w = do_where - - def break_anywhere(self, frame): - """ - - _stop_in_decorator_internals is overly restrictive, as we may still want - to trace function calls, so we need to also update break_anywhere so - that is we don't `stop_here`, because of debugger skip, we may still - stop at any point inside the function - - """ - - sup = super().break_anywhere(frame) - if sup: - return sup - if self._predicates["debuggerskip"]: - if DEBUGGERSKIP in frame.f_code.co_varnames: - return True - if frame.f_back and self._get_frame_locals(frame.f_back).get(DEBUGGERSKIP): - return True - return False - - @skip_doctest - def _is_in_decorator_internal_and_should_skip(self, frame): - """ - Utility to tell us whether we are in a decorator internal and should stop. - - - - """ - - # if we are disabled don't skip - if not self._predicates["debuggerskip"]: - return False - - # if frame is tagged, skip by default. - if DEBUGGERSKIP in frame.f_code.co_varnames: - return True - - # if one of the parent frame value set to True skip as well. - - cframe = frame - while getattr(cframe, "f_back", None): - cframe = cframe.f_back - if self._get_frame_locals(cframe).get(DEBUGGERSKIP): - return True - - return False - - def stop_here(self, frame): - """Check if pdb should stop here""" - if not super().stop_here(frame): - return False - - if self._is_in_decorator_internal_and_should_skip(frame) is True: - return False - - hidden = False - if self.skip_hidden: - hidden = self._hidden_predicate(frame) - if hidden: - if self.report_skipped: - Colors = self.color_scheme_table.active_colors - ColorsNormal = Colors.Normal - print(f"{Colors.excName} [... skipped 1 hidden frame]{ColorsNormal}\n") - return False - return True - - def do_up(self, arg): - """u(p) [count] - Move the current frame count (default one) levels up in the - stack trace (to an older frame). - - Will skip hidden frames. - """ - # modified version of upstream that skips - # frames with __tracebackhide__ - if self.curindex == 0: - self.error("Oldest frame") - return - try: - count = int(arg or 1) - except ValueError: - self.error("Invalid frame count (%s)" % arg) - return - skipped = 0 - if count < 0: - _newframe = 0 - else: - _newindex = self.curindex - counter = 0 - hidden_frames = self.hidden_frames(self.stack) - for i in range(self.curindex - 1, -1, -1): - frame = self.stack[i][0] - if hidden_frames[i] and self.skip_hidden: - skipped += 1 - continue - counter += 1 - if counter >= count: - break - else: - # if no break occured. - self.error("all frames above hidden") - return - - Colors = self.color_scheme_table.active_colors - ColorsNormal = Colors.Normal - _newframe = i - self._select_frame(_newframe) - if skipped: - print( - f"{Colors.excName} [... skipped {skipped} hidden frame(s)]{ColorsNormal}\n" - ) - - def do_down(self, arg): - """d(own) [count] - Move the current frame count (default one) levels down in the - stack trace (to a newer frame). - - Will skip hidden frames. - """ - if self.curindex + 1 == len(self.stack): - self.error("Newest frame") - return - try: - count = int(arg or 1) - except ValueError: - self.error("Invalid frame count (%s)" % arg) - return - if count < 0: - _newframe = len(self.stack) - 1 - else: - _newindex = self.curindex - counter = 0 - skipped = 0 - hidden_frames = self.hidden_frames(self.stack) - for i in range(self.curindex + 1, len(self.stack)): - frame = self.stack[i][0] - if hidden_frames[i] and self.skip_hidden: - skipped += 1 - continue - counter += 1 - if counter >= count: - break - else: - self.error("all frames bellow hidden") - return - - Colors = self.color_scheme_table.active_colors - ColorsNormal = Colors.Normal - if skipped: - print( - f"{Colors.excName} [... skipped {skipped} hidden frame(s)]{ColorsNormal}\n" - ) - _newframe = i - - self._select_frame(_newframe) - - do_d = do_down - do_u = do_up - - def do_context(self, context): - """context number_of_lines - Set the number of lines of source code to show when displaying - stacktrace information. - """ - try: - new_context = int(context) - if new_context <= 0: - raise ValueError() - self.context = new_context - except ValueError: - self.error("The 'context' command requires a positive integer argument.") - - -class InterruptiblePdb(Pdb): - """Version of debugger where KeyboardInterrupt exits the debugger altogether.""" - - def cmdloop(self, intro=None): - """Wrap cmdloop() such that KeyboardInterrupt stops the debugger.""" - try: - return OldPdb.cmdloop(self, intro=intro) - except KeyboardInterrupt: - self.stop_here = lambda frame: False - self.do_quit("") - sys.settrace(None) - self.quitting = False - raise - - def _cmdloop(self): - while True: - try: - # keyboard interrupts allow for an easy way to cancel - # the current command, so allow them during interactive input - self.allow_kbdint = True - self.cmdloop() - self.allow_kbdint = False - break - except KeyboardInterrupt: - self.message('--KeyboardInterrupt--') - raise - - -def set_trace(frame=None): - """ - Start debugging from `frame`. - - If frame is not specified, debugging starts from caller's frame. - """ - Pdb().set_trace(frame or sys._getframe().f_back) +# -*- coding: utf-8 -*- +""" +Pdb debugger class. + + +This is an extension to PDB which adds a number of new features. +Note that there is also the `IPython.terminal.debugger` class which provides UI +improvements. + +We also strongly recommend to use this via the `ipdb` package, which provides +extra configuration options. + +Among other things, this subclass of PDB: + - supports many IPython magics like pdef/psource + - hide frames in tracebacks based on `__tracebackhide__` + - allows to skip frames based on `__debuggerskip__` + +The skipping and hiding frames are configurable via the `skip_predicates` +command. + +By default, frames from readonly files will be hidden, frames containing +``__tracebackhide__=True`` will be hidden. + +Frames containing ``__debuggerskip__`` will be stepped over, frames who's parent +frames value of ``__debuggerskip__`` is ``True`` will be skipped. + + >>> def helpers_helper(): + ... pass + ... + ... def helper_1(): + ... print("don't step in me") + ... helpers_helpers() # will be stepped over unless breakpoint set. + ... + ... + ... def helper_2(): + ... print("in me neither") + ... + +One can define a decorator that wraps a function between the two helpers: + + >>> def pdb_skipped_decorator(function): + ... + ... + ... def wrapped_fn(*args, **kwargs): + ... __debuggerskip__ = True + ... helper_1() + ... __debuggerskip__ = False + ... result = function(*args, **kwargs) + ... __debuggerskip__ = True + ... helper_2() + ... # setting __debuggerskip__ to False again is not necessary + ... return result + ... + ... return wrapped_fn + +When decorating a function, ipdb will directly step into ``bar()`` by +default: + + >>> @foo_decorator + ... def bar(x, y): + ... return x * y + + +You can toggle the behavior with + + ipdb> skip_predicates debuggerskip false + +or configure it in your ``.pdbrc`` + + + +Licencse +-------- + +Modified from the standard pdb.Pdb class to avoid including readline, so that +the command line completion of other programs which include this isn't +damaged. + +In the future, this class will be expanded with improvements over the standard +pdb. + +The original code in this file is mainly lifted out of cmd.py in Python 2.2, +with minor changes. Licensing should therefore be under the standard Python +terms. For details on the PSF (Python Software Foundation) standard license, +see: + +https://docs.python.org/2/license.html + + +All the changes since then are under the same license as IPython. + +""" + +#***************************************************************************** +# +# This file is licensed under the PSF license. +# +# Copyright (C) 2001 Python Software Foundation, www.python.org +# Copyright (C) 2005-2006 Fernando Perez. <fperez@colorado.edu> +# +# +#***************************************************************************** + +import bdb +import functools +import inspect +import linecache +import sys +import warnings +import re +import os + +from IPython import get_ipython +from IPython.utils import PyColorize +from IPython.utils import coloransi, py3compat +from IPython.core.excolors import exception_colors +from IPython.testing.skipdoctest import skip_doctest + + +prompt = 'ipdb> ' + +#We have to check this directly from sys.argv, config struct not yet available +from pdb import Pdb as OldPdb + +# Allow the set_trace code to operate outside of an ipython instance, even if +# it does so with some limitations. The rest of this support is implemented in +# the Tracer constructor. + +DEBUGGERSKIP = "__debuggerskip__" + + +def make_arrow(pad): + """generate the leading arrow in front of traceback or debugger""" + if pad >= 2: + return '-'*(pad-2) + '> ' + elif pad == 1: + return '>' + return '' + + +def BdbQuit_excepthook(et, ev, tb, excepthook=None): + """Exception hook which handles `BdbQuit` exceptions. + + All other exceptions are processed using the `excepthook` + parameter. + """ + warnings.warn("`BdbQuit_excepthook` is deprecated since version 5.1", + DeprecationWarning, stacklevel=2) + if et==bdb.BdbQuit: + print('Exiting Debugger.') + elif excepthook is not None: + excepthook(et, ev, tb) + else: + # Backwards compatibility. Raise deprecation warning? + BdbQuit_excepthook.excepthook_ori(et,ev,tb) + + +def BdbQuit_IPython_excepthook(self,et,ev,tb,tb_offset=None): + warnings.warn( + "`BdbQuit_IPython_excepthook` is deprecated since version 5.1", + DeprecationWarning, stacklevel=2) + print('Exiting Debugger.') + + +class Tracer(object): + """ + DEPRECATED + + Class for local debugging, similar to pdb.set_trace. + + Instances of this class, when called, behave like pdb.set_trace, but + providing IPython's enhanced capabilities. + + This is implemented as a class which must be initialized in your own code + and not as a standalone function because we need to detect at runtime + whether IPython is already active or not. That detection is done in the + constructor, ensuring that this code plays nicely with a running IPython, + while functioning acceptably (though with limitations) if outside of it. + """ + + @skip_doctest + def __init__(self, colors=None): + """ + DEPRECATED + + Create a local debugger instance. + + Parameters + ---------- + + colors : str, optional + The name of the color scheme to use, it must be one of IPython's + valid color schemes. If not given, the function will default to + the current IPython scheme when running inside IPython, and to + 'NoColor' otherwise. + + Examples + -------- + :: + + from IPython.core.debugger import Tracer; debug_here = Tracer() + + Later in your code:: + + debug_here() # -> will open up the debugger at that point. + + Once the debugger activates, you can use all of its regular commands to + step through code, set breakpoints, etc. See the pdb documentation + from the Python standard library for usage details. + """ + warnings.warn("`Tracer` is deprecated since version 5.1, directly use " + "`IPython.core.debugger.Pdb.set_trace()`", + DeprecationWarning, stacklevel=2) + + ip = get_ipython() + if ip is None: + # Outside of ipython, we set our own exception hook manually + sys.excepthook = functools.partial(BdbQuit_excepthook, + excepthook=sys.excepthook) + def_colors = 'NoColor' + else: + # In ipython, we use its custom exception handler mechanism + def_colors = ip.colors + ip.set_custom_exc((bdb.BdbQuit,), BdbQuit_IPython_excepthook) + + if colors is None: + colors = def_colors + + # The stdlib debugger internally uses a modified repr from the `repr` + # module, that limits the length of printed strings to a hardcoded + # limit of 30 characters. That much trimming is too aggressive, let's + # at least raise that limit to 80 chars, which should be enough for + # most interactive uses. + try: + from reprlib import aRepr + aRepr.maxstring = 80 + except: + # This is only a user-facing convenience, so any error we encounter + # here can be warned about but can be otherwise ignored. These + # printouts will tell us about problems if this API changes + import traceback + traceback.print_exc() + + self.debugger = Pdb(colors) + + def __call__(self): + """Starts an interactive debugger at the point where called. + + This is similar to the pdb.set_trace() function from the std lib, but + using IPython's enhanced debugger.""" + + self.debugger.set_trace(sys._getframe().f_back) + + +RGX_EXTRA_INDENT = re.compile(r'(?<=\n)\s+') + + +def strip_indentation(multiline_string): + return RGX_EXTRA_INDENT.sub('', multiline_string) + + +def decorate_fn_with_doc(new_fn, old_fn, additional_text=""): + """Make new_fn have old_fn's doc string. This is particularly useful + for the ``do_...`` commands that hook into the help system. + Adapted from from a comp.lang.python posting + by Duncan Booth.""" + def wrapper(*args, **kw): + return new_fn(*args, **kw) + if old_fn.__doc__: + wrapper.__doc__ = strip_indentation(old_fn.__doc__) + additional_text + return wrapper + + +class Pdb(OldPdb): + """Modified Pdb class, does not load readline. + + for a standalone version that uses prompt_toolkit, see + `IPython.terminal.debugger.TerminalPdb` and + `IPython.terminal.debugger.set_trace()` + + + This debugger can hide and skip frames that are tagged according to some predicates. + See the `skip_predicates` commands. + + """ + + default_predicates = { + "tbhide": True, + "readonly": False, + "ipython_internal": True, + "debuggerskip": True, + } + + def __init__(self, color_scheme=None, completekey=None, + stdin=None, stdout=None, context=5, **kwargs): + """Create a new IPython debugger. + + Parameters + ---------- + color_scheme : default None + Deprecated, do not use. + completekey : default None + Passed to pdb.Pdb. + stdin : default None + Passed to pdb.Pdb. + stdout : default None + Passed to pdb.Pdb. + context : int + Number of lines of source code context to show when + displaying stacktrace information. + **kwargs + Passed to pdb.Pdb. + + Notes + ----- + The possibilities are python version dependent, see the python + docs for more info. + """ + + # Parent constructor: + try: + self.context = int(context) + if self.context <= 0: + raise ValueError("Context must be a positive integer") + except (TypeError, ValueError): + raise ValueError("Context must be a positive integer") + + # `kwargs` ensures full compatibility with stdlib's `pdb.Pdb`. + OldPdb.__init__(self, completekey, stdin, stdout, **kwargs) + + # IPython changes... + self.shell = get_ipython() + + if self.shell is None: + save_main = sys.modules['__main__'] + # No IPython instance running, we must create one + from IPython.terminal.interactiveshell import \ + TerminalInteractiveShell + self.shell = TerminalInteractiveShell.instance() + # needed by any code which calls __import__("__main__") after + # the debugger was entered. See also #9941. + sys.modules['__main__'] = save_main + + if color_scheme is not None: + warnings.warn( + "The `color_scheme` argument is deprecated since version 5.1", + DeprecationWarning, stacklevel=2) + else: + color_scheme = self.shell.colors + + self.aliases = {} + + # Create color table: we copy the default one from the traceback + # module and add a few attributes needed for debugging + self.color_scheme_table = exception_colors() + + # shorthands + C = coloransi.TermColors + cst = self.color_scheme_table + + cst['NoColor'].colors.prompt = C.NoColor + cst['NoColor'].colors.breakpoint_enabled = C.NoColor + cst['NoColor'].colors.breakpoint_disabled = C.NoColor + + cst['Linux'].colors.prompt = C.Green + cst['Linux'].colors.breakpoint_enabled = C.LightRed + cst['Linux'].colors.breakpoint_disabled = C.Red + + cst['LightBG'].colors.prompt = C.Blue + cst['LightBG'].colors.breakpoint_enabled = C.LightRed + cst['LightBG'].colors.breakpoint_disabled = C.Red + + cst['Neutral'].colors.prompt = C.Blue + cst['Neutral'].colors.breakpoint_enabled = C.LightRed + cst['Neutral'].colors.breakpoint_disabled = C.Red + + + # Add a python parser so we can syntax highlight source while + # debugging. + self.parser = PyColorize.Parser(style=color_scheme) + self.set_colors(color_scheme) + + # Set the prompt - the default prompt is '(Pdb)' + self.prompt = prompt + self.skip_hidden = True + self.report_skipped = True + + # list of predicates we use to skip frames + self._predicates = self.default_predicates + + # + def set_colors(self, scheme): + """Shorthand access to the color table scheme selector method.""" + self.color_scheme_table.set_active_scheme(scheme) + self.parser.style = scheme + + def set_trace(self, frame=None): + if frame is None: + frame = sys._getframe().f_back + self.initial_frame = frame + return super().set_trace(frame) + + def _hidden_predicate(self, frame): + """ + Given a frame return whether it it should be hidden or not by IPython. + """ + + if self._predicates["readonly"]: + fname = frame.f_code.co_filename + # we need to check for file existence and interactively define + # function would otherwise appear as RO. + if os.path.isfile(fname) and not os.access(fname, os.W_OK): + return True + + if self._predicates["tbhide"]: + if frame in (self.curframe, getattr(self, "initial_frame", None)): + return False + frame_locals = self._get_frame_locals(frame) + if "__tracebackhide__" not in frame_locals: + return False + return frame_locals["__tracebackhide__"] + return False + + def hidden_frames(self, stack): + """ + Given an index in the stack return wether it should be skipped. + + This is used in up/down and where to skip frames. + """ + # The f_locals dictionary is updated from the actual frame + # locals whenever the .f_locals accessor is called, so we + # avoid calling it here to preserve self.curframe_locals. + # Futhermore, there is no good reason to hide the current frame. + ip_hide = [self._hidden_predicate(s[0]) for s in stack] + ip_start = [i for i, s in enumerate(ip_hide) if s == "__ipython_bottom__"] + if ip_start and self._predicates["ipython_internal"]: + ip_hide = [h if i > ip_start[0] else True for (i, h) in enumerate(ip_hide)] + return ip_hide + + def interaction(self, frame, traceback): + try: + OldPdb.interaction(self, frame, traceback) + except KeyboardInterrupt: + self.stdout.write("\n" + self.shell.get_exception_only()) + + def new_do_frame(self, arg): + OldPdb.do_frame(self, arg) + + def new_do_quit(self, arg): + + if hasattr(self, 'old_all_completions'): + self.shell.Completer.all_completions=self.old_all_completions + + return OldPdb.do_quit(self, arg) + + do_q = do_quit = decorate_fn_with_doc(new_do_quit, OldPdb.do_quit) + + def new_do_restart(self, arg): + """Restart command. In the context of ipython this is exactly the same + thing as 'quit'.""" + self.msg("Restart doesn't make sense here. Using 'quit' instead.") + return self.do_quit(arg) + + def print_stack_trace(self, context=None): + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + if context is None: + context = self.context + try: + context=int(context) + if context <= 0: + raise ValueError("Context must be a positive integer") + except (TypeError, ValueError): + raise ValueError("Context must be a positive integer") + try: + skipped = 0 + for hidden, frame_lineno in zip(self.hidden_frames(self.stack), self.stack): + if hidden and self.skip_hidden: + skipped += 1 + continue + if skipped: + print( + f"{Colors.excName} [... skipping {skipped} hidden frame(s)]{ColorsNormal}\n" + ) + skipped = 0 + self.print_stack_entry(frame_lineno, context=context) + if skipped: + print( + f"{Colors.excName} [... skipping {skipped} hidden frame(s)]{ColorsNormal}\n" + ) + except KeyboardInterrupt: + pass + + def print_stack_entry(self, frame_lineno, prompt_prefix='\n-> ', + context=None): + if context is None: + context = self.context + try: + context=int(context) + if context <= 0: + raise ValueError("Context must be a positive integer") + except (TypeError, ValueError): + raise ValueError("Context must be a positive integer") + print(self.format_stack_entry(frame_lineno, '', context), file=self.stdout) + + # vds: >> + frame, lineno = frame_lineno + filename = frame.f_code.co_filename + self.shell.hooks.synchronize_with_editor(filename, lineno, 0) + # vds: << + + def _get_frame_locals(self, frame): + """ " + Acessing f_local of current frame reset the namespace, so we want to avoid + that or the following can happend + + ipdb> foo + "old" + ipdb> foo = "new" + ipdb> foo + "new" + ipdb> where + ipdb> foo + "old" + + So if frame is self.current_frame we instead return self.curframe_locals + + """ + if frame is self.curframe: + return self.curframe_locals + else: + return frame.f_locals + + def format_stack_entry(self, frame_lineno, lprefix=': ', context=None): + if context is None: + context = self.context + try: + context=int(context) + if context <= 0: + print("Context must be a positive integer", file=self.stdout) + except (TypeError, ValueError): + print("Context must be a positive integer", file=self.stdout) + try: + import reprlib # Py 3 + except ImportError: + import repr as reprlib # Py 2 + + ret = [] + + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + tpl_link = u'%s%%s%s' % (Colors.filenameEm, ColorsNormal) + tpl_call = u'%s%%s%s%%s%s' % (Colors.vName, Colors.valEm, ColorsNormal) + tpl_line = u'%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal) + tpl_line_em = u'%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, + ColorsNormal) + + frame, lineno = frame_lineno + + return_value = '' + loc_frame = self._get_frame_locals(frame) + if "__return__" in loc_frame: + rv = loc_frame["__return__"] + # return_value += '->' + return_value += reprlib.repr(rv) + "\n" + ret.append(return_value) + + #s = filename + '(' + `lineno` + ')' + filename = self.canonic(frame.f_code.co_filename) + link = tpl_link % py3compat.cast_unicode(filename) + + if frame.f_code.co_name: + func = frame.f_code.co_name + else: + func = "<lambda>" + + call = "" + if func != "?": + if "__args__" in loc_frame: + args = reprlib.repr(loc_frame["__args__"]) + else: + args = '()' + call = tpl_call % (func, args) + + # The level info should be generated in the same format pdb uses, to + # avoid breaking the pdbtrack functionality of python-mode in *emacs. + if frame is self.curframe: + ret.append('> ') + else: + ret.append(' ') + ret.append(u'%s(%s)%s\n' % (link,lineno,call)) + + start = lineno - 1 - context//2 + lines = linecache.getlines(filename) + start = min(start, len(lines) - context) + start = max(start, 0) + lines = lines[start : start + context] + + for i,line in enumerate(lines): + show_arrow = (start + 1 + i == lineno) + linetpl = (frame is self.curframe or show_arrow) \ + and tpl_line_em \ + or tpl_line + ret.append(self.__format_line(linetpl, filename, + start + 1 + i, line, + arrow = show_arrow) ) + return ''.join(ret) + + def __format_line(self, tpl_line, filename, lineno, line, arrow = False): + bp_mark = "" + bp_mark_color = "" + + new_line, err = self.parser.format2(line, 'str') + if not err: + line = new_line + + bp = None + if lineno in self.get_file_breaks(filename): + bps = self.get_breaks(filename, lineno) + bp = bps[-1] + + if bp: + Colors = self.color_scheme_table.active_colors + bp_mark = str(bp.number) + bp_mark_color = Colors.breakpoint_enabled + if not bp.enabled: + bp_mark_color = Colors.breakpoint_disabled + + numbers_width = 7 + if arrow: + # This is the line with the error + pad = numbers_width - len(str(lineno)) - len(bp_mark) + num = '%s%s' % (make_arrow(pad), str(lineno)) + else: + num = '%*s' % (numbers_width - len(bp_mark), str(lineno)) + + return tpl_line % (bp_mark_color + bp_mark, num, line) + + + def print_list_lines(self, filename, first, last): + """The printing (as opposed to the parsing part of a 'list' + command.""" + try: + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + tpl_line = '%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal) + tpl_line_em = '%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, ColorsNormal) + src = [] + if filename == "<string>" and hasattr(self, "_exec_filename"): + filename = self._exec_filename + + for lineno in range(first, last+1): + line = linecache.getline(filename, lineno) + if not line: + break + + if lineno == self.curframe.f_lineno: + line = self.__format_line(tpl_line_em, filename, lineno, line, arrow = True) + else: + line = self.__format_line(tpl_line, filename, lineno, line, arrow = False) + + src.append(line) + self.lineno = lineno + + print(''.join(src), file=self.stdout) + + except KeyboardInterrupt: + pass + + def do_skip_predicates(self, args): + """ + Turn on/off individual predicates as to whether a frame should be hidden/skip. + + The global option to skip (or not) hidden frames is set with skip_hidden + + To change the value of a predicate + + skip_predicates key [true|false] + + Call without arguments to see the current values. + + To permanently change the value of an option add the corresponding + command to your ``~/.pdbrc`` file. If you are programmatically using the + Pdb instance you can also change the ``default_predicates`` class + attribute. + """ + if not args.strip(): + print("current predicates:") + for (p, v) in self._predicates.items(): + print(" ", p, ":", v) + return + type_value = args.strip().split(" ") + if len(type_value) != 2: + print( + f"Usage: skip_predicates <type> <value>, with <type> one of {set(self._predicates.keys())}" + ) + return + + type_, value = type_value + if type_ not in self._predicates: + print(f"{type_!r} not in {set(self._predicates.keys())}") + return + if value.lower() not in ("true", "yes", "1", "no", "false", "0"): + print( + f"{value!r} is invalid - use one of ('true', 'yes', '1', 'no', 'false', '0')" + ) + return + + self._predicates[type_] = value.lower() in ("true", "yes", "1") + if not any(self._predicates.values()): + print( + "Warning, all predicates set to False, skip_hidden may not have any effects." + ) + + def do_skip_hidden(self, arg): + """ + Change whether or not we should skip frames with the + __tracebackhide__ attribute. + """ + if not arg.strip(): + print( + f"skip_hidden = {self.skip_hidden}, use 'yes','no', 'true', or 'false' to change." + ) + elif arg.strip().lower() in ("true", "yes"): + self.skip_hidden = True + elif arg.strip().lower() in ("false", "no"): + self.skip_hidden = False + if not any(self._predicates.values()): + print( + "Warning, all predicates set to False, skip_hidden may not have any effects." + ) + + def do_list(self, arg): + """Print lines of code from the current stack frame + """ + self.lastcmd = 'list' + last = None + if arg: + try: + x = eval(arg, {}, {}) + if type(x) == type(()): + first, last = x + first = int(first) + last = int(last) + if last < first: + # Assume it's a count + last = first + last + else: + first = max(1, int(x) - 5) + except: + print('*** Error in argument:', repr(arg), file=self.stdout) + return + elif self.lineno is None: + first = max(1, self.curframe.f_lineno - 5) + else: + first = self.lineno + 1 + if last is None: + last = first + 10 + self.print_list_lines(self.curframe.f_code.co_filename, first, last) + + # vds: >> + lineno = first + filename = self.curframe.f_code.co_filename + self.shell.hooks.synchronize_with_editor(filename, lineno, 0) + # vds: << + + do_l = do_list + + def getsourcelines(self, obj): + lines, lineno = inspect.findsource(obj) + if inspect.isframe(obj) and obj.f_globals is self._get_frame_locals(obj): + # must be a module frame: do not try to cut a block out of it + return lines, 1 + elif inspect.ismodule(obj): + return lines, 1 + return inspect.getblock(lines[lineno:]), lineno+1 + + def do_longlist(self, arg): + """Print lines of code from the current stack frame. + + Shows more lines than 'list' does. + """ + self.lastcmd = 'longlist' + try: + lines, lineno = self.getsourcelines(self.curframe) + except OSError as err: + self.error(err) + return + last = lineno + len(lines) + self.print_list_lines(self.curframe.f_code.co_filename, lineno, last) + do_ll = do_longlist + + def do_debug(self, arg): + """debug code + Enter a recursive debugger that steps through the code + argument (which is an arbitrary expression or statement to be + executed in the current environment). + """ + trace_function = sys.gettrace() + sys.settrace(None) + globals = self.curframe.f_globals + locals = self.curframe_locals + p = self.__class__(completekey=self.completekey, + stdin=self.stdin, stdout=self.stdout) + p.use_rawinput = self.use_rawinput + p.prompt = "(%s) " % self.prompt.strip() + self.message("ENTERING RECURSIVE DEBUGGER") + sys.call_tracing(p.run, (arg, globals, locals)) + self.message("LEAVING RECURSIVE DEBUGGER") + sys.settrace(trace_function) + self.lastcmd = p.lastcmd + + def do_pdef(self, arg): + """Print the call signature for any callable object. + + The debugger interface to %pdef""" + namespaces = [ + ("Locals", self.curframe_locals), + ("Globals", self.curframe.f_globals), + ] + self.shell.find_line_magic("pdef")(arg, namespaces=namespaces) + + def do_pdoc(self, arg): + """Print the docstring for an object. + + The debugger interface to %pdoc.""" + namespaces = [ + ("Locals", self.curframe_locals), + ("Globals", self.curframe.f_globals), + ] + self.shell.find_line_magic("pdoc")(arg, namespaces=namespaces) + + def do_pfile(self, arg): + """Print (or run through pager) the file where an object is defined. + + The debugger interface to %pfile. + """ + namespaces = [ + ("Locals", self.curframe_locals), + ("Globals", self.curframe.f_globals), + ] + self.shell.find_line_magic("pfile")(arg, namespaces=namespaces) + + def do_pinfo(self, arg): + """Provide detailed information about an object. + + The debugger interface to %pinfo, i.e., obj?.""" + namespaces = [ + ("Locals", self.curframe_locals), + ("Globals", self.curframe.f_globals), + ] + self.shell.find_line_magic("pinfo")(arg, namespaces=namespaces) + + def do_pinfo2(self, arg): + """Provide extra detailed information about an object. + + The debugger interface to %pinfo2, i.e., obj??.""" + namespaces = [ + ("Locals", self.curframe_locals), + ("Globals", self.curframe.f_globals), + ] + self.shell.find_line_magic("pinfo2")(arg, namespaces=namespaces) + + def do_psource(self, arg): + """Print (or run through pager) the source code for an object.""" + namespaces = [ + ("Locals", self.curframe_locals), + ("Globals", self.curframe.f_globals), + ] + self.shell.find_line_magic("psource")(arg, namespaces=namespaces) + + def do_where(self, arg): + """w(here) + Print a stack trace, with the most recent frame at the bottom. + An arrow indicates the "current frame", which determines the + context of most commands. 'bt' is an alias for this command. + + Take a number as argument as an (optional) number of context line to + print""" + if arg: + try: + context = int(arg) + except ValueError as err: + self.error(err) + return + self.print_stack_trace(context) + else: + self.print_stack_trace() + + do_w = do_where + + def break_anywhere(self, frame): + """ + + _stop_in_decorator_internals is overly restrictive, as we may still want + to trace function calls, so we need to also update break_anywhere so + that is we don't `stop_here`, because of debugger skip, we may still + stop at any point inside the function + + """ + + sup = super().break_anywhere(frame) + if sup: + return sup + if self._predicates["debuggerskip"]: + if DEBUGGERSKIP in frame.f_code.co_varnames: + return True + if frame.f_back and self._get_frame_locals(frame.f_back).get(DEBUGGERSKIP): + return True + return False + + @skip_doctest + def _is_in_decorator_internal_and_should_skip(self, frame): + """ + Utility to tell us whether we are in a decorator internal and should stop. + + + + """ + + # if we are disabled don't skip + if not self._predicates["debuggerskip"]: + return False + + # if frame is tagged, skip by default. + if DEBUGGERSKIP in frame.f_code.co_varnames: + return True + + # if one of the parent frame value set to True skip as well. + + cframe = frame + while getattr(cframe, "f_back", None): + cframe = cframe.f_back + if self._get_frame_locals(cframe).get(DEBUGGERSKIP): + return True + + return False + + def stop_here(self, frame): + """Check if pdb should stop here""" + if not super().stop_here(frame): + return False + + if self._is_in_decorator_internal_and_should_skip(frame) is True: + return False + + hidden = False + if self.skip_hidden: + hidden = self._hidden_predicate(frame) + if hidden: + if self.report_skipped: + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + print(f"{Colors.excName} [... skipped 1 hidden frame]{ColorsNormal}\n") + return False + return True + + def do_up(self, arg): + """u(p) [count] + Move the current frame count (default one) levels up in the + stack trace (to an older frame). + + Will skip hidden frames. + """ + # modified version of upstream that skips + # frames with __tracebackhide__ + if self.curindex == 0: + self.error("Oldest frame") + return + try: + count = int(arg or 1) + except ValueError: + self.error("Invalid frame count (%s)" % arg) + return + skipped = 0 + if count < 0: + _newframe = 0 + else: + _newindex = self.curindex + counter = 0 + hidden_frames = self.hidden_frames(self.stack) + for i in range(self.curindex - 1, -1, -1): + frame = self.stack[i][0] + if hidden_frames[i] and self.skip_hidden: + skipped += 1 + continue + counter += 1 + if counter >= count: + break + else: + # if no break occured. + self.error("all frames above hidden") + return + + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + _newframe = i + self._select_frame(_newframe) + if skipped: + print( + f"{Colors.excName} [... skipped {skipped} hidden frame(s)]{ColorsNormal}\n" + ) + + def do_down(self, arg): + """d(own) [count] + Move the current frame count (default one) levels down in the + stack trace (to a newer frame). + + Will skip hidden frames. + """ + if self.curindex + 1 == len(self.stack): + self.error("Newest frame") + return + try: + count = int(arg or 1) + except ValueError: + self.error("Invalid frame count (%s)" % arg) + return + if count < 0: + _newframe = len(self.stack) - 1 + else: + _newindex = self.curindex + counter = 0 + skipped = 0 + hidden_frames = self.hidden_frames(self.stack) + for i in range(self.curindex + 1, len(self.stack)): + frame = self.stack[i][0] + if hidden_frames[i] and self.skip_hidden: + skipped += 1 + continue + counter += 1 + if counter >= count: + break + else: + self.error("all frames bellow hidden") + return + + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + if skipped: + print( + f"{Colors.excName} [... skipped {skipped} hidden frame(s)]{ColorsNormal}\n" + ) + _newframe = i + + self._select_frame(_newframe) + + do_d = do_down + do_u = do_up + + def do_context(self, context): + """context number_of_lines + Set the number of lines of source code to show when displaying + stacktrace information. + """ + try: + new_context = int(context) + if new_context <= 0: + raise ValueError() + self.context = new_context + except ValueError: + self.error("The 'context' command requires a positive integer argument.") + + +class InterruptiblePdb(Pdb): + """Version of debugger where KeyboardInterrupt exits the debugger altogether.""" + + def cmdloop(self, intro=None): + """Wrap cmdloop() such that KeyboardInterrupt stops the debugger.""" + try: + return OldPdb.cmdloop(self, intro=intro) + except KeyboardInterrupt: + self.stop_here = lambda frame: False + self.do_quit("") + sys.settrace(None) + self.quitting = False + raise + + def _cmdloop(self): + while True: + try: + # keyboard interrupts allow for an easy way to cancel + # the current command, so allow them during interactive input + self.allow_kbdint = True + self.cmdloop() + self.allow_kbdint = False + break + except KeyboardInterrupt: + self.message('--KeyboardInterrupt--') + raise + + +def set_trace(frame=None): + """ + Start debugging from `frame`. + + If frame is not specified, debugging starts from caller's frame. + """ + Pdb().set_trace(frame or sys._getframe().f_back) diff --git a/contrib/python/ipython/py3/IPython/core/display.py b/contrib/python/ipython/py3/IPython/core/display.py index f5a5a90e4a9..f45e7599c9e 100644 --- a/contrib/python/ipython/py3/IPython/core/display.py +++ b/contrib/python/ipython/py3/IPython/core/display.py @@ -1,1556 +1,1556 @@ -# -*- coding: utf-8 -*- -"""Top-level display functions for displaying object in different formats.""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -from binascii import b2a_hex, b2a_base64, hexlify -import json -import mimetypes -import os -import struct -import sys -import warnings -from copy import deepcopy -from os.path import splitext -from pathlib import Path, PurePath - -from IPython.utils.py3compat import cast_unicode -from IPython.testing.skipdoctest import skip_doctest - -__all__ = ['display', 'display_pretty', 'display_html', 'display_markdown', -'display_svg', 'display_png', 'display_jpeg', 'display_latex', 'display_json', -'display_javascript', 'display_pdf', 'DisplayObject', 'TextDisplayObject', -'Pretty', 'HTML', 'Markdown', 'Math', 'Latex', 'SVG', 'ProgressBar', 'JSON', -'GeoJSON', 'Javascript', 'Image', 'clear_output', 'set_matplotlib_formats', -'set_matplotlib_close', 'publish_display_data', 'update_display', 'DisplayHandle', -'Video'] - -#----------------------------------------------------------------------------- -# utility functions -#----------------------------------------------------------------------------- - -def _safe_exists(path): - """Check path, but don't let exceptions raise""" - try: - return os.path.exists(path) - except Exception: - return False - -def _merge(d1, d2): - """Like update, but merges sub-dicts instead of clobbering at the top level. - - Updates d1 in-place - """ - - if not isinstance(d2, dict) or not isinstance(d1, dict): - return d2 - for key, value in d2.items(): - d1[key] = _merge(d1.get(key), value) - return d1 - -def _display_mimetype(mimetype, objs, raw=False, metadata=None): - """internal implementation of all display_foo methods - - Parameters - ---------- - mimetype : str - The mimetype to be published (e.g. 'image/png') - *objs : object - The Python objects to display, or if raw=True raw text data to - display. - raw : bool - Are the data objects raw data or Python objects that need to be - formatted before display? [default: False] - metadata : dict (optional) - Metadata to be associated with the specific mimetype output. - """ - if metadata: - metadata = {mimetype: metadata} - if raw: - # turn list of pngdata into list of { 'image/png': pngdata } - objs = [ {mimetype: obj} for obj in objs ] - display(*objs, raw=raw, metadata=metadata, include=[mimetype]) - -#----------------------------------------------------------------------------- -# Main functions -#----------------------------------------------------------------------------- - -# use * to indicate transient is keyword-only -def publish_display_data(data, metadata=None, source=None, *, transient=None, **kwargs): - """Publish data and metadata to all frontends. - - See the ``display_data`` message in the messaging documentation for - more details about this message type. - - Keys of data and metadata can be any mime-type. - - Parameters - ---------- - data : dict - A dictionary having keys that are valid MIME types (like - 'text/plain' or 'image/svg+xml') and values that are the data for - that MIME type. The data itself must be a JSON'able data - structure. Minimally all data should have the 'text/plain' data, - which can be displayed by all frontends. If more than the plain - text is given, it is up to the frontend to decide which - representation to use. - metadata : dict - A dictionary for metadata related to the data. This can contain - arbitrary key, value pairs that frontends can use to interpret - the data. mime-type keys matching those in data can be used - to specify metadata about particular representations. - source : str, deprecated - Unused. - transient : dict, keyword-only - A dictionary of transient data, such as display_id. - """ - from IPython.core.interactiveshell import InteractiveShell - - display_pub = InteractiveShell.instance().display_pub - - # only pass transient if supplied, - # to avoid errors with older ipykernel. - # TODO: We could check for ipykernel version and provide a detailed upgrade message. - if transient: - kwargs['transient'] = transient - - display_pub.publish( - data=data, - metadata=metadata, - **kwargs - ) - - -def _new_id(): - """Generate a new random text id with urandom""" - return b2a_hex(os.urandom(16)).decode('ascii') - - -def display(*objs, include=None, exclude=None, metadata=None, transient=None, display_id=None, **kwargs): - """Display a Python object in all frontends. - - By default all representations will be computed and sent to the frontends. - Frontends can decide which representation is used and how. - - In terminal IPython this will be similar to using :func:`print`, for use in richer - frontends see Jupyter notebook examples with rich display logic. - - Parameters - ---------- - *objs : object - The Python objects to display. - raw : bool, optional - Are the objects to be displayed already mimetype-keyed dicts of raw display data, - or Python objects that need to be formatted before display? [default: False] - include : list, tuple or set, optional - A list of format type strings (MIME types) to include in the - format data dict. If this is set *only* the format types included - in this list will be computed. - exclude : list, tuple or set, optional - A list of format type strings (MIME types) to exclude in the format - data dict. If this is set all format types will be computed, - except for those included in this argument. - metadata : dict, optional - A dictionary of metadata to associate with the output. - mime-type keys in this dictionary will be associated with the individual - representation formats, if they exist. - transient : dict, optional - A dictionary of transient data to associate with the output. - Data in this dict should not be persisted to files (e.g. notebooks). - display_id : str, bool optional - Set an id for the display. - This id can be used for updating this display area later via update_display. - If given as `True`, generate a new `display_id` - clear : bool, optional - Should the output area be cleared before displaying anything? If True, - this will wait for additional output before clearing. [default: False] - kwargs: additional keyword-args, optional - Additional keyword-arguments are passed through to the display publisher. - - Returns - ------- - - handle: DisplayHandle - Returns a handle on updatable displays for use with :func:`update_display`, - if `display_id` is given. Returns :any:`None` if no `display_id` is given - (default). - - Examples - -------- - - >>> class Json(object): - ... def __init__(self, json): - ... self.json = json - ... def _repr_pretty_(self, pp, cycle): - ... import json - ... pp.text(json.dumps(self.json, indent=2)) - ... def __repr__(self): - ... return str(self.json) - ... - - >>> d = Json({1:2, 3: {4:5}}) - - >>> print(d) - {1: 2, 3: {4: 5}} - - >>> display(d) - { - "1": 2, - "3": { - "4": 5 - } - } - - >>> def int_formatter(integer, pp, cycle): - ... pp.text('I'*integer) - - >>> plain = get_ipython().display_formatter.formatters['text/plain'] - >>> plain.for_type(int, int_formatter) - <function _repr_pprint at 0x...> - >>> display(7-5) - II - - >>> del plain.type_printers[int] - >>> display(7-5) - 2 - - See Also - -------- - - :func:`update_display` - - Notes - ----- - - In Python, objects can declare their textual representation using the - `__repr__` method. IPython expands on this idea and allows objects to declare - other, rich representations including: - - - HTML - - JSON - - PNG - - JPEG - - SVG - - LaTeX - - A single object can declare some or all of these representations; all are - handled by IPython's display system. - - The main idea of the first approach is that you have to implement special - display methods when you define your class, one for each representation you - want to use. Here is a list of the names of the special methods and the - values they must return: - - - `_repr_html_`: return raw HTML as a string, or a tuple (see below). - - `_repr_json_`: return a JSONable dict, or a tuple (see below). - - `_repr_jpeg_`: return raw JPEG data, or a tuple (see below). - - `_repr_png_`: return raw PNG data, or a tuple (see below). - - `_repr_svg_`: return raw SVG data as a string, or a tuple (see below). - - `_repr_latex_`: return LaTeX commands in a string surrounded by "$", - or a tuple (see below). - - `_repr_mimebundle_`: return a full mimebundle containing the mapping - from all mimetypes to data. - Use this for any mime-type not listed above. - - The above functions may also return the object's metadata alonside the - data. If the metadata is available, the functions will return a tuple - containing the data and metadata, in that order. If there is no metadata - available, then the functions will return the data only. - - When you are directly writing your own classes, you can adapt them for - display in IPython by following the above approach. But in practice, you - often need to work with existing classes that you can't easily modify. - - You can refer to the documentation on integrating with the display system in - order to register custom formatters for already existing types - (:ref:`integrating_rich_display`). - - .. versionadded:: 5.4 display available without import - .. versionadded:: 6.1 display available without import - - Since IPython 5.4 and 6.1 :func:`display` is automatically made available to - the user without import. If you are using display in a document that might - be used in a pure python context or with older version of IPython, use the - following import at the top of your file:: - - from IPython.display import display - - """ - from IPython.core.interactiveshell import InteractiveShell - - if not InteractiveShell.initialized(): - # Directly print objects. - print(*objs) - return - - raw = kwargs.pop("raw", False) - clear = kwargs.pop("clear", False) - if transient is None: - transient = {} - if metadata is None: - metadata={} - if display_id: - if display_id is True: - display_id = _new_id() - transient['display_id'] = display_id - if kwargs.get('update') and 'display_id' not in transient: - raise TypeError('display_id required for update_display') - if transient: - kwargs['transient'] = transient - - if not objs and display_id: - # if given no objects, but still a request for a display_id, - # we assume the user wants to insert an empty output that - # can be updated later - objs = [{}] - raw = True - - if not raw: - format = InteractiveShell.instance().display_formatter.format - - if clear: - clear_output(wait=True) - - for obj in objs: - if raw: - publish_display_data(data=obj, metadata=metadata, **kwargs) - else: - format_dict, md_dict = format(obj, include=include, exclude=exclude) - if not format_dict: - # nothing to display (e.g. _ipython_display_ took over) - continue - if metadata: - # kwarg-specified metadata gets precedence - _merge(md_dict, metadata) - publish_display_data(data=format_dict, metadata=md_dict, **kwargs) - if display_id: - return DisplayHandle(display_id) - - -# use * for keyword-only display_id arg -def update_display(obj, *, display_id, **kwargs): - """Update an existing display by id - - Parameters - ---------- - - obj: - The object with which to update the display - display_id: keyword-only - The id of the display to update - - See Also - -------- - - :func:`display` - """ - kwargs['update'] = True - display(obj, display_id=display_id, **kwargs) - - -class DisplayHandle(object): - """A handle on an updatable display - - Call `.update(obj)` to display a new object. - - Call `.display(obj`) to add a new instance of this display, - and update existing instances. - - See Also - -------- - - :func:`display`, :func:`update_display` - - """ - - def __init__(self, display_id=None): - if display_id is None: - display_id = _new_id() - self.display_id = display_id - - def __repr__(self): - return "<%s display_id=%s>" % (self.__class__.__name__, self.display_id) - - def display(self, obj, **kwargs): - """Make a new display with my id, updating existing instances. - - Parameters - ---------- - - obj: - object to display - **kwargs: - additional keyword arguments passed to display - """ - display(obj, display_id=self.display_id, **kwargs) - - def update(self, obj, **kwargs): - """Update existing displays with my id - - Parameters - ---------- - - obj: - object to display - **kwargs: - additional keyword arguments passed to update_display - """ - update_display(obj, display_id=self.display_id, **kwargs) - - -def display_pretty(*objs, **kwargs): - """Display the pretty (default) representation of an object. - - Parameters - ---------- - *objs : object - The Python objects to display, or if raw=True raw text data to - display. - raw : bool - Are the data objects raw data or Python objects that need to be - formatted before display? [default: False] - metadata : dict (optional) - Metadata to be associated with the specific mimetype output. - """ - _display_mimetype('text/plain', objs, **kwargs) - - -def display_html(*objs, **kwargs): - """Display the HTML representation of an object. - - Note: If raw=False and the object does not have a HTML - representation, no HTML will be shown. - - Parameters - ---------- - *objs : object - The Python objects to display, or if raw=True raw HTML data to - display. - raw : bool - Are the data objects raw data or Python objects that need to be - formatted before display? [default: False] - metadata : dict (optional) - Metadata to be associated with the specific mimetype output. - """ - _display_mimetype('text/html', objs, **kwargs) - - -def display_markdown(*objs, **kwargs): - """Displays the Markdown representation of an object. - - Parameters - ---------- - *objs : object - The Python objects to display, or if raw=True raw markdown data to - display. - raw : bool - Are the data objects raw data or Python objects that need to be - formatted before display? [default: False] - metadata : dict (optional) - Metadata to be associated with the specific mimetype output. - """ - - _display_mimetype('text/markdown', objs, **kwargs) - - -def display_svg(*objs, **kwargs): - """Display the SVG representation of an object. - - Parameters - ---------- - *objs : object - The Python objects to display, or if raw=True raw svg data to - display. - raw : bool - Are the data objects raw data or Python objects that need to be - formatted before display? [default: False] - metadata : dict (optional) - Metadata to be associated with the specific mimetype output. - """ - _display_mimetype('image/svg+xml', objs, **kwargs) - - -def display_png(*objs, **kwargs): - """Display the PNG representation of an object. - - Parameters - ---------- - *objs : object - The Python objects to display, or if raw=True raw png data to - display. - raw : bool - Are the data objects raw data or Python objects that need to be - formatted before display? [default: False] - metadata : dict (optional) - Metadata to be associated with the specific mimetype output. - """ - _display_mimetype('image/png', objs, **kwargs) - - -def display_jpeg(*objs, **kwargs): - """Display the JPEG representation of an object. - - Parameters - ---------- - *objs : object - The Python objects to display, or if raw=True raw JPEG data to - display. - raw : bool - Are the data objects raw data or Python objects that need to be - formatted before display? [default: False] - metadata : dict (optional) - Metadata to be associated with the specific mimetype output. - """ - _display_mimetype('image/jpeg', objs, **kwargs) - - -def display_latex(*objs, **kwargs): - """Display the LaTeX representation of an object. - - Parameters - ---------- - *objs : object - The Python objects to display, or if raw=True raw latex data to - display. - raw : bool - Are the data objects raw data or Python objects that need to be - formatted before display? [default: False] - metadata : dict (optional) - Metadata to be associated with the specific mimetype output. - """ - _display_mimetype('text/latex', objs, **kwargs) - - -def display_json(*objs, **kwargs): - """Display the JSON representation of an object. - - Note that not many frontends support displaying JSON. - - Parameters - ---------- - *objs : object - The Python objects to display, or if raw=True raw json data to - display. - raw : bool - Are the data objects raw data or Python objects that need to be - formatted before display? [default: False] - metadata : dict (optional) - Metadata to be associated with the specific mimetype output. - """ - _display_mimetype('application/json', objs, **kwargs) - - -def display_javascript(*objs, **kwargs): - """Display the Javascript representation of an object. - - Parameters - ---------- - *objs : object - The Python objects to display, or if raw=True raw javascript data to - display. - raw : bool - Are the data objects raw data or Python objects that need to be - formatted before display? [default: False] - metadata : dict (optional) - Metadata to be associated with the specific mimetype output. - """ - _display_mimetype('application/javascript', objs, **kwargs) - - -def display_pdf(*objs, **kwargs): - """Display the PDF representation of an object. - - Parameters - ---------- - *objs : object - The Python objects to display, or if raw=True raw javascript data to - display. - raw : bool - Are the data objects raw data or Python objects that need to be - formatted before display? [default: False] - metadata : dict (optional) - Metadata to be associated with the specific mimetype output. - """ - _display_mimetype('application/pdf', objs, **kwargs) - - -#----------------------------------------------------------------------------- -# Smart classes -#----------------------------------------------------------------------------- - - -class DisplayObject(object): - """An object that wraps data to be displayed.""" - - _read_flags = 'r' - _show_mem_addr = False - metadata = None - - def __init__(self, data=None, url=None, filename=None, metadata=None): - """Create a display object given raw data. - - When this object is returned by an expression or passed to the - display function, it will result in the data being displayed - in the frontend. The MIME type of the data should match the - subclasses used, so the Png subclass should be used for 'image/png' - data. If the data is a URL, the data will first be downloaded - and then displayed. If - - Parameters - ---------- - data : unicode, str or bytes - The raw data or a URL or file to load the data from - url : unicode - A URL to download the data from. - filename : unicode - Path to a local file to load the data from. - metadata : dict - Dict of metadata associated to be the object when displayed - """ - if isinstance(data, (Path, PurePath)): - data = str(data) - - if data is not None and isinstance(data, str): - if data.startswith('http') and url is None: - url = data - filename = None - data = None - elif _safe_exists(data) and filename is None: - url = None - filename = data - data = None - - self.url = url - self.filename = filename - # because of @data.setter methods in - # subclasses ensure url and filename are set - # before assigning to self.data - self.data = data - - if metadata is not None: - self.metadata = metadata - elif self.metadata is None: - self.metadata = {} - - self.reload() - self._check_data() - - def __repr__(self): - if not self._show_mem_addr: - cls = self.__class__ - r = "<%s.%s object>" % (cls.__module__, cls.__name__) - else: - r = super(DisplayObject, self).__repr__() - return r - - def _check_data(self): - """Override in subclasses if there's something to check.""" - pass - - def _data_and_metadata(self): - """shortcut for returning metadata with shape information, if defined""" - if self.metadata: - return self.data, deepcopy(self.metadata) - else: - return self.data - - def reload(self): - """Reload the raw data from file or URL.""" - if self.filename is not None: - with open(self.filename, self._read_flags) as f: - self.data = f.read() - elif self.url is not None: - # Deferred import - from urllib.request import urlopen - response = urlopen(self.url) - data = response.read() - # extract encoding from header, if there is one: - encoding = None - if 'content-type' in response.headers: - for sub in response.headers['content-type'].split(';'): - sub = sub.strip() - if sub.startswith('charset'): - encoding = sub.split('=')[-1].strip() - break - if 'content-encoding' in response.headers: - # TODO: do deflate? - if 'gzip' in response.headers['content-encoding']: - import gzip - from io import BytesIO - with gzip.open(BytesIO(data), 'rt', encoding=encoding) as fp: - encoding = None - data = fp.read() - - # decode data, if an encoding was specified - # We only touch self.data once since - # subclasses such as SVG have @data.setter methods - # that transform self.data into ... well svg. - if encoding: - self.data = data.decode(encoding, 'replace') - else: - self.data = data - - -class TextDisplayObject(DisplayObject): - """Validate that display data is text""" - def _check_data(self): - if self.data is not None and not isinstance(self.data, str): - raise TypeError("%s expects text, not %r" % (self.__class__.__name__, self.data)) - -class Pretty(TextDisplayObject): - - def _repr_pretty_(self, pp, cycle): - return pp.text(self.data) - - -class HTML(TextDisplayObject): - - def __init__(self, data=None, url=None, filename=None, metadata=None): - def warn(): - if not data: - return False - - # - # Avoid calling lower() on the entire data, because it could be a - # long string and we're only interested in its beginning and end. - # - prefix = data[:10].lower() - suffix = data[-10:].lower() - return prefix.startswith("<iframe ") and suffix.endswith("</iframe>") - - if warn(): - warnings.warn("Consider using IPython.display.IFrame instead") - super(HTML, self).__init__(data=data, url=url, filename=filename, metadata=metadata) - - def _repr_html_(self): - return self._data_and_metadata() - - def __html__(self): - """ - This method exists to inform other HTML-using modules (e.g. Markupsafe, - htmltag, etc) that this object is HTML and does not need things like - special characters (<>&) escaped. - """ - return self._repr_html_() - - -class Markdown(TextDisplayObject): - - def _repr_markdown_(self): - return self._data_and_metadata() - - -class Math(TextDisplayObject): - - def _repr_latex_(self): - s = r"$\displaystyle %s$" % self.data.strip('$') - if self.metadata: - return s, deepcopy(self.metadata) - else: - return s - - -class Latex(TextDisplayObject): - - def _repr_latex_(self): - return self._data_and_metadata() - - -class SVG(DisplayObject): - """Embed an SVG into the display. - - Note if you just want to view a svg image via a URL use `:class:Image` with - a url=URL keyword argument. - """ - - _read_flags = 'rb' - # wrap data in a property, which extracts the <svg> tag, discarding - # document headers - _data = None - - @property - def data(self): - return self._data - - @data.setter - def data(self, svg): - if svg is None: - self._data = None - return - # parse into dom object - from xml.dom import minidom - x = minidom.parseString(svg) - # get svg tag (should be 1) - found_svg = x.getElementsByTagName('svg') - if found_svg: - svg = found_svg[0].toxml() - else: - # fallback on the input, trust the user - # but this is probably an error. - pass - svg = cast_unicode(svg) - self._data = svg - - def _repr_svg_(self): - return self._data_and_metadata() - -class ProgressBar(DisplayObject): - """Progressbar supports displaying a progressbar like element - """ - def __init__(self, total): - """Creates a new progressbar - - Parameters - ---------- - total : int - maximum size of the progressbar - """ - self.total = total - self._progress = 0 - self.html_width = '60ex' - self.text_width = 60 - self._display_id = hexlify(os.urandom(8)).decode('ascii') - - def __repr__(self): - fraction = self.progress / self.total - filled = '=' * int(fraction * self.text_width) - rest = ' ' * (self.text_width - len(filled)) - return '[{}{}] {}/{}'.format( - filled, rest, - self.progress, self.total, - ) - - def _repr_html_(self): - return "<progress style='width:{}' max='{}' value='{}'></progress>".format( - self.html_width, self.total, self.progress) - - def display(self): - display(self, display_id=self._display_id) - - def update(self): - display(self, display_id=self._display_id, update=True) - - @property - def progress(self): - return self._progress - - @progress.setter - def progress(self, value): - self._progress = value - self.update() - - def __iter__(self): - self.display() - self._progress = -1 # First iteration is 0 - return self - - def __next__(self): - """Returns current value and increments display by one.""" - self.progress += 1 - if self.progress < self.total: - return self.progress - else: - raise StopIteration() - -class JSON(DisplayObject): - """JSON expects a JSON-able dict or list - - not an already-serialized JSON string. - - Scalar types (None, number, string) are not allowed, only dict or list containers. - """ - # wrap data in a property, which warns about passing already-serialized JSON - _data = None - def __init__(self, data=None, url=None, filename=None, expanded=False, metadata=None, root='root', **kwargs): - """Create a JSON display object given raw data. - - Parameters - ---------- - data : dict or list - JSON data to display. Not an already-serialized JSON string. - Scalar types (None, number, string) are not allowed, only dict - or list containers. - url : unicode - A URL to download the data from. - filename : unicode - Path to a local file to load the data from. - expanded : boolean - Metadata to control whether a JSON display component is expanded. - metadata: dict - Specify extra metadata to attach to the json display object. - root : str - The name of the root element of the JSON tree - """ - self.metadata = { - 'expanded': expanded, - 'root': root, - } - if metadata: - self.metadata.update(metadata) - if kwargs: - self.metadata.update(kwargs) - super(JSON, self).__init__(data=data, url=url, filename=filename) - - def _check_data(self): - if self.data is not None and not isinstance(self.data, (dict, list)): - raise TypeError("%s expects JSONable dict or list, not %r" % (self.__class__.__name__, self.data)) - - @property - def data(self): - return self._data - - @data.setter - def data(self, data): - if isinstance(data, (Path, PurePath)): - data = str(data) - - if isinstance(data, str): - if self.filename is None and self.url is None: - warnings.warn("JSON expects JSONable dict or list, not JSON strings") - data = json.loads(data) - self._data = data - - def _data_and_metadata(self): - return self.data, self.metadata - - def _repr_json_(self): - return self._data_and_metadata() - -_css_t = """var link = document.createElement("link"); - link.ref = "stylesheet"; - link.type = "text/css"; - link.href = "%s"; - document.head.appendChild(link); -""" - -_lib_t1 = """new Promise(function(resolve, reject) { - var script = document.createElement("script"); - script.onload = resolve; - script.onerror = reject; - script.src = "%s"; - document.head.appendChild(script); -}).then(() => { -""" - -_lib_t2 = """ -});""" - -class GeoJSON(JSON): - """GeoJSON expects JSON-able dict - - not an already-serialized JSON string. - - Scalar types (None, number, string) are not allowed, only dict containers. - """ - - def __init__(self, *args, **kwargs): - """Create a GeoJSON display object given raw data. - - Parameters - ---------- - data : dict or list - VegaLite data. Not an already-serialized JSON string. - Scalar types (None, number, string) are not allowed, only dict - or list containers. - url_template : string - Leaflet TileLayer URL template: http://leafletjs.com/reference.html#url-template - layer_options : dict - Leaflet TileLayer options: http://leafletjs.com/reference.html#tilelayer-options - url : unicode - A URL to download the data from. - filename : unicode - Path to a local file to load the data from. - metadata: dict - Specify extra metadata to attach to the json display object. - - Examples - -------- - - The following will display an interactive map of Mars with a point of - interest on frontend that do support GeoJSON display. - - >>> from IPython.display import GeoJSON - - >>> GeoJSON(data={ - ... "type": "Feature", - ... "geometry": { - ... "type": "Point", - ... "coordinates": [-81.327, 296.038] - ... } - ... }, - ... url_template="http://s3-eu-west-1.amazonaws.com/whereonmars.cartodb.net/{basemap_id}/{z}/{x}/{y}.png", - ... layer_options={ - ... "basemap_id": "celestia_mars-shaded-16k_global", - ... "attribution" : "Celestia/praesepe", - ... "minZoom" : 0, - ... "maxZoom" : 18, - ... }) - <IPython.core.display.GeoJSON object> - - In the terminal IPython, you will only see the text representation of - the GeoJSON object. - - """ - - super(GeoJSON, self).__init__(*args, **kwargs) - - - def _ipython_display_(self): - bundle = { - 'application/geo+json': self.data, - 'text/plain': '<IPython.display.GeoJSON object>' - } - metadata = { - 'application/geo+json': self.metadata - } - display(bundle, metadata=metadata, raw=True) - -class Javascript(TextDisplayObject): - - def __init__(self, data=None, url=None, filename=None, lib=None, css=None): - """Create a Javascript display object given raw data. - - When this object is returned by an expression or passed to the - display function, it will result in the data being displayed - in the frontend. If the data is a URL, the data will first be - downloaded and then displayed. - - In the Notebook, the containing element will be available as `element`, - and jQuery will be available. Content appended to `element` will be - visible in the output area. - - Parameters - ---------- - data : unicode, str or bytes - The Javascript source code or a URL to download it from. - url : unicode - A URL to download the data from. - filename : unicode - Path to a local file to load the data from. - lib : list or str - A sequence of Javascript library URLs to load asynchronously before - running the source code. The full URLs of the libraries should - be given. A single Javascript library URL can also be given as a - string. - css: : list or str - A sequence of css files to load before running the source code. - The full URLs of the css files should be given. A single css URL - can also be given as a string. - """ - if isinstance(lib, str): - lib = [lib] - elif lib is None: - lib = [] - if isinstance(css, str): - css = [css] - elif css is None: - css = [] - if not isinstance(lib, (list,tuple)): - raise TypeError('expected sequence, got: %r' % lib) - if not isinstance(css, (list,tuple)): - raise TypeError('expected sequence, got: %r' % css) - self.lib = lib - self.css = css - super(Javascript, self).__init__(data=data, url=url, filename=filename) - - def _repr_javascript_(self): - r = '' - for c in self.css: - r += _css_t % c - for l in self.lib: - r += _lib_t1 % l - r += self.data - r += _lib_t2*len(self.lib) - return r - -# constants for identifying png/jpeg data -_PNG = b'\x89PNG\r\n\x1a\n' -_JPEG = b'\xff\xd8' - -def _pngxy(data): - """read the (width, height) from a PNG header""" - ihdr = data.index(b'IHDR') - # next 8 bytes are width/height - return struct.unpack('>ii', data[ihdr+4:ihdr+12]) - -def _jpegxy(data): - """read the (width, height) from a JPEG header""" - # adapted from http://www.64lines.com/jpeg-width-height - - idx = 4 - while True: - block_size = struct.unpack('>H', data[idx:idx+2])[0] - idx = idx + block_size - if data[idx:idx+2] == b'\xFF\xC0': - # found Start of Frame - iSOF = idx - break - else: - # read another block - idx += 2 - - h, w = struct.unpack('>HH', data[iSOF+5:iSOF+9]) - return w, h - -def _gifxy(data): - """read the (width, height) from a GIF header""" - return struct.unpack('<HH', data[6:10]) - - -class Image(DisplayObject): - - _read_flags = 'rb' - _FMT_JPEG = u'jpeg' - _FMT_PNG = u'png' - _FMT_GIF = u'gif' - _ACCEPTABLE_EMBEDDINGS = [_FMT_JPEG, _FMT_PNG, _FMT_GIF] - _MIMETYPES = { - _FMT_PNG: 'image/png', - _FMT_JPEG: 'image/jpeg', - _FMT_GIF: 'image/gif', - } - - def __init__(self, data=None, url=None, filename=None, format=None, - embed=None, width=None, height=None, retina=False, - unconfined=False, metadata=None): - """Create a PNG/JPEG/GIF image object given raw data. - - When this object is returned by an input cell or passed to the - display function, it will result in the image being displayed - in the frontend. - - Parameters - ---------- - data : unicode, str or bytes - The raw image data or a URL or filename to load the data from. - This always results in embedded image data. - url : unicode - A URL to download the data from. If you specify `url=`, - the image data will not be embedded unless you also specify `embed=True`. - filename : unicode - Path to a local file to load the data from. - Images from a file are always embedded. - format : unicode - The format of the image data (png/jpeg/jpg/gif). If a filename or URL is given - for format will be inferred from the filename extension. - embed : bool - Should the image data be embedded using a data URI (True) or be - loaded using an <img> tag. Set this to True if you want the image - to be viewable later with no internet connection in the notebook. - - Default is `True`, unless the keyword argument `url` is set, then - default value is `False`. - - Note that QtConsole is not able to display images if `embed` is set to `False` - width : int - Width in pixels to which to constrain the image in html - height : int - Height in pixels to which to constrain the image in html - retina : bool - Automatically set the width and height to half of the measured - width and height. - This only works for embedded images because it reads the width/height - from image data. - For non-embedded images, you can just set the desired display width - and height directly. - unconfined: bool - Set unconfined=True to disable max-width confinement of the image. - metadata: dict - Specify extra metadata to attach to the image. - - Examples - -------- - # embedded image data, works in qtconsole and notebook - # when passed positionally, the first arg can be any of raw image data, - # a URL, or a filename from which to load image data. - # The result is always embedding image data for inline images. - Image('http://www.google.fr/images/srpr/logo3w.png') - Image('/path/to/image.jpg') - Image(b'RAW_PNG_DATA...') - - # Specifying Image(url=...) does not embed the image data, - # it only generates `<img>` tag with a link to the source. - # This will not work in the qtconsole or offline. - Image(url='http://www.google.fr/images/srpr/logo3w.png') - - """ - if isinstance(data, (Path, PurePath)): - data = str(data) - - if filename is not None: - ext = self._find_ext(filename) - elif url is not None: - ext = self._find_ext(url) - elif data is None: - raise ValueError("No image data found. Expecting filename, url, or data.") - elif isinstance(data, str) and ( - data.startswith('http') or _safe_exists(data) - ): - ext = self._find_ext(data) - else: - ext = None - - if format is None: - if ext is not None: - if ext == u'jpg' or ext == u'jpeg': - format = self._FMT_JPEG - elif ext == u'png': - format = self._FMT_PNG - elif ext == u'gif': - format = self._FMT_GIF - else: - format = ext.lower() - elif isinstance(data, bytes): - # infer image type from image data header, - # only if format has not been specified. - if data[:2] == _JPEG: - format = self._FMT_JPEG - - # failed to detect format, default png - if format is None: - format = self._FMT_PNG - - if format.lower() == 'jpg': - # jpg->jpeg - format = self._FMT_JPEG - - self.format = format.lower() - self.embed = embed if embed is not None else (url is None) - - if self.embed and self.format not in self._ACCEPTABLE_EMBEDDINGS: - raise ValueError("Cannot embed the '%s' image format" % (self.format)) - if self.embed: - self._mimetype = self._MIMETYPES.get(self.format) - - self.width = width - self.height = height - self.retina = retina - self.unconfined = unconfined - super(Image, self).__init__(data=data, url=url, filename=filename, - metadata=metadata) - - if self.width is None and self.metadata.get('width', {}): - self.width = metadata['width'] - - if self.height is None and self.metadata.get('height', {}): - self.height = metadata['height'] - - if retina: - self._retina_shape() - - - def _retina_shape(self): - """load pixel-doubled width and height from image data""" - if not self.embed: - return - if self.format == self._FMT_PNG: - w, h = _pngxy(self.data) - elif self.format == self._FMT_JPEG: - w, h = _jpegxy(self.data) - elif self.format == self._FMT_GIF: - w, h = _gifxy(self.data) - else: - # retina only supports png - return - self.width = w // 2 - self.height = h // 2 - - def reload(self): - """Reload the raw data from file or URL.""" - if self.embed: - super(Image,self).reload() - if self.retina: - self._retina_shape() - - def _repr_html_(self): - if not self.embed: - width = height = klass = '' - if self.width: - width = ' width="%d"' % self.width - if self.height: - height = ' height="%d"' % self.height - if self.unconfined: - klass = ' class="unconfined"' - return u'<img src="{url}"{width}{height}{klass}/>'.format( - url=self.url, - width=width, - height=height, - klass=klass, - ) - - def _repr_mimebundle_(self, include=None, exclude=None): - """Return the image as a mimebundle - - Any new mimetype support should be implemented here. - """ - if self.embed: - mimetype = self._mimetype - data, metadata = self._data_and_metadata(always_both=True) - if metadata: - metadata = {mimetype: metadata} - return {mimetype: data}, metadata - else: - return {'text/html': self._repr_html_()} - - def _data_and_metadata(self, always_both=False): - """shortcut for returning metadata with shape information, if defined""" - try: - b64_data = b2a_base64(self.data).decode('ascii') - except TypeError: - raise FileNotFoundError( - "No such file or directory: '%s'" % (self.data)) - md = {} - if self.metadata: - md.update(self.metadata) - if self.width: - md['width'] = self.width - if self.height: - md['height'] = self.height - if self.unconfined: - md['unconfined'] = self.unconfined - if md or always_both: - return b64_data, md - else: - return b64_data - - def _repr_png_(self): - if self.embed and self.format == self._FMT_PNG: - return self._data_and_metadata() - - def _repr_jpeg_(self): - if self.embed and self.format == self._FMT_JPEG: - return self._data_and_metadata() - - def _find_ext(self, s): - base, ext = splitext(s) - - if not ext: - return base - - # `splitext` includes leading period, so we skip it - return ext[1:].lower() - - -class Video(DisplayObject): - - def __init__(self, data=None, url=None, filename=None, embed=False, - mimetype=None, width=None, height=None, html_attributes="controls"): - """Create a video object given raw data or an URL. - - When this object is returned by an input cell or passed to the - display function, it will result in the video being displayed - in the frontend. - - Parameters - ---------- - data : unicode, str or bytes - The raw video data or a URL or filename to load the data from. - Raw data will require passing ``embed=True``. - url : unicode - A URL for the video. If you specify ``url=``, - the image data will not be embedded. - filename : unicode - Path to a local file containing the video. - Will be interpreted as a local URL unless ``embed=True``. - embed : bool - Should the video be embedded using a data URI (True) or be - loaded using a <video> tag (False). - - Since videos are large, embedding them should be avoided, if possible. - You must confirm embedding as your intention by passing ``embed=True``. - - Local files can be displayed with URLs without embedding the content, via:: - - Video('./video.mp4') - - mimetype: unicode - Specify the mimetype for embedded videos. - Default will be guessed from file extension, if available. - width : int - Width in pixels to which to constrain the video in HTML. - If not supplied, defaults to the width of the video. - height : int - Height in pixels to which to constrain the video in html. - If not supplied, defaults to the height of the video. - html_attributes : str - Attributes for the HTML ``<video>`` block. - Default: ``"controls"`` to get video controls. - Other examples: ``"controls muted"`` for muted video with controls, - ``"loop autoplay"`` for looping autoplaying video without controls. - - Examples - -------- - - :: - - Video('https://archive.org/download/Sita_Sings_the_Blues/Sita_Sings_the_Blues_small.mp4') - Video('path/to/video.mp4') - Video('path/to/video.mp4', embed=True) - Video('path/to/video.mp4', embed=True, html_attributes="controls muted autoplay") - Video(b'raw-videodata', embed=True) - """ - if isinstance(data, (Path, PurePath)): - data = str(data) - - if url is None and isinstance(data, str) and data.startswith(('http:', 'https:')): - url = data - data = None - elif os.path.exists(data): - filename = data - data = None - - if data and not embed: - msg = ''.join([ - "To embed videos, you must pass embed=True ", - "(this may make your notebook files huge)\n", - "Consider passing Video(url='...')", - ]) - raise ValueError(msg) - - self.mimetype = mimetype - self.embed = embed - self.width = width - self.height = height - self.html_attributes = html_attributes - super(Video, self).__init__(data=data, url=url, filename=filename) - - def _repr_html_(self): - width = height = '' - if self.width: - width = ' width="%d"' % self.width - if self.height: - height = ' height="%d"' % self.height - - # External URLs and potentially local files are not embedded into the - # notebook output. - if not self.embed: - url = self.url if self.url is not None else self.filename - output = """<video src="{0}" {1} {2} {3}> - Your browser does not support the <code>video</code> element. - </video>""".format(url, self.html_attributes, width, height) - return output - - # Embedded videos are base64-encoded. - mimetype = self.mimetype - if self.filename is not None: - if not mimetype: - mimetype, _ = mimetypes.guess_type(self.filename) - - with open(self.filename, 'rb') as f: - video = f.read() - else: - video = self.data - if isinstance(video, str): - # unicode input is already b64-encoded - b64_video = video - else: - b64_video = b2a_base64(video).decode('ascii').rstrip() - - output = """<video {0} {1} {2}> - <source src="data:{3};base64,{4}" type="{3}"> - Your browser does not support the video tag. - </video>""".format(self.html_attributes, width, height, mimetype, b64_video) - return output - - def reload(self): - # TODO - pass - - -def clear_output(wait=False): - """Clear the output of the current cell receiving output. - - Parameters - ---------- - wait : bool [default: false] - Wait to clear the output until new output is available to replace it.""" - from IPython.core.interactiveshell import InteractiveShell - if InteractiveShell.initialized(): - InteractiveShell.instance().display_pub.clear_output(wait) - else: - print('\033[2K\r', end='') - sys.stdout.flush() - print('\033[2K\r', end='') - sys.stderr.flush() - - -@skip_doctest -def set_matplotlib_formats(*formats, **kwargs): - """ - .. deprecated:: 7.23 - - use `matplotlib_inline.backend_inline.set_matplotlib_formats()` - - Select figure formats for the inline backend. Optionally pass quality for JPEG. - - For example, this enables PNG and JPEG output with a JPEG quality of 90%:: - - In [1]: set_matplotlib_formats('png', 'jpeg', quality=90) - - To set this in your config files use the following:: - - c.InlineBackend.figure_formats = {'png', 'jpeg'} - c.InlineBackend.print_figure_kwargs.update({'quality' : 90}) - - Parameters - ---------- - *formats : strs - One or more figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'. - **kwargs : - Keyword args will be relayed to ``figure.canvas.print_figure``. - """ - warnings.warn( - "`set_matplotlib_formats` is deprecated since IPython 7.23, directly " - "use `matplotlib_inline.backend_inline.set_matplotlib_formats()`", - DeprecationWarning, - stacklevel=2, - ) - - from matplotlib_inline.backend_inline import ( - set_matplotlib_formats as set_matplotlib_formats_orig, - ) - - set_matplotlib_formats_orig(*formats, **kwargs) - -@skip_doctest -def set_matplotlib_close(close=True): - """ - .. deprecated:: 7.23 - - use `matplotlib_inline.backend_inline.set_matplotlib_close()` - - - Set whether the inline backend closes all figures automatically or not. - - By default, the inline backend used in the IPython Notebook will close all - matplotlib figures automatically after each cell is run. This means that - plots in different cells won't interfere. Sometimes, you may want to make - a plot in one cell and then refine it in later cells. This can be accomplished - by:: - - In [1]: set_matplotlib_close(False) - - To set this in your config files use the following:: - - c.InlineBackend.close_figures = False - - Parameters - ---------- - close : bool - Should all matplotlib figures be automatically closed after each cell is - run? - """ - warnings.warn( - "`set_matplotlib_close` is deprecated since IPython 7.23, directly " - "use `matplotlib_inline.backend_inline.set_matplotlib_close()`", - DeprecationWarning, - stacklevel=2, - ) - - from matplotlib_inline.backend_inline import ( - set_matplotlib_close as set_matplotlib_close_orig, - ) - - set_matplotlib_close_orig(close) +# -*- coding: utf-8 -*- +"""Top-level display functions for displaying object in different formats.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +from binascii import b2a_hex, b2a_base64, hexlify +import json +import mimetypes +import os +import struct +import sys +import warnings +from copy import deepcopy +from os.path import splitext +from pathlib import Path, PurePath + +from IPython.utils.py3compat import cast_unicode +from IPython.testing.skipdoctest import skip_doctest + +__all__ = ['display', 'display_pretty', 'display_html', 'display_markdown', +'display_svg', 'display_png', 'display_jpeg', 'display_latex', 'display_json', +'display_javascript', 'display_pdf', 'DisplayObject', 'TextDisplayObject', +'Pretty', 'HTML', 'Markdown', 'Math', 'Latex', 'SVG', 'ProgressBar', 'JSON', +'GeoJSON', 'Javascript', 'Image', 'clear_output', 'set_matplotlib_formats', +'set_matplotlib_close', 'publish_display_data', 'update_display', 'DisplayHandle', +'Video'] + +#----------------------------------------------------------------------------- +# utility functions +#----------------------------------------------------------------------------- + +def _safe_exists(path): + """Check path, but don't let exceptions raise""" + try: + return os.path.exists(path) + except Exception: + return False + +def _merge(d1, d2): + """Like update, but merges sub-dicts instead of clobbering at the top level. + + Updates d1 in-place + """ + + if not isinstance(d2, dict) or not isinstance(d1, dict): + return d2 + for key, value in d2.items(): + d1[key] = _merge(d1.get(key), value) + return d1 + +def _display_mimetype(mimetype, objs, raw=False, metadata=None): + """internal implementation of all display_foo methods + + Parameters + ---------- + mimetype : str + The mimetype to be published (e.g. 'image/png') + *objs : object + The Python objects to display, or if raw=True raw text data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + if metadata: + metadata = {mimetype: metadata} + if raw: + # turn list of pngdata into list of { 'image/png': pngdata } + objs = [ {mimetype: obj} for obj in objs ] + display(*objs, raw=raw, metadata=metadata, include=[mimetype]) + +#----------------------------------------------------------------------------- +# Main functions +#----------------------------------------------------------------------------- + +# use * to indicate transient is keyword-only +def publish_display_data(data, metadata=None, source=None, *, transient=None, **kwargs): + """Publish data and metadata to all frontends. + + See the ``display_data`` message in the messaging documentation for + more details about this message type. + + Keys of data and metadata can be any mime-type. + + Parameters + ---------- + data : dict + A dictionary having keys that are valid MIME types (like + 'text/plain' or 'image/svg+xml') and values that are the data for + that MIME type. The data itself must be a JSON'able data + structure. Minimally all data should have the 'text/plain' data, + which can be displayed by all frontends. If more than the plain + text is given, it is up to the frontend to decide which + representation to use. + metadata : dict + A dictionary for metadata related to the data. This can contain + arbitrary key, value pairs that frontends can use to interpret + the data. mime-type keys matching those in data can be used + to specify metadata about particular representations. + source : str, deprecated + Unused. + transient : dict, keyword-only + A dictionary of transient data, such as display_id. + """ + from IPython.core.interactiveshell import InteractiveShell + + display_pub = InteractiveShell.instance().display_pub + + # only pass transient if supplied, + # to avoid errors with older ipykernel. + # TODO: We could check for ipykernel version and provide a detailed upgrade message. + if transient: + kwargs['transient'] = transient + + display_pub.publish( + data=data, + metadata=metadata, + **kwargs + ) + + +def _new_id(): + """Generate a new random text id with urandom""" + return b2a_hex(os.urandom(16)).decode('ascii') + + +def display(*objs, include=None, exclude=None, metadata=None, transient=None, display_id=None, **kwargs): + """Display a Python object in all frontends. + + By default all representations will be computed and sent to the frontends. + Frontends can decide which representation is used and how. + + In terminal IPython this will be similar to using :func:`print`, for use in richer + frontends see Jupyter notebook examples with rich display logic. + + Parameters + ---------- + *objs : object + The Python objects to display. + raw : bool, optional + Are the objects to be displayed already mimetype-keyed dicts of raw display data, + or Python objects that need to be formatted before display? [default: False] + include : list, tuple or set, optional + A list of format type strings (MIME types) to include in the + format data dict. If this is set *only* the format types included + in this list will be computed. + exclude : list, tuple or set, optional + A list of format type strings (MIME types) to exclude in the format + data dict. If this is set all format types will be computed, + except for those included in this argument. + metadata : dict, optional + A dictionary of metadata to associate with the output. + mime-type keys in this dictionary will be associated with the individual + representation formats, if they exist. + transient : dict, optional + A dictionary of transient data to associate with the output. + Data in this dict should not be persisted to files (e.g. notebooks). + display_id : str, bool optional + Set an id for the display. + This id can be used for updating this display area later via update_display. + If given as `True`, generate a new `display_id` + clear : bool, optional + Should the output area be cleared before displaying anything? If True, + this will wait for additional output before clearing. [default: False] + kwargs: additional keyword-args, optional + Additional keyword-arguments are passed through to the display publisher. + + Returns + ------- + + handle: DisplayHandle + Returns a handle on updatable displays for use with :func:`update_display`, + if `display_id` is given. Returns :any:`None` if no `display_id` is given + (default). + + Examples + -------- + + >>> class Json(object): + ... def __init__(self, json): + ... self.json = json + ... def _repr_pretty_(self, pp, cycle): + ... import json + ... pp.text(json.dumps(self.json, indent=2)) + ... def __repr__(self): + ... return str(self.json) + ... + + >>> d = Json({1:2, 3: {4:5}}) + + >>> print(d) + {1: 2, 3: {4: 5}} + + >>> display(d) + { + "1": 2, + "3": { + "4": 5 + } + } + + >>> def int_formatter(integer, pp, cycle): + ... pp.text('I'*integer) + + >>> plain = get_ipython().display_formatter.formatters['text/plain'] + >>> plain.for_type(int, int_formatter) + <function _repr_pprint at 0x...> + >>> display(7-5) + II + + >>> del plain.type_printers[int] + >>> display(7-5) + 2 + + See Also + -------- + + :func:`update_display` + + Notes + ----- + + In Python, objects can declare their textual representation using the + `__repr__` method. IPython expands on this idea and allows objects to declare + other, rich representations including: + + - HTML + - JSON + - PNG + - JPEG + - SVG + - LaTeX + + A single object can declare some or all of these representations; all are + handled by IPython's display system. + + The main idea of the first approach is that you have to implement special + display methods when you define your class, one for each representation you + want to use. Here is a list of the names of the special methods and the + values they must return: + + - `_repr_html_`: return raw HTML as a string, or a tuple (see below). + - `_repr_json_`: return a JSONable dict, or a tuple (see below). + - `_repr_jpeg_`: return raw JPEG data, or a tuple (see below). + - `_repr_png_`: return raw PNG data, or a tuple (see below). + - `_repr_svg_`: return raw SVG data as a string, or a tuple (see below). + - `_repr_latex_`: return LaTeX commands in a string surrounded by "$", + or a tuple (see below). + - `_repr_mimebundle_`: return a full mimebundle containing the mapping + from all mimetypes to data. + Use this for any mime-type not listed above. + + The above functions may also return the object's metadata alonside the + data. If the metadata is available, the functions will return a tuple + containing the data and metadata, in that order. If there is no metadata + available, then the functions will return the data only. + + When you are directly writing your own classes, you can adapt them for + display in IPython by following the above approach. But in practice, you + often need to work with existing classes that you can't easily modify. + + You can refer to the documentation on integrating with the display system in + order to register custom formatters for already existing types + (:ref:`integrating_rich_display`). + + .. versionadded:: 5.4 display available without import + .. versionadded:: 6.1 display available without import + + Since IPython 5.4 and 6.1 :func:`display` is automatically made available to + the user without import. If you are using display in a document that might + be used in a pure python context or with older version of IPython, use the + following import at the top of your file:: + + from IPython.display import display + + """ + from IPython.core.interactiveshell import InteractiveShell + + if not InteractiveShell.initialized(): + # Directly print objects. + print(*objs) + return + + raw = kwargs.pop("raw", False) + clear = kwargs.pop("clear", False) + if transient is None: + transient = {} + if metadata is None: + metadata={} + if display_id: + if display_id is True: + display_id = _new_id() + transient['display_id'] = display_id + if kwargs.get('update') and 'display_id' not in transient: + raise TypeError('display_id required for update_display') + if transient: + kwargs['transient'] = transient + + if not objs and display_id: + # if given no objects, but still a request for a display_id, + # we assume the user wants to insert an empty output that + # can be updated later + objs = [{}] + raw = True + + if not raw: + format = InteractiveShell.instance().display_formatter.format + + if clear: + clear_output(wait=True) + + for obj in objs: + if raw: + publish_display_data(data=obj, metadata=metadata, **kwargs) + else: + format_dict, md_dict = format(obj, include=include, exclude=exclude) + if not format_dict: + # nothing to display (e.g. _ipython_display_ took over) + continue + if metadata: + # kwarg-specified metadata gets precedence + _merge(md_dict, metadata) + publish_display_data(data=format_dict, metadata=md_dict, **kwargs) + if display_id: + return DisplayHandle(display_id) + + +# use * for keyword-only display_id arg +def update_display(obj, *, display_id, **kwargs): + """Update an existing display by id + + Parameters + ---------- + + obj: + The object with which to update the display + display_id: keyword-only + The id of the display to update + + See Also + -------- + + :func:`display` + """ + kwargs['update'] = True + display(obj, display_id=display_id, **kwargs) + + +class DisplayHandle(object): + """A handle on an updatable display + + Call `.update(obj)` to display a new object. + + Call `.display(obj`) to add a new instance of this display, + and update existing instances. + + See Also + -------- + + :func:`display`, :func:`update_display` + + """ + + def __init__(self, display_id=None): + if display_id is None: + display_id = _new_id() + self.display_id = display_id + + def __repr__(self): + return "<%s display_id=%s>" % (self.__class__.__name__, self.display_id) + + def display(self, obj, **kwargs): + """Make a new display with my id, updating existing instances. + + Parameters + ---------- + + obj: + object to display + **kwargs: + additional keyword arguments passed to display + """ + display(obj, display_id=self.display_id, **kwargs) + + def update(self, obj, **kwargs): + """Update existing displays with my id + + Parameters + ---------- + + obj: + object to display + **kwargs: + additional keyword arguments passed to update_display + """ + update_display(obj, display_id=self.display_id, **kwargs) + + +def display_pretty(*objs, **kwargs): + """Display the pretty (default) representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw text data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('text/plain', objs, **kwargs) + + +def display_html(*objs, **kwargs): + """Display the HTML representation of an object. + + Note: If raw=False and the object does not have a HTML + representation, no HTML will be shown. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw HTML data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('text/html', objs, **kwargs) + + +def display_markdown(*objs, **kwargs): + """Displays the Markdown representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw markdown data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + + _display_mimetype('text/markdown', objs, **kwargs) + + +def display_svg(*objs, **kwargs): + """Display the SVG representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw svg data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('image/svg+xml', objs, **kwargs) + + +def display_png(*objs, **kwargs): + """Display the PNG representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw png data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('image/png', objs, **kwargs) + + +def display_jpeg(*objs, **kwargs): + """Display the JPEG representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw JPEG data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('image/jpeg', objs, **kwargs) + + +def display_latex(*objs, **kwargs): + """Display the LaTeX representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw latex data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('text/latex', objs, **kwargs) + + +def display_json(*objs, **kwargs): + """Display the JSON representation of an object. + + Note that not many frontends support displaying JSON. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw json data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('application/json', objs, **kwargs) + + +def display_javascript(*objs, **kwargs): + """Display the Javascript representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw javascript data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('application/javascript', objs, **kwargs) + + +def display_pdf(*objs, **kwargs): + """Display the PDF representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw javascript data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('application/pdf', objs, **kwargs) + + +#----------------------------------------------------------------------------- +# Smart classes +#----------------------------------------------------------------------------- + + +class DisplayObject(object): + """An object that wraps data to be displayed.""" + + _read_flags = 'r' + _show_mem_addr = False + metadata = None + + def __init__(self, data=None, url=None, filename=None, metadata=None): + """Create a display object given raw data. + + When this object is returned by an expression or passed to the + display function, it will result in the data being displayed + in the frontend. The MIME type of the data should match the + subclasses used, so the Png subclass should be used for 'image/png' + data. If the data is a URL, the data will first be downloaded + and then displayed. If + + Parameters + ---------- + data : unicode, str or bytes + The raw data or a URL or file to load the data from + url : unicode + A URL to download the data from. + filename : unicode + Path to a local file to load the data from. + metadata : dict + Dict of metadata associated to be the object when displayed + """ + if isinstance(data, (Path, PurePath)): + data = str(data) + + if data is not None and isinstance(data, str): + if data.startswith('http') and url is None: + url = data + filename = None + data = None + elif _safe_exists(data) and filename is None: + url = None + filename = data + data = None + + self.url = url + self.filename = filename + # because of @data.setter methods in + # subclasses ensure url and filename are set + # before assigning to self.data + self.data = data + + if metadata is not None: + self.metadata = metadata + elif self.metadata is None: + self.metadata = {} + + self.reload() + self._check_data() + + def __repr__(self): + if not self._show_mem_addr: + cls = self.__class__ + r = "<%s.%s object>" % (cls.__module__, cls.__name__) + else: + r = super(DisplayObject, self).__repr__() + return r + + def _check_data(self): + """Override in subclasses if there's something to check.""" + pass + + def _data_and_metadata(self): + """shortcut for returning metadata with shape information, if defined""" + if self.metadata: + return self.data, deepcopy(self.metadata) + else: + return self.data + + def reload(self): + """Reload the raw data from file or URL.""" + if self.filename is not None: + with open(self.filename, self._read_flags) as f: + self.data = f.read() + elif self.url is not None: + # Deferred import + from urllib.request import urlopen + response = urlopen(self.url) + data = response.read() + # extract encoding from header, if there is one: + encoding = None + if 'content-type' in response.headers: + for sub in response.headers['content-type'].split(';'): + sub = sub.strip() + if sub.startswith('charset'): + encoding = sub.split('=')[-1].strip() + break + if 'content-encoding' in response.headers: + # TODO: do deflate? + if 'gzip' in response.headers['content-encoding']: + import gzip + from io import BytesIO + with gzip.open(BytesIO(data), 'rt', encoding=encoding) as fp: + encoding = None + data = fp.read() + + # decode data, if an encoding was specified + # We only touch self.data once since + # subclasses such as SVG have @data.setter methods + # that transform self.data into ... well svg. + if encoding: + self.data = data.decode(encoding, 'replace') + else: + self.data = data + + +class TextDisplayObject(DisplayObject): + """Validate that display data is text""" + def _check_data(self): + if self.data is not None and not isinstance(self.data, str): + raise TypeError("%s expects text, not %r" % (self.__class__.__name__, self.data)) + +class Pretty(TextDisplayObject): + + def _repr_pretty_(self, pp, cycle): + return pp.text(self.data) + + +class HTML(TextDisplayObject): + + def __init__(self, data=None, url=None, filename=None, metadata=None): + def warn(): + if not data: + return False + + # + # Avoid calling lower() on the entire data, because it could be a + # long string and we're only interested in its beginning and end. + # + prefix = data[:10].lower() + suffix = data[-10:].lower() + return prefix.startswith("<iframe ") and suffix.endswith("</iframe>") + + if warn(): + warnings.warn("Consider using IPython.display.IFrame instead") + super(HTML, self).__init__(data=data, url=url, filename=filename, metadata=metadata) + + def _repr_html_(self): + return self._data_and_metadata() + + def __html__(self): + """ + This method exists to inform other HTML-using modules (e.g. Markupsafe, + htmltag, etc) that this object is HTML and does not need things like + special characters (<>&) escaped. + """ + return self._repr_html_() + + +class Markdown(TextDisplayObject): + + def _repr_markdown_(self): + return self._data_and_metadata() + + +class Math(TextDisplayObject): + + def _repr_latex_(self): + s = r"$\displaystyle %s$" % self.data.strip('$') + if self.metadata: + return s, deepcopy(self.metadata) + else: + return s + + +class Latex(TextDisplayObject): + + def _repr_latex_(self): + return self._data_and_metadata() + + +class SVG(DisplayObject): + """Embed an SVG into the display. + + Note if you just want to view a svg image via a URL use `:class:Image` with + a url=URL keyword argument. + """ + + _read_flags = 'rb' + # wrap data in a property, which extracts the <svg> tag, discarding + # document headers + _data = None + + @property + def data(self): + return self._data + + @data.setter + def data(self, svg): + if svg is None: + self._data = None + return + # parse into dom object + from xml.dom import minidom + x = minidom.parseString(svg) + # get svg tag (should be 1) + found_svg = x.getElementsByTagName('svg') + if found_svg: + svg = found_svg[0].toxml() + else: + # fallback on the input, trust the user + # but this is probably an error. + pass + svg = cast_unicode(svg) + self._data = svg + + def _repr_svg_(self): + return self._data_and_metadata() + +class ProgressBar(DisplayObject): + """Progressbar supports displaying a progressbar like element + """ + def __init__(self, total): + """Creates a new progressbar + + Parameters + ---------- + total : int + maximum size of the progressbar + """ + self.total = total + self._progress = 0 + self.html_width = '60ex' + self.text_width = 60 + self._display_id = hexlify(os.urandom(8)).decode('ascii') + + def __repr__(self): + fraction = self.progress / self.total + filled = '=' * int(fraction * self.text_width) + rest = ' ' * (self.text_width - len(filled)) + return '[{}{}] {}/{}'.format( + filled, rest, + self.progress, self.total, + ) + + def _repr_html_(self): + return "<progress style='width:{}' max='{}' value='{}'></progress>".format( + self.html_width, self.total, self.progress) + + def display(self): + display(self, display_id=self._display_id) + + def update(self): + display(self, display_id=self._display_id, update=True) + + @property + def progress(self): + return self._progress + + @progress.setter + def progress(self, value): + self._progress = value + self.update() + + def __iter__(self): + self.display() + self._progress = -1 # First iteration is 0 + return self + + def __next__(self): + """Returns current value and increments display by one.""" + self.progress += 1 + if self.progress < self.total: + return self.progress + else: + raise StopIteration() + +class JSON(DisplayObject): + """JSON expects a JSON-able dict or list + + not an already-serialized JSON string. + + Scalar types (None, number, string) are not allowed, only dict or list containers. + """ + # wrap data in a property, which warns about passing already-serialized JSON + _data = None + def __init__(self, data=None, url=None, filename=None, expanded=False, metadata=None, root='root', **kwargs): + """Create a JSON display object given raw data. + + Parameters + ---------- + data : dict or list + JSON data to display. Not an already-serialized JSON string. + Scalar types (None, number, string) are not allowed, only dict + or list containers. + url : unicode + A URL to download the data from. + filename : unicode + Path to a local file to load the data from. + expanded : boolean + Metadata to control whether a JSON display component is expanded. + metadata: dict + Specify extra metadata to attach to the json display object. + root : str + The name of the root element of the JSON tree + """ + self.metadata = { + 'expanded': expanded, + 'root': root, + } + if metadata: + self.metadata.update(metadata) + if kwargs: + self.metadata.update(kwargs) + super(JSON, self).__init__(data=data, url=url, filename=filename) + + def _check_data(self): + if self.data is not None and not isinstance(self.data, (dict, list)): + raise TypeError("%s expects JSONable dict or list, not %r" % (self.__class__.__name__, self.data)) + + @property + def data(self): + return self._data + + @data.setter + def data(self, data): + if isinstance(data, (Path, PurePath)): + data = str(data) + + if isinstance(data, str): + if self.filename is None and self.url is None: + warnings.warn("JSON expects JSONable dict or list, not JSON strings") + data = json.loads(data) + self._data = data + + def _data_and_metadata(self): + return self.data, self.metadata + + def _repr_json_(self): + return self._data_and_metadata() + +_css_t = """var link = document.createElement("link"); + link.ref = "stylesheet"; + link.type = "text/css"; + link.href = "%s"; + document.head.appendChild(link); +""" + +_lib_t1 = """new Promise(function(resolve, reject) { + var script = document.createElement("script"); + script.onload = resolve; + script.onerror = reject; + script.src = "%s"; + document.head.appendChild(script); +}).then(() => { +""" + +_lib_t2 = """ +});""" + +class GeoJSON(JSON): + """GeoJSON expects JSON-able dict + + not an already-serialized JSON string. + + Scalar types (None, number, string) are not allowed, only dict containers. + """ + + def __init__(self, *args, **kwargs): + """Create a GeoJSON display object given raw data. + + Parameters + ---------- + data : dict or list + VegaLite data. Not an already-serialized JSON string. + Scalar types (None, number, string) are not allowed, only dict + or list containers. + url_template : string + Leaflet TileLayer URL template: http://leafletjs.com/reference.html#url-template + layer_options : dict + Leaflet TileLayer options: http://leafletjs.com/reference.html#tilelayer-options + url : unicode + A URL to download the data from. + filename : unicode + Path to a local file to load the data from. + metadata: dict + Specify extra metadata to attach to the json display object. + + Examples + -------- + + The following will display an interactive map of Mars with a point of + interest on frontend that do support GeoJSON display. + + >>> from IPython.display import GeoJSON + + >>> GeoJSON(data={ + ... "type": "Feature", + ... "geometry": { + ... "type": "Point", + ... "coordinates": [-81.327, 296.038] + ... } + ... }, + ... url_template="http://s3-eu-west-1.amazonaws.com/whereonmars.cartodb.net/{basemap_id}/{z}/{x}/{y}.png", + ... layer_options={ + ... "basemap_id": "celestia_mars-shaded-16k_global", + ... "attribution" : "Celestia/praesepe", + ... "minZoom" : 0, + ... "maxZoom" : 18, + ... }) + <IPython.core.display.GeoJSON object> + + In the terminal IPython, you will only see the text representation of + the GeoJSON object. + + """ + + super(GeoJSON, self).__init__(*args, **kwargs) + + + def _ipython_display_(self): + bundle = { + 'application/geo+json': self.data, + 'text/plain': '<IPython.display.GeoJSON object>' + } + metadata = { + 'application/geo+json': self.metadata + } + display(bundle, metadata=metadata, raw=True) + +class Javascript(TextDisplayObject): + + def __init__(self, data=None, url=None, filename=None, lib=None, css=None): + """Create a Javascript display object given raw data. + + When this object is returned by an expression or passed to the + display function, it will result in the data being displayed + in the frontend. If the data is a URL, the data will first be + downloaded and then displayed. + + In the Notebook, the containing element will be available as `element`, + and jQuery will be available. Content appended to `element` will be + visible in the output area. + + Parameters + ---------- + data : unicode, str or bytes + The Javascript source code or a URL to download it from. + url : unicode + A URL to download the data from. + filename : unicode + Path to a local file to load the data from. + lib : list or str + A sequence of Javascript library URLs to load asynchronously before + running the source code. The full URLs of the libraries should + be given. A single Javascript library URL can also be given as a + string. + css: : list or str + A sequence of css files to load before running the source code. + The full URLs of the css files should be given. A single css URL + can also be given as a string. + """ + if isinstance(lib, str): + lib = [lib] + elif lib is None: + lib = [] + if isinstance(css, str): + css = [css] + elif css is None: + css = [] + if not isinstance(lib, (list,tuple)): + raise TypeError('expected sequence, got: %r' % lib) + if not isinstance(css, (list,tuple)): + raise TypeError('expected sequence, got: %r' % css) + self.lib = lib + self.css = css + super(Javascript, self).__init__(data=data, url=url, filename=filename) + + def _repr_javascript_(self): + r = '' + for c in self.css: + r += _css_t % c + for l in self.lib: + r += _lib_t1 % l + r += self.data + r += _lib_t2*len(self.lib) + return r + +# constants for identifying png/jpeg data +_PNG = b'\x89PNG\r\n\x1a\n' +_JPEG = b'\xff\xd8' + +def _pngxy(data): + """read the (width, height) from a PNG header""" + ihdr = data.index(b'IHDR') + # next 8 bytes are width/height + return struct.unpack('>ii', data[ihdr+4:ihdr+12]) + +def _jpegxy(data): + """read the (width, height) from a JPEG header""" + # adapted from http://www.64lines.com/jpeg-width-height + + idx = 4 + while True: + block_size = struct.unpack('>H', data[idx:idx+2])[0] + idx = idx + block_size + if data[idx:idx+2] == b'\xFF\xC0': + # found Start of Frame + iSOF = idx + break + else: + # read another block + idx += 2 + + h, w = struct.unpack('>HH', data[iSOF+5:iSOF+9]) + return w, h + +def _gifxy(data): + """read the (width, height) from a GIF header""" + return struct.unpack('<HH', data[6:10]) + + +class Image(DisplayObject): + + _read_flags = 'rb' + _FMT_JPEG = u'jpeg' + _FMT_PNG = u'png' + _FMT_GIF = u'gif' + _ACCEPTABLE_EMBEDDINGS = [_FMT_JPEG, _FMT_PNG, _FMT_GIF] + _MIMETYPES = { + _FMT_PNG: 'image/png', + _FMT_JPEG: 'image/jpeg', + _FMT_GIF: 'image/gif', + } + + def __init__(self, data=None, url=None, filename=None, format=None, + embed=None, width=None, height=None, retina=False, + unconfined=False, metadata=None): + """Create a PNG/JPEG/GIF image object given raw data. + + When this object is returned by an input cell or passed to the + display function, it will result in the image being displayed + in the frontend. + + Parameters + ---------- + data : unicode, str or bytes + The raw image data or a URL or filename to load the data from. + This always results in embedded image data. + url : unicode + A URL to download the data from. If you specify `url=`, + the image data will not be embedded unless you also specify `embed=True`. + filename : unicode + Path to a local file to load the data from. + Images from a file are always embedded. + format : unicode + The format of the image data (png/jpeg/jpg/gif). If a filename or URL is given + for format will be inferred from the filename extension. + embed : bool + Should the image data be embedded using a data URI (True) or be + loaded using an <img> tag. Set this to True if you want the image + to be viewable later with no internet connection in the notebook. + + Default is `True`, unless the keyword argument `url` is set, then + default value is `False`. + + Note that QtConsole is not able to display images if `embed` is set to `False` + width : int + Width in pixels to which to constrain the image in html + height : int + Height in pixels to which to constrain the image in html + retina : bool + Automatically set the width and height to half of the measured + width and height. + This only works for embedded images because it reads the width/height + from image data. + For non-embedded images, you can just set the desired display width + and height directly. + unconfined: bool + Set unconfined=True to disable max-width confinement of the image. + metadata: dict + Specify extra metadata to attach to the image. + + Examples + -------- + # embedded image data, works in qtconsole and notebook + # when passed positionally, the first arg can be any of raw image data, + # a URL, or a filename from which to load image data. + # The result is always embedding image data for inline images. + Image('http://www.google.fr/images/srpr/logo3w.png') + Image('/path/to/image.jpg') + Image(b'RAW_PNG_DATA...') + + # Specifying Image(url=...) does not embed the image data, + # it only generates `<img>` tag with a link to the source. + # This will not work in the qtconsole or offline. + Image(url='http://www.google.fr/images/srpr/logo3w.png') + + """ + if isinstance(data, (Path, PurePath)): + data = str(data) + + if filename is not None: + ext = self._find_ext(filename) + elif url is not None: + ext = self._find_ext(url) + elif data is None: + raise ValueError("No image data found. Expecting filename, url, or data.") + elif isinstance(data, str) and ( + data.startswith('http') or _safe_exists(data) + ): + ext = self._find_ext(data) + else: + ext = None + + if format is None: + if ext is not None: + if ext == u'jpg' or ext == u'jpeg': + format = self._FMT_JPEG + elif ext == u'png': + format = self._FMT_PNG + elif ext == u'gif': + format = self._FMT_GIF + else: + format = ext.lower() + elif isinstance(data, bytes): + # infer image type from image data header, + # only if format has not been specified. + if data[:2] == _JPEG: + format = self._FMT_JPEG + + # failed to detect format, default png + if format is None: + format = self._FMT_PNG + + if format.lower() == 'jpg': + # jpg->jpeg + format = self._FMT_JPEG + + self.format = format.lower() + self.embed = embed if embed is not None else (url is None) + + if self.embed and self.format not in self._ACCEPTABLE_EMBEDDINGS: + raise ValueError("Cannot embed the '%s' image format" % (self.format)) + if self.embed: + self._mimetype = self._MIMETYPES.get(self.format) + + self.width = width + self.height = height + self.retina = retina + self.unconfined = unconfined + super(Image, self).__init__(data=data, url=url, filename=filename, + metadata=metadata) + + if self.width is None and self.metadata.get('width', {}): + self.width = metadata['width'] + + if self.height is None and self.metadata.get('height', {}): + self.height = metadata['height'] + + if retina: + self._retina_shape() + + + def _retina_shape(self): + """load pixel-doubled width and height from image data""" + if not self.embed: + return + if self.format == self._FMT_PNG: + w, h = _pngxy(self.data) + elif self.format == self._FMT_JPEG: + w, h = _jpegxy(self.data) + elif self.format == self._FMT_GIF: + w, h = _gifxy(self.data) + else: + # retina only supports png + return + self.width = w // 2 + self.height = h // 2 + + def reload(self): + """Reload the raw data from file or URL.""" + if self.embed: + super(Image,self).reload() + if self.retina: + self._retina_shape() + + def _repr_html_(self): + if not self.embed: + width = height = klass = '' + if self.width: + width = ' width="%d"' % self.width + if self.height: + height = ' height="%d"' % self.height + if self.unconfined: + klass = ' class="unconfined"' + return u'<img src="{url}"{width}{height}{klass}/>'.format( + url=self.url, + width=width, + height=height, + klass=klass, + ) + + def _repr_mimebundle_(self, include=None, exclude=None): + """Return the image as a mimebundle + + Any new mimetype support should be implemented here. + """ + if self.embed: + mimetype = self._mimetype + data, metadata = self._data_and_metadata(always_both=True) + if metadata: + metadata = {mimetype: metadata} + return {mimetype: data}, metadata + else: + return {'text/html': self._repr_html_()} + + def _data_and_metadata(self, always_both=False): + """shortcut for returning metadata with shape information, if defined""" + try: + b64_data = b2a_base64(self.data).decode('ascii') + except TypeError: + raise FileNotFoundError( + "No such file or directory: '%s'" % (self.data)) + md = {} + if self.metadata: + md.update(self.metadata) + if self.width: + md['width'] = self.width + if self.height: + md['height'] = self.height + if self.unconfined: + md['unconfined'] = self.unconfined + if md or always_both: + return b64_data, md + else: + return b64_data + + def _repr_png_(self): + if self.embed and self.format == self._FMT_PNG: + return self._data_and_metadata() + + def _repr_jpeg_(self): + if self.embed and self.format == self._FMT_JPEG: + return self._data_and_metadata() + + def _find_ext(self, s): + base, ext = splitext(s) + + if not ext: + return base + + # `splitext` includes leading period, so we skip it + return ext[1:].lower() + + +class Video(DisplayObject): + + def __init__(self, data=None, url=None, filename=None, embed=False, + mimetype=None, width=None, height=None, html_attributes="controls"): + """Create a video object given raw data or an URL. + + When this object is returned by an input cell or passed to the + display function, it will result in the video being displayed + in the frontend. + + Parameters + ---------- + data : unicode, str or bytes + The raw video data or a URL or filename to load the data from. + Raw data will require passing ``embed=True``. + url : unicode + A URL for the video. If you specify ``url=``, + the image data will not be embedded. + filename : unicode + Path to a local file containing the video. + Will be interpreted as a local URL unless ``embed=True``. + embed : bool + Should the video be embedded using a data URI (True) or be + loaded using a <video> tag (False). + + Since videos are large, embedding them should be avoided, if possible. + You must confirm embedding as your intention by passing ``embed=True``. + + Local files can be displayed with URLs without embedding the content, via:: + + Video('./video.mp4') + + mimetype: unicode + Specify the mimetype for embedded videos. + Default will be guessed from file extension, if available. + width : int + Width in pixels to which to constrain the video in HTML. + If not supplied, defaults to the width of the video. + height : int + Height in pixels to which to constrain the video in html. + If not supplied, defaults to the height of the video. + html_attributes : str + Attributes for the HTML ``<video>`` block. + Default: ``"controls"`` to get video controls. + Other examples: ``"controls muted"`` for muted video with controls, + ``"loop autoplay"`` for looping autoplaying video without controls. + + Examples + -------- + + :: + + Video('https://archive.org/download/Sita_Sings_the_Blues/Sita_Sings_the_Blues_small.mp4') + Video('path/to/video.mp4') + Video('path/to/video.mp4', embed=True) + Video('path/to/video.mp4', embed=True, html_attributes="controls muted autoplay") + Video(b'raw-videodata', embed=True) + """ + if isinstance(data, (Path, PurePath)): + data = str(data) + + if url is None and isinstance(data, str) and data.startswith(('http:', 'https:')): + url = data + data = None + elif os.path.exists(data): + filename = data + data = None + + if data and not embed: + msg = ''.join([ + "To embed videos, you must pass embed=True ", + "(this may make your notebook files huge)\n", + "Consider passing Video(url='...')", + ]) + raise ValueError(msg) + + self.mimetype = mimetype + self.embed = embed + self.width = width + self.height = height + self.html_attributes = html_attributes + super(Video, self).__init__(data=data, url=url, filename=filename) + + def _repr_html_(self): + width = height = '' + if self.width: + width = ' width="%d"' % self.width + if self.height: + height = ' height="%d"' % self.height + + # External URLs and potentially local files are not embedded into the + # notebook output. + if not self.embed: + url = self.url if self.url is not None else self.filename + output = """<video src="{0}" {1} {2} {3}> + Your browser does not support the <code>video</code> element. + </video>""".format(url, self.html_attributes, width, height) + return output + + # Embedded videos are base64-encoded. + mimetype = self.mimetype + if self.filename is not None: + if not mimetype: + mimetype, _ = mimetypes.guess_type(self.filename) + + with open(self.filename, 'rb') as f: + video = f.read() + else: + video = self.data + if isinstance(video, str): + # unicode input is already b64-encoded + b64_video = video + else: + b64_video = b2a_base64(video).decode('ascii').rstrip() + + output = """<video {0} {1} {2}> + <source src="data:{3};base64,{4}" type="{3}"> + Your browser does not support the video tag. + </video>""".format(self.html_attributes, width, height, mimetype, b64_video) + return output + + def reload(self): + # TODO + pass + + +def clear_output(wait=False): + """Clear the output of the current cell receiving output. + + Parameters + ---------- + wait : bool [default: false] + Wait to clear the output until new output is available to replace it.""" + from IPython.core.interactiveshell import InteractiveShell + if InteractiveShell.initialized(): + InteractiveShell.instance().display_pub.clear_output(wait) + else: + print('\033[2K\r', end='') + sys.stdout.flush() + print('\033[2K\r', end='') + sys.stderr.flush() + + +@skip_doctest +def set_matplotlib_formats(*formats, **kwargs): + """ + .. deprecated:: 7.23 + + use `matplotlib_inline.backend_inline.set_matplotlib_formats()` + + Select figure formats for the inline backend. Optionally pass quality for JPEG. + + For example, this enables PNG and JPEG output with a JPEG quality of 90%:: + + In [1]: set_matplotlib_formats('png', 'jpeg', quality=90) + + To set this in your config files use the following:: + + c.InlineBackend.figure_formats = {'png', 'jpeg'} + c.InlineBackend.print_figure_kwargs.update({'quality' : 90}) + + Parameters + ---------- + *formats : strs + One or more figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'. + **kwargs : + Keyword args will be relayed to ``figure.canvas.print_figure``. + """ + warnings.warn( + "`set_matplotlib_formats` is deprecated since IPython 7.23, directly " + "use `matplotlib_inline.backend_inline.set_matplotlib_formats()`", + DeprecationWarning, + stacklevel=2, + ) + + from matplotlib_inline.backend_inline import ( + set_matplotlib_formats as set_matplotlib_formats_orig, + ) + + set_matplotlib_formats_orig(*formats, **kwargs) + +@skip_doctest +def set_matplotlib_close(close=True): + """ + .. deprecated:: 7.23 + + use `matplotlib_inline.backend_inline.set_matplotlib_close()` + + + Set whether the inline backend closes all figures automatically or not. + + By default, the inline backend used in the IPython Notebook will close all + matplotlib figures automatically after each cell is run. This means that + plots in different cells won't interfere. Sometimes, you may want to make + a plot in one cell and then refine it in later cells. This can be accomplished + by:: + + In [1]: set_matplotlib_close(False) + + To set this in your config files use the following:: + + c.InlineBackend.close_figures = False + + Parameters + ---------- + close : bool + Should all matplotlib figures be automatically closed after each cell is + run? + """ + warnings.warn( + "`set_matplotlib_close` is deprecated since IPython 7.23, directly " + "use `matplotlib_inline.backend_inline.set_matplotlib_close()`", + DeprecationWarning, + stacklevel=2, + ) + + from matplotlib_inline.backend_inline import ( + set_matplotlib_close as set_matplotlib_close_orig, + ) + + set_matplotlib_close_orig(close) diff --git a/contrib/python/ipython/py3/IPython/core/display_trap.py b/contrib/python/ipython/py3/IPython/core/display_trap.py index 7a48a5e1196..9931dfe2dfc 100644 --- a/contrib/python/ipython/py3/IPython/core/display_trap.py +++ b/contrib/python/ipython/py3/IPython/core/display_trap.py @@ -1,70 +1,70 @@ -# encoding: utf-8 -""" -A context manager for handling sys.displayhook. - -Authors: - -* Robert Kern -* Brian Granger -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import sys - -from traitlets.config.configurable import Configurable -from traitlets import Any - -#----------------------------------------------------------------------------- -# Classes and functions -#----------------------------------------------------------------------------- - - -class DisplayTrap(Configurable): - """Object to manage sys.displayhook. - - This came from IPython.core.kernel.display_hook, but is simplified - (no callbacks or formatters) until more of the core is refactored. - """ - - hook = Any() - - def __init__(self, hook=None): - super(DisplayTrap, self).__init__(hook=hook, config=None) - self.old_hook = None - # We define this to track if a single BuiltinTrap is nested. - # Only turn off the trap when the outermost call to __exit__ is made. - self._nested_level = 0 - - def __enter__(self): - if self._nested_level == 0: - self.set() - self._nested_level += 1 - return self - - def __exit__(self, type, value, traceback): - if self._nested_level == 1: - self.unset() - self._nested_level -= 1 - # Returning False will cause exceptions to propagate - return False - - def set(self): - """Set the hook.""" - if sys.displayhook is not self.hook: - self.old_hook = sys.displayhook - sys.displayhook = self.hook - - def unset(self): - """Unset the hook.""" - sys.displayhook = self.old_hook - +# encoding: utf-8 +""" +A context manager for handling sys.displayhook. + +Authors: + +* Robert Kern +* Brian Granger +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import sys + +from traitlets.config.configurable import Configurable +from traitlets import Any + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + + +class DisplayTrap(Configurable): + """Object to manage sys.displayhook. + + This came from IPython.core.kernel.display_hook, but is simplified + (no callbacks or formatters) until more of the core is refactored. + """ + + hook = Any() + + def __init__(self, hook=None): + super(DisplayTrap, self).__init__(hook=hook, config=None) + self.old_hook = None + # We define this to track if a single BuiltinTrap is nested. + # Only turn off the trap when the outermost call to __exit__ is made. + self._nested_level = 0 + + def __enter__(self): + if self._nested_level == 0: + self.set() + self._nested_level += 1 + return self + + def __exit__(self, type, value, traceback): + if self._nested_level == 1: + self.unset() + self._nested_level -= 1 + # Returning False will cause exceptions to propagate + return False + + def set(self): + """Set the hook.""" + if sys.displayhook is not self.hook: + self.old_hook = sys.displayhook + sys.displayhook = self.hook + + def unset(self): + """Unset the hook.""" + sys.displayhook = self.old_hook + diff --git a/contrib/python/ipython/py3/IPython/core/displayhook.py b/contrib/python/ipython/py3/IPython/core/displayhook.py index a825b585324..3c06675e86e 100644 --- a/contrib/python/ipython/py3/IPython/core/displayhook.py +++ b/contrib/python/ipython/py3/IPython/core/displayhook.py @@ -1,325 +1,325 @@ -# -*- coding: utf-8 -*- -"""Displayhook for IPython. - -This defines a callable class that IPython uses for `sys.displayhook`. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import builtins as builtin_mod -import sys -import io as _io -import tokenize - -from traitlets.config.configurable import Configurable -from traitlets import Instance, Float -from warnings import warn - -# TODO: Move the various attributes (cache_size, [others now moved]). Some -# of these are also attributes of InteractiveShell. They should be on ONE object -# only and the other objects should ask that one object for their values. - -class DisplayHook(Configurable): - """The custom IPython displayhook to replace sys.displayhook. - - This class does many things, but the basic idea is that it is a callable - that gets called anytime user code returns a value. - """ - - shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', - allow_none=True) - exec_result = Instance('IPython.core.interactiveshell.ExecutionResult', - allow_none=True) - cull_fraction = Float(0.2) - - def __init__(self, shell=None, cache_size=1000, **kwargs): - super(DisplayHook, self).__init__(shell=shell, **kwargs) - cache_size_min = 3 - if cache_size <= 0: - self.do_full_cache = 0 - cache_size = 0 - elif cache_size < cache_size_min: - self.do_full_cache = 0 - cache_size = 0 - warn('caching was disabled (min value for cache size is %s).' % - cache_size_min,stacklevel=3) - else: - self.do_full_cache = 1 - - self.cache_size = cache_size - - # we need a reference to the user-level namespace - self.shell = shell - - self._,self.__,self.___ = '','','' - - # these are deliberately global: - to_user_ns = {'_':self._,'__':self.__,'___':self.___} - self.shell.user_ns.update(to_user_ns) - - @property - def prompt_count(self): - return self.shell.execution_count - - #------------------------------------------------------------------------- - # Methods used in __call__. Override these methods to modify the behavior - # of the displayhook. - #------------------------------------------------------------------------- - - def check_for_underscore(self): - """Check if the user has set the '_' variable by hand.""" - # If something injected a '_' variable in __builtin__, delete - # ipython's automatic one so we don't clobber that. gettext() in - # particular uses _, so we need to stay away from it. - if '_' in builtin_mod.__dict__: - try: - user_value = self.shell.user_ns['_'] - if user_value is not self._: - return - del self.shell.user_ns['_'] - except KeyError: - pass - - def quiet(self): - """Should we silence the display hook because of ';'?""" - # do not print output if input ends in ';' - - try: - cell = self.shell.history_manager.input_hist_parsed[-1] - except IndexError: - # some uses of ipshellembed may fail here - return False - - sio = _io.StringIO(cell) - tokens = list(tokenize.generate_tokens(sio.readline)) - - for token in reversed(tokens): - if token[0] in (tokenize.ENDMARKER, tokenize.NL, tokenize.NEWLINE, tokenize.COMMENT): - continue - if (token[0] == tokenize.OP) and (token[1] == ';'): - return True - else: - return False - - def start_displayhook(self): - """Start the displayhook, initializing resources.""" - pass - - def write_output_prompt(self): - """Write the output prompt. - - The default implementation simply writes the prompt to - ``sys.stdout``. - """ - # Use write, not print which adds an extra space. - sys.stdout.write(self.shell.separate_out) - outprompt = 'Out[{}]: '.format(self.shell.execution_count) - if self.do_full_cache: - sys.stdout.write(outprompt) - - def compute_format_data(self, result): - """Compute format data of the object to be displayed. - - The format data is a generalization of the :func:`repr` of an object. - In the default implementation the format data is a :class:`dict` of - key value pair where the keys are valid MIME types and the values - are JSON'able data structure containing the raw data for that MIME - type. It is up to frontends to determine pick a MIME to to use and - display that data in an appropriate manner. - - This method only computes the format data for the object and should - NOT actually print or write that to a stream. - - Parameters - ---------- - result : object - The Python object passed to the display hook, whose format will be - computed. - - Returns - ------- - (format_dict, md_dict) : dict - format_dict is a :class:`dict` whose keys are valid MIME types and values are - JSON'able raw data for that MIME type. It is recommended that - all return values of this should always include the "text/plain" - MIME type representation of the object. - md_dict is a :class:`dict` with the same MIME type keys - of metadata associated with each output. - - """ - return self.shell.display_formatter.format(result) - - # This can be set to True by the write_output_prompt method in a subclass - prompt_end_newline = False - - def write_format_data(self, format_dict, md_dict=None) -> None: - """Write the format data dict to the frontend. - - This default version of this method simply writes the plain text - representation of the object to ``sys.stdout``. Subclasses should - override this method to send the entire `format_dict` to the - frontends. - - Parameters - ---------- - format_dict : dict - The format dict for the object passed to `sys.displayhook`. - md_dict : dict (optional) - The metadata dict to be associated with the display data. - """ - if 'text/plain' not in format_dict: - # nothing to do - return - # We want to print because we want to always make sure we have a - # newline, even if all the prompt separators are ''. This is the - # standard IPython behavior. - result_repr = format_dict['text/plain'] - if '\n' in result_repr: - # So that multi-line strings line up with the left column of - # the screen, instead of having the output prompt mess up - # their first line. - # We use the prompt template instead of the expanded prompt - # because the expansion may add ANSI escapes that will interfere - # with our ability to determine whether or not we should add - # a newline. - if not self.prompt_end_newline: - # But avoid extraneous empty lines. - result_repr = '\n' + result_repr - - try: - print(result_repr) - except UnicodeEncodeError: - # If a character is not supported by the terminal encoding replace - # it with its \u or \x representation - print(result_repr.encode(sys.stdout.encoding,'backslashreplace').decode(sys.stdout.encoding)) - - def update_user_ns(self, result): - """Update user_ns with various things like _, __, _1, etc.""" - - # Avoid recursive reference when displaying _oh/Out - if self.cache_size and result is not self.shell.user_ns['_oh']: - if len(self.shell.user_ns['_oh']) >= self.cache_size and self.do_full_cache: - self.cull_cache() - - # Don't overwrite '_' and friends if '_' is in __builtin__ - # (otherwise we cause buggy behavior for things like gettext). and - # do not overwrite _, __ or ___ if one of these has been assigned - # by the user. - update_unders = True - for unders in ['_'*i for i in range(1,4)]: - if not unders in self.shell.user_ns: - continue - if getattr(self, unders) is not self.shell.user_ns.get(unders): - update_unders = False - - self.___ = self.__ - self.__ = self._ - self._ = result - - if ('_' not in builtin_mod.__dict__) and (update_unders): - self.shell.push({'_':self._, - '__':self.__, - '___':self.___}, interactive=False) - - # hackish access to top-level namespace to create _1,_2... dynamically - to_main = {} - if self.do_full_cache: - new_result = '_%s' % self.prompt_count - to_main[new_result] = result - self.shell.push(to_main, interactive=False) - self.shell.user_ns['_oh'][self.prompt_count] = result - - def fill_exec_result(self, result): - if self.exec_result is not None: - self.exec_result.result = result - - def log_output(self, format_dict): - """Log the output.""" - if 'text/plain' not in format_dict: - # nothing to do - return - if self.shell.logger.log_output: - self.shell.logger.log_write(format_dict['text/plain'], 'output') - self.shell.history_manager.output_hist_reprs[self.prompt_count] = \ - format_dict['text/plain'] - - def finish_displayhook(self): - """Finish up all displayhook activities.""" - sys.stdout.write(self.shell.separate_out2) - sys.stdout.flush() - - def __call__(self, result=None): - """Printing with history cache management. - - This is invoked every time the interpreter needs to print, and is - activated by setting the variable sys.displayhook to it. - """ - self.check_for_underscore() - if result is not None and not self.quiet(): - self.start_displayhook() - self.write_output_prompt() - format_dict, md_dict = self.compute_format_data(result) - self.update_user_ns(result) - self.fill_exec_result(result) - if format_dict: - self.write_format_data(format_dict, md_dict) - self.log_output(format_dict) - self.finish_displayhook() - - def cull_cache(self): - """Output cache is full, cull the oldest entries""" - oh = self.shell.user_ns.get('_oh', {}) - sz = len(oh) - cull_count = max(int(sz * self.cull_fraction), 2) - warn('Output cache limit (currently {sz} entries) hit.\n' - 'Flushing oldest {cull_count} entries.'.format(sz=sz, cull_count=cull_count)) - - for i, n in enumerate(sorted(oh)): - if i >= cull_count: - break - self.shell.user_ns.pop('_%i' % n, None) - oh.pop(n, None) - - - def flush(self): - if not self.do_full_cache: - raise ValueError("You shouldn't have reached the cache flush " - "if full caching is not enabled!") - # delete auto-generated vars from global namespace - - for n in range(1,self.prompt_count + 1): - key = '_'+repr(n) - try: - del self.shell.user_ns[key] - except: pass - # In some embedded circumstances, the user_ns doesn't have the - # '_oh' key set up. - oh = self.shell.user_ns.get('_oh', None) - if oh is not None: - oh.clear() - - # Release our own references to objects: - self._, self.__, self.___ = '', '', '' - - if '_' not in builtin_mod.__dict__: - self.shell.user_ns.update({'_':self._,'__':self.__,'___':self.___}) - import gc - # TODO: Is this really needed? - # IronPython blocks here forever - if sys.platform != "cli": - gc.collect() - - -class CapturingDisplayHook(object): - def __init__(self, shell, outputs=None): - self.shell = shell - if outputs is None: - outputs = [] - self.outputs = outputs - - def __call__(self, result=None): - if result is None: - return - format_dict, md_dict = self.shell.display_formatter.format(result) - self.outputs.append({ 'data': format_dict, 'metadata': md_dict }) +# -*- coding: utf-8 -*- +"""Displayhook for IPython. + +This defines a callable class that IPython uses for `sys.displayhook`. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import builtins as builtin_mod +import sys +import io as _io +import tokenize + +from traitlets.config.configurable import Configurable +from traitlets import Instance, Float +from warnings import warn + +# TODO: Move the various attributes (cache_size, [others now moved]). Some +# of these are also attributes of InteractiveShell. They should be on ONE object +# only and the other objects should ask that one object for their values. + +class DisplayHook(Configurable): + """The custom IPython displayhook to replace sys.displayhook. + + This class does many things, but the basic idea is that it is a callable + that gets called anytime user code returns a value. + """ + + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', + allow_none=True) + exec_result = Instance('IPython.core.interactiveshell.ExecutionResult', + allow_none=True) + cull_fraction = Float(0.2) + + def __init__(self, shell=None, cache_size=1000, **kwargs): + super(DisplayHook, self).__init__(shell=shell, **kwargs) + cache_size_min = 3 + if cache_size <= 0: + self.do_full_cache = 0 + cache_size = 0 + elif cache_size < cache_size_min: + self.do_full_cache = 0 + cache_size = 0 + warn('caching was disabled (min value for cache size is %s).' % + cache_size_min,stacklevel=3) + else: + self.do_full_cache = 1 + + self.cache_size = cache_size + + # we need a reference to the user-level namespace + self.shell = shell + + self._,self.__,self.___ = '','','' + + # these are deliberately global: + to_user_ns = {'_':self._,'__':self.__,'___':self.___} + self.shell.user_ns.update(to_user_ns) + + @property + def prompt_count(self): + return self.shell.execution_count + + #------------------------------------------------------------------------- + # Methods used in __call__. Override these methods to modify the behavior + # of the displayhook. + #------------------------------------------------------------------------- + + def check_for_underscore(self): + """Check if the user has set the '_' variable by hand.""" + # If something injected a '_' variable in __builtin__, delete + # ipython's automatic one so we don't clobber that. gettext() in + # particular uses _, so we need to stay away from it. + if '_' in builtin_mod.__dict__: + try: + user_value = self.shell.user_ns['_'] + if user_value is not self._: + return + del self.shell.user_ns['_'] + except KeyError: + pass + + def quiet(self): + """Should we silence the display hook because of ';'?""" + # do not print output if input ends in ';' + + try: + cell = self.shell.history_manager.input_hist_parsed[-1] + except IndexError: + # some uses of ipshellembed may fail here + return False + + sio = _io.StringIO(cell) + tokens = list(tokenize.generate_tokens(sio.readline)) + + for token in reversed(tokens): + if token[0] in (tokenize.ENDMARKER, tokenize.NL, tokenize.NEWLINE, tokenize.COMMENT): + continue + if (token[0] == tokenize.OP) and (token[1] == ';'): + return True + else: + return False + + def start_displayhook(self): + """Start the displayhook, initializing resources.""" + pass + + def write_output_prompt(self): + """Write the output prompt. + + The default implementation simply writes the prompt to + ``sys.stdout``. + """ + # Use write, not print which adds an extra space. + sys.stdout.write(self.shell.separate_out) + outprompt = 'Out[{}]: '.format(self.shell.execution_count) + if self.do_full_cache: + sys.stdout.write(outprompt) + + def compute_format_data(self, result): + """Compute format data of the object to be displayed. + + The format data is a generalization of the :func:`repr` of an object. + In the default implementation the format data is a :class:`dict` of + key value pair where the keys are valid MIME types and the values + are JSON'able data structure containing the raw data for that MIME + type. It is up to frontends to determine pick a MIME to to use and + display that data in an appropriate manner. + + This method only computes the format data for the object and should + NOT actually print or write that to a stream. + + Parameters + ---------- + result : object + The Python object passed to the display hook, whose format will be + computed. + + Returns + ------- + (format_dict, md_dict) : dict + format_dict is a :class:`dict` whose keys are valid MIME types and values are + JSON'able raw data for that MIME type. It is recommended that + all return values of this should always include the "text/plain" + MIME type representation of the object. + md_dict is a :class:`dict` with the same MIME type keys + of metadata associated with each output. + + """ + return self.shell.display_formatter.format(result) + + # This can be set to True by the write_output_prompt method in a subclass + prompt_end_newline = False + + def write_format_data(self, format_dict, md_dict=None) -> None: + """Write the format data dict to the frontend. + + This default version of this method simply writes the plain text + representation of the object to ``sys.stdout``. Subclasses should + override this method to send the entire `format_dict` to the + frontends. + + Parameters + ---------- + format_dict : dict + The format dict for the object passed to `sys.displayhook`. + md_dict : dict (optional) + The metadata dict to be associated with the display data. + """ + if 'text/plain' not in format_dict: + # nothing to do + return + # We want to print because we want to always make sure we have a + # newline, even if all the prompt separators are ''. This is the + # standard IPython behavior. + result_repr = format_dict['text/plain'] + if '\n' in result_repr: + # So that multi-line strings line up with the left column of + # the screen, instead of having the output prompt mess up + # their first line. + # We use the prompt template instead of the expanded prompt + # because the expansion may add ANSI escapes that will interfere + # with our ability to determine whether or not we should add + # a newline. + if not self.prompt_end_newline: + # But avoid extraneous empty lines. + result_repr = '\n' + result_repr + + try: + print(result_repr) + except UnicodeEncodeError: + # If a character is not supported by the terminal encoding replace + # it with its \u or \x representation + print(result_repr.encode(sys.stdout.encoding,'backslashreplace').decode(sys.stdout.encoding)) + + def update_user_ns(self, result): + """Update user_ns with various things like _, __, _1, etc.""" + + # Avoid recursive reference when displaying _oh/Out + if self.cache_size and result is not self.shell.user_ns['_oh']: + if len(self.shell.user_ns['_oh']) >= self.cache_size and self.do_full_cache: + self.cull_cache() + + # Don't overwrite '_' and friends if '_' is in __builtin__ + # (otherwise we cause buggy behavior for things like gettext). and + # do not overwrite _, __ or ___ if one of these has been assigned + # by the user. + update_unders = True + for unders in ['_'*i for i in range(1,4)]: + if not unders in self.shell.user_ns: + continue + if getattr(self, unders) is not self.shell.user_ns.get(unders): + update_unders = False + + self.___ = self.__ + self.__ = self._ + self._ = result + + if ('_' not in builtin_mod.__dict__) and (update_unders): + self.shell.push({'_':self._, + '__':self.__, + '___':self.___}, interactive=False) + + # hackish access to top-level namespace to create _1,_2... dynamically + to_main = {} + if self.do_full_cache: + new_result = '_%s' % self.prompt_count + to_main[new_result] = result + self.shell.push(to_main, interactive=False) + self.shell.user_ns['_oh'][self.prompt_count] = result + + def fill_exec_result(self, result): + if self.exec_result is not None: + self.exec_result.result = result + + def log_output(self, format_dict): + """Log the output.""" + if 'text/plain' not in format_dict: + # nothing to do + return + if self.shell.logger.log_output: + self.shell.logger.log_write(format_dict['text/plain'], 'output') + self.shell.history_manager.output_hist_reprs[self.prompt_count] = \ + format_dict['text/plain'] + + def finish_displayhook(self): + """Finish up all displayhook activities.""" + sys.stdout.write(self.shell.separate_out2) + sys.stdout.flush() + + def __call__(self, result=None): + """Printing with history cache management. + + This is invoked every time the interpreter needs to print, and is + activated by setting the variable sys.displayhook to it. + """ + self.check_for_underscore() + if result is not None and not self.quiet(): + self.start_displayhook() + self.write_output_prompt() + format_dict, md_dict = self.compute_format_data(result) + self.update_user_ns(result) + self.fill_exec_result(result) + if format_dict: + self.write_format_data(format_dict, md_dict) + self.log_output(format_dict) + self.finish_displayhook() + + def cull_cache(self): + """Output cache is full, cull the oldest entries""" + oh = self.shell.user_ns.get('_oh', {}) + sz = len(oh) + cull_count = max(int(sz * self.cull_fraction), 2) + warn('Output cache limit (currently {sz} entries) hit.\n' + 'Flushing oldest {cull_count} entries.'.format(sz=sz, cull_count=cull_count)) + + for i, n in enumerate(sorted(oh)): + if i >= cull_count: + break + self.shell.user_ns.pop('_%i' % n, None) + oh.pop(n, None) + + + def flush(self): + if not self.do_full_cache: + raise ValueError("You shouldn't have reached the cache flush " + "if full caching is not enabled!") + # delete auto-generated vars from global namespace + + for n in range(1,self.prompt_count + 1): + key = '_'+repr(n) + try: + del self.shell.user_ns[key] + except: pass + # In some embedded circumstances, the user_ns doesn't have the + # '_oh' key set up. + oh = self.shell.user_ns.get('_oh', None) + if oh is not None: + oh.clear() + + # Release our own references to objects: + self._, self.__, self.___ = '', '', '' + + if '_' not in builtin_mod.__dict__: + self.shell.user_ns.update({'_':self._,'__':self.__,'___':self.___}) + import gc + # TODO: Is this really needed? + # IronPython blocks here forever + if sys.platform != "cli": + gc.collect() + + +class CapturingDisplayHook(object): + def __init__(self, shell, outputs=None): + self.shell = shell + if outputs is None: + outputs = [] + self.outputs = outputs + + def __call__(self, result=None): + if result is None: + return + format_dict, md_dict = self.shell.display_formatter.format(result) + self.outputs.append({ 'data': format_dict, 'metadata': md_dict }) diff --git a/contrib/python/ipython/py3/IPython/core/displaypub.py b/contrib/python/ipython/py3/IPython/core/displaypub.py index 01bc2389feb..1da0458cf08 100644 --- a/contrib/python/ipython/py3/IPython/core/displaypub.py +++ b/contrib/python/ipython/py3/IPython/core/displaypub.py @@ -1,138 +1,138 @@ -"""An interface for publishing rich data to frontends. - -There are two components of the display system: - -* Display formatters, which take a Python object and compute the - representation of the object in various formats (text, HTML, SVG, etc.). -* The display publisher that is used to send the representation data to the - various frontends. - -This module defines the logic display publishing. The display publisher uses -the ``display_data`` message type that is defined in the IPython messaging -spec. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -import sys - -from traitlets.config.configurable import Configurable -from traitlets import List - -# This used to be defined here - it is imported for backwards compatibility -from .display import publish_display_data - -#----------------------------------------------------------------------------- -# Main payload class -#----------------------------------------------------------------------------- - - -class DisplayPublisher(Configurable): - """A traited class that publishes display data to frontends. - - Instances of this class are created by the main IPython object and should - be accessed there. - """ - - def __init__(self, shell=None, *args, **kwargs): - self.shell = shell - super().__init__(*args, **kwargs) - - def _validate_data(self, data, metadata=None): - """Validate the display data. - - Parameters - ---------- - data : dict - The formata data dictionary. - metadata : dict - Any metadata for the data. - """ - - if not isinstance(data, dict): - raise TypeError('data must be a dict, got: %r' % data) - if metadata is not None: - if not isinstance(metadata, dict): - raise TypeError('metadata must be a dict, got: %r' % data) - - # use * to indicate transient, update are keyword-only - def publish(self, data, metadata=None, source=None, *, transient=None, update=False, **kwargs) -> None: - """Publish data and metadata to all frontends. - - See the ``display_data`` message in the messaging documentation for - more details about this message type. - - The following MIME types are currently implemented: - - * text/plain - * text/html - * text/markdown - * text/latex - * application/json - * application/javascript - * image/png - * image/jpeg - * image/svg+xml - - Parameters - ---------- - data : dict - A dictionary having keys that are valid MIME types (like - 'text/plain' or 'image/svg+xml') and values that are the data for - that MIME type. The data itself must be a JSON'able data - structure. Minimally all data should have the 'text/plain' data, - which can be displayed by all frontends. If more than the plain - text is given, it is up to the frontend to decide which - representation to use. - metadata : dict - A dictionary for metadata related to the data. This can contain - arbitrary key, value pairs that frontends can use to interpret - the data. Metadata specific to each mime-type can be specified - in the metadata dict with the same mime-type keys as - the data itself. - source : str, deprecated - Unused. - transient: dict, keyword-only - A dictionary for transient data. - Data in this dictionary should not be persisted as part of saving this output. - Examples include 'display_id'. - update: bool, keyword-only, default: False - If True, only update existing outputs with the same display_id, - rather than creating a new output. - """ - - handlers = {} - if self.shell is not None: - handlers = getattr(self.shell, 'mime_renderers', {}) - - for mime, handler in handlers.items(): - if mime in data: - handler(data[mime], metadata.get(mime, None)) - return - - if 'text/plain' in data: - print(data['text/plain']) - - def clear_output(self, wait=False): - """Clear the output of the cell receiving output.""" - print('\033[2K\r', end='') - sys.stdout.flush() - print('\033[2K\r', end='') - sys.stderr.flush() - - -class CapturingDisplayPublisher(DisplayPublisher): - """A DisplayPublisher that stores""" - outputs = List() - - def publish(self, data, metadata=None, source=None, *, transient=None, update=False): - self.outputs.append({'data':data, 'metadata':metadata, - 'transient':transient, 'update':update}) - - def clear_output(self, wait=False): - super(CapturingDisplayPublisher, self).clear_output(wait) - - # empty the list, *do not* reassign a new list - self.outputs.clear() +"""An interface for publishing rich data to frontends. + +There are two components of the display system: + +* Display formatters, which take a Python object and compute the + representation of the object in various formats (text, HTML, SVG, etc.). +* The display publisher that is used to send the representation data to the + various frontends. + +This module defines the logic display publishing. The display publisher uses +the ``display_data`` message type that is defined in the IPython messaging +spec. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +import sys + +from traitlets.config.configurable import Configurable +from traitlets import List + +# This used to be defined here - it is imported for backwards compatibility +from .display import publish_display_data + +#----------------------------------------------------------------------------- +# Main payload class +#----------------------------------------------------------------------------- + + +class DisplayPublisher(Configurable): + """A traited class that publishes display data to frontends. + + Instances of this class are created by the main IPython object and should + be accessed there. + """ + + def __init__(self, shell=None, *args, **kwargs): + self.shell = shell + super().__init__(*args, **kwargs) + + def _validate_data(self, data, metadata=None): + """Validate the display data. + + Parameters + ---------- + data : dict + The formata data dictionary. + metadata : dict + Any metadata for the data. + """ + + if not isinstance(data, dict): + raise TypeError('data must be a dict, got: %r' % data) + if metadata is not None: + if not isinstance(metadata, dict): + raise TypeError('metadata must be a dict, got: %r' % data) + + # use * to indicate transient, update are keyword-only + def publish(self, data, metadata=None, source=None, *, transient=None, update=False, **kwargs) -> None: + """Publish data and metadata to all frontends. + + See the ``display_data`` message in the messaging documentation for + more details about this message type. + + The following MIME types are currently implemented: + + * text/plain + * text/html + * text/markdown + * text/latex + * application/json + * application/javascript + * image/png + * image/jpeg + * image/svg+xml + + Parameters + ---------- + data : dict + A dictionary having keys that are valid MIME types (like + 'text/plain' or 'image/svg+xml') and values that are the data for + that MIME type. The data itself must be a JSON'able data + structure. Minimally all data should have the 'text/plain' data, + which can be displayed by all frontends. If more than the plain + text is given, it is up to the frontend to decide which + representation to use. + metadata : dict + A dictionary for metadata related to the data. This can contain + arbitrary key, value pairs that frontends can use to interpret + the data. Metadata specific to each mime-type can be specified + in the metadata dict with the same mime-type keys as + the data itself. + source : str, deprecated + Unused. + transient: dict, keyword-only + A dictionary for transient data. + Data in this dictionary should not be persisted as part of saving this output. + Examples include 'display_id'. + update: bool, keyword-only, default: False + If True, only update existing outputs with the same display_id, + rather than creating a new output. + """ + + handlers = {} + if self.shell is not None: + handlers = getattr(self.shell, 'mime_renderers', {}) + + for mime, handler in handlers.items(): + if mime in data: + handler(data[mime], metadata.get(mime, None)) + return + + if 'text/plain' in data: + print(data['text/plain']) + + def clear_output(self, wait=False): + """Clear the output of the cell receiving output.""" + print('\033[2K\r', end='') + sys.stdout.flush() + print('\033[2K\r', end='') + sys.stderr.flush() + + +class CapturingDisplayPublisher(DisplayPublisher): + """A DisplayPublisher that stores""" + outputs = List() + + def publish(self, data, metadata=None, source=None, *, transient=None, update=False): + self.outputs.append({'data':data, 'metadata':metadata, + 'transient':transient, 'update':update}) + + def clear_output(self, wait=False): + super(CapturingDisplayPublisher, self).clear_output(wait) + + # empty the list, *do not* reassign a new list + self.outputs.clear() diff --git a/contrib/python/ipython/py3/IPython/core/error.py b/contrib/python/ipython/py3/IPython/core/error.py index 66d67a6ba6a..684cbc8da6a 100644 --- a/contrib/python/ipython/py3/IPython/core/error.py +++ b/contrib/python/ipython/py3/IPython/core/error.py @@ -1,60 +1,60 @@ -# encoding: utf-8 -""" -Global exception classes for IPython.core. - -Authors: - -* Brian Granger -* Fernando Perez -* Min Ragan-Kelley - -Notes ------ -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Exception classes -#----------------------------------------------------------------------------- - -class IPythonCoreError(Exception): - pass - - -class TryNext(IPythonCoreError): - """Try next hook exception. - - Raise this in your hook function to indicate that the next hook handler - should be used to handle the operation. - """ - -class UsageError(IPythonCoreError): - """Error in magic function arguments, etc. - - Something that probably won't warrant a full traceback, but should - nevertheless interrupt a macro / batch file. - """ - -class StdinNotImplementedError(IPythonCoreError, NotImplementedError): - """raw_input was requested in a context where it is not supported - - For use in IPython kernels, where only some frontends may support - stdin requests. - """ - -class InputRejected(Exception): - """Input rejected by ast transformer. - - Raise this in your NodeTransformer to indicate that InteractiveShell should - not execute the supplied input. - """ +# encoding: utf-8 +""" +Global exception classes for IPython.core. + +Authors: + +* Brian Granger +* Fernando Perez +* Min Ragan-Kelley + +Notes +----- +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Exception classes +#----------------------------------------------------------------------------- + +class IPythonCoreError(Exception): + pass + + +class TryNext(IPythonCoreError): + """Try next hook exception. + + Raise this in your hook function to indicate that the next hook handler + should be used to handle the operation. + """ + +class UsageError(IPythonCoreError): + """Error in magic function arguments, etc. + + Something that probably won't warrant a full traceback, but should + nevertheless interrupt a macro / batch file. + """ + +class StdinNotImplementedError(IPythonCoreError, NotImplementedError): + """raw_input was requested in a context where it is not supported + + For use in IPython kernels, where only some frontends may support + stdin requests. + """ + +class InputRejected(Exception): + """Input rejected by ast transformer. + + Raise this in your NodeTransformer to indicate that InteractiveShell should + not execute the supplied input. + """ diff --git a/contrib/python/ipython/py3/IPython/core/events.py b/contrib/python/ipython/py3/IPython/core/events.py index 7f3e177a18f..1af13ca406f 100644 --- a/contrib/python/ipython/py3/IPython/core/events.py +++ b/contrib/python/ipython/py3/IPython/core/events.py @@ -1,161 +1,161 @@ -"""Infrastructure for registering and firing callbacks on application events. - -Unlike :mod:`IPython.core.hooks`, which lets end users set single functions to -be called at specific times, or a collection of alternative methods to try, -callbacks are designed to be used by extension authors. A number of callbacks -can be registered for the same event without needing to be aware of one another. - -The functions defined in this module are no-ops indicating the names of available -events and the arguments which will be passed to them. - -.. note:: - - This API is experimental in IPython 2.0, and may be revised in future versions. -""" - -from backcall import callback_prototype - - -class EventManager(object): - """Manage a collection of events and a sequence of callbacks for each. - - This is attached to :class:`~IPython.core.interactiveshell.InteractiveShell` - instances as an ``events`` attribute. - - .. note:: - - This API is experimental in IPython 2.0, and may be revised in future versions. - """ - def __init__(self, shell, available_events): - """Initialise the :class:`CallbackManager`. - - Parameters - ---------- - shell - The :class:`~IPython.core.interactiveshell.InteractiveShell` instance - available_callbacks - An iterable of names for callback events. - """ - self.shell = shell - self.callbacks = {n:[] for n in available_events} - - def register(self, event, function): - """Register a new event callback. - - Parameters - ---------- - event : str - The event for which to register this callback. - function : callable - A function to be called on the given event. It should take the same - parameters as the appropriate callback prototype. - - Raises - ------ - TypeError - If ``function`` is not callable. - KeyError - If ``event`` is not one of the known events. - """ - if not callable(function): - raise TypeError('Need a callable, got %r' % function) - callback_proto = available_events.get(event) - if function not in self.callbacks[event]: - self.callbacks[event].append(callback_proto.adapt(function)) - - def unregister(self, event, function): - """Remove a callback from the given event.""" - if function in self.callbacks[event]: - return self.callbacks[event].remove(function) - - # Remove callback in case ``function`` was adapted by `backcall`. - for callback in self.callbacks[event]: - try: - if callback.__wrapped__ is function: - return self.callbacks[event].remove(callback) - except AttributeError: - pass - - raise ValueError('Function {!r} is not registered as a {} callback'.format(function, event)) - - def trigger(self, event, *args, **kwargs): - """Call callbacks for ``event``. - - Any additional arguments are passed to all callbacks registered for this - event. Exceptions raised by callbacks are caught, and a message printed. - """ - for func in self.callbacks[event][:]: - try: - func(*args, **kwargs) - except (Exception, KeyboardInterrupt): - print("Error in callback {} (for {}):".format(func, event)) - self.shell.showtraceback() - -# event_name -> prototype mapping -available_events = {} - -def _define_event(callback_function): - callback_proto = callback_prototype(callback_function) - available_events[callback_function.__name__] = callback_proto - return callback_proto - -# ------------------------------------------------------------------------------ -# Callback prototypes -# -# No-op functions which describe the names of available events and the -# signatures of callbacks for those events. -# ------------------------------------------------------------------------------ - -@_define_event -def pre_execute(): - """Fires before code is executed in response to user/frontend action. - - This includes comm and widget messages and silent execution, as well as user - code cells. - """ - pass - -@_define_event -def pre_run_cell(info): - """Fires before user-entered code runs. - - Parameters - ---------- - info : :class:`~IPython.core.interactiveshell.ExecutionInfo` - An object containing information used for the code execution. - """ - pass - -@_define_event -def post_execute(): - """Fires after code is executed in response to user/frontend action. - - This includes comm and widget messages and silent execution, as well as user - code cells. - """ - pass - -@_define_event -def post_run_cell(result): - """Fires after user-entered code runs. - - Parameters - ---------- - result : :class:`~IPython.core.interactiveshell.ExecutionResult` - The object which will be returned as the execution result. - """ - pass - -@_define_event -def shell_initialized(ip): - """Fires after initialisation of :class:`~IPython.core.interactiveshell.InteractiveShell`. - - This is before extensions and startup scripts are loaded, so it can only be - set by subclassing. - - Parameters - ---------- - ip : :class:`~IPython.core.interactiveshell.InteractiveShell` - The newly initialised shell. - """ - pass +"""Infrastructure for registering and firing callbacks on application events. + +Unlike :mod:`IPython.core.hooks`, which lets end users set single functions to +be called at specific times, or a collection of alternative methods to try, +callbacks are designed to be used by extension authors. A number of callbacks +can be registered for the same event without needing to be aware of one another. + +The functions defined in this module are no-ops indicating the names of available +events and the arguments which will be passed to them. + +.. note:: + + This API is experimental in IPython 2.0, and may be revised in future versions. +""" + +from backcall import callback_prototype + + +class EventManager(object): + """Manage a collection of events and a sequence of callbacks for each. + + This is attached to :class:`~IPython.core.interactiveshell.InteractiveShell` + instances as an ``events`` attribute. + + .. note:: + + This API is experimental in IPython 2.0, and may be revised in future versions. + """ + def __init__(self, shell, available_events): + """Initialise the :class:`CallbackManager`. + + Parameters + ---------- + shell + The :class:`~IPython.core.interactiveshell.InteractiveShell` instance + available_callbacks + An iterable of names for callback events. + """ + self.shell = shell + self.callbacks = {n:[] for n in available_events} + + def register(self, event, function): + """Register a new event callback. + + Parameters + ---------- + event : str + The event for which to register this callback. + function : callable + A function to be called on the given event. It should take the same + parameters as the appropriate callback prototype. + + Raises + ------ + TypeError + If ``function`` is not callable. + KeyError + If ``event`` is not one of the known events. + """ + if not callable(function): + raise TypeError('Need a callable, got %r' % function) + callback_proto = available_events.get(event) + if function not in self.callbacks[event]: + self.callbacks[event].append(callback_proto.adapt(function)) + + def unregister(self, event, function): + """Remove a callback from the given event.""" + if function in self.callbacks[event]: + return self.callbacks[event].remove(function) + + # Remove callback in case ``function`` was adapted by `backcall`. + for callback in self.callbacks[event]: + try: + if callback.__wrapped__ is function: + return self.callbacks[event].remove(callback) + except AttributeError: + pass + + raise ValueError('Function {!r} is not registered as a {} callback'.format(function, event)) + + def trigger(self, event, *args, **kwargs): + """Call callbacks for ``event``. + + Any additional arguments are passed to all callbacks registered for this + event. Exceptions raised by callbacks are caught, and a message printed. + """ + for func in self.callbacks[event][:]: + try: + func(*args, **kwargs) + except (Exception, KeyboardInterrupt): + print("Error in callback {} (for {}):".format(func, event)) + self.shell.showtraceback() + +# event_name -> prototype mapping +available_events = {} + +def _define_event(callback_function): + callback_proto = callback_prototype(callback_function) + available_events[callback_function.__name__] = callback_proto + return callback_proto + +# ------------------------------------------------------------------------------ +# Callback prototypes +# +# No-op functions which describe the names of available events and the +# signatures of callbacks for those events. +# ------------------------------------------------------------------------------ + +@_define_event +def pre_execute(): + """Fires before code is executed in response to user/frontend action. + + This includes comm and widget messages and silent execution, as well as user + code cells. + """ + pass + +@_define_event +def pre_run_cell(info): + """Fires before user-entered code runs. + + Parameters + ---------- + info : :class:`~IPython.core.interactiveshell.ExecutionInfo` + An object containing information used for the code execution. + """ + pass + +@_define_event +def post_execute(): + """Fires after code is executed in response to user/frontend action. + + This includes comm and widget messages and silent execution, as well as user + code cells. + """ + pass + +@_define_event +def post_run_cell(result): + """Fires after user-entered code runs. + + Parameters + ---------- + result : :class:`~IPython.core.interactiveshell.ExecutionResult` + The object which will be returned as the execution result. + """ + pass + +@_define_event +def shell_initialized(ip): + """Fires after initialisation of :class:`~IPython.core.interactiveshell.InteractiveShell`. + + This is before extensions and startup scripts are loaded, so it can only be + set by subclassing. + + Parameters + ---------- + ip : :class:`~IPython.core.interactiveshell.InteractiveShell` + The newly initialised shell. + """ + pass diff --git a/contrib/python/ipython/py3/IPython/core/excolors.py b/contrib/python/ipython/py3/IPython/core/excolors.py index 32498382f8e..487bde18c88 100644 --- a/contrib/python/ipython/py3/IPython/core/excolors.py +++ b/contrib/python/ipython/py3/IPython/core/excolors.py @@ -1,184 +1,184 @@ -# -*- coding: utf-8 -*- -""" -Color schemes for exception handling code in IPython. -""" - -import os -import warnings - -#***************************************************************************** -# Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu> -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#***************************************************************************** - -from IPython.utils.coloransi import ColorSchemeTable, TermColors, ColorScheme - -def exception_colors(): - """Return a color table with fields for exception reporting. - - The table is an instance of ColorSchemeTable with schemes added for - 'Neutral', 'Linux', 'LightBG' and 'NoColor' and fields for exception handling filled - in. - - Examples: - - >>> ec = exception_colors() - >>> ec.active_scheme_name - '' - >>> print(ec.active_colors) - None - - Now we activate a color scheme: - >>> ec.set_active_scheme('NoColor') - >>> ec.active_scheme_name - 'NoColor' - >>> sorted(ec.active_colors.keys()) - ['Normal', 'caret', 'em', 'excName', 'filename', 'filenameEm', 'line', - 'lineno', 'linenoEm', 'name', 'nameEm', 'normalEm', 'topline', 'vName', - 'val', 'valEm'] - """ - - ex_colors = ColorSchemeTable() - - # Populate it with color schemes - C = TermColors # shorthand and local lookup - ex_colors.add_scheme(ColorScheme( - 'NoColor', - # The color to be used for the top line - topline = C.NoColor, - - # The colors to be used in the traceback - filename = C.NoColor, - lineno = C.NoColor, - name = C.NoColor, - vName = C.NoColor, - val = C.NoColor, - em = C.NoColor, - - # Emphasized colors for the last frame of the traceback - normalEm = C.NoColor, - filenameEm = C.NoColor, - linenoEm = C.NoColor, - nameEm = C.NoColor, - valEm = C.NoColor, - - # Colors for printing the exception - excName = C.NoColor, - line = C.NoColor, - caret = C.NoColor, - Normal = C.NoColor - )) - - # make some schemes as instances so we can copy them for modification easily - ex_colors.add_scheme(ColorScheme( - 'Linux', - # The color to be used for the top line - topline = C.LightRed, - - # The colors to be used in the traceback - filename = C.Green, - lineno = C.Green, - name = C.Purple, - vName = C.Cyan, - val = C.Green, - em = C.LightCyan, - - # Emphasized colors for the last frame of the traceback - normalEm = C.LightCyan, - filenameEm = C.LightGreen, - linenoEm = C.LightGreen, - nameEm = C.LightPurple, - valEm = C.LightBlue, - - # Colors for printing the exception - excName = C.LightRed, - line = C.Yellow, - caret = C.White, - Normal = C.Normal - )) - - # For light backgrounds, swap dark/light colors - ex_colors.add_scheme(ColorScheme( - 'LightBG', - # The color to be used for the top line - topline = C.Red, - - # The colors to be used in the traceback - filename = C.LightGreen, - lineno = C.LightGreen, - name = C.LightPurple, - vName = C.Cyan, - val = C.LightGreen, - em = C.Cyan, - - # Emphasized colors for the last frame of the traceback - normalEm = C.Cyan, - filenameEm = C.Green, - linenoEm = C.Green, - nameEm = C.Purple, - valEm = C.Blue, - - # Colors for printing the exception - excName = C.Red, - #line = C.Brown, # brown often is displayed as yellow - line = C.Red, - caret = C.Normal, - Normal = C.Normal, - )) - - ex_colors.add_scheme(ColorScheme( - 'Neutral', - # The color to be used for the top line - topline = C.Red, - - # The colors to be used in the traceback - filename = C.LightGreen, - lineno = C.LightGreen, - name = C.LightPurple, - vName = C.Cyan, - val = C.LightGreen, - em = C.Cyan, - - # Emphasized colors for the last frame of the traceback - normalEm = C.Cyan, - filenameEm = C.Green, - linenoEm = C.Green, - nameEm = C.Purple, - valEm = C.Blue, - - # Colors for printing the exception - excName = C.Red, - #line = C.Brown, # brown often is displayed as yellow - line = C.Red, - caret = C.Normal, - Normal = C.Normal, - )) - - # Hack: the 'neutral' colours are not very visible on a dark background on - # Windows. Since Windows command prompts have a dark background by default, and - # relatively few users are likely to alter that, we will use the 'Linux' colours, - # designed for a dark background, as the default on Windows. - if os.name == "nt": - ex_colors.add_scheme(ex_colors['Linux'].copy('Neutral')) - - return ex_colors - -class Deprec(object): - - def __init__(self, wrapped_obj): - self.wrapped=wrapped_obj - - def __getattr__(self, name): - val = getattr(self.wrapped, name) - warnings.warn("Using ExceptionColors global is deprecated and will be removed in IPython 6.0", - DeprecationWarning, stacklevel=2) - # using getattr after warnings break ipydoctest in weird way for 3.5 - return val - -# For backwards compatibility, keep around a single global object. Note that -# this should NOT be used, the factory function should be used instead, since -# these objects are stateful and it's very easy to get strange bugs if any code -# modifies the module-level object's state. -ExceptionColors = Deprec(exception_colors()) +# -*- coding: utf-8 -*- +""" +Color schemes for exception handling code in IPython. +""" + +import os +import warnings + +#***************************************************************************** +# Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu> +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + +from IPython.utils.coloransi import ColorSchemeTable, TermColors, ColorScheme + +def exception_colors(): + """Return a color table with fields for exception reporting. + + The table is an instance of ColorSchemeTable with schemes added for + 'Neutral', 'Linux', 'LightBG' and 'NoColor' and fields for exception handling filled + in. + + Examples: + + >>> ec = exception_colors() + >>> ec.active_scheme_name + '' + >>> print(ec.active_colors) + None + + Now we activate a color scheme: + >>> ec.set_active_scheme('NoColor') + >>> ec.active_scheme_name + 'NoColor' + >>> sorted(ec.active_colors.keys()) + ['Normal', 'caret', 'em', 'excName', 'filename', 'filenameEm', 'line', + 'lineno', 'linenoEm', 'name', 'nameEm', 'normalEm', 'topline', 'vName', + 'val', 'valEm'] + """ + + ex_colors = ColorSchemeTable() + + # Populate it with color schemes + C = TermColors # shorthand and local lookup + ex_colors.add_scheme(ColorScheme( + 'NoColor', + # The color to be used for the top line + topline = C.NoColor, + + # The colors to be used in the traceback + filename = C.NoColor, + lineno = C.NoColor, + name = C.NoColor, + vName = C.NoColor, + val = C.NoColor, + em = C.NoColor, + + # Emphasized colors for the last frame of the traceback + normalEm = C.NoColor, + filenameEm = C.NoColor, + linenoEm = C.NoColor, + nameEm = C.NoColor, + valEm = C.NoColor, + + # Colors for printing the exception + excName = C.NoColor, + line = C.NoColor, + caret = C.NoColor, + Normal = C.NoColor + )) + + # make some schemes as instances so we can copy them for modification easily + ex_colors.add_scheme(ColorScheme( + 'Linux', + # The color to be used for the top line + topline = C.LightRed, + + # The colors to be used in the traceback + filename = C.Green, + lineno = C.Green, + name = C.Purple, + vName = C.Cyan, + val = C.Green, + em = C.LightCyan, + + # Emphasized colors for the last frame of the traceback + normalEm = C.LightCyan, + filenameEm = C.LightGreen, + linenoEm = C.LightGreen, + nameEm = C.LightPurple, + valEm = C.LightBlue, + + # Colors for printing the exception + excName = C.LightRed, + line = C.Yellow, + caret = C.White, + Normal = C.Normal + )) + + # For light backgrounds, swap dark/light colors + ex_colors.add_scheme(ColorScheme( + 'LightBG', + # The color to be used for the top line + topline = C.Red, + + # The colors to be used in the traceback + filename = C.LightGreen, + lineno = C.LightGreen, + name = C.LightPurple, + vName = C.Cyan, + val = C.LightGreen, + em = C.Cyan, + + # Emphasized colors for the last frame of the traceback + normalEm = C.Cyan, + filenameEm = C.Green, + linenoEm = C.Green, + nameEm = C.Purple, + valEm = C.Blue, + + # Colors for printing the exception + excName = C.Red, + #line = C.Brown, # brown often is displayed as yellow + line = C.Red, + caret = C.Normal, + Normal = C.Normal, + )) + + ex_colors.add_scheme(ColorScheme( + 'Neutral', + # The color to be used for the top line + topline = C.Red, + + # The colors to be used in the traceback + filename = C.LightGreen, + lineno = C.LightGreen, + name = C.LightPurple, + vName = C.Cyan, + val = C.LightGreen, + em = C.Cyan, + + # Emphasized colors for the last frame of the traceback + normalEm = C.Cyan, + filenameEm = C.Green, + linenoEm = C.Green, + nameEm = C.Purple, + valEm = C.Blue, + + # Colors for printing the exception + excName = C.Red, + #line = C.Brown, # brown often is displayed as yellow + line = C.Red, + caret = C.Normal, + Normal = C.Normal, + )) + + # Hack: the 'neutral' colours are not very visible on a dark background on + # Windows. Since Windows command prompts have a dark background by default, and + # relatively few users are likely to alter that, we will use the 'Linux' colours, + # designed for a dark background, as the default on Windows. + if os.name == "nt": + ex_colors.add_scheme(ex_colors['Linux'].copy('Neutral')) + + return ex_colors + +class Deprec(object): + + def __init__(self, wrapped_obj): + self.wrapped=wrapped_obj + + def __getattr__(self, name): + val = getattr(self.wrapped, name) + warnings.warn("Using ExceptionColors global is deprecated and will be removed in IPython 6.0", + DeprecationWarning, stacklevel=2) + # using getattr after warnings break ipydoctest in weird way for 3.5 + return val + +# For backwards compatibility, keep around a single global object. Note that +# this should NOT be used, the factory function should be used instead, since +# these objects are stateful and it's very easy to get strange bugs if any code +# modifies the module-level object's state. +ExceptionColors = Deprec(exception_colors()) diff --git a/contrib/python/ipython/py3/IPython/core/extensions.py b/contrib/python/ipython/py3/IPython/core/extensions.py index 650bd5eb024..bf5e0ad06c0 100644 --- a/contrib/python/ipython/py3/IPython/core/extensions.py +++ b/contrib/python/ipython/py3/IPython/core/extensions.py @@ -1,150 +1,150 @@ -# encoding: utf-8 -"""A class for managing IPython extensions.""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import os -import os.path -import sys -from importlib import import_module, reload - -from traitlets.config.configurable import Configurable -from IPython.utils.path import ensure_dir_exists, compress_user -from IPython.utils.decorators import undoc -from traitlets import Instance - - -#----------------------------------------------------------------------------- -# Main class -#----------------------------------------------------------------------------- - -class ExtensionManager(Configurable): - """A class to manage IPython extensions. - - An IPython extension is an importable Python module that has - a function with the signature:: - - def load_ipython_extension(ipython): - # Do things with ipython - - This function is called after your extension is imported and the - currently active :class:`InteractiveShell` instance is passed as - the only argument. You can do anything you want with IPython at - that point, including defining new magic and aliases, adding new - components, etc. - - You can also optionally define an :func:`unload_ipython_extension(ipython)` - function, which will be called if the user unloads or reloads the extension. - The extension manager will only call :func:`load_ipython_extension` again - if the extension is reloaded. - - You can put your extension modules anywhere you want, as long as - they can be imported by Python's standard import mechanism. However, - to make it easy to write extensions, you can also put your extensions - in ``os.path.join(self.ipython_dir, 'extensions')``. This directory - is added to ``sys.path`` automatically. - """ - - shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) - - def __init__(self, shell=None, **kwargs): - super(ExtensionManager, self).__init__(shell=shell, **kwargs) - self.shell.observe( - self._on_ipython_dir_changed, names=('ipython_dir',) - ) - self.loaded = set() - - @property - def ipython_extension_dir(self): - return os.path.join(self.shell.ipython_dir, u'extensions') - - def _on_ipython_dir_changed(self, change): - ensure_dir_exists(self.ipython_extension_dir) - - def load_extension(self, module_str): - """Load an IPython extension by its module name. - - Returns the string "already loaded" if the extension is already loaded, - "no load function" if the module doesn't have a load_ipython_extension - function, or None if it succeeded. - """ - if module_str in self.loaded: - return "already loaded" - - with self.shell.builtin_trap: - if module_str not in sys.modules: - try: - sys.modules[module_str] = __import__('IPython.extensions.' + module_str) - except ImportError: - mod = import_module(module_str) - if mod.__file__.startswith(self.ipython_extension_dir): - print(("Loading extensions from {dir} is deprecated. " - "We recommend managing extensions like any " - "other Python packages, in site-packages.").format( - dir=compress_user(self.ipython_extension_dir))) - mod = sys.modules[module_str] - if self._call_load_ipython_extension(mod): - self.loaded.add(module_str) - else: - return "no load function" - - def unload_extension(self, module_str): - """Unload an IPython extension by its module name. - - This function looks up the extension's name in ``sys.modules`` and - simply calls ``mod.unload_ipython_extension(self)``. - - Returns the string "no unload function" if the extension doesn't define - a function to unload itself, "not loaded" if the extension isn't loaded, - otherwise None. - """ - if module_str not in self.loaded: - return "not loaded" - - if module_str in sys.modules: - mod = sys.modules[module_str] - if self._call_unload_ipython_extension(mod): - self.loaded.discard(module_str) - else: - return "no unload function" - - def reload_extension(self, module_str): - """Reload an IPython extension by calling reload. - - If the module has not been loaded before, - :meth:`InteractiveShell.load_extension` is called. Otherwise - :func:`reload` is called and then the :func:`load_ipython_extension` - function of the module, if it exists is called. - """ - from IPython.utils.syspathcontext import prepended_to_syspath - - if (module_str in self.loaded) and (module_str in sys.modules): - self.unload_extension(module_str) - mod = sys.modules[module_str] - with prepended_to_syspath(self.ipython_extension_dir): - reload(mod) - if self._call_load_ipython_extension(mod): - self.loaded.add(module_str) - else: - self.load_extension(module_str) - - def _call_load_ipython_extension(self, mod): - if hasattr(mod, 'load_ipython_extension'): - mod.load_ipython_extension(self.shell) - return True - - def _call_unload_ipython_extension(self, mod): - if hasattr(mod, 'unload_ipython_extension'): - mod.unload_ipython_extension(self.shell) - return True - - @undoc - def install_extension(self, url, filename=None): - """ - Deprecated. - """ - # Ensure the extension directory exists - raise DeprecationWarning( - '`install_extension` and the `install_ext` magic have been deprecated since IPython 4.0' - 'Use pip or other package managers to manage ipython extensions.') +# encoding: utf-8 +"""A class for managing IPython extensions.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import os +import os.path +import sys +from importlib import import_module, reload + +from traitlets.config.configurable import Configurable +from IPython.utils.path import ensure_dir_exists, compress_user +from IPython.utils.decorators import undoc +from traitlets import Instance + + +#----------------------------------------------------------------------------- +# Main class +#----------------------------------------------------------------------------- + +class ExtensionManager(Configurable): + """A class to manage IPython extensions. + + An IPython extension is an importable Python module that has + a function with the signature:: + + def load_ipython_extension(ipython): + # Do things with ipython + + This function is called after your extension is imported and the + currently active :class:`InteractiveShell` instance is passed as + the only argument. You can do anything you want with IPython at + that point, including defining new magic and aliases, adding new + components, etc. + + You can also optionally define an :func:`unload_ipython_extension(ipython)` + function, which will be called if the user unloads or reloads the extension. + The extension manager will only call :func:`load_ipython_extension` again + if the extension is reloaded. + + You can put your extension modules anywhere you want, as long as + they can be imported by Python's standard import mechanism. However, + to make it easy to write extensions, you can also put your extensions + in ``os.path.join(self.ipython_dir, 'extensions')``. This directory + is added to ``sys.path`` automatically. + """ + + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) + + def __init__(self, shell=None, **kwargs): + super(ExtensionManager, self).__init__(shell=shell, **kwargs) + self.shell.observe( + self._on_ipython_dir_changed, names=('ipython_dir',) + ) + self.loaded = set() + + @property + def ipython_extension_dir(self): + return os.path.join(self.shell.ipython_dir, u'extensions') + + def _on_ipython_dir_changed(self, change): + ensure_dir_exists(self.ipython_extension_dir) + + def load_extension(self, module_str): + """Load an IPython extension by its module name. + + Returns the string "already loaded" if the extension is already loaded, + "no load function" if the module doesn't have a load_ipython_extension + function, or None if it succeeded. + """ + if module_str in self.loaded: + return "already loaded" + + with self.shell.builtin_trap: + if module_str not in sys.modules: + try: + sys.modules[module_str] = __import__('IPython.extensions.' + module_str) + except ImportError: + mod = import_module(module_str) + if mod.__file__.startswith(self.ipython_extension_dir): + print(("Loading extensions from {dir} is deprecated. " + "We recommend managing extensions like any " + "other Python packages, in site-packages.").format( + dir=compress_user(self.ipython_extension_dir))) + mod = sys.modules[module_str] + if self._call_load_ipython_extension(mod): + self.loaded.add(module_str) + else: + return "no load function" + + def unload_extension(self, module_str): + """Unload an IPython extension by its module name. + + This function looks up the extension's name in ``sys.modules`` and + simply calls ``mod.unload_ipython_extension(self)``. + + Returns the string "no unload function" if the extension doesn't define + a function to unload itself, "not loaded" if the extension isn't loaded, + otherwise None. + """ + if module_str not in self.loaded: + return "not loaded" + + if module_str in sys.modules: + mod = sys.modules[module_str] + if self._call_unload_ipython_extension(mod): + self.loaded.discard(module_str) + else: + return "no unload function" + + def reload_extension(self, module_str): + """Reload an IPython extension by calling reload. + + If the module has not been loaded before, + :meth:`InteractiveShell.load_extension` is called. Otherwise + :func:`reload` is called and then the :func:`load_ipython_extension` + function of the module, if it exists is called. + """ + from IPython.utils.syspathcontext import prepended_to_syspath + + if (module_str in self.loaded) and (module_str in sys.modules): + self.unload_extension(module_str) + mod = sys.modules[module_str] + with prepended_to_syspath(self.ipython_extension_dir): + reload(mod) + if self._call_load_ipython_extension(mod): + self.loaded.add(module_str) + else: + self.load_extension(module_str) + + def _call_load_ipython_extension(self, mod): + if hasattr(mod, 'load_ipython_extension'): + mod.load_ipython_extension(self.shell) + return True + + def _call_unload_ipython_extension(self, mod): + if hasattr(mod, 'unload_ipython_extension'): + mod.unload_ipython_extension(self.shell) + return True + + @undoc + def install_extension(self, url, filename=None): + """ + Deprecated. + """ + # Ensure the extension directory exists + raise DeprecationWarning( + '`install_extension` and the `install_ext` magic have been deprecated since IPython 4.0' + 'Use pip or other package managers to manage ipython extensions.') diff --git a/contrib/python/ipython/py3/IPython/core/formatters.py b/contrib/python/ipython/py3/IPython/core/formatters.py index e77ffc720a3..c13caab91ac 100644 --- a/contrib/python/ipython/py3/IPython/core/formatters.py +++ b/contrib/python/ipython/py3/IPython/core/formatters.py @@ -1,1024 +1,1024 @@ -# -*- coding: utf-8 -*- -"""Display formatters. - -Inheritance diagram: - -.. inheritance-diagram:: IPython.core.formatters - :parts: 3 -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import abc -import json -import sys -import traceback -import warnings -from io import StringIO - -from decorator import decorator - -from traitlets.config.configurable import Configurable -from .getipython import get_ipython -from ..utils.sentinel import Sentinel -from ..utils.dir2 import get_real_method -from ..lib import pretty -from traitlets import ( - Bool, Dict, Integer, Unicode, CUnicode, ObjectName, List, - ForwardDeclaredInstance, - default, observe, -) - - -class DisplayFormatter(Configurable): - - active_types = List(Unicode(), - help="""List of currently active mime-types to display. - You can use this to set a white-list for formats to display. - - Most users will not need to change this value. - """).tag(config=True) - - @default('active_types') - def _active_types_default(self): - return self.format_types - - @observe('active_types') - def _active_types_changed(self, change): - for key, formatter in self.formatters.items(): - if key in change['new']: - formatter.enabled = True - else: - formatter.enabled = False - - ipython_display_formatter = ForwardDeclaredInstance('FormatterABC') - @default('ipython_display_formatter') - def _default_formatter(self): - return IPythonDisplayFormatter(parent=self) - - mimebundle_formatter = ForwardDeclaredInstance('FormatterABC') - @default('mimebundle_formatter') - def _default_mime_formatter(self): - return MimeBundleFormatter(parent=self) - - # A dict of formatter whose keys are format types (MIME types) and whose - # values are subclasses of BaseFormatter. - formatters = Dict() - @default('formatters') - def _formatters_default(self): - """Activate the default formatters.""" - formatter_classes = [ - PlainTextFormatter, - HTMLFormatter, - MarkdownFormatter, - SVGFormatter, - PNGFormatter, - PDFFormatter, - JPEGFormatter, - LatexFormatter, - JSONFormatter, - JavascriptFormatter - ] - d = {} - for cls in formatter_classes: - f = cls(parent=self) - d[f.format_type] = f - return d - - def format(self, obj, include=None, exclude=None): - """Return a format data dict for an object. - - By default all format types will be computed. - - The following MIME types are usually implemented: - - * text/plain - * text/html - * text/markdown - * text/latex - * application/json - * application/javascript - * application/pdf - * image/png - * image/jpeg - * image/svg+xml - - Parameters - ---------- - obj : object - The Python object whose format data will be computed. - include : list, tuple or set; optional - A list of format type strings (MIME types) to include in the - format data dict. If this is set *only* the format types included - in this list will be computed. - exclude : list, tuple or set; optional - A list of format type string (MIME types) to exclude in the format - data dict. If this is set all format types will be computed, - except for those included in this argument. - Mimetypes present in exclude will take precedence over the ones in include - - Returns - ------- - (format_dict, metadata_dict) : tuple of two dicts - - format_dict is a dictionary of key/value pairs, one of each format that was - generated for the object. The keys are the format types, which - will usually be MIME type strings and the values and JSON'able - data structure containing the raw data for the representation in - that format. - - metadata_dict is a dictionary of metadata about each mime-type output. - Its keys will be a strict subset of the keys in format_dict. - - Notes - ----- - - If an object implement `_repr_mimebundle_` as well as various - `_repr_*_`, the data returned by `_repr_mimebundle_` will take - precedence and the corresponding `_repr_*_` for this mimetype will - not be called. - - """ - format_dict = {} - md_dict = {} - - if self.ipython_display_formatter(obj): - # object handled itself, don't proceed - return {}, {} - - format_dict, md_dict = self.mimebundle_formatter(obj, include=include, exclude=exclude) - - if format_dict or md_dict: - if include: - format_dict = {k:v for k,v in format_dict.items() if k in include} - md_dict = {k:v for k,v in md_dict.items() if k in include} - if exclude: - format_dict = {k:v for k,v in format_dict.items() if k not in exclude} - md_dict = {k:v for k,v in md_dict.items() if k not in exclude} - - for format_type, formatter in self.formatters.items(): - if format_type in format_dict: - # already got it from mimebundle, maybe don't render again. - # exception: manually registered per-mime renderer - # check priority: - # 1. user-registered per-mime formatter - # 2. mime-bundle (user-registered or repr method) - # 3. default per-mime formatter (e.g. repr method) - try: - formatter.lookup(obj) - except KeyError: - # no special formatter, use mime-bundle-provided value - continue - if include and format_type not in include: - continue - if exclude and format_type in exclude: - continue - - md = None - try: - data = formatter(obj) - except: - # FIXME: log the exception - raise - - # formatters can return raw data or (data, metadata) - if isinstance(data, tuple) and len(data) == 2: - data, md = data - - if data is not None: - format_dict[format_type] = data - if md is not None: - md_dict[format_type] = md - return format_dict, md_dict - - @property - def format_types(self): - """Return the format types (MIME types) of the active formatters.""" - return list(self.formatters.keys()) - - -#----------------------------------------------------------------------------- -# Formatters for specific format types (text, html, svg, etc.) -#----------------------------------------------------------------------------- - - -def _safe_repr(obj): - """Try to return a repr of an object - - always returns a string, at least. - """ - try: - return repr(obj) - except Exception as e: - return "un-repr-able object (%r)" % e - - -class FormatterWarning(UserWarning): - """Warning class for errors in formatters""" - -@decorator -def catch_format_error(method, self, *args, **kwargs): - """show traceback on failed format call""" - try: - r = method(self, *args, **kwargs) - except NotImplementedError: - # don't warn on NotImplementedErrors - return self._check_return(None, args[0]) - except Exception: - exc_info = sys.exc_info() - ip = get_ipython() - if ip is not None: - ip.showtraceback(exc_info) - else: - traceback.print_exception(*exc_info) - return self._check_return(None, args[0]) - return self._check_return(r, args[0]) - - -class FormatterABC(metaclass=abc.ABCMeta): - """ Abstract base class for Formatters. - - A formatter is a callable class that is responsible for computing the - raw format data for a particular format type (MIME type). For example, - an HTML formatter would have a format type of `text/html` and would return - the HTML representation of the object when called. - """ - - # The format type of the data returned, usually a MIME type. - format_type = 'text/plain' - - # Is the formatter enabled... - enabled = True - - @abc.abstractmethod - def __call__(self, obj): - """Return a JSON'able representation of the object. - - If the object cannot be formatted by this formatter, - warn and return None. - """ - return repr(obj) - - -def _mod_name_key(typ): - """Return a (__module__, __name__) tuple for a type. - - Used as key in Formatter.deferred_printers. - """ - module = getattr(typ, '__module__', None) - name = getattr(typ, '__name__', None) - return (module, name) - - -def _get_type(obj): - """Return the type of an instance (old and new-style)""" - return getattr(obj, '__class__', None) or type(obj) - - -_raise_key_error = Sentinel('_raise_key_error', __name__, -""" -Special value to raise a KeyError - -Raise KeyError in `BaseFormatter.pop` if passed as the default value to `pop` -""") - - -class BaseFormatter(Configurable): - """A base formatter class that is configurable. - - This formatter should usually be used as the base class of all formatters. - It is a traited :class:`Configurable` class and includes an extensible - API for users to determine how their objects are formatted. The following - logic is used to find a function to format an given object. - - 1. The object is introspected to see if it has a method with the name - :attr:`print_method`. If is does, that object is passed to that method - for formatting. - 2. If no print method is found, three internal dictionaries are consulted - to find print method: :attr:`singleton_printers`, :attr:`type_printers` - and :attr:`deferred_printers`. - - Users should use these dictionaries to register functions that will be - used to compute the format data for their objects (if those objects don't - have the special print methods). The easiest way of using these - dictionaries is through the :meth:`for_type` and :meth:`for_type_by_name` - methods. - - If no function/callable is found to compute the format data, ``None`` is - returned and this format type is not used. - """ - - format_type = Unicode('text/plain') - _return_type = str - - enabled = Bool(True).tag(config=True) - - print_method = ObjectName('__repr__') - - # The singleton printers. - # Maps the IDs of the builtin singleton objects to the format functions. - singleton_printers = Dict().tag(config=True) - - # The type-specific printers. - # Map type objects to the format functions. - type_printers = Dict().tag(config=True) - - # The deferred-import type-specific printers. - # Map (modulename, classname) pairs to the format functions. - deferred_printers = Dict().tag(config=True) - - @catch_format_error - def __call__(self, obj): - """Compute the format for an object.""" - if self.enabled: - # lookup registered printer - try: - printer = self.lookup(obj) - except KeyError: - pass - else: - return printer(obj) - # Finally look for special method names - method = get_real_method(obj, self.print_method) - if method is not None: - return method() - return None - else: - return None - - def __contains__(self, typ): - """map in to lookup_by_type""" - try: - self.lookup_by_type(typ) - except KeyError: - return False - else: - return True - - def _check_return(self, r, obj): - """Check that a return value is appropriate - - Return the value if so, None otherwise, warning if invalid. - """ - if r is None or isinstance(r, self._return_type) or \ - (isinstance(r, tuple) and r and isinstance(r[0], self._return_type)): - return r - else: - warnings.warn( - "%s formatter returned invalid type %s (expected %s) for object: %s" % \ - (self.format_type, type(r), self._return_type, _safe_repr(obj)), - FormatterWarning - ) - - def lookup(self, obj): - """Look up the formatter for a given instance. - - Parameters - ---------- - obj : object instance - - Returns - ------- - f : callable - The registered formatting callable for the type. - - Raises - ------ - KeyError if the type has not been registered. - """ - # look for singleton first - obj_id = id(obj) - if obj_id in self.singleton_printers: - return self.singleton_printers[obj_id] - # then lookup by type - return self.lookup_by_type(_get_type(obj)) - - def lookup_by_type(self, typ): - """Look up the registered formatter for a type. - - Parameters - ---------- - typ : type or '__module__.__name__' string for a type - - Returns - ------- - f : callable - The registered formatting callable for the type. - - Raises - ------ - KeyError if the type has not been registered. - """ - if isinstance(typ, str): - typ_key = tuple(typ.rsplit('.',1)) - if typ_key not in self.deferred_printers: - # We may have it cached in the type map. We will have to - # iterate over all of the types to check. - for cls in self.type_printers: - if _mod_name_key(cls) == typ_key: - return self.type_printers[cls] - else: - return self.deferred_printers[typ_key] - else: - for cls in pretty._get_mro(typ): - if cls in self.type_printers or self._in_deferred_types(cls): - return self.type_printers[cls] - - # If we have reached here, the lookup failed. - raise KeyError("No registered printer for {0!r}".format(typ)) - - def for_type(self, typ, func=None): - """Add a format function for a given type. - - Parameters - ---------- - typ : type or '__module__.__name__' string for a type - The class of the object that will be formatted using `func`. - func : callable - A callable for computing the format data. - `func` will be called with the object to be formatted, - and will return the raw data in this formatter's format. - Subclasses may use a different call signature for the - `func` argument. - - If `func` is None or not specified, there will be no change, - only returning the current value. - - Returns - ------- - oldfunc : callable - The currently registered callable. - If you are registering a new formatter, - this will be the previous value (to enable restoring later). - """ - # if string given, interpret as 'pkg.module.class_name' - if isinstance(typ, str): - type_module, type_name = typ.rsplit('.', 1) - return self.for_type_by_name(type_module, type_name, func) - - try: - oldfunc = self.lookup_by_type(typ) - except KeyError: - oldfunc = None - - if func is not None: - self.type_printers[typ] = func - - return oldfunc - - def for_type_by_name(self, type_module, type_name, func=None): - """Add a format function for a type specified by the full dotted - module and name of the type, rather than the type of the object. - - Parameters - ---------- - type_module : str - The full dotted name of the module the type is defined in, like - ``numpy``. - type_name : str - The name of the type (the class name), like ``dtype`` - func : callable - A callable for computing the format data. - `func` will be called with the object to be formatted, - and will return the raw data in this formatter's format. - Subclasses may use a different call signature for the - `func` argument. - - If `func` is None or unspecified, there will be no change, - only returning the current value. - - Returns - ------- - oldfunc : callable - The currently registered callable. - If you are registering a new formatter, - this will be the previous value (to enable restoring later). - """ - key = (type_module, type_name) - - try: - oldfunc = self.lookup_by_type("%s.%s" % key) - except KeyError: - oldfunc = None - - if func is not None: - self.deferred_printers[key] = func - return oldfunc - - def pop(self, typ, default=_raise_key_error): - """Pop a formatter for the given type. - - Parameters - ---------- - typ : type or '__module__.__name__' string for a type - default : object - value to be returned if no formatter is registered for typ. - - Returns - ------- - obj : object - The last registered object for the type. - - Raises - ------ - KeyError if the type is not registered and default is not specified. - """ - - if isinstance(typ, str): - typ_key = tuple(typ.rsplit('.',1)) - if typ_key not in self.deferred_printers: - # We may have it cached in the type map. We will have to - # iterate over all of the types to check. - for cls in self.type_printers: - if _mod_name_key(cls) == typ_key: - old = self.type_printers.pop(cls) - break - else: - old = default - else: - old = self.deferred_printers.pop(typ_key) - else: - if typ in self.type_printers: - old = self.type_printers.pop(typ) - else: - old = self.deferred_printers.pop(_mod_name_key(typ), default) - if old is _raise_key_error: - raise KeyError("No registered value for {0!r}".format(typ)) - return old - - def _in_deferred_types(self, cls): - """ - Check if the given class is specified in the deferred type registry. - - Successful matches will be moved to the regular type registry for future use. - """ - mod = getattr(cls, '__module__', None) - name = getattr(cls, '__name__', None) - key = (mod, name) - if key in self.deferred_printers: - # Move the printer over to the regular registry. - printer = self.deferred_printers.pop(key) - self.type_printers[cls] = printer - return True - return False - - -class PlainTextFormatter(BaseFormatter): - """The default pretty-printer. - - This uses :mod:`IPython.lib.pretty` to compute the format data of - the object. If the object cannot be pretty printed, :func:`repr` is used. - See the documentation of :mod:`IPython.lib.pretty` for details on - how to write pretty printers. Here is a simple example:: - - def dtype_pprinter(obj, p, cycle): - if cycle: - return p.text('dtype(...)') - if hasattr(obj, 'fields'): - if obj.fields is None: - p.text(repr(obj)) - else: - p.begin_group(7, 'dtype([') - for i, field in enumerate(obj.descr): - if i > 0: - p.text(',') - p.breakable() - p.pretty(field) - p.end_group(7, '])') - """ - - # The format type of data returned. - format_type = Unicode('text/plain') - - # This subclass ignores this attribute as it always need to return - # something. - enabled = Bool(True).tag(config=False) - - max_seq_length = Integer(pretty.MAX_SEQ_LENGTH, - help="""Truncate large collections (lists, dicts, tuples, sets) to this size. - - Set to 0 to disable truncation. - """ - ).tag(config=True) - - # Look for a _repr_pretty_ methods to use for pretty printing. - print_method = ObjectName('_repr_pretty_') - - # Whether to pretty-print or not. - pprint = Bool(True).tag(config=True) - - # Whether to be verbose or not. - verbose = Bool(False).tag(config=True) - - # The maximum width. - max_width = Integer(79).tag(config=True) - - # The newline character. - newline = Unicode('\n').tag(config=True) - - # format-string for pprinting floats - float_format = Unicode('%r') - # setter for float precision, either int or direct format-string - float_precision = CUnicode('').tag(config=True) - - @observe('float_precision') - def _float_precision_changed(self, change): - """float_precision changed, set float_format accordingly. - - float_precision can be set by int or str. - This will set float_format, after interpreting input. - If numpy has been imported, numpy print precision will also be set. - - integer `n` sets format to '%.nf', otherwise, format set directly. - - An empty string returns to defaults (repr for float, 8 for numpy). - - This parameter can be set via the '%precision' magic. - """ - - new = change['new'] - if '%' in new: - # got explicit format string - fmt = new - try: - fmt%3.14159 - except Exception: - raise ValueError("Precision must be int or format string, not %r"%new) - elif new: - # otherwise, should be an int - try: - i = int(new) - assert i >= 0 - except ValueError: - raise ValueError("Precision must be int or format string, not %r"%new) - except AssertionError: - raise ValueError("int precision must be non-negative, not %r"%i) - - fmt = '%%.%if'%i - if 'numpy' in sys.modules: - # set numpy precision if it has been imported - import numpy - numpy.set_printoptions(precision=i) - else: - # default back to repr - fmt = '%r' - if 'numpy' in sys.modules: - import numpy - # numpy default is 8 - numpy.set_printoptions(precision=8) - self.float_format = fmt - - # Use the default pretty printers from IPython.lib.pretty. - @default('singleton_printers') - def _singleton_printers_default(self): - return pretty._singleton_pprinters.copy() - - @default('type_printers') - def _type_printers_default(self): - d = pretty._type_pprinters.copy() - d[float] = lambda obj,p,cycle: p.text(self.float_format%obj) - return d - - @default('deferred_printers') - def _deferred_printers_default(self): - return pretty._deferred_type_pprinters.copy() - - #### FormatterABC interface #### - - @catch_format_error - def __call__(self, obj): - """Compute the pretty representation of the object.""" - if not self.pprint: - return repr(obj) - else: - stream = StringIO() - printer = pretty.RepresentationPrinter(stream, self.verbose, - self.max_width, self.newline, - max_seq_length=self.max_seq_length, - singleton_pprinters=self.singleton_printers, - type_pprinters=self.type_printers, - deferred_pprinters=self.deferred_printers) - printer.pretty(obj) - printer.flush() - return stream.getvalue() - - -class HTMLFormatter(BaseFormatter): - """An HTML formatter. - - To define the callables that compute the HTML representation of your - objects, define a :meth:`_repr_html_` method or use the :meth:`for_type` - or :meth:`for_type_by_name` methods to register functions that handle - this. - - The return value of this formatter should be a valid HTML snippet that - could be injected into an existing DOM. It should *not* include the - ```<html>`` or ```<body>`` tags. - """ - format_type = Unicode('text/html') - - print_method = ObjectName('_repr_html_') - - -class MarkdownFormatter(BaseFormatter): - """A Markdown formatter. - - To define the callables that compute the Markdown representation of your - objects, define a :meth:`_repr_markdown_` method or use the :meth:`for_type` - or :meth:`for_type_by_name` methods to register functions that handle - this. - - The return value of this formatter should be a valid Markdown. - """ - format_type = Unicode('text/markdown') - - print_method = ObjectName('_repr_markdown_') - -class SVGFormatter(BaseFormatter): - """An SVG formatter. - - To define the callables that compute the SVG representation of your - objects, define a :meth:`_repr_svg_` method or use the :meth:`for_type` - or :meth:`for_type_by_name` methods to register functions that handle - this. - - The return value of this formatter should be valid SVG enclosed in - ```<svg>``` tags, that could be injected into an existing DOM. It should - *not* include the ```<html>`` or ```<body>`` tags. - """ - format_type = Unicode('image/svg+xml') - - print_method = ObjectName('_repr_svg_') - - -class PNGFormatter(BaseFormatter): - """A PNG formatter. - - To define the callables that compute the PNG representation of your - objects, define a :meth:`_repr_png_` method or use the :meth:`for_type` - or :meth:`for_type_by_name` methods to register functions that handle - this. - - The return value of this formatter should be raw PNG data, *not* - base64 encoded. - """ - format_type = Unicode('image/png') - - print_method = ObjectName('_repr_png_') - - _return_type = (bytes, str) - - -class JPEGFormatter(BaseFormatter): - """A JPEG formatter. - - To define the callables that compute the JPEG representation of your - objects, define a :meth:`_repr_jpeg_` method or use the :meth:`for_type` - or :meth:`for_type_by_name` methods to register functions that handle - this. - - The return value of this formatter should be raw JPEG data, *not* - base64 encoded. - """ - format_type = Unicode('image/jpeg') - - print_method = ObjectName('_repr_jpeg_') - - _return_type = (bytes, str) - - -class LatexFormatter(BaseFormatter): - """A LaTeX formatter. - - To define the callables that compute the LaTeX representation of your - objects, define a :meth:`_repr_latex_` method or use the :meth:`for_type` - or :meth:`for_type_by_name` methods to register functions that handle - this. - - The return value of this formatter should be a valid LaTeX equation, - enclosed in either ```$```, ```$$``` or another LaTeX equation - environment. - """ - format_type = Unicode('text/latex') - - print_method = ObjectName('_repr_latex_') - - -class JSONFormatter(BaseFormatter): - """A JSON string formatter. - - To define the callables that compute the JSONable representation of - your objects, define a :meth:`_repr_json_` method or use the :meth:`for_type` - or :meth:`for_type_by_name` methods to register functions that handle - this. - - The return value of this formatter should be a JSONable list or dict. - JSON scalars (None, number, string) are not allowed, only dict or list containers. - """ - format_type = Unicode('application/json') - _return_type = (list, dict) - - print_method = ObjectName('_repr_json_') - - def _check_return(self, r, obj): - """Check that a return value is appropriate - - Return the value if so, None otherwise, warning if invalid. - """ - if r is None: - return - md = None - if isinstance(r, tuple): - # unpack data, metadata tuple for type checking on first element - r, md = r - - # handle deprecated JSON-as-string form from IPython < 3 - if isinstance(r, str): - warnings.warn("JSON expects JSONable list/dict containers, not JSON strings", - FormatterWarning) - r = json.loads(r) - - if md is not None: - # put the tuple back together - r = (r, md) - return super(JSONFormatter, self)._check_return(r, obj) - - -class JavascriptFormatter(BaseFormatter): - """A Javascript formatter. - - To define the callables that compute the Javascript representation of - your objects, define a :meth:`_repr_javascript_` method or use the - :meth:`for_type` or :meth:`for_type_by_name` methods to register functions - that handle this. - - The return value of this formatter should be valid Javascript code and - should *not* be enclosed in ```<script>``` tags. - """ - format_type = Unicode('application/javascript') - - print_method = ObjectName('_repr_javascript_') - - -class PDFFormatter(BaseFormatter): - """A PDF formatter. - - To define the callables that compute the PDF representation of your - objects, define a :meth:`_repr_pdf_` method or use the :meth:`for_type` - or :meth:`for_type_by_name` methods to register functions that handle - this. - - The return value of this formatter should be raw PDF data, *not* - base64 encoded. - """ - format_type = Unicode('application/pdf') - - print_method = ObjectName('_repr_pdf_') - - _return_type = (bytes, str) - -class IPythonDisplayFormatter(BaseFormatter): - """An escape-hatch Formatter for objects that know how to display themselves. - - To define the callables that compute the representation of your - objects, define a :meth:`_ipython_display_` method or use the :meth:`for_type` - or :meth:`for_type_by_name` methods to register functions that handle - this. Unlike mime-type displays, this method should not return anything, - instead calling any appropriate display methods itself. - - This display formatter has highest priority. - If it fires, no other display formatter will be called. - - Prior to IPython 6.1, `_ipython_display_` was the only way to display custom mime-types - without registering a new Formatter. - - IPython 6.1 introduces `_repr_mimebundle_` for displaying custom mime-types, - so `_ipython_display_` should only be used for objects that require unusual - display patterns, such as multiple display calls. - """ - print_method = ObjectName('_ipython_display_') - _return_type = (type(None), bool) - - @catch_format_error - def __call__(self, obj): - """Compute the format for an object.""" - if self.enabled: - # lookup registered printer - try: - printer = self.lookup(obj) - except KeyError: - pass - else: - printer(obj) - return True - # Finally look for special method names - method = get_real_method(obj, self.print_method) - if method is not None: - method() - return True - - -class MimeBundleFormatter(BaseFormatter): - """A Formatter for arbitrary mime-types. - - Unlike other `_repr_<mimetype>_` methods, - `_repr_mimebundle_` should return mime-bundle data, - either the mime-keyed `data` dictionary or the tuple `(data, metadata)`. - Any mime-type is valid. - - To define the callables that compute the mime-bundle representation of your - objects, define a :meth:`_repr_mimebundle_` method or use the :meth:`for_type` - or :meth:`for_type_by_name` methods to register functions that handle - this. - - .. versionadded:: 6.1 - """ - print_method = ObjectName('_repr_mimebundle_') - _return_type = dict - - def _check_return(self, r, obj): - r = super(MimeBundleFormatter, self)._check_return(r, obj) - # always return (data, metadata): - if r is None: - return {}, {} - if not isinstance(r, tuple): - return r, {} - return r - - @catch_format_error - def __call__(self, obj, include=None, exclude=None): - """Compute the format for an object. - - Identical to parent's method but we pass extra parameters to the method. - - Unlike other _repr_*_ `_repr_mimebundle_` should allow extra kwargs, in - particular `include` and `exclude`. - """ - if self.enabled: - # lookup registered printer - try: - printer = self.lookup(obj) - except KeyError: - pass - else: - return printer(obj) - # Finally look for special method names - method = get_real_method(obj, self.print_method) - - if method is not None: - return method(include=include, exclude=exclude) - return None - else: - return None - - -FormatterABC.register(BaseFormatter) -FormatterABC.register(PlainTextFormatter) -FormatterABC.register(HTMLFormatter) -FormatterABC.register(MarkdownFormatter) -FormatterABC.register(SVGFormatter) -FormatterABC.register(PNGFormatter) -FormatterABC.register(PDFFormatter) -FormatterABC.register(JPEGFormatter) -FormatterABC.register(LatexFormatter) -FormatterABC.register(JSONFormatter) -FormatterABC.register(JavascriptFormatter) -FormatterABC.register(IPythonDisplayFormatter) -FormatterABC.register(MimeBundleFormatter) - - -def format_display_data(obj, include=None, exclude=None): - """Return a format data dict for an object. - - By default all format types will be computed. - - Parameters - ---------- - obj : object - The Python object whose format data will be computed. - - Returns - ------- - format_dict : dict - A dictionary of key/value pairs, one or each format that was - generated for the object. The keys are the format types, which - will usually be MIME type strings and the values and JSON'able - data structure containing the raw data for the representation in - that format. - include : list or tuple, optional - A list of format type strings (MIME types) to include in the - format data dict. If this is set *only* the format types included - in this list will be computed. - exclude : list or tuple, optional - A list of format type string (MIME types) to exclude in the format - data dict. If this is set all format types will be computed, - except for those included in this argument. - """ - from .interactiveshell import InteractiveShell - - return InteractiveShell.instance().display_formatter.format( - obj, - include, - exclude - ) +# -*- coding: utf-8 -*- +"""Display formatters. + +Inheritance diagram: + +.. inheritance-diagram:: IPython.core.formatters + :parts: 3 +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import abc +import json +import sys +import traceback +import warnings +from io import StringIO + +from decorator import decorator + +from traitlets.config.configurable import Configurable +from .getipython import get_ipython +from ..utils.sentinel import Sentinel +from ..utils.dir2 import get_real_method +from ..lib import pretty +from traitlets import ( + Bool, Dict, Integer, Unicode, CUnicode, ObjectName, List, + ForwardDeclaredInstance, + default, observe, +) + + +class DisplayFormatter(Configurable): + + active_types = List(Unicode(), + help="""List of currently active mime-types to display. + You can use this to set a white-list for formats to display. + + Most users will not need to change this value. + """).tag(config=True) + + @default('active_types') + def _active_types_default(self): + return self.format_types + + @observe('active_types') + def _active_types_changed(self, change): + for key, formatter in self.formatters.items(): + if key in change['new']: + formatter.enabled = True + else: + formatter.enabled = False + + ipython_display_formatter = ForwardDeclaredInstance('FormatterABC') + @default('ipython_display_formatter') + def _default_formatter(self): + return IPythonDisplayFormatter(parent=self) + + mimebundle_formatter = ForwardDeclaredInstance('FormatterABC') + @default('mimebundle_formatter') + def _default_mime_formatter(self): + return MimeBundleFormatter(parent=self) + + # A dict of formatter whose keys are format types (MIME types) and whose + # values are subclasses of BaseFormatter. + formatters = Dict() + @default('formatters') + def _formatters_default(self): + """Activate the default formatters.""" + formatter_classes = [ + PlainTextFormatter, + HTMLFormatter, + MarkdownFormatter, + SVGFormatter, + PNGFormatter, + PDFFormatter, + JPEGFormatter, + LatexFormatter, + JSONFormatter, + JavascriptFormatter + ] + d = {} + for cls in formatter_classes: + f = cls(parent=self) + d[f.format_type] = f + return d + + def format(self, obj, include=None, exclude=None): + """Return a format data dict for an object. + + By default all format types will be computed. + + The following MIME types are usually implemented: + + * text/plain + * text/html + * text/markdown + * text/latex + * application/json + * application/javascript + * application/pdf + * image/png + * image/jpeg + * image/svg+xml + + Parameters + ---------- + obj : object + The Python object whose format data will be computed. + include : list, tuple or set; optional + A list of format type strings (MIME types) to include in the + format data dict. If this is set *only* the format types included + in this list will be computed. + exclude : list, tuple or set; optional + A list of format type string (MIME types) to exclude in the format + data dict. If this is set all format types will be computed, + except for those included in this argument. + Mimetypes present in exclude will take precedence over the ones in include + + Returns + ------- + (format_dict, metadata_dict) : tuple of two dicts + + format_dict is a dictionary of key/value pairs, one of each format that was + generated for the object. The keys are the format types, which + will usually be MIME type strings and the values and JSON'able + data structure containing the raw data for the representation in + that format. + + metadata_dict is a dictionary of metadata about each mime-type output. + Its keys will be a strict subset of the keys in format_dict. + + Notes + ----- + + If an object implement `_repr_mimebundle_` as well as various + `_repr_*_`, the data returned by `_repr_mimebundle_` will take + precedence and the corresponding `_repr_*_` for this mimetype will + not be called. + + """ + format_dict = {} + md_dict = {} + + if self.ipython_display_formatter(obj): + # object handled itself, don't proceed + return {}, {} + + format_dict, md_dict = self.mimebundle_formatter(obj, include=include, exclude=exclude) + + if format_dict or md_dict: + if include: + format_dict = {k:v for k,v in format_dict.items() if k in include} + md_dict = {k:v for k,v in md_dict.items() if k in include} + if exclude: + format_dict = {k:v for k,v in format_dict.items() if k not in exclude} + md_dict = {k:v for k,v in md_dict.items() if k not in exclude} + + for format_type, formatter in self.formatters.items(): + if format_type in format_dict: + # already got it from mimebundle, maybe don't render again. + # exception: manually registered per-mime renderer + # check priority: + # 1. user-registered per-mime formatter + # 2. mime-bundle (user-registered or repr method) + # 3. default per-mime formatter (e.g. repr method) + try: + formatter.lookup(obj) + except KeyError: + # no special formatter, use mime-bundle-provided value + continue + if include and format_type not in include: + continue + if exclude and format_type in exclude: + continue + + md = None + try: + data = formatter(obj) + except: + # FIXME: log the exception + raise + + # formatters can return raw data or (data, metadata) + if isinstance(data, tuple) and len(data) == 2: + data, md = data + + if data is not None: + format_dict[format_type] = data + if md is not None: + md_dict[format_type] = md + return format_dict, md_dict + + @property + def format_types(self): + """Return the format types (MIME types) of the active formatters.""" + return list(self.formatters.keys()) + + +#----------------------------------------------------------------------------- +# Formatters for specific format types (text, html, svg, etc.) +#----------------------------------------------------------------------------- + + +def _safe_repr(obj): + """Try to return a repr of an object + + always returns a string, at least. + """ + try: + return repr(obj) + except Exception as e: + return "un-repr-able object (%r)" % e + + +class FormatterWarning(UserWarning): + """Warning class for errors in formatters""" + +@decorator +def catch_format_error(method, self, *args, **kwargs): + """show traceback on failed format call""" + try: + r = method(self, *args, **kwargs) + except NotImplementedError: + # don't warn on NotImplementedErrors + return self._check_return(None, args[0]) + except Exception: + exc_info = sys.exc_info() + ip = get_ipython() + if ip is not None: + ip.showtraceback(exc_info) + else: + traceback.print_exception(*exc_info) + return self._check_return(None, args[0]) + return self._check_return(r, args[0]) + + +class FormatterABC(metaclass=abc.ABCMeta): + """ Abstract base class for Formatters. + + A formatter is a callable class that is responsible for computing the + raw format data for a particular format type (MIME type). For example, + an HTML formatter would have a format type of `text/html` and would return + the HTML representation of the object when called. + """ + + # The format type of the data returned, usually a MIME type. + format_type = 'text/plain' + + # Is the formatter enabled... + enabled = True + + @abc.abstractmethod + def __call__(self, obj): + """Return a JSON'able representation of the object. + + If the object cannot be formatted by this formatter, + warn and return None. + """ + return repr(obj) + + +def _mod_name_key(typ): + """Return a (__module__, __name__) tuple for a type. + + Used as key in Formatter.deferred_printers. + """ + module = getattr(typ, '__module__', None) + name = getattr(typ, '__name__', None) + return (module, name) + + +def _get_type(obj): + """Return the type of an instance (old and new-style)""" + return getattr(obj, '__class__', None) or type(obj) + + +_raise_key_error = Sentinel('_raise_key_error', __name__, +""" +Special value to raise a KeyError + +Raise KeyError in `BaseFormatter.pop` if passed as the default value to `pop` +""") + + +class BaseFormatter(Configurable): + """A base formatter class that is configurable. + + This formatter should usually be used as the base class of all formatters. + It is a traited :class:`Configurable` class and includes an extensible + API for users to determine how their objects are formatted. The following + logic is used to find a function to format an given object. + + 1. The object is introspected to see if it has a method with the name + :attr:`print_method`. If is does, that object is passed to that method + for formatting. + 2. If no print method is found, three internal dictionaries are consulted + to find print method: :attr:`singleton_printers`, :attr:`type_printers` + and :attr:`deferred_printers`. + + Users should use these dictionaries to register functions that will be + used to compute the format data for their objects (if those objects don't + have the special print methods). The easiest way of using these + dictionaries is through the :meth:`for_type` and :meth:`for_type_by_name` + methods. + + If no function/callable is found to compute the format data, ``None`` is + returned and this format type is not used. + """ + + format_type = Unicode('text/plain') + _return_type = str + + enabled = Bool(True).tag(config=True) + + print_method = ObjectName('__repr__') + + # The singleton printers. + # Maps the IDs of the builtin singleton objects to the format functions. + singleton_printers = Dict().tag(config=True) + + # The type-specific printers. + # Map type objects to the format functions. + type_printers = Dict().tag(config=True) + + # The deferred-import type-specific printers. + # Map (modulename, classname) pairs to the format functions. + deferred_printers = Dict().tag(config=True) + + @catch_format_error + def __call__(self, obj): + """Compute the format for an object.""" + if self.enabled: + # lookup registered printer + try: + printer = self.lookup(obj) + except KeyError: + pass + else: + return printer(obj) + # Finally look for special method names + method = get_real_method(obj, self.print_method) + if method is not None: + return method() + return None + else: + return None + + def __contains__(self, typ): + """map in to lookup_by_type""" + try: + self.lookup_by_type(typ) + except KeyError: + return False + else: + return True + + def _check_return(self, r, obj): + """Check that a return value is appropriate + + Return the value if so, None otherwise, warning if invalid. + """ + if r is None or isinstance(r, self._return_type) or \ + (isinstance(r, tuple) and r and isinstance(r[0], self._return_type)): + return r + else: + warnings.warn( + "%s formatter returned invalid type %s (expected %s) for object: %s" % \ + (self.format_type, type(r), self._return_type, _safe_repr(obj)), + FormatterWarning + ) + + def lookup(self, obj): + """Look up the formatter for a given instance. + + Parameters + ---------- + obj : object instance + + Returns + ------- + f : callable + The registered formatting callable for the type. + + Raises + ------ + KeyError if the type has not been registered. + """ + # look for singleton first + obj_id = id(obj) + if obj_id in self.singleton_printers: + return self.singleton_printers[obj_id] + # then lookup by type + return self.lookup_by_type(_get_type(obj)) + + def lookup_by_type(self, typ): + """Look up the registered formatter for a type. + + Parameters + ---------- + typ : type or '__module__.__name__' string for a type + + Returns + ------- + f : callable + The registered formatting callable for the type. + + Raises + ------ + KeyError if the type has not been registered. + """ + if isinstance(typ, str): + typ_key = tuple(typ.rsplit('.',1)) + if typ_key not in self.deferred_printers: + # We may have it cached in the type map. We will have to + # iterate over all of the types to check. + for cls in self.type_printers: + if _mod_name_key(cls) == typ_key: + return self.type_printers[cls] + else: + return self.deferred_printers[typ_key] + else: + for cls in pretty._get_mro(typ): + if cls in self.type_printers or self._in_deferred_types(cls): + return self.type_printers[cls] + + # If we have reached here, the lookup failed. + raise KeyError("No registered printer for {0!r}".format(typ)) + + def for_type(self, typ, func=None): + """Add a format function for a given type. + + Parameters + ---------- + typ : type or '__module__.__name__' string for a type + The class of the object that will be formatted using `func`. + func : callable + A callable for computing the format data. + `func` will be called with the object to be formatted, + and will return the raw data in this formatter's format. + Subclasses may use a different call signature for the + `func` argument. + + If `func` is None or not specified, there will be no change, + only returning the current value. + + Returns + ------- + oldfunc : callable + The currently registered callable. + If you are registering a new formatter, + this will be the previous value (to enable restoring later). + """ + # if string given, interpret as 'pkg.module.class_name' + if isinstance(typ, str): + type_module, type_name = typ.rsplit('.', 1) + return self.for_type_by_name(type_module, type_name, func) + + try: + oldfunc = self.lookup_by_type(typ) + except KeyError: + oldfunc = None + + if func is not None: + self.type_printers[typ] = func + + return oldfunc + + def for_type_by_name(self, type_module, type_name, func=None): + """Add a format function for a type specified by the full dotted + module and name of the type, rather than the type of the object. + + Parameters + ---------- + type_module : str + The full dotted name of the module the type is defined in, like + ``numpy``. + type_name : str + The name of the type (the class name), like ``dtype`` + func : callable + A callable for computing the format data. + `func` will be called with the object to be formatted, + and will return the raw data in this formatter's format. + Subclasses may use a different call signature for the + `func` argument. + + If `func` is None or unspecified, there will be no change, + only returning the current value. + + Returns + ------- + oldfunc : callable + The currently registered callable. + If you are registering a new formatter, + this will be the previous value (to enable restoring later). + """ + key = (type_module, type_name) + + try: + oldfunc = self.lookup_by_type("%s.%s" % key) + except KeyError: + oldfunc = None + + if func is not None: + self.deferred_printers[key] = func + return oldfunc + + def pop(self, typ, default=_raise_key_error): + """Pop a formatter for the given type. + + Parameters + ---------- + typ : type or '__module__.__name__' string for a type + default : object + value to be returned if no formatter is registered for typ. + + Returns + ------- + obj : object + The last registered object for the type. + + Raises + ------ + KeyError if the type is not registered and default is not specified. + """ + + if isinstance(typ, str): + typ_key = tuple(typ.rsplit('.',1)) + if typ_key not in self.deferred_printers: + # We may have it cached in the type map. We will have to + # iterate over all of the types to check. + for cls in self.type_printers: + if _mod_name_key(cls) == typ_key: + old = self.type_printers.pop(cls) + break + else: + old = default + else: + old = self.deferred_printers.pop(typ_key) + else: + if typ in self.type_printers: + old = self.type_printers.pop(typ) + else: + old = self.deferred_printers.pop(_mod_name_key(typ), default) + if old is _raise_key_error: + raise KeyError("No registered value for {0!r}".format(typ)) + return old + + def _in_deferred_types(self, cls): + """ + Check if the given class is specified in the deferred type registry. + + Successful matches will be moved to the regular type registry for future use. + """ + mod = getattr(cls, '__module__', None) + name = getattr(cls, '__name__', None) + key = (mod, name) + if key in self.deferred_printers: + # Move the printer over to the regular registry. + printer = self.deferred_printers.pop(key) + self.type_printers[cls] = printer + return True + return False + + +class PlainTextFormatter(BaseFormatter): + """The default pretty-printer. + + This uses :mod:`IPython.lib.pretty` to compute the format data of + the object. If the object cannot be pretty printed, :func:`repr` is used. + See the documentation of :mod:`IPython.lib.pretty` for details on + how to write pretty printers. Here is a simple example:: + + def dtype_pprinter(obj, p, cycle): + if cycle: + return p.text('dtype(...)') + if hasattr(obj, 'fields'): + if obj.fields is None: + p.text(repr(obj)) + else: + p.begin_group(7, 'dtype([') + for i, field in enumerate(obj.descr): + if i > 0: + p.text(',') + p.breakable() + p.pretty(field) + p.end_group(7, '])') + """ + + # The format type of data returned. + format_type = Unicode('text/plain') + + # This subclass ignores this attribute as it always need to return + # something. + enabled = Bool(True).tag(config=False) + + max_seq_length = Integer(pretty.MAX_SEQ_LENGTH, + help="""Truncate large collections (lists, dicts, tuples, sets) to this size. + + Set to 0 to disable truncation. + """ + ).tag(config=True) + + # Look for a _repr_pretty_ methods to use for pretty printing. + print_method = ObjectName('_repr_pretty_') + + # Whether to pretty-print or not. + pprint = Bool(True).tag(config=True) + + # Whether to be verbose or not. + verbose = Bool(False).tag(config=True) + + # The maximum width. + max_width = Integer(79).tag(config=True) + + # The newline character. + newline = Unicode('\n').tag(config=True) + + # format-string for pprinting floats + float_format = Unicode('%r') + # setter for float precision, either int or direct format-string + float_precision = CUnicode('').tag(config=True) + + @observe('float_precision') + def _float_precision_changed(self, change): + """float_precision changed, set float_format accordingly. + + float_precision can be set by int or str. + This will set float_format, after interpreting input. + If numpy has been imported, numpy print precision will also be set. + + integer `n` sets format to '%.nf', otherwise, format set directly. + + An empty string returns to defaults (repr for float, 8 for numpy). + + This parameter can be set via the '%precision' magic. + """ + + new = change['new'] + if '%' in new: + # got explicit format string + fmt = new + try: + fmt%3.14159 + except Exception: + raise ValueError("Precision must be int or format string, not %r"%new) + elif new: + # otherwise, should be an int + try: + i = int(new) + assert i >= 0 + except ValueError: + raise ValueError("Precision must be int or format string, not %r"%new) + except AssertionError: + raise ValueError("int precision must be non-negative, not %r"%i) + + fmt = '%%.%if'%i + if 'numpy' in sys.modules: + # set numpy precision if it has been imported + import numpy + numpy.set_printoptions(precision=i) + else: + # default back to repr + fmt = '%r' + if 'numpy' in sys.modules: + import numpy + # numpy default is 8 + numpy.set_printoptions(precision=8) + self.float_format = fmt + + # Use the default pretty printers from IPython.lib.pretty. + @default('singleton_printers') + def _singleton_printers_default(self): + return pretty._singleton_pprinters.copy() + + @default('type_printers') + def _type_printers_default(self): + d = pretty._type_pprinters.copy() + d[float] = lambda obj,p,cycle: p.text(self.float_format%obj) + return d + + @default('deferred_printers') + def _deferred_printers_default(self): + return pretty._deferred_type_pprinters.copy() + + #### FormatterABC interface #### + + @catch_format_error + def __call__(self, obj): + """Compute the pretty representation of the object.""" + if not self.pprint: + return repr(obj) + else: + stream = StringIO() + printer = pretty.RepresentationPrinter(stream, self.verbose, + self.max_width, self.newline, + max_seq_length=self.max_seq_length, + singleton_pprinters=self.singleton_printers, + type_pprinters=self.type_printers, + deferred_pprinters=self.deferred_printers) + printer.pretty(obj) + printer.flush() + return stream.getvalue() + + +class HTMLFormatter(BaseFormatter): + """An HTML formatter. + + To define the callables that compute the HTML representation of your + objects, define a :meth:`_repr_html_` method or use the :meth:`for_type` + or :meth:`for_type_by_name` methods to register functions that handle + this. + + The return value of this formatter should be a valid HTML snippet that + could be injected into an existing DOM. It should *not* include the + ```<html>`` or ```<body>`` tags. + """ + format_type = Unicode('text/html') + + print_method = ObjectName('_repr_html_') + + +class MarkdownFormatter(BaseFormatter): + """A Markdown formatter. + + To define the callables that compute the Markdown representation of your + objects, define a :meth:`_repr_markdown_` method or use the :meth:`for_type` + or :meth:`for_type_by_name` methods to register functions that handle + this. + + The return value of this formatter should be a valid Markdown. + """ + format_type = Unicode('text/markdown') + + print_method = ObjectName('_repr_markdown_') + +class SVGFormatter(BaseFormatter): + """An SVG formatter. + + To define the callables that compute the SVG representation of your + objects, define a :meth:`_repr_svg_` method or use the :meth:`for_type` + or :meth:`for_type_by_name` methods to register functions that handle + this. + + The return value of this formatter should be valid SVG enclosed in + ```<svg>``` tags, that could be injected into an existing DOM. It should + *not* include the ```<html>`` or ```<body>`` tags. + """ + format_type = Unicode('image/svg+xml') + + print_method = ObjectName('_repr_svg_') + + +class PNGFormatter(BaseFormatter): + """A PNG formatter. + + To define the callables that compute the PNG representation of your + objects, define a :meth:`_repr_png_` method or use the :meth:`for_type` + or :meth:`for_type_by_name` methods to register functions that handle + this. + + The return value of this formatter should be raw PNG data, *not* + base64 encoded. + """ + format_type = Unicode('image/png') + + print_method = ObjectName('_repr_png_') + + _return_type = (bytes, str) + + +class JPEGFormatter(BaseFormatter): + """A JPEG formatter. + + To define the callables that compute the JPEG representation of your + objects, define a :meth:`_repr_jpeg_` method or use the :meth:`for_type` + or :meth:`for_type_by_name` methods to register functions that handle + this. + + The return value of this formatter should be raw JPEG data, *not* + base64 encoded. + """ + format_type = Unicode('image/jpeg') + + print_method = ObjectName('_repr_jpeg_') + + _return_type = (bytes, str) + + +class LatexFormatter(BaseFormatter): + """A LaTeX formatter. + + To define the callables that compute the LaTeX representation of your + objects, define a :meth:`_repr_latex_` method or use the :meth:`for_type` + or :meth:`for_type_by_name` methods to register functions that handle + this. + + The return value of this formatter should be a valid LaTeX equation, + enclosed in either ```$```, ```$$``` or another LaTeX equation + environment. + """ + format_type = Unicode('text/latex') + + print_method = ObjectName('_repr_latex_') + + +class JSONFormatter(BaseFormatter): + """A JSON string formatter. + + To define the callables that compute the JSONable representation of + your objects, define a :meth:`_repr_json_` method or use the :meth:`for_type` + or :meth:`for_type_by_name` methods to register functions that handle + this. + + The return value of this formatter should be a JSONable list or dict. + JSON scalars (None, number, string) are not allowed, only dict or list containers. + """ + format_type = Unicode('application/json') + _return_type = (list, dict) + + print_method = ObjectName('_repr_json_') + + def _check_return(self, r, obj): + """Check that a return value is appropriate + + Return the value if so, None otherwise, warning if invalid. + """ + if r is None: + return + md = None + if isinstance(r, tuple): + # unpack data, metadata tuple for type checking on first element + r, md = r + + # handle deprecated JSON-as-string form from IPython < 3 + if isinstance(r, str): + warnings.warn("JSON expects JSONable list/dict containers, not JSON strings", + FormatterWarning) + r = json.loads(r) + + if md is not None: + # put the tuple back together + r = (r, md) + return super(JSONFormatter, self)._check_return(r, obj) + + +class JavascriptFormatter(BaseFormatter): + """A Javascript formatter. + + To define the callables that compute the Javascript representation of + your objects, define a :meth:`_repr_javascript_` method or use the + :meth:`for_type` or :meth:`for_type_by_name` methods to register functions + that handle this. + + The return value of this formatter should be valid Javascript code and + should *not* be enclosed in ```<script>``` tags. + """ + format_type = Unicode('application/javascript') + + print_method = ObjectName('_repr_javascript_') + + +class PDFFormatter(BaseFormatter): + """A PDF formatter. + + To define the callables that compute the PDF representation of your + objects, define a :meth:`_repr_pdf_` method or use the :meth:`for_type` + or :meth:`for_type_by_name` methods to register functions that handle + this. + + The return value of this formatter should be raw PDF data, *not* + base64 encoded. + """ + format_type = Unicode('application/pdf') + + print_method = ObjectName('_repr_pdf_') + + _return_type = (bytes, str) + +class IPythonDisplayFormatter(BaseFormatter): + """An escape-hatch Formatter for objects that know how to display themselves. + + To define the callables that compute the representation of your + objects, define a :meth:`_ipython_display_` method or use the :meth:`for_type` + or :meth:`for_type_by_name` methods to register functions that handle + this. Unlike mime-type displays, this method should not return anything, + instead calling any appropriate display methods itself. + + This display formatter has highest priority. + If it fires, no other display formatter will be called. + + Prior to IPython 6.1, `_ipython_display_` was the only way to display custom mime-types + without registering a new Formatter. + + IPython 6.1 introduces `_repr_mimebundle_` for displaying custom mime-types, + so `_ipython_display_` should only be used for objects that require unusual + display patterns, such as multiple display calls. + """ + print_method = ObjectName('_ipython_display_') + _return_type = (type(None), bool) + + @catch_format_error + def __call__(self, obj): + """Compute the format for an object.""" + if self.enabled: + # lookup registered printer + try: + printer = self.lookup(obj) + except KeyError: + pass + else: + printer(obj) + return True + # Finally look for special method names + method = get_real_method(obj, self.print_method) + if method is not None: + method() + return True + + +class MimeBundleFormatter(BaseFormatter): + """A Formatter for arbitrary mime-types. + + Unlike other `_repr_<mimetype>_` methods, + `_repr_mimebundle_` should return mime-bundle data, + either the mime-keyed `data` dictionary or the tuple `(data, metadata)`. + Any mime-type is valid. + + To define the callables that compute the mime-bundle representation of your + objects, define a :meth:`_repr_mimebundle_` method or use the :meth:`for_type` + or :meth:`for_type_by_name` methods to register functions that handle + this. + + .. versionadded:: 6.1 + """ + print_method = ObjectName('_repr_mimebundle_') + _return_type = dict + + def _check_return(self, r, obj): + r = super(MimeBundleFormatter, self)._check_return(r, obj) + # always return (data, metadata): + if r is None: + return {}, {} + if not isinstance(r, tuple): + return r, {} + return r + + @catch_format_error + def __call__(self, obj, include=None, exclude=None): + """Compute the format for an object. + + Identical to parent's method but we pass extra parameters to the method. + + Unlike other _repr_*_ `_repr_mimebundle_` should allow extra kwargs, in + particular `include` and `exclude`. + """ + if self.enabled: + # lookup registered printer + try: + printer = self.lookup(obj) + except KeyError: + pass + else: + return printer(obj) + # Finally look for special method names + method = get_real_method(obj, self.print_method) + + if method is not None: + return method(include=include, exclude=exclude) + return None + else: + return None + + +FormatterABC.register(BaseFormatter) +FormatterABC.register(PlainTextFormatter) +FormatterABC.register(HTMLFormatter) +FormatterABC.register(MarkdownFormatter) +FormatterABC.register(SVGFormatter) +FormatterABC.register(PNGFormatter) +FormatterABC.register(PDFFormatter) +FormatterABC.register(JPEGFormatter) +FormatterABC.register(LatexFormatter) +FormatterABC.register(JSONFormatter) +FormatterABC.register(JavascriptFormatter) +FormatterABC.register(IPythonDisplayFormatter) +FormatterABC.register(MimeBundleFormatter) + + +def format_display_data(obj, include=None, exclude=None): + """Return a format data dict for an object. + + By default all format types will be computed. + + Parameters + ---------- + obj : object + The Python object whose format data will be computed. + + Returns + ------- + format_dict : dict + A dictionary of key/value pairs, one or each format that was + generated for the object. The keys are the format types, which + will usually be MIME type strings and the values and JSON'able + data structure containing the raw data for the representation in + that format. + include : list or tuple, optional + A list of format type strings (MIME types) to include in the + format data dict. If this is set *only* the format types included + in this list will be computed. + exclude : list or tuple, optional + A list of format type string (MIME types) to exclude in the format + data dict. If this is set all format types will be computed, + except for those included in this argument. + """ + from .interactiveshell import InteractiveShell + + return InteractiveShell.instance().display_formatter.format( + obj, + include, + exclude + ) diff --git a/contrib/python/ipython/py3/IPython/core/getipython.py b/contrib/python/ipython/py3/IPython/core/getipython.py index 9a127418add..e6d8a4c91d7 100644 --- a/contrib/python/ipython/py3/IPython/core/getipython.py +++ b/contrib/python/ipython/py3/IPython/core/getipython.py @@ -1,24 +1,24 @@ -# encoding: utf-8 -"""Simple function to call to get the current InteractiveShell instance -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2013 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Classes and functions -#----------------------------------------------------------------------------- - - -def get_ipython(): - """Get the global InteractiveShell instance. - - Returns None if no InteractiveShell instance is registered. - """ - from IPython.core.interactiveshell import InteractiveShell - if InteractiveShell.initialized(): - return InteractiveShell.instance() +# encoding: utf-8 +"""Simple function to call to get the current InteractiveShell instance +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2013 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + + +def get_ipython(): + """Get the global InteractiveShell instance. + + Returns None if no InteractiveShell instance is registered. + """ + from IPython.core.interactiveshell import InteractiveShell + if InteractiveShell.initialized(): + return InteractiveShell.instance() diff --git a/contrib/python/ipython/py3/IPython/core/history.py b/contrib/python/ipython/py3/IPython/core/history.py index baf9d92cc7b..98373f279c9 100644 --- a/contrib/python/ipython/py3/IPython/core/history.py +++ b/contrib/python/ipython/py3/IPython/core/history.py @@ -1,906 +1,906 @@ -""" History related magics and functionality """ - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -import atexit -import datetime -import os -import re -try: - import sqlite3 -except ImportError: - try: - from pysqlite2 import dbapi2 as sqlite3 - except ImportError: - sqlite3 = None -import threading - -from traitlets.config.configurable import LoggingConfigurable -from decorator import decorator -from IPython.utils.decorators import undoc -from IPython.paths import locate_profile -from traitlets import ( - Any, Bool, Dict, Instance, Integer, List, Unicode, TraitError, - default, observe, -) -from warnings import warn - -#----------------------------------------------------------------------------- -# Classes and functions -#----------------------------------------------------------------------------- - -@undoc -class DummyDB(object): - """Dummy DB that will act as a black hole for history. - - Only used in the absence of sqlite""" - def execute(*args, **kwargs): - return [] - - def commit(self, *args, **kwargs): - pass - - def __enter__(self, *args, **kwargs): - pass - - def __exit__(self, *args, **kwargs): - pass - - -@decorator -def needs_sqlite(f, self, *a, **kw): - """Decorator: return an empty list in the absence of sqlite.""" - if sqlite3 is None or not self.enabled: - return [] - else: - return f(self, *a, **kw) - - -if sqlite3 is not None: - DatabaseError = sqlite3.DatabaseError - OperationalError = sqlite3.OperationalError -else: - @undoc - class DatabaseError(Exception): - "Dummy exception when sqlite could not be imported. Should never occur." - - @undoc - class OperationalError(Exception): - "Dummy exception when sqlite could not be imported. Should never occur." - -# use 16kB as threshold for whether a corrupt history db should be saved -# that should be at least 100 entries or so -_SAVE_DB_SIZE = 16384 - -@decorator -def catch_corrupt_db(f, self, *a, **kw): - """A decorator which wraps HistoryAccessor method calls to catch errors from - a corrupt SQLite database, move the old database out of the way, and create - a new one. - - We avoid clobbering larger databases because this may be triggered due to filesystem issues, - not just a corrupt file. - """ - try: - return f(self, *a, **kw) - except (DatabaseError, OperationalError) as e: - self._corrupt_db_counter += 1 - self.log.error("Failed to open SQLite history %s (%s).", self.hist_file, e) - if self.hist_file != ':memory:': - if self._corrupt_db_counter > self._corrupt_db_limit: - self.hist_file = ':memory:' - self.log.error("Failed to load history too many times, history will not be saved.") - elif os.path.isfile(self.hist_file): - # move the file out of the way - base, ext = os.path.splitext(self.hist_file) - size = os.stat(self.hist_file).st_size - if size >= _SAVE_DB_SIZE: - # if there's significant content, avoid clobbering - now = datetime.datetime.now().isoformat().replace(':', '.') - newpath = base + '-corrupt-' + now + ext - # don't clobber previous corrupt backups - for i in range(100): - if not os.path.isfile(newpath): - break - else: - newpath = base + '-corrupt-' + now + (u'-%i' % i) + ext - else: - # not much content, possibly empty; don't worry about clobbering - # maybe we should just delete it? - newpath = base + '-corrupt' + ext - os.rename(self.hist_file, newpath) - self.log.error("History file was moved to %s and a new file created.", newpath) - self.init_db() - return [] - else: - # Failed with :memory:, something serious is wrong - raise - -class HistoryAccessorBase(LoggingConfigurable): - """An abstract class for History Accessors """ - - def get_tail(self, n=10, raw=True, output=False, include_latest=False): - raise NotImplementedError - - def search(self, pattern="*", raw=True, search_raw=True, - output=False, n=None, unique=False): - raise NotImplementedError - - def get_range(self, session, start=1, stop=None, raw=True,output=False): - raise NotImplementedError - - def get_range_by_str(self, rangestr, raw=True, output=False): - raise NotImplementedError - - -class HistoryAccessor(HistoryAccessorBase): - """Access the history database without adding to it. - - This is intended for use by standalone history tools. IPython shells use - HistoryManager, below, which is a subclass of this.""" - - # counter for init_db retries, so we don't keep trying over and over - _corrupt_db_counter = 0 - # after two failures, fallback on :memory: - _corrupt_db_limit = 2 - - # String holding the path to the history file - hist_file = Unicode( - help="""Path to file to use for SQLite history database. - - By default, IPython will put the history database in the IPython - profile directory. If you would rather share one history among - profiles, you can set this value in each, so that they are consistent. - - Due to an issue with fcntl, SQLite is known to misbehave on some NFS - mounts. If you see IPython hanging, try setting this to something on a - local disk, e.g:: - - ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite - - you can also use the specific value `:memory:` (including the colon - at both end but not the back ticks), to avoid creating an history file. - - """).tag(config=True) - - enabled = Bool(True, - help="""enable the SQLite history - - set enabled=False to disable the SQLite history, - in which case there will be no stored history, no SQLite connection, - and no background saving thread. This may be necessary in some - threaded environments where IPython is embedded. - """ - ).tag(config=True) - - connection_options = Dict( - help="""Options for configuring the SQLite connection - - These options are passed as keyword args to sqlite3.connect - when establishing database connections. - """ - ).tag(config=True) - - # The SQLite database - db = Any() - @observe('db') - def _db_changed(self, change): - """validate the db, since it can be an Instance of two different types""" - new = change['new'] - connection_types = (DummyDB,) - if sqlite3 is not None: - connection_types = (DummyDB, sqlite3.Connection) - if not isinstance(new, connection_types): - msg = "%s.db must be sqlite3 Connection or DummyDB, not %r" % \ - (self.__class__.__name__, new) - raise TraitError(msg) - - def __init__(self, profile='default', hist_file=u'', **traits): - """Create a new history accessor. - - Parameters - ---------- - profile : str - The name of the profile from which to open history. - hist_file : str - Path to an SQLite history database stored by IPython. If specified, - hist_file overrides profile. - config : :class:`~traitlets.config.loader.Config` - Config object. hist_file can also be set through this. - """ - # We need a pointer back to the shell for various tasks. - super(HistoryAccessor, self).__init__(**traits) - # defer setting hist_file from kwarg until after init, - # otherwise the default kwarg value would clobber any value - # set by config - if hist_file: - self.hist_file = hist_file - - if self.hist_file == u'': - # No one has set the hist_file, yet. - self.hist_file = self._get_hist_file_name(profile) - - if sqlite3 is None and self.enabled: - warn("IPython History requires SQLite, your history will not be saved") - self.enabled = False - - self.init_db() - - def _get_hist_file_name(self, profile='default'): - """Find the history file for the given profile name. - - This is overridden by the HistoryManager subclass, to use the shell's - active profile. - - Parameters - ---------- - profile : str - The name of a profile which has a history file. - """ - return os.path.join(locate_profile(profile), 'history.sqlite') - - @catch_corrupt_db - def init_db(self): - """Connect to the database, and create tables if necessary.""" - if not self.enabled: - self.db = DummyDB() - return - - # use detect_types so that timestamps return datetime objects - kwargs = dict(detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) - kwargs.update(self.connection_options) - self.db = sqlite3.connect(self.hist_file, **kwargs) - self.db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer - primary key autoincrement, start timestamp, - end timestamp, num_cmds integer, remark text)""") - self.db.execute("""CREATE TABLE IF NOT EXISTS history - (session integer, line integer, source text, source_raw text, - PRIMARY KEY (session, line))""") - # Output history is optional, but ensure the table's there so it can be - # enabled later. - self.db.execute("""CREATE TABLE IF NOT EXISTS output_history - (session integer, line integer, output text, - PRIMARY KEY (session, line))""") - self.db.commit() - # success! reset corrupt db count - self._corrupt_db_counter = 0 - - def writeout_cache(self): - """Overridden by HistoryManager to dump the cache before certain - database lookups.""" - pass - - ## ------------------------------- - ## Methods for retrieving history: - ## ------------------------------- - def _run_sql(self, sql, params, raw=True, output=False): - """Prepares and runs an SQL query for the history database. - - Parameters - ---------- - sql : str - Any filtering expressions to go after SELECT ... FROM ... - params : tuple - Parameters passed to the SQL query (to replace "?") - raw, output : bool - See :meth:`get_range` - - Returns - ------- - Tuples as :meth:`get_range` - """ - toget = 'source_raw' if raw else 'source' - sqlfrom = "history" - if output: - sqlfrom = "history LEFT JOIN output_history USING (session, line)" - toget = "history.%s, output_history.output" % toget - cur = self.db.execute("SELECT session, line, %s FROM %s " %\ - (toget, sqlfrom) + sql, params) - if output: # Regroup into 3-tuples, and parse JSON - return ((ses, lin, (inp, out)) for ses, lin, inp, out in cur) - return cur - - @needs_sqlite - @catch_corrupt_db - def get_session_info(self, session): - """Get info about a session. - - Parameters - ---------- - - session : int - Session number to retrieve. - - Returns - ------- - - session_id : int - Session ID number - start : datetime - Timestamp for the start of the session. - end : datetime - Timestamp for the end of the session, or None if IPython crashed. - num_cmds : int - Number of commands run, or None if IPython crashed. - remark : unicode - A manually set description. - """ - query = "SELECT * from sessions where session == ?" - return self.db.execute(query, (session,)).fetchone() - - @catch_corrupt_db - def get_last_session_id(self): - """Get the last session ID currently in the database. - - Within IPython, this should be the same as the value stored in - :attr:`HistoryManager.session_number`. - """ - for record in self.get_tail(n=1, include_latest=True): - return record[0] - - @catch_corrupt_db - def get_tail(self, n=10, raw=True, output=False, include_latest=False): - """Get the last n lines from the history database. - - Parameters - ---------- - n : int - The number of lines to get - raw, output : bool - See :meth:`get_range` - include_latest : bool - If False (default), n+1 lines are fetched, and the latest one - is discarded. This is intended to be used where the function - is called by a user command, which it should not return. - - Returns - ------- - Tuples as :meth:`get_range` - """ - self.writeout_cache() - if not include_latest: - n += 1 - cur = self._run_sql("ORDER BY session DESC, line DESC LIMIT ?", - (n,), raw=raw, output=output) - if not include_latest: - return reversed(list(cur)[1:]) - return reversed(list(cur)) - - @catch_corrupt_db - def search(self, pattern="*", raw=True, search_raw=True, - output=False, n=None, unique=False): - """Search the database using unix glob-style matching (wildcards - * and ?). - - Parameters - ---------- - pattern : str - The wildcarded pattern to match when searching - search_raw : bool - If True, search the raw input, otherwise, the parsed input - raw, output : bool - See :meth:`get_range` - n : None or int - If an integer is given, it defines the limit of - returned entries. - unique : bool - When it is true, return only unique entries. - - Returns - ------- - Tuples as :meth:`get_range` - """ - tosearch = "source_raw" if search_raw else "source" - if output: - tosearch = "history." + tosearch - self.writeout_cache() - sqlform = "WHERE %s GLOB ?" % tosearch - params = (pattern,) - if unique: - sqlform += ' GROUP BY {0}'.format(tosearch) - if n is not None: - sqlform += " ORDER BY session DESC, line DESC LIMIT ?" - params += (n,) - elif unique: - sqlform += " ORDER BY session, line" - cur = self._run_sql(sqlform, params, raw=raw, output=output) - if n is not None: - return reversed(list(cur)) - return cur - - @catch_corrupt_db - def get_range(self, session, start=1, stop=None, raw=True,output=False): - """Retrieve input by session. - - Parameters - ---------- - session : int - Session number to retrieve. - start : int - First line to retrieve. - stop : int - End of line range (excluded from output itself). If None, retrieve - to the end of the session. - raw : bool - If True, return untranslated input - output : bool - If True, attempt to include output. This will be 'real' Python - objects for the current session, or text reprs from previous - sessions if db_log_output was enabled at the time. Where no output - is found, None is used. - - Returns - ------- - entries - An iterator over the desired lines. Each line is a 3-tuple, either - (session, line, input) if output is False, or - (session, line, (input, output)) if output is True. - """ - if stop: - lineclause = "line >= ? AND line < ?" - params = (session, start, stop) - else: - lineclause = "line>=?" - params = (session, start) - - return self._run_sql("WHERE session==? AND %s" % lineclause, - params, raw=raw, output=output) - - def get_range_by_str(self, rangestr, raw=True, output=False): - """Get lines of history from a string of ranges, as used by magic - commands %hist, %save, %macro, etc. - - Parameters - ---------- - rangestr : str - A string specifying ranges, e.g. "5 ~2/1-4". See - :func:`magic_history` for full details. - raw, output : bool - As :meth:`get_range` - - Returns - ------- - Tuples as :meth:`get_range` - """ - for sess, s, e in extract_hist_ranges(rangestr): - for line in self.get_range(sess, s, e, raw=raw, output=output): - yield line - - -class HistoryManager(HistoryAccessor): - """A class to organize all history-related functionality in one place. - """ - # Public interface - - # An instance of the IPython shell we are attached to - shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', - allow_none=True) - # Lists to hold processed and raw history. These start with a blank entry - # so that we can index them starting from 1 - input_hist_parsed = List([""]) - input_hist_raw = List([""]) - # A list of directories visited during session - dir_hist = List() - @default('dir_hist') - def _dir_hist_default(self): - try: - return [os.getcwd()] - except OSError: - return [] - - # A dict of output history, keyed with ints from the shell's - # execution count. - output_hist = Dict() - # The text/plain repr of outputs. - output_hist_reprs = Dict() - - # The number of the current session in the history database - session_number = Integer() - - db_log_output = Bool(False, - help="Should the history database include output? (default: no)" - ).tag(config=True) - db_cache_size = Integer(0, - help="Write to database every x commands (higher values save disk access & power).\n" - "Values of 1 or less effectively disable caching." - ).tag(config=True) - # The input and output caches - db_input_cache = List() - db_output_cache = List() - - # History saving in separate thread - save_thread = Instance('IPython.core.history.HistorySavingThread', - allow_none=True) - save_flag = Instance(threading.Event, allow_none=True) - - # Private interface - # Variables used to store the three last inputs from the user. On each new - # history update, we populate the user's namespace with these, shifted as - # necessary. - _i00 = Unicode(u'') - _i = Unicode(u'') - _ii = Unicode(u'') - _iii = Unicode(u'') - - # A regex matching all forms of the exit command, so that we don't store - # them in the history (it's annoying to rewind the first entry and land on - # an exit call). - _exit_re = re.compile(r"(exit|quit)(\s*\(.*\))?$") - - def __init__(self, shell=None, config=None, **traits): - """Create a new history manager associated with a shell instance. - """ - # We need a pointer back to the shell for various tasks. - super(HistoryManager, self).__init__(shell=shell, config=config, - **traits) - self.save_flag = threading.Event() - self.db_input_cache_lock = threading.Lock() - self.db_output_cache_lock = threading.Lock() - - try: - self.new_session() - except OperationalError: - self.log.error("Failed to create history session in %s. History will not be saved.", - self.hist_file, exc_info=True) - self.hist_file = ':memory:' - - if self.enabled and self.hist_file != ':memory:': - self.save_thread = HistorySavingThread(self) - self.save_thread.start() - - def _get_hist_file_name(self, profile=None): - """Get default history file name based on the Shell's profile. - - The profile parameter is ignored, but must exist for compatibility with - the parent class.""" - profile_dir = self.shell.profile_dir.location - return os.path.join(profile_dir, 'history.sqlite') - - @needs_sqlite - def new_session(self, conn=None): - """Get a new session number.""" - if conn is None: - conn = self.db - - with conn: - cur = conn.execute("""INSERT INTO sessions VALUES (NULL, ?, NULL, - NULL, "") """, (datetime.datetime.now(),)) - self.session_number = cur.lastrowid - - def end_session(self): - """Close the database session, filling in the end time and line count.""" - self.writeout_cache() - with self.db: - self.db.execute("""UPDATE sessions SET end=?, num_cmds=? WHERE - session==?""", (datetime.datetime.now(), - len(self.input_hist_parsed)-1, self.session_number)) - self.session_number = 0 - - def name_session(self, name): - """Give the current session a name in the history database.""" - with self.db: - self.db.execute("UPDATE sessions SET remark=? WHERE session==?", - (name, self.session_number)) - - def reset(self, new_session=True): - """Clear the session history, releasing all object references, and - optionally open a new session.""" - self.output_hist.clear() - # The directory history can't be completely empty - self.dir_hist[:] = [os.getcwd()] - - if new_session: - if self.session_number: - self.end_session() - self.input_hist_parsed[:] = [""] - self.input_hist_raw[:] = [""] - self.new_session() - - # ------------------------------ - # Methods for retrieving history - # ------------------------------ - def get_session_info(self, session=0): - """Get info about a session. - - Parameters - ---------- - - session : int - Session number to retrieve. The current session is 0, and negative - numbers count back from current session, so -1 is the previous session. - - Returns - ------- - - session_id : int - Session ID number - start : datetime - Timestamp for the start of the session. - end : datetime - Timestamp for the end of the session, or None if IPython crashed. - num_cmds : int - Number of commands run, or None if IPython crashed. - remark : unicode - A manually set description. - """ - if session <= 0: - session += self.session_number - - return super(HistoryManager, self).get_session_info(session=session) - - def _get_range_session(self, start=1, stop=None, raw=True, output=False): - """Get input and output history from the current session. Called by - get_range, and takes similar parameters.""" - input_hist = self.input_hist_raw if raw else self.input_hist_parsed - - n = len(input_hist) - if start < 0: - start += n - if not stop or (stop > n): - stop = n - elif stop < 0: - stop += n - - for i in range(start, stop): - if output: - line = (input_hist[i], self.output_hist_reprs.get(i)) - else: - line = input_hist[i] - yield (0, i, line) - - def get_range(self, session=0, start=1, stop=None, raw=True,output=False): - """Retrieve input by session. - - Parameters - ---------- - session : int - Session number to retrieve. The current session is 0, and negative - numbers count back from current session, so -1 is previous session. - start : int - First line to retrieve. - stop : int - End of line range (excluded from output itself). If None, retrieve - to the end of the session. - raw : bool - If True, return untranslated input - output : bool - If True, attempt to include output. This will be 'real' Python - objects for the current session, or text reprs from previous - sessions if db_log_output was enabled at the time. Where no output - is found, None is used. - - Returns - ------- - entries - An iterator over the desired lines. Each line is a 3-tuple, either - (session, line, input) if output is False, or - (session, line, (input, output)) if output is True. - """ - if session <= 0: - session += self.session_number - if session==self.session_number: # Current session - return self._get_range_session(start, stop, raw, output) - return super(HistoryManager, self).get_range(session, start, stop, raw, - output) - - ## ---------------------------- - ## Methods for storing history: - ## ---------------------------- - def store_inputs(self, line_num, source, source_raw=None): - """Store source and raw input in history and create input cache - variables ``_i*``. - - Parameters - ---------- - line_num : int - The prompt number of this input. - - source : str - Python input. - - source_raw : str, optional - If given, this is the raw input without any IPython transformations - applied to it. If not given, ``source`` is used. - """ - if source_raw is None: - source_raw = source - source = source.rstrip('\n') - source_raw = source_raw.rstrip('\n') - - # do not store exit/quit commands - if self._exit_re.match(source_raw.strip()): - return - - self.input_hist_parsed.append(source) - self.input_hist_raw.append(source_raw) - - with self.db_input_cache_lock: - self.db_input_cache.append((line_num, source, source_raw)) - # Trigger to flush cache and write to DB. - if len(self.db_input_cache) >= self.db_cache_size: - self.save_flag.set() - - # update the auto _i variables - self._iii = self._ii - self._ii = self._i - self._i = self._i00 - self._i00 = source_raw - - # hackish access to user namespace to create _i1,_i2... dynamically - new_i = '_i%s' % line_num - to_main = {'_i': self._i, - '_ii': self._ii, - '_iii': self._iii, - new_i : self._i00 } - - if self.shell is not None: - self.shell.push(to_main, interactive=False) - - def store_output(self, line_num): - """If database output logging is enabled, this saves all the - outputs from the indicated prompt number to the database. It's - called by run_cell after code has been executed. - - Parameters - ---------- - line_num : int - The line number from which to save outputs - """ - if (not self.db_log_output) or (line_num not in self.output_hist_reprs): - return - output = self.output_hist_reprs[line_num] - - with self.db_output_cache_lock: - self.db_output_cache.append((line_num, output)) - if self.db_cache_size <= 1: - self.save_flag.set() - - def _writeout_input_cache(self, conn): - with conn: - for line in self.db_input_cache: - conn.execute("INSERT INTO history VALUES (?, ?, ?, ?)", - (self.session_number,)+line) - - def _writeout_output_cache(self, conn): - with conn: - for line in self.db_output_cache: - conn.execute("INSERT INTO output_history VALUES (?, ?, ?)", - (self.session_number,)+line) - - @needs_sqlite - def writeout_cache(self, conn=None): - """Write any entries in the cache to the database.""" - if conn is None: - conn = self.db - - with self.db_input_cache_lock: - try: - self._writeout_input_cache(conn) - except sqlite3.IntegrityError: - self.new_session(conn) - print("ERROR! Session/line number was not unique in", - "database. History logging moved to new session", - self.session_number) - try: - # Try writing to the new session. If this fails, don't - # recurse - self._writeout_input_cache(conn) - except sqlite3.IntegrityError: - pass - finally: - self.db_input_cache = [] - - with self.db_output_cache_lock: - try: - self._writeout_output_cache(conn) - except sqlite3.IntegrityError: - print("!! Session/line number for output was not unique", - "in database. Output will not be stored.") - finally: - self.db_output_cache = [] - - -class HistorySavingThread(threading.Thread): - """This thread takes care of writing history to the database, so that - the UI isn't held up while that happens. - - It waits for the HistoryManager's save_flag to be set, then writes out - the history cache. The main thread is responsible for setting the flag when - the cache size reaches a defined threshold.""" - daemon = True - stop_now = False - enabled = True - def __init__(self, history_manager): - super(HistorySavingThread, self).__init__(name="IPythonHistorySavingThread") - self.history_manager = history_manager - self.enabled = history_manager.enabled - atexit.register(self.stop) - - @needs_sqlite - def run(self): - # We need a separate db connection per thread: - try: - self.db = sqlite3.connect(self.history_manager.hist_file, - **self.history_manager.connection_options - ) - while True: - self.history_manager.save_flag.wait() - if self.stop_now: - self.db.close() - return - self.history_manager.save_flag.clear() - self.history_manager.writeout_cache(self.db) - except Exception as e: - print(("The history saving thread hit an unexpected error (%s)." - "History will not be written to the database.") % repr(e)) - - def stop(self): - """This can be called from the main thread to safely stop this thread. - - Note that it does not attempt to write out remaining history before - exiting. That should be done by calling the HistoryManager's - end_session method.""" - self.stop_now = True - self.history_manager.save_flag.set() - self.join() - - -# To match, e.g. ~5/8-~2/3 -range_re = re.compile(r""" -((?P<startsess>~?\d+)/)? -(?P<start>\d+)? -((?P<sep>[\-:]) - ((?P<endsess>~?\d+)/)? - (?P<end>\d+))? -$""", re.VERBOSE) - - -def extract_hist_ranges(ranges_str): - """Turn a string of history ranges into 3-tuples of (session, start, stop). - - Examples - -------- - >>> list(extract_hist_ranges("~8/5-~7/4 2")) - [(-8, 5, None), (-7, 1, 5), (0, 2, 3)] - """ - for range_str in ranges_str.split(): - rmatch = range_re.match(range_str) - if not rmatch: - continue - start = rmatch.group("start") - if start: - start = int(start) - end = rmatch.group("end") - # If no end specified, get (a, a + 1) - end = int(end) if end else start + 1 - else: # start not specified - if not rmatch.group('startsess'): # no startsess - continue - start = 1 - end = None # provide the entire session hist - - if rmatch.group("sep") == "-": # 1-3 == 1:4 --> [1, 2, 3] - end += 1 - startsess = rmatch.group("startsess") or "0" - endsess = rmatch.group("endsess") or startsess - startsess = int(startsess.replace("~","-")) - endsess = int(endsess.replace("~","-")) - assert endsess >= startsess, "start session must be earlier than end session" - - if endsess == startsess: - yield (startsess, start, end) - continue - # Multiple sessions in one range: - yield (startsess, start, None) - for sess in range(startsess+1, endsess): - yield (sess, 1, None) - yield (endsess, 1, end) - - -def _format_lineno(session, line): - """Helper function to format line numbers properly.""" - if session == 0: - return str(line) - return "%s#%s" % (session, line) +""" History related magics and functionality """ + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +import atexit +import datetime +import os +import re +try: + import sqlite3 +except ImportError: + try: + from pysqlite2 import dbapi2 as sqlite3 + except ImportError: + sqlite3 = None +import threading + +from traitlets.config.configurable import LoggingConfigurable +from decorator import decorator +from IPython.utils.decorators import undoc +from IPython.paths import locate_profile +from traitlets import ( + Any, Bool, Dict, Instance, Integer, List, Unicode, TraitError, + default, observe, +) +from warnings import warn + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + +@undoc +class DummyDB(object): + """Dummy DB that will act as a black hole for history. + + Only used in the absence of sqlite""" + def execute(*args, **kwargs): + return [] + + def commit(self, *args, **kwargs): + pass + + def __enter__(self, *args, **kwargs): + pass + + def __exit__(self, *args, **kwargs): + pass + + +@decorator +def needs_sqlite(f, self, *a, **kw): + """Decorator: return an empty list in the absence of sqlite.""" + if sqlite3 is None or not self.enabled: + return [] + else: + return f(self, *a, **kw) + + +if sqlite3 is not None: + DatabaseError = sqlite3.DatabaseError + OperationalError = sqlite3.OperationalError +else: + @undoc + class DatabaseError(Exception): + "Dummy exception when sqlite could not be imported. Should never occur." + + @undoc + class OperationalError(Exception): + "Dummy exception when sqlite could not be imported. Should never occur." + +# use 16kB as threshold for whether a corrupt history db should be saved +# that should be at least 100 entries or so +_SAVE_DB_SIZE = 16384 + +@decorator +def catch_corrupt_db(f, self, *a, **kw): + """A decorator which wraps HistoryAccessor method calls to catch errors from + a corrupt SQLite database, move the old database out of the way, and create + a new one. + + We avoid clobbering larger databases because this may be triggered due to filesystem issues, + not just a corrupt file. + """ + try: + return f(self, *a, **kw) + except (DatabaseError, OperationalError) as e: + self._corrupt_db_counter += 1 + self.log.error("Failed to open SQLite history %s (%s).", self.hist_file, e) + if self.hist_file != ':memory:': + if self._corrupt_db_counter > self._corrupt_db_limit: + self.hist_file = ':memory:' + self.log.error("Failed to load history too many times, history will not be saved.") + elif os.path.isfile(self.hist_file): + # move the file out of the way + base, ext = os.path.splitext(self.hist_file) + size = os.stat(self.hist_file).st_size + if size >= _SAVE_DB_SIZE: + # if there's significant content, avoid clobbering + now = datetime.datetime.now().isoformat().replace(':', '.') + newpath = base + '-corrupt-' + now + ext + # don't clobber previous corrupt backups + for i in range(100): + if not os.path.isfile(newpath): + break + else: + newpath = base + '-corrupt-' + now + (u'-%i' % i) + ext + else: + # not much content, possibly empty; don't worry about clobbering + # maybe we should just delete it? + newpath = base + '-corrupt' + ext + os.rename(self.hist_file, newpath) + self.log.error("History file was moved to %s and a new file created.", newpath) + self.init_db() + return [] + else: + # Failed with :memory:, something serious is wrong + raise + +class HistoryAccessorBase(LoggingConfigurable): + """An abstract class for History Accessors """ + + def get_tail(self, n=10, raw=True, output=False, include_latest=False): + raise NotImplementedError + + def search(self, pattern="*", raw=True, search_raw=True, + output=False, n=None, unique=False): + raise NotImplementedError + + def get_range(self, session, start=1, stop=None, raw=True,output=False): + raise NotImplementedError + + def get_range_by_str(self, rangestr, raw=True, output=False): + raise NotImplementedError + + +class HistoryAccessor(HistoryAccessorBase): + """Access the history database without adding to it. + + This is intended for use by standalone history tools. IPython shells use + HistoryManager, below, which is a subclass of this.""" + + # counter for init_db retries, so we don't keep trying over and over + _corrupt_db_counter = 0 + # after two failures, fallback on :memory: + _corrupt_db_limit = 2 + + # String holding the path to the history file + hist_file = Unicode( + help="""Path to file to use for SQLite history database. + + By default, IPython will put the history database in the IPython + profile directory. If you would rather share one history among + profiles, you can set this value in each, so that they are consistent. + + Due to an issue with fcntl, SQLite is known to misbehave on some NFS + mounts. If you see IPython hanging, try setting this to something on a + local disk, e.g:: + + ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite + + you can also use the specific value `:memory:` (including the colon + at both end but not the back ticks), to avoid creating an history file. + + """).tag(config=True) + + enabled = Bool(True, + help="""enable the SQLite history + + set enabled=False to disable the SQLite history, + in which case there will be no stored history, no SQLite connection, + and no background saving thread. This may be necessary in some + threaded environments where IPython is embedded. + """ + ).tag(config=True) + + connection_options = Dict( + help="""Options for configuring the SQLite connection + + These options are passed as keyword args to sqlite3.connect + when establishing database connections. + """ + ).tag(config=True) + + # The SQLite database + db = Any() + @observe('db') + def _db_changed(self, change): + """validate the db, since it can be an Instance of two different types""" + new = change['new'] + connection_types = (DummyDB,) + if sqlite3 is not None: + connection_types = (DummyDB, sqlite3.Connection) + if not isinstance(new, connection_types): + msg = "%s.db must be sqlite3 Connection or DummyDB, not %r" % \ + (self.__class__.__name__, new) + raise TraitError(msg) + + def __init__(self, profile='default', hist_file=u'', **traits): + """Create a new history accessor. + + Parameters + ---------- + profile : str + The name of the profile from which to open history. + hist_file : str + Path to an SQLite history database stored by IPython. If specified, + hist_file overrides profile. + config : :class:`~traitlets.config.loader.Config` + Config object. hist_file can also be set through this. + """ + # We need a pointer back to the shell for various tasks. + super(HistoryAccessor, self).__init__(**traits) + # defer setting hist_file from kwarg until after init, + # otherwise the default kwarg value would clobber any value + # set by config + if hist_file: + self.hist_file = hist_file + + if self.hist_file == u'': + # No one has set the hist_file, yet. + self.hist_file = self._get_hist_file_name(profile) + + if sqlite3 is None and self.enabled: + warn("IPython History requires SQLite, your history will not be saved") + self.enabled = False + + self.init_db() + + def _get_hist_file_name(self, profile='default'): + """Find the history file for the given profile name. + + This is overridden by the HistoryManager subclass, to use the shell's + active profile. + + Parameters + ---------- + profile : str + The name of a profile which has a history file. + """ + return os.path.join(locate_profile(profile), 'history.sqlite') + + @catch_corrupt_db + def init_db(self): + """Connect to the database, and create tables if necessary.""" + if not self.enabled: + self.db = DummyDB() + return + + # use detect_types so that timestamps return datetime objects + kwargs = dict(detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) + kwargs.update(self.connection_options) + self.db = sqlite3.connect(self.hist_file, **kwargs) + self.db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer + primary key autoincrement, start timestamp, + end timestamp, num_cmds integer, remark text)""") + self.db.execute("""CREATE TABLE IF NOT EXISTS history + (session integer, line integer, source text, source_raw text, + PRIMARY KEY (session, line))""") + # Output history is optional, but ensure the table's there so it can be + # enabled later. + self.db.execute("""CREATE TABLE IF NOT EXISTS output_history + (session integer, line integer, output text, + PRIMARY KEY (session, line))""") + self.db.commit() + # success! reset corrupt db count + self._corrupt_db_counter = 0 + + def writeout_cache(self): + """Overridden by HistoryManager to dump the cache before certain + database lookups.""" + pass + + ## ------------------------------- + ## Methods for retrieving history: + ## ------------------------------- + def _run_sql(self, sql, params, raw=True, output=False): + """Prepares and runs an SQL query for the history database. + + Parameters + ---------- + sql : str + Any filtering expressions to go after SELECT ... FROM ... + params : tuple + Parameters passed to the SQL query (to replace "?") + raw, output : bool + See :meth:`get_range` + + Returns + ------- + Tuples as :meth:`get_range` + """ + toget = 'source_raw' if raw else 'source' + sqlfrom = "history" + if output: + sqlfrom = "history LEFT JOIN output_history USING (session, line)" + toget = "history.%s, output_history.output" % toget + cur = self.db.execute("SELECT session, line, %s FROM %s " %\ + (toget, sqlfrom) + sql, params) + if output: # Regroup into 3-tuples, and parse JSON + return ((ses, lin, (inp, out)) for ses, lin, inp, out in cur) + return cur + + @needs_sqlite + @catch_corrupt_db + def get_session_info(self, session): + """Get info about a session. + + Parameters + ---------- + + session : int + Session number to retrieve. + + Returns + ------- + + session_id : int + Session ID number + start : datetime + Timestamp for the start of the session. + end : datetime + Timestamp for the end of the session, or None if IPython crashed. + num_cmds : int + Number of commands run, or None if IPython crashed. + remark : unicode + A manually set description. + """ + query = "SELECT * from sessions where session == ?" + return self.db.execute(query, (session,)).fetchone() + + @catch_corrupt_db + def get_last_session_id(self): + """Get the last session ID currently in the database. + + Within IPython, this should be the same as the value stored in + :attr:`HistoryManager.session_number`. + """ + for record in self.get_tail(n=1, include_latest=True): + return record[0] + + @catch_corrupt_db + def get_tail(self, n=10, raw=True, output=False, include_latest=False): + """Get the last n lines from the history database. + + Parameters + ---------- + n : int + The number of lines to get + raw, output : bool + See :meth:`get_range` + include_latest : bool + If False (default), n+1 lines are fetched, and the latest one + is discarded. This is intended to be used where the function + is called by a user command, which it should not return. + + Returns + ------- + Tuples as :meth:`get_range` + """ + self.writeout_cache() + if not include_latest: + n += 1 + cur = self._run_sql("ORDER BY session DESC, line DESC LIMIT ?", + (n,), raw=raw, output=output) + if not include_latest: + return reversed(list(cur)[1:]) + return reversed(list(cur)) + + @catch_corrupt_db + def search(self, pattern="*", raw=True, search_raw=True, + output=False, n=None, unique=False): + """Search the database using unix glob-style matching (wildcards + * and ?). + + Parameters + ---------- + pattern : str + The wildcarded pattern to match when searching + search_raw : bool + If True, search the raw input, otherwise, the parsed input + raw, output : bool + See :meth:`get_range` + n : None or int + If an integer is given, it defines the limit of + returned entries. + unique : bool + When it is true, return only unique entries. + + Returns + ------- + Tuples as :meth:`get_range` + """ + tosearch = "source_raw" if search_raw else "source" + if output: + tosearch = "history." + tosearch + self.writeout_cache() + sqlform = "WHERE %s GLOB ?" % tosearch + params = (pattern,) + if unique: + sqlform += ' GROUP BY {0}'.format(tosearch) + if n is not None: + sqlform += " ORDER BY session DESC, line DESC LIMIT ?" + params += (n,) + elif unique: + sqlform += " ORDER BY session, line" + cur = self._run_sql(sqlform, params, raw=raw, output=output) + if n is not None: + return reversed(list(cur)) + return cur + + @catch_corrupt_db + def get_range(self, session, start=1, stop=None, raw=True,output=False): + """Retrieve input by session. + + Parameters + ---------- + session : int + Session number to retrieve. + start : int + First line to retrieve. + stop : int + End of line range (excluded from output itself). If None, retrieve + to the end of the session. + raw : bool + If True, return untranslated input + output : bool + If True, attempt to include output. This will be 'real' Python + objects for the current session, or text reprs from previous + sessions if db_log_output was enabled at the time. Where no output + is found, None is used. + + Returns + ------- + entries + An iterator over the desired lines. Each line is a 3-tuple, either + (session, line, input) if output is False, or + (session, line, (input, output)) if output is True. + """ + if stop: + lineclause = "line >= ? AND line < ?" + params = (session, start, stop) + else: + lineclause = "line>=?" + params = (session, start) + + return self._run_sql("WHERE session==? AND %s" % lineclause, + params, raw=raw, output=output) + + def get_range_by_str(self, rangestr, raw=True, output=False): + """Get lines of history from a string of ranges, as used by magic + commands %hist, %save, %macro, etc. + + Parameters + ---------- + rangestr : str + A string specifying ranges, e.g. "5 ~2/1-4". See + :func:`magic_history` for full details. + raw, output : bool + As :meth:`get_range` + + Returns + ------- + Tuples as :meth:`get_range` + """ + for sess, s, e in extract_hist_ranges(rangestr): + for line in self.get_range(sess, s, e, raw=raw, output=output): + yield line + + +class HistoryManager(HistoryAccessor): + """A class to organize all history-related functionality in one place. + """ + # Public interface + + # An instance of the IPython shell we are attached to + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', + allow_none=True) + # Lists to hold processed and raw history. These start with a blank entry + # so that we can index them starting from 1 + input_hist_parsed = List([""]) + input_hist_raw = List([""]) + # A list of directories visited during session + dir_hist = List() + @default('dir_hist') + def _dir_hist_default(self): + try: + return [os.getcwd()] + except OSError: + return [] + + # A dict of output history, keyed with ints from the shell's + # execution count. + output_hist = Dict() + # The text/plain repr of outputs. + output_hist_reprs = Dict() + + # The number of the current session in the history database + session_number = Integer() + + db_log_output = Bool(False, + help="Should the history database include output? (default: no)" + ).tag(config=True) + db_cache_size = Integer(0, + help="Write to database every x commands (higher values save disk access & power).\n" + "Values of 1 or less effectively disable caching." + ).tag(config=True) + # The input and output caches + db_input_cache = List() + db_output_cache = List() + + # History saving in separate thread + save_thread = Instance('IPython.core.history.HistorySavingThread', + allow_none=True) + save_flag = Instance(threading.Event, allow_none=True) + + # Private interface + # Variables used to store the three last inputs from the user. On each new + # history update, we populate the user's namespace with these, shifted as + # necessary. + _i00 = Unicode(u'') + _i = Unicode(u'') + _ii = Unicode(u'') + _iii = Unicode(u'') + + # A regex matching all forms of the exit command, so that we don't store + # them in the history (it's annoying to rewind the first entry and land on + # an exit call). + _exit_re = re.compile(r"(exit|quit)(\s*\(.*\))?$") + + def __init__(self, shell=None, config=None, **traits): + """Create a new history manager associated with a shell instance. + """ + # We need a pointer back to the shell for various tasks. + super(HistoryManager, self).__init__(shell=shell, config=config, + **traits) + self.save_flag = threading.Event() + self.db_input_cache_lock = threading.Lock() + self.db_output_cache_lock = threading.Lock() + + try: + self.new_session() + except OperationalError: + self.log.error("Failed to create history session in %s. History will not be saved.", + self.hist_file, exc_info=True) + self.hist_file = ':memory:' + + if self.enabled and self.hist_file != ':memory:': + self.save_thread = HistorySavingThread(self) + self.save_thread.start() + + def _get_hist_file_name(self, profile=None): + """Get default history file name based on the Shell's profile. + + The profile parameter is ignored, but must exist for compatibility with + the parent class.""" + profile_dir = self.shell.profile_dir.location + return os.path.join(profile_dir, 'history.sqlite') + + @needs_sqlite + def new_session(self, conn=None): + """Get a new session number.""" + if conn is None: + conn = self.db + + with conn: + cur = conn.execute("""INSERT INTO sessions VALUES (NULL, ?, NULL, + NULL, "") """, (datetime.datetime.now(),)) + self.session_number = cur.lastrowid + + def end_session(self): + """Close the database session, filling in the end time and line count.""" + self.writeout_cache() + with self.db: + self.db.execute("""UPDATE sessions SET end=?, num_cmds=? WHERE + session==?""", (datetime.datetime.now(), + len(self.input_hist_parsed)-1, self.session_number)) + self.session_number = 0 + + def name_session(self, name): + """Give the current session a name in the history database.""" + with self.db: + self.db.execute("UPDATE sessions SET remark=? WHERE session==?", + (name, self.session_number)) + + def reset(self, new_session=True): + """Clear the session history, releasing all object references, and + optionally open a new session.""" + self.output_hist.clear() + # The directory history can't be completely empty + self.dir_hist[:] = [os.getcwd()] + + if new_session: + if self.session_number: + self.end_session() + self.input_hist_parsed[:] = [""] + self.input_hist_raw[:] = [""] + self.new_session() + + # ------------------------------ + # Methods for retrieving history + # ------------------------------ + def get_session_info(self, session=0): + """Get info about a session. + + Parameters + ---------- + + session : int + Session number to retrieve. The current session is 0, and negative + numbers count back from current session, so -1 is the previous session. + + Returns + ------- + + session_id : int + Session ID number + start : datetime + Timestamp for the start of the session. + end : datetime + Timestamp for the end of the session, or None if IPython crashed. + num_cmds : int + Number of commands run, or None if IPython crashed. + remark : unicode + A manually set description. + """ + if session <= 0: + session += self.session_number + + return super(HistoryManager, self).get_session_info(session=session) + + def _get_range_session(self, start=1, stop=None, raw=True, output=False): + """Get input and output history from the current session. Called by + get_range, and takes similar parameters.""" + input_hist = self.input_hist_raw if raw else self.input_hist_parsed + + n = len(input_hist) + if start < 0: + start += n + if not stop or (stop > n): + stop = n + elif stop < 0: + stop += n + + for i in range(start, stop): + if output: + line = (input_hist[i], self.output_hist_reprs.get(i)) + else: + line = input_hist[i] + yield (0, i, line) + + def get_range(self, session=0, start=1, stop=None, raw=True,output=False): + """Retrieve input by session. + + Parameters + ---------- + session : int + Session number to retrieve. The current session is 0, and negative + numbers count back from current session, so -1 is previous session. + start : int + First line to retrieve. + stop : int + End of line range (excluded from output itself). If None, retrieve + to the end of the session. + raw : bool + If True, return untranslated input + output : bool + If True, attempt to include output. This will be 'real' Python + objects for the current session, or text reprs from previous + sessions if db_log_output was enabled at the time. Where no output + is found, None is used. + + Returns + ------- + entries + An iterator over the desired lines. Each line is a 3-tuple, either + (session, line, input) if output is False, or + (session, line, (input, output)) if output is True. + """ + if session <= 0: + session += self.session_number + if session==self.session_number: # Current session + return self._get_range_session(start, stop, raw, output) + return super(HistoryManager, self).get_range(session, start, stop, raw, + output) + + ## ---------------------------- + ## Methods for storing history: + ## ---------------------------- + def store_inputs(self, line_num, source, source_raw=None): + """Store source and raw input in history and create input cache + variables ``_i*``. + + Parameters + ---------- + line_num : int + The prompt number of this input. + + source : str + Python input. + + source_raw : str, optional + If given, this is the raw input without any IPython transformations + applied to it. If not given, ``source`` is used. + """ + if source_raw is None: + source_raw = source + source = source.rstrip('\n') + source_raw = source_raw.rstrip('\n') + + # do not store exit/quit commands + if self._exit_re.match(source_raw.strip()): + return + + self.input_hist_parsed.append(source) + self.input_hist_raw.append(source_raw) + + with self.db_input_cache_lock: + self.db_input_cache.append((line_num, source, source_raw)) + # Trigger to flush cache and write to DB. + if len(self.db_input_cache) >= self.db_cache_size: + self.save_flag.set() + + # update the auto _i variables + self._iii = self._ii + self._ii = self._i + self._i = self._i00 + self._i00 = source_raw + + # hackish access to user namespace to create _i1,_i2... dynamically + new_i = '_i%s' % line_num + to_main = {'_i': self._i, + '_ii': self._ii, + '_iii': self._iii, + new_i : self._i00 } + + if self.shell is not None: + self.shell.push(to_main, interactive=False) + + def store_output(self, line_num): + """If database output logging is enabled, this saves all the + outputs from the indicated prompt number to the database. It's + called by run_cell after code has been executed. + + Parameters + ---------- + line_num : int + The line number from which to save outputs + """ + if (not self.db_log_output) or (line_num not in self.output_hist_reprs): + return + output = self.output_hist_reprs[line_num] + + with self.db_output_cache_lock: + self.db_output_cache.append((line_num, output)) + if self.db_cache_size <= 1: + self.save_flag.set() + + def _writeout_input_cache(self, conn): + with conn: + for line in self.db_input_cache: + conn.execute("INSERT INTO history VALUES (?, ?, ?, ?)", + (self.session_number,)+line) + + def _writeout_output_cache(self, conn): + with conn: + for line in self.db_output_cache: + conn.execute("INSERT INTO output_history VALUES (?, ?, ?)", + (self.session_number,)+line) + + @needs_sqlite + def writeout_cache(self, conn=None): + """Write any entries in the cache to the database.""" + if conn is None: + conn = self.db + + with self.db_input_cache_lock: + try: + self._writeout_input_cache(conn) + except sqlite3.IntegrityError: + self.new_session(conn) + print("ERROR! Session/line number was not unique in", + "database. History logging moved to new session", + self.session_number) + try: + # Try writing to the new session. If this fails, don't + # recurse + self._writeout_input_cache(conn) + except sqlite3.IntegrityError: + pass + finally: + self.db_input_cache = [] + + with self.db_output_cache_lock: + try: + self._writeout_output_cache(conn) + except sqlite3.IntegrityError: + print("!! Session/line number for output was not unique", + "in database. Output will not be stored.") + finally: + self.db_output_cache = [] + + +class HistorySavingThread(threading.Thread): + """This thread takes care of writing history to the database, so that + the UI isn't held up while that happens. + + It waits for the HistoryManager's save_flag to be set, then writes out + the history cache. The main thread is responsible for setting the flag when + the cache size reaches a defined threshold.""" + daemon = True + stop_now = False + enabled = True + def __init__(self, history_manager): + super(HistorySavingThread, self).__init__(name="IPythonHistorySavingThread") + self.history_manager = history_manager + self.enabled = history_manager.enabled + atexit.register(self.stop) + + @needs_sqlite + def run(self): + # We need a separate db connection per thread: + try: + self.db = sqlite3.connect(self.history_manager.hist_file, + **self.history_manager.connection_options + ) + while True: + self.history_manager.save_flag.wait() + if self.stop_now: + self.db.close() + return + self.history_manager.save_flag.clear() + self.history_manager.writeout_cache(self.db) + except Exception as e: + print(("The history saving thread hit an unexpected error (%s)." + "History will not be written to the database.") % repr(e)) + + def stop(self): + """This can be called from the main thread to safely stop this thread. + + Note that it does not attempt to write out remaining history before + exiting. That should be done by calling the HistoryManager's + end_session method.""" + self.stop_now = True + self.history_manager.save_flag.set() + self.join() + + +# To match, e.g. ~5/8-~2/3 +range_re = re.compile(r""" +((?P<startsess>~?\d+)/)? +(?P<start>\d+)? +((?P<sep>[\-:]) + ((?P<endsess>~?\d+)/)? + (?P<end>\d+))? +$""", re.VERBOSE) + + +def extract_hist_ranges(ranges_str): + """Turn a string of history ranges into 3-tuples of (session, start, stop). + + Examples + -------- + >>> list(extract_hist_ranges("~8/5-~7/4 2")) + [(-8, 5, None), (-7, 1, 5), (0, 2, 3)] + """ + for range_str in ranges_str.split(): + rmatch = range_re.match(range_str) + if not rmatch: + continue + start = rmatch.group("start") + if start: + start = int(start) + end = rmatch.group("end") + # If no end specified, get (a, a + 1) + end = int(end) if end else start + 1 + else: # start not specified + if not rmatch.group('startsess'): # no startsess + continue + start = 1 + end = None # provide the entire session hist + + if rmatch.group("sep") == "-": # 1-3 == 1:4 --> [1, 2, 3] + end += 1 + startsess = rmatch.group("startsess") or "0" + endsess = rmatch.group("endsess") or startsess + startsess = int(startsess.replace("~","-")) + endsess = int(endsess.replace("~","-")) + assert endsess >= startsess, "start session must be earlier than end session" + + if endsess == startsess: + yield (startsess, start, end) + continue + # Multiple sessions in one range: + yield (startsess, start, None) + for sess in range(startsess+1, endsess): + yield (sess, 1, None) + yield (endsess, 1, end) + + +def _format_lineno(session, line): + """Helper function to format line numbers properly.""" + if session == 0: + return str(line) + return "%s#%s" % (session, line) diff --git a/contrib/python/ipython/py3/IPython/core/historyapp.py b/contrib/python/ipython/py3/IPython/core/historyapp.py index ec1bb46d7c1..a6437eff26e 100644 --- a/contrib/python/ipython/py3/IPython/core/historyapp.py +++ b/contrib/python/ipython/py3/IPython/core/historyapp.py @@ -1,161 +1,161 @@ -# encoding: utf-8 -""" -An application for managing IPython history. - -To be invoked as the `ipython history` subcommand. -""" - -import os -import sqlite3 - -from traitlets.config.application import Application -from .application import BaseIPythonApplication -from traitlets import Bool, Int, Dict -from ..utils.io import ask_yes_no - -trim_hist_help = """Trim the IPython history database to the last 1000 entries. - -This actually copies the last 1000 entries to a new database, and then replaces -the old file with the new. Use the `--keep=` argument to specify a number -other than 1000. -""" - -clear_hist_help = """Clear the IPython history database, deleting all entries. - -Because this is a destructive operation, IPython will prompt the user if they -really want to do this. Passing a `-f` flag will force clearing without a -prompt. - -This is an handy alias to `ipython history trim --keep=0` -""" - - -class HistoryTrim(BaseIPythonApplication): - description = trim_hist_help - - backup = Bool(False, - help="Keep the old history file as history.sqlite.<N>" - ).tag(config=True) - - keep = Int(1000, - help="Number of recent lines to keep in the database." - ).tag(config=True) - - flags = Dict(dict( - backup = ({'HistoryTrim' : {'backup' : True}}, - backup.help - ) - )) - - aliases=Dict(dict( - keep = 'HistoryTrim.keep' - )) - - def start(self): - profile_dir = self.profile_dir.location - hist_file = os.path.join(profile_dir, 'history.sqlite') - con = sqlite3.connect(hist_file) - - # Grab the recent history from the current database. - inputs = list(con.execute('SELECT session, line, source, source_raw FROM ' - 'history ORDER BY session DESC, line DESC LIMIT ?', (self.keep+1,))) - if len(inputs) <= self.keep: - print("There are already at most %d entries in the history database." % self.keep) - print("Not doing anything. Use --keep= argument to keep fewer entries") - return - - print("Trimming history to the most recent %d entries." % self.keep) - - inputs.pop() # Remove the extra element we got to check the length. - inputs.reverse() - if inputs: - first_session = inputs[0][0] - outputs = list(con.execute('SELECT session, line, output FROM ' - 'output_history WHERE session >= ?', (first_session,))) - sessions = list(con.execute('SELECT session, start, end, num_cmds, remark FROM ' - 'sessions WHERE session >= ?', (first_session,))) - con.close() - - # Create the new history database. - new_hist_file = os.path.join(profile_dir, 'history.sqlite.new') - i = 0 - while os.path.exists(new_hist_file): - # Make sure we don't interfere with an existing file. - i += 1 - new_hist_file = os.path.join(profile_dir, 'history.sqlite.new'+str(i)) - new_db = sqlite3.connect(new_hist_file) - new_db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer - primary key autoincrement, start timestamp, - end timestamp, num_cmds integer, remark text)""") - new_db.execute("""CREATE TABLE IF NOT EXISTS history - (session integer, line integer, source text, source_raw text, - PRIMARY KEY (session, line))""") - new_db.execute("""CREATE TABLE IF NOT EXISTS output_history - (session integer, line integer, output text, - PRIMARY KEY (session, line))""") - new_db.commit() - - - if inputs: - with new_db: - # Add the recent history into the new database. - new_db.executemany('insert into sessions values (?,?,?,?,?)', sessions) - new_db.executemany('insert into history values (?,?,?,?)', inputs) - new_db.executemany('insert into output_history values (?,?,?)', outputs) - new_db.close() - - if self.backup: - i = 1 - backup_hist_file = os.path.join(profile_dir, 'history.sqlite.old.%d' % i) - while os.path.exists(backup_hist_file): - i += 1 - backup_hist_file = os.path.join(profile_dir, 'history.sqlite.old.%d' % i) - os.rename(hist_file, backup_hist_file) - print("Backed up longer history file to", backup_hist_file) - else: - os.remove(hist_file) - - os.rename(new_hist_file, hist_file) - -class HistoryClear(HistoryTrim): - description = clear_hist_help - keep = Int(0, - help="Number of recent lines to keep in the database.") - - force = Bool(False, - help="Don't prompt user for confirmation" - ).tag(config=True) - - flags = Dict(dict( - force = ({'HistoryClear' : {'force' : True}}, - force.help), - f = ({'HistoryTrim' : {'force' : True}}, - force.help - ) - )) - aliases = Dict() - - def start(self): - if self.force or ask_yes_no("Really delete all ipython history? ", - default="no", interrupt="no"): - HistoryTrim.start(self) - -class HistoryApp(Application): - name = u'ipython-history' - description = "Manage the IPython history database." - - subcommands = Dict(dict( - trim = (HistoryTrim, HistoryTrim.description.splitlines()[0]), - clear = (HistoryClear, HistoryClear.description.splitlines()[0]), - )) - - def start(self): - if self.subapp is None: - print("No subcommand specified. Must specify one of: %s" % \ - (self.subcommands.keys())) - print() - self.print_description() - self.print_subcommands() - self.exit(1) - else: - return self.subapp.start() +# encoding: utf-8 +""" +An application for managing IPython history. + +To be invoked as the `ipython history` subcommand. +""" + +import os +import sqlite3 + +from traitlets.config.application import Application +from .application import BaseIPythonApplication +from traitlets import Bool, Int, Dict +from ..utils.io import ask_yes_no + +trim_hist_help = """Trim the IPython history database to the last 1000 entries. + +This actually copies the last 1000 entries to a new database, and then replaces +the old file with the new. Use the `--keep=` argument to specify a number +other than 1000. +""" + +clear_hist_help = """Clear the IPython history database, deleting all entries. + +Because this is a destructive operation, IPython will prompt the user if they +really want to do this. Passing a `-f` flag will force clearing without a +prompt. + +This is an handy alias to `ipython history trim --keep=0` +""" + + +class HistoryTrim(BaseIPythonApplication): + description = trim_hist_help + + backup = Bool(False, + help="Keep the old history file as history.sqlite.<N>" + ).tag(config=True) + + keep = Int(1000, + help="Number of recent lines to keep in the database." + ).tag(config=True) + + flags = Dict(dict( + backup = ({'HistoryTrim' : {'backup' : True}}, + backup.help + ) + )) + + aliases=Dict(dict( + keep = 'HistoryTrim.keep' + )) + + def start(self): + profile_dir = self.profile_dir.location + hist_file = os.path.join(profile_dir, 'history.sqlite') + con = sqlite3.connect(hist_file) + + # Grab the recent history from the current database. + inputs = list(con.execute('SELECT session, line, source, source_raw FROM ' + 'history ORDER BY session DESC, line DESC LIMIT ?', (self.keep+1,))) + if len(inputs) <= self.keep: + print("There are already at most %d entries in the history database." % self.keep) + print("Not doing anything. Use --keep= argument to keep fewer entries") + return + + print("Trimming history to the most recent %d entries." % self.keep) + + inputs.pop() # Remove the extra element we got to check the length. + inputs.reverse() + if inputs: + first_session = inputs[0][0] + outputs = list(con.execute('SELECT session, line, output FROM ' + 'output_history WHERE session >= ?', (first_session,))) + sessions = list(con.execute('SELECT session, start, end, num_cmds, remark FROM ' + 'sessions WHERE session >= ?', (first_session,))) + con.close() + + # Create the new history database. + new_hist_file = os.path.join(profile_dir, 'history.sqlite.new') + i = 0 + while os.path.exists(new_hist_file): + # Make sure we don't interfere with an existing file. + i += 1 + new_hist_file = os.path.join(profile_dir, 'history.sqlite.new'+str(i)) + new_db = sqlite3.connect(new_hist_file) + new_db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer + primary key autoincrement, start timestamp, + end timestamp, num_cmds integer, remark text)""") + new_db.execute("""CREATE TABLE IF NOT EXISTS history + (session integer, line integer, source text, source_raw text, + PRIMARY KEY (session, line))""") + new_db.execute("""CREATE TABLE IF NOT EXISTS output_history + (session integer, line integer, output text, + PRIMARY KEY (session, line))""") + new_db.commit() + + + if inputs: + with new_db: + # Add the recent history into the new database. + new_db.executemany('insert into sessions values (?,?,?,?,?)', sessions) + new_db.executemany('insert into history values (?,?,?,?)', inputs) + new_db.executemany('insert into output_history values (?,?,?)', outputs) + new_db.close() + + if self.backup: + i = 1 + backup_hist_file = os.path.join(profile_dir, 'history.sqlite.old.%d' % i) + while os.path.exists(backup_hist_file): + i += 1 + backup_hist_file = os.path.join(profile_dir, 'history.sqlite.old.%d' % i) + os.rename(hist_file, backup_hist_file) + print("Backed up longer history file to", backup_hist_file) + else: + os.remove(hist_file) + + os.rename(new_hist_file, hist_file) + +class HistoryClear(HistoryTrim): + description = clear_hist_help + keep = Int(0, + help="Number of recent lines to keep in the database.") + + force = Bool(False, + help="Don't prompt user for confirmation" + ).tag(config=True) + + flags = Dict(dict( + force = ({'HistoryClear' : {'force' : True}}, + force.help), + f = ({'HistoryTrim' : {'force' : True}}, + force.help + ) + )) + aliases = Dict() + + def start(self): + if self.force or ask_yes_no("Really delete all ipython history? ", + default="no", interrupt="no"): + HistoryTrim.start(self) + +class HistoryApp(Application): + name = u'ipython-history' + description = "Manage the IPython history database." + + subcommands = Dict(dict( + trim = (HistoryTrim, HistoryTrim.description.splitlines()[0]), + clear = (HistoryClear, HistoryClear.description.splitlines()[0]), + )) + + def start(self): + if self.subapp is None: + print("No subcommand specified. Must specify one of: %s" % \ + (self.subcommands.keys())) + print() + self.print_description() + self.print_subcommands() + self.exit(1) + else: + return self.subapp.start() diff --git a/contrib/python/ipython/py3/IPython/core/hooks.py b/contrib/python/ipython/py3/IPython/core/hooks.py index 7b9552a8930..fa732f7ba82 100644 --- a/contrib/python/ipython/py3/IPython/core/hooks.py +++ b/contrib/python/ipython/py3/IPython/core/hooks.py @@ -1,190 +1,190 @@ -"""Hooks for IPython. - -In Python, it is possible to overwrite any method of any object if you really -want to. But IPython exposes a few 'hooks', methods which are *designed* to -be overwritten by users for customization purposes. This module defines the -default versions of all such hooks, which get used by IPython if not -overridden by the user. - -Hooks are simple functions, but they should be declared with ``self`` as their -first argument, because when activated they are registered into IPython as -instance methods. The self argument will be the IPython running instance -itself, so hooks have full access to the entire IPython object. - -If you wish to define a new hook and activate it, you can make an :doc:`extension -</config/extensions/index>` or a :ref:`startup script <startup_files>`. For -example, you could use a startup file like this:: - - import os - - def calljed(self,filename, linenum): - "My editor hook calls the jed editor directly." - print "Calling my own editor, jed ..." - if os.system('jed +%d %s' % (linenum,filename)) != 0: - raise TryNext() - - def load_ipython_extension(ip): - ip.set_hook('editor', calljed) - -""" - -#***************************************************************************** -# Copyright (C) 2005 Fernando Perez. <fperez@colorado.edu> -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#***************************************************************************** - -import os -import subprocess -import sys - -from .error import TryNext - -# List here all the default hooks. For now it's just the editor functions -# but over time we'll move here all the public API for user-accessible things. - -__all__ = ['editor', 'synchronize_with_editor', - 'shutdown_hook', 'late_startup_hook', - 'show_in_pager','pre_prompt_hook', - 'pre_run_code_hook', 'clipboard_get'] - -deprecated = {'pre_run_code_hook': "a callback for the 'pre_execute' or 'pre_run_cell' event", - 'late_startup_hook': "a callback for the 'shell_initialized' event", - 'shutdown_hook': "the atexit module", - } - -def editor(self, filename, linenum=None, wait=True): - """Open the default editor at the given filename and linenumber. - - This is IPython's default editor hook, you can use it as an example to - write your own modified one. To set your own editor function as the - new editor hook, call ip.set_hook('editor',yourfunc).""" - - # IPython configures a default editor at startup by reading $EDITOR from - # the environment, and falling back on vi (unix) or notepad (win32). - editor = self.editor - - # marker for at which line to open the file (for existing objects) - if linenum is None or editor=='notepad': - linemark = '' - else: - linemark = '+%d' % int(linenum) - - # Enclose in quotes if necessary and legal - if ' ' in editor and os.path.isfile(editor) and editor[0] != '"': - editor = '"%s"' % editor - - # Call the actual editor - proc = subprocess.Popen('%s %s %s' % (editor, linemark, filename), - shell=True) - if wait and proc.wait() != 0: - raise TryNext() - - -def synchronize_with_editor(self, filename, linenum, column): - pass - - -class CommandChainDispatcher: - """ Dispatch calls to a chain of commands until some func can handle it - - Usage: instantiate, execute "add" to add commands (with optional - priority), execute normally via f() calling mechanism. - - """ - def __init__(self,commands=None): - if commands is None: - self.chain = [] - else: - self.chain = commands - - - def __call__(self,*args, **kw): - """ Command chain is called just like normal func. - - This will call all funcs in chain with the same args as were given to - this function, and return the result of first func that didn't raise - TryNext""" - last_exc = TryNext() - for prio,cmd in self.chain: - #print "prio",prio,"cmd",cmd #dbg - try: - return cmd(*args, **kw) - except TryNext as exc: - last_exc = exc - # if no function will accept it, raise TryNext up to the caller - raise last_exc - - def __str__(self): - return str(self.chain) - - def add(self, func, priority=0): - """ Add a func to the cmd chain with given priority """ - self.chain.append((priority, func)) - self.chain.sort(key=lambda x: x[0]) - - def __iter__(self): - """ Return all objects in chain. - - Handy if the objects are not callable. - """ - return iter(self.chain) - - -def shutdown_hook(self): - """ default shutdown hook - - Typically, shutdown hooks should raise TryNext so all shutdown ops are done - """ - - #print "default shutdown hook ok" # dbg - return - - -def late_startup_hook(self): - """ Executed after ipython has been constructed and configured - - """ - #print "default startup hook ok" # dbg - - -def show_in_pager(self, data, start, screen_lines): - """ Run a string through pager """ - # raising TryNext here will use the default paging functionality - raise TryNext - - -def pre_prompt_hook(self): - """ Run before displaying the next prompt - - Use this e.g. to display output from asynchronous operations (in order - to not mess up text entry) - """ - - return None - - -def pre_run_code_hook(self): - """ Executed before running the (prefiltered) code in IPython """ - return None - - -def clipboard_get(self): - """ Get text from the clipboard. - """ - from ..lib.clipboard import ( - osx_clipboard_get, tkinter_clipboard_get, - win32_clipboard_get - ) - if sys.platform == 'win32': - chain = [win32_clipboard_get, tkinter_clipboard_get] - elif sys.platform == 'darwin': - chain = [osx_clipboard_get, tkinter_clipboard_get] - else: - chain = [tkinter_clipboard_get] - dispatcher = CommandChainDispatcher() - for func in chain: - dispatcher.add(func) - text = dispatcher() - return text +"""Hooks for IPython. + +In Python, it is possible to overwrite any method of any object if you really +want to. But IPython exposes a few 'hooks', methods which are *designed* to +be overwritten by users for customization purposes. This module defines the +default versions of all such hooks, which get used by IPython if not +overridden by the user. + +Hooks are simple functions, but they should be declared with ``self`` as their +first argument, because when activated they are registered into IPython as +instance methods. The self argument will be the IPython running instance +itself, so hooks have full access to the entire IPython object. + +If you wish to define a new hook and activate it, you can make an :doc:`extension +</config/extensions/index>` or a :ref:`startup script <startup_files>`. For +example, you could use a startup file like this:: + + import os + + def calljed(self,filename, linenum): + "My editor hook calls the jed editor directly." + print "Calling my own editor, jed ..." + if os.system('jed +%d %s' % (linenum,filename)) != 0: + raise TryNext() + + def load_ipython_extension(ip): + ip.set_hook('editor', calljed) + +""" + +#***************************************************************************** +# Copyright (C) 2005 Fernando Perez. <fperez@colorado.edu> +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + +import os +import subprocess +import sys + +from .error import TryNext + +# List here all the default hooks. For now it's just the editor functions +# but over time we'll move here all the public API for user-accessible things. + +__all__ = ['editor', 'synchronize_with_editor', + 'shutdown_hook', 'late_startup_hook', + 'show_in_pager','pre_prompt_hook', + 'pre_run_code_hook', 'clipboard_get'] + +deprecated = {'pre_run_code_hook': "a callback for the 'pre_execute' or 'pre_run_cell' event", + 'late_startup_hook': "a callback for the 'shell_initialized' event", + 'shutdown_hook': "the atexit module", + } + +def editor(self, filename, linenum=None, wait=True): + """Open the default editor at the given filename and linenumber. + + This is IPython's default editor hook, you can use it as an example to + write your own modified one. To set your own editor function as the + new editor hook, call ip.set_hook('editor',yourfunc).""" + + # IPython configures a default editor at startup by reading $EDITOR from + # the environment, and falling back on vi (unix) or notepad (win32). + editor = self.editor + + # marker for at which line to open the file (for existing objects) + if linenum is None or editor=='notepad': + linemark = '' + else: + linemark = '+%d' % int(linenum) + + # Enclose in quotes if necessary and legal + if ' ' in editor and os.path.isfile(editor) and editor[0] != '"': + editor = '"%s"' % editor + + # Call the actual editor + proc = subprocess.Popen('%s %s %s' % (editor, linemark, filename), + shell=True) + if wait and proc.wait() != 0: + raise TryNext() + + +def synchronize_with_editor(self, filename, linenum, column): + pass + + +class CommandChainDispatcher: + """ Dispatch calls to a chain of commands until some func can handle it + + Usage: instantiate, execute "add" to add commands (with optional + priority), execute normally via f() calling mechanism. + + """ + def __init__(self,commands=None): + if commands is None: + self.chain = [] + else: + self.chain = commands + + + def __call__(self,*args, **kw): + """ Command chain is called just like normal func. + + This will call all funcs in chain with the same args as were given to + this function, and return the result of first func that didn't raise + TryNext""" + last_exc = TryNext() + for prio,cmd in self.chain: + #print "prio",prio,"cmd",cmd #dbg + try: + return cmd(*args, **kw) + except TryNext as exc: + last_exc = exc + # if no function will accept it, raise TryNext up to the caller + raise last_exc + + def __str__(self): + return str(self.chain) + + def add(self, func, priority=0): + """ Add a func to the cmd chain with given priority """ + self.chain.append((priority, func)) + self.chain.sort(key=lambda x: x[0]) + + def __iter__(self): + """ Return all objects in chain. + + Handy if the objects are not callable. + """ + return iter(self.chain) + + +def shutdown_hook(self): + """ default shutdown hook + + Typically, shutdown hooks should raise TryNext so all shutdown ops are done + """ + + #print "default shutdown hook ok" # dbg + return + + +def late_startup_hook(self): + """ Executed after ipython has been constructed and configured + + """ + #print "default startup hook ok" # dbg + + +def show_in_pager(self, data, start, screen_lines): + """ Run a string through pager """ + # raising TryNext here will use the default paging functionality + raise TryNext + + +def pre_prompt_hook(self): + """ Run before displaying the next prompt + + Use this e.g. to display output from asynchronous operations (in order + to not mess up text entry) + """ + + return None + + +def pre_run_code_hook(self): + """ Executed before running the (prefiltered) code in IPython """ + return None + + +def clipboard_get(self): + """ Get text from the clipboard. + """ + from ..lib.clipboard import ( + osx_clipboard_get, tkinter_clipboard_get, + win32_clipboard_get + ) + if sys.platform == 'win32': + chain = [win32_clipboard_get, tkinter_clipboard_get] + elif sys.platform == 'darwin': + chain = [osx_clipboard_get, tkinter_clipboard_get] + else: + chain = [tkinter_clipboard_get] + dispatcher = CommandChainDispatcher() + for func in chain: + dispatcher.add(func) + text = dispatcher() + return text diff --git a/contrib/python/ipython/py3/IPython/core/inputsplitter.py b/contrib/python/ipython/py3/IPython/core/inputsplitter.py index 5682b092a43..e7bc6e7f5a3 100644 --- a/contrib/python/ipython/py3/IPython/core/inputsplitter.py +++ b/contrib/python/ipython/py3/IPython/core/inputsplitter.py @@ -1,772 +1,772 @@ -"""DEPRECATED: Input handling and transformation machinery. - -This module was deprecated in IPython 7.0, in favour of inputtransformer2. - -The first class in this module, :class:`InputSplitter`, is designed to tell when -input from a line-oriented frontend is complete and should be executed, and when -the user should be prompted for another line of code instead. The name 'input -splitter' is largely for historical reasons. - -A companion, :class:`IPythonInputSplitter`, provides the same functionality but -with full support for the extended IPython syntax (magics, system calls, etc). -The code to actually do these transformations is in :mod:`IPython.core.inputtransformer`. -:class:`IPythonInputSplitter` feeds the raw code to the transformers in order -and stores the results. - -For more details, see the class docstrings below. -""" - -from warnings import warn - -warn('IPython.core.inputsplitter is deprecated since IPython 7 in favor of `IPython.core.inputtransformer2`', - DeprecationWarning) - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. -import ast -import codeop -import io -import re -import sys -import tokenize -import warnings - -from IPython.core.inputtransformer import (leading_indent, - classic_prompt, - ipy_prompt, - cellmagic, - assemble_logical_lines, - help_end, - escaped_commands, - assign_from_magic, - assign_from_system, - assemble_python_lines, - ) - -# These are available in this module for backwards compatibility. -from IPython.core.inputtransformer import (ESC_SHELL, ESC_SH_CAP, ESC_HELP, - ESC_HELP2, ESC_MAGIC, ESC_MAGIC2, - ESC_QUOTE, ESC_QUOTE2, ESC_PAREN, ESC_SEQUENCES) - -#----------------------------------------------------------------------------- -# Utilities -#----------------------------------------------------------------------------- - -# FIXME: These are general-purpose utilities that later can be moved to the -# general ward. Kept here for now because we're being very strict about test -# coverage with this code, and this lets us ensure that we keep 100% coverage -# while developing. - -# compiled regexps for autoindent management -dedent_re = re.compile('|'.join([ - r'^\s+raise(\s.*)?$', # raise statement (+ space + other stuff, maybe) - r'^\s+raise\([^\)]*\).*$', # wacky raise with immediate open paren - r'^\s+return(\s.*)?$', # normal return (+ space + other stuff, maybe) - r'^\s+return\([^\)]*\).*$', # wacky return with immediate open paren - r'^\s+pass\s*$', # pass (optionally followed by trailing spaces) - r'^\s+break\s*$', # break (optionally followed by trailing spaces) - r'^\s+continue\s*$', # continue (optionally followed by trailing spaces) -])) -ini_spaces_re = re.compile(r'^([ \t\r\f\v]+)') - -# regexp to match pure comment lines so we don't accidentally insert 'if 1:' -# before pure comments -comment_line_re = re.compile(r'^\s*\#') - - -def num_ini_spaces(s): - """Return the number of initial spaces in a string. - - Note that tabs are counted as a single space. For now, we do *not* support - mixing of tabs and spaces in the user's input. - - Parameters - ---------- - s : string - - Returns - ------- - n : int - """ - - ini_spaces = ini_spaces_re.match(s) - if ini_spaces: - return ini_spaces.end() - else: - return 0 - -# Fake token types for partial_tokenize: -INCOMPLETE_STRING = tokenize.N_TOKENS -IN_MULTILINE_STATEMENT = tokenize.N_TOKENS + 1 - -# The 2 classes below have the same API as TokenInfo, but don't try to look up -# a token type name that they won't find. -class IncompleteString: - type = exact_type = INCOMPLETE_STRING - def __init__(self, s, start, end, line): - self.s = s - self.start = start - self.end = end - self.line = line - -class InMultilineStatement: - type = exact_type = IN_MULTILINE_STATEMENT - def __init__(self, pos, line): - self.s = '' - self.start = self.end = pos - self.line = line - -def partial_tokens(s): - """Iterate over tokens from a possibly-incomplete string of code. - - This adds two special token types: INCOMPLETE_STRING and - IN_MULTILINE_STATEMENT. These can only occur as the last token yielded, and - represent the two main ways for code to be incomplete. - """ - readline = io.StringIO(s).readline - token = tokenize.TokenInfo(tokenize.NEWLINE, '', (1, 0), (1, 0), '') - try: - for token in tokenize.generate_tokens(readline): - yield token - except tokenize.TokenError as e: - # catch EOF error - lines = s.splitlines(keepends=True) - end = len(lines), len(lines[-1]) - if 'multi-line string' in e.args[0]: - l, c = start = token.end - s = lines[l-1][c:] + ''.join(lines[l:]) - yield IncompleteString(s, start, end, lines[-1]) - elif 'multi-line statement' in e.args[0]: - yield InMultilineStatement(end, lines[-1]) - else: - raise - -def find_next_indent(code): - """Find the number of spaces for the next line of indentation""" - tokens = list(partial_tokens(code)) - if tokens[-1].type == tokenize.ENDMARKER: - tokens.pop() - if not tokens: - return 0 - while (tokens[-1].type in {tokenize.DEDENT, tokenize.NEWLINE, tokenize.COMMENT}): - tokens.pop() - - if tokens[-1].type == INCOMPLETE_STRING: - # Inside a multiline string - return 0 - - # Find the indents used before - prev_indents = [0] - def _add_indent(n): - if n != prev_indents[-1]: - prev_indents.append(n) - - tokiter = iter(tokens) - for tok in tokiter: - if tok.type in {tokenize.INDENT, tokenize.DEDENT}: - _add_indent(tok.end[1]) - elif (tok.type == tokenize.NL): - try: - _add_indent(next(tokiter).start[1]) - except StopIteration: - break - - last_indent = prev_indents.pop() - - # If we've just opened a multiline statement (e.g. 'a = ['), indent more - if tokens[-1].type == IN_MULTILINE_STATEMENT: - if tokens[-2].exact_type in {tokenize.LPAR, tokenize.LSQB, tokenize.LBRACE}: - return last_indent + 4 - return last_indent - - if tokens[-1].exact_type == tokenize.COLON: - # Line ends with colon - indent - return last_indent + 4 - - if last_indent: - # Examine the last line for dedent cues - statements like return or - # raise which normally end a block of code. - last_line_starts = 0 - for i, tok in enumerate(tokens): - if tok.type == tokenize.NEWLINE: - last_line_starts = i + 1 - - last_line_tokens = tokens[last_line_starts:] - names = [t.string for t in last_line_tokens if t.type == tokenize.NAME] - if names and names[0] in {'raise', 'return', 'pass', 'break', 'continue'}: - # Find the most recent indentation less than the current level - for indent in reversed(prev_indents): - if indent < last_indent: - return indent - - return last_indent - - -def last_blank(src): - """Determine if the input source ends in a blank. - - A blank is either a newline or a line consisting of whitespace. - - Parameters - ---------- - src : string - A single or multiline string. - """ - if not src: return False - ll = src.splitlines()[-1] - return (ll == '') or ll.isspace() - - -last_two_blanks_re = re.compile(r'\n\s*\n\s*$', re.MULTILINE) -last_two_blanks_re2 = re.compile(r'.+\n\s*\n\s+$', re.MULTILINE) - -def last_two_blanks(src): - """Determine if the input source ends in two blanks. - - A blank is either a newline or a line consisting of whitespace. - - Parameters - ---------- - src : string - A single or multiline string. - """ - if not src: return False - # The logic here is tricky: I couldn't get a regexp to work and pass all - # the tests, so I took a different approach: split the source by lines, - # grab the last two and prepend '###\n' as a stand-in for whatever was in - # the body before the last two lines. Then, with that structure, it's - # possible to analyze with two regexps. Not the most elegant solution, but - # it works. If anyone tries to change this logic, make sure to validate - # the whole test suite first! - new_src = '\n'.join(['###\n'] + src.splitlines()[-2:]) - return (bool(last_two_blanks_re.match(new_src)) or - bool(last_two_blanks_re2.match(new_src)) ) - - -def remove_comments(src): - """Remove all comments from input source. - - Note: comments are NOT recognized inside of strings! - - Parameters - ---------- - src : string - A single or multiline input string. - - Returns - ------- - String with all Python comments removed. - """ - - return re.sub('#.*', '', src) - - -def get_input_encoding(): - """Return the default standard input encoding. - - If sys.stdin has no encoding, 'ascii' is returned.""" - # There are strange environments for which sys.stdin.encoding is None. We - # ensure that a valid encoding is returned. - encoding = getattr(sys.stdin, 'encoding', None) - if encoding is None: - encoding = 'ascii' - return encoding - -#----------------------------------------------------------------------------- -# Classes and functions for normal Python syntax handling -#----------------------------------------------------------------------------- - -class InputSplitter(object): - r"""An object that can accumulate lines of Python source before execution. - - This object is designed to be fed python source line-by-line, using - :meth:`push`. It will return on each push whether the currently pushed - code could be executed already. In addition, it provides a method called - :meth:`push_accepts_more` that can be used to query whether more input - can be pushed into a single interactive block. - - This is a simple example of how an interactive terminal-based client can use - this tool:: - - isp = InputSplitter() - while isp.push_accepts_more(): - indent = ' '*isp.indent_spaces - prompt = '>>> ' + indent - line = indent + raw_input(prompt) - isp.push(line) - print 'Input source was:\n', isp.source_reset(), - """ - # A cache for storing the current indentation - # The first value stores the most recently processed source input - # The second value is the number of spaces for the current indentation - # If self.source matches the first value, the second value is a valid - # current indentation. Otherwise, the cache is invalid and the indentation - # must be recalculated. - _indent_spaces_cache = None, None - # String, indicating the default input encoding. It is computed by default - # at initialization time via get_input_encoding(), but it can be reset by a - # client with specific knowledge of the encoding. - encoding = '' - # String where the current full source input is stored, properly encoded. - # Reading this attribute is the normal way of querying the currently pushed - # source code, that has been properly encoded. - source = '' - # Code object corresponding to the current source. It is automatically - # synced to the source, so it can be queried at any time to obtain the code - # object; it will be None if the source doesn't compile to valid Python. - code = None - - # Private attributes - - # List with lines of input accumulated so far - _buffer = None - # Command compiler - _compile = None - # Boolean indicating whether the current block is complete - _is_complete = None - # Boolean indicating whether the current block has an unrecoverable syntax error - _is_invalid = False - - def __init__(self): - """Create a new InputSplitter instance. - """ - self._buffer = [] - self._compile = codeop.CommandCompiler() - self.encoding = get_input_encoding() - - def reset(self): - """Reset the input buffer and associated state.""" - self._buffer[:] = [] - self.source = '' - self.code = None - self._is_complete = False - self._is_invalid = False - - def source_reset(self): - """Return the input source and perform a full reset. - """ - out = self.source - self.reset() - return out - - def check_complete(self, source): - """Return whether a block of code is ready to execute, or should be continued - - This is a non-stateful API, and will reset the state of this InputSplitter. - - Parameters - ---------- - source : string - Python input code, which can be multiline. - - Returns - ------- - status : str - One of 'complete', 'incomplete', or 'invalid' if source is not a - prefix of valid code. - indent_spaces : int or None - The number of spaces by which to indent the next line of code. If - status is not 'incomplete', this is None. - """ - self.reset() - try: - self.push(source) - except SyntaxError: - # Transformers in IPythonInputSplitter can raise SyntaxError, - # which push() will not catch. - return 'invalid', None - else: - if self._is_invalid: - return 'invalid', None - elif self.push_accepts_more(): - return 'incomplete', self.get_indent_spaces() - else: - return 'complete', None - finally: - self.reset() - - def push(self, lines:str) -> bool: - """Push one or more lines of input. - - This stores the given lines and returns a status code indicating - whether the code forms a complete Python block or not. - - Any exceptions generated in compilation are swallowed, but if an - exception was produced, the method returns True. - - Parameters - ---------- - lines : string - One or more lines of Python input. - - Returns - ------- - is_complete : boolean - True if the current input source (the result of the current input - plus prior inputs) forms a complete Python execution block. Note that - this value is also stored as a private attribute (``_is_complete``), so it - can be queried at any time. - """ - assert isinstance(lines, str) - self._store(lines) - source = self.source - - # Before calling _compile(), reset the code object to None so that if an - # exception is raised in compilation, we don't mislead by having - # inconsistent code/source attributes. - self.code, self._is_complete = None, None - self._is_invalid = False - - # Honor termination lines properly - if source.endswith('\\\n'): - return False - - try: - with warnings.catch_warnings(): - warnings.simplefilter('error', SyntaxWarning) - self.code = self._compile(source, symbol="exec") - # Invalid syntax can produce any of a number of different errors from - # inside the compiler, so we have to catch them all. Syntax errors - # immediately produce a 'ready' block, so the invalid Python can be - # sent to the kernel for evaluation with possible ipython - # special-syntax conversion. - except (SyntaxError, OverflowError, ValueError, TypeError, - MemoryError, SyntaxWarning): - self._is_complete = True - self._is_invalid = True - else: - # Compilation didn't produce any exceptions (though it may not have - # given a complete code object) - self._is_complete = self.code is not None - - return self._is_complete - - def push_accepts_more(self): - """Return whether a block of interactive input can accept more input. - - This method is meant to be used by line-oriented frontends, who need to - guess whether a block is complete or not based solely on prior and - current input lines. The InputSplitter considers it has a complete - interactive block and will not accept more input when either: - - * A SyntaxError is raised - - * The code is complete and consists of a single line or a single - non-compound statement - - * The code is complete and has a blank line at the end - - If the current input produces a syntax error, this method immediately - returns False but does *not* raise the syntax error exception, as - typically clients will want to send invalid syntax to an execution - backend which might convert the invalid syntax into valid Python via - one of the dynamic IPython mechanisms. - """ - - # With incomplete input, unconditionally accept more - # A syntax error also sets _is_complete to True - see push() - if not self._is_complete: - #print("Not complete") # debug - return True - - # The user can make any (complete) input execute by leaving a blank line - last_line = self.source.splitlines()[-1] - if (not last_line) or last_line.isspace(): - #print("Blank line") # debug - return False - - # If there's just a single line or AST node, and we're flush left, as is - # the case after a simple statement such as 'a=1', we want to execute it - # straight away. - if self.get_indent_spaces() == 0: - if len(self.source.splitlines()) <= 1: - return False - - try: - code_ast = ast.parse(u''.join(self._buffer)) - except Exception: - #print("Can't parse AST") # debug - return False - else: - if len(code_ast.body) == 1 and \ - not hasattr(code_ast.body[0], 'body'): - #print("Simple statement") # debug - return False - - # General fallback - accept more code - return True - - def get_indent_spaces(self): - sourcefor, n = self._indent_spaces_cache - if sourcefor == self.source: - return n - - # self.source always has a trailing newline - n = find_next_indent(self.source[:-1]) - self._indent_spaces_cache = (self.source, n) - return n - - # Backwards compatibility. I think all code that used .indent_spaces was - # inside IPython, but we can leave this here until IPython 7 in case any - # other modules are using it. -TK, November 2017 - indent_spaces = property(get_indent_spaces) - - def _store(self, lines, buffer=None, store='source'): - """Store one or more lines of input. - - If input lines are not newline-terminated, a newline is automatically - appended.""" - - if buffer is None: - buffer = self._buffer - - if lines.endswith('\n'): - buffer.append(lines) - else: - buffer.append(lines+'\n') - setattr(self, store, self._set_source(buffer)) - - def _set_source(self, buffer): - return u''.join(buffer) - - -class IPythonInputSplitter(InputSplitter): - """An input splitter that recognizes all of IPython's special syntax.""" - - # String with raw, untransformed input. - source_raw = '' - - # Flag to track when a transformer has stored input that it hasn't given - # back yet. - transformer_accumulating = False - - # Flag to track when assemble_python_lines has stored input that it hasn't - # given back yet. - within_python_line = False - - # Private attributes - - # List with lines of raw input accumulated so far. - _buffer_raw = None - - def __init__(self, line_input_checker=True, physical_line_transforms=None, - logical_line_transforms=None, python_line_transforms=None): - super(IPythonInputSplitter, self).__init__() - self._buffer_raw = [] - self._validate = True - - if physical_line_transforms is not None: - self.physical_line_transforms = physical_line_transforms - else: - self.physical_line_transforms = [ - leading_indent(), - classic_prompt(), - ipy_prompt(), - cellmagic(end_on_blank_line=line_input_checker), - ] - - self.assemble_logical_lines = assemble_logical_lines() - if logical_line_transforms is not None: - self.logical_line_transforms = logical_line_transforms - else: - self.logical_line_transforms = [ - help_end(), - escaped_commands(), - assign_from_magic(), - assign_from_system(), - ] - - self.assemble_python_lines = assemble_python_lines() - if python_line_transforms is not None: - self.python_line_transforms = python_line_transforms - else: - # We don't use any of these at present - self.python_line_transforms = [] - - @property - def transforms(self): - "Quick access to all transformers." - return self.physical_line_transforms + \ - [self.assemble_logical_lines] + self.logical_line_transforms + \ - [self.assemble_python_lines] + self.python_line_transforms - - @property - def transforms_in_use(self): - """Transformers, excluding logical line transformers if we're in a - Python line.""" - t = self.physical_line_transforms[:] - if not self.within_python_line: - t += [self.assemble_logical_lines] + self.logical_line_transforms - return t + [self.assemble_python_lines] + self.python_line_transforms - - def reset(self): - """Reset the input buffer and associated state.""" - super(IPythonInputSplitter, self).reset() - self._buffer_raw[:] = [] - self.source_raw = '' - self.transformer_accumulating = False - self.within_python_line = False - - for t in self.transforms: - try: - t.reset() - except SyntaxError: - # Nothing that calls reset() expects to handle transformer - # errors - pass - - def flush_transformers(self): - def _flush(transform, outs): - """yield transformed lines - - always strings, never None - - transform: the current transform - outs: an iterable of previously transformed inputs. - Each may be multiline, which will be passed - one line at a time to transform. - """ - for out in outs: - for line in out.splitlines(): - # push one line at a time - tmp = transform.push(line) - if tmp is not None: - yield tmp - - # reset the transform - tmp = transform.reset() - if tmp is not None: - yield tmp - - out = [] - for t in self.transforms_in_use: - out = _flush(t, out) - - out = list(out) - if out: - self._store('\n'.join(out)) - - def raw_reset(self): - """Return raw input only and perform a full reset. - """ - out = self.source_raw - self.reset() - return out - - def source_reset(self): - try: - self.flush_transformers() - return self.source - finally: - self.reset() - - def push_accepts_more(self): - if self.transformer_accumulating: - return True - else: - return super(IPythonInputSplitter, self).push_accepts_more() - - def transform_cell(self, cell): - """Process and translate a cell of input. - """ - self.reset() - try: - self.push(cell) - self.flush_transformers() - return self.source - finally: - self.reset() - - def push(self, lines:str) -> bool: - """Push one or more lines of IPython input. - - This stores the given lines and returns a status code indicating - whether the code forms a complete Python block or not, after processing - all input lines for special IPython syntax. - - Any exceptions generated in compilation are swallowed, but if an - exception was produced, the method returns True. - - Parameters - ---------- - lines : string - One or more lines of Python input. - - Returns - ------- - is_complete : boolean - True if the current input source (the result of the current input - plus prior inputs) forms a complete Python execution block. Note that - this value is also stored as a private attribute (_is_complete), so it - can be queried at any time. - """ - assert isinstance(lines, str) - # We must ensure all input is pure unicode - # ''.splitlines() --> [], but we need to push the empty line to transformers - lines_list = lines.splitlines() - if not lines_list: - lines_list = [''] - - # Store raw source before applying any transformations to it. Note - # that this must be done *after* the reset() call that would otherwise - # flush the buffer. - self._store(lines, self._buffer_raw, 'source_raw') - - transformed_lines_list = [] - for line in lines_list: - transformed = self._transform_line(line) - if transformed is not None: - transformed_lines_list.append(transformed) - - if transformed_lines_list: - transformed_lines = '\n'.join(transformed_lines_list) - return super(IPythonInputSplitter, self).push(transformed_lines) - else: - # Got nothing back from transformers - they must be waiting for - # more input. - return False - - def _transform_line(self, line): - """Push a line of input code through the various transformers. - - Returns any output from the transformers, or None if a transformer - is accumulating lines. - - Sets self.transformer_accumulating as a side effect. - """ - def _accumulating(dbg): - #print(dbg) - self.transformer_accumulating = True - return None - - for transformer in self.physical_line_transforms: - line = transformer.push(line) - if line is None: - return _accumulating(transformer) - - if not self.within_python_line: - line = self.assemble_logical_lines.push(line) - if line is None: - return _accumulating('acc logical line') - - for transformer in self.logical_line_transforms: - line = transformer.push(line) - if line is None: - return _accumulating(transformer) - - line = self.assemble_python_lines.push(line) - if line is None: - self.within_python_line = True - return _accumulating('acc python line') - else: - self.within_python_line = False - - for transformer in self.python_line_transforms: - line = transformer.push(line) - if line is None: - return _accumulating(transformer) - - #print("transformers clear") #debug - self.transformer_accumulating = False - return line - +"""DEPRECATED: Input handling and transformation machinery. + +This module was deprecated in IPython 7.0, in favour of inputtransformer2. + +The first class in this module, :class:`InputSplitter`, is designed to tell when +input from a line-oriented frontend is complete and should be executed, and when +the user should be prompted for another line of code instead. The name 'input +splitter' is largely for historical reasons. + +A companion, :class:`IPythonInputSplitter`, provides the same functionality but +with full support for the extended IPython syntax (magics, system calls, etc). +The code to actually do these transformations is in :mod:`IPython.core.inputtransformer`. +:class:`IPythonInputSplitter` feeds the raw code to the transformers in order +and stores the results. + +For more details, see the class docstrings below. +""" + +from warnings import warn + +warn('IPython.core.inputsplitter is deprecated since IPython 7 in favor of `IPython.core.inputtransformer2`', + DeprecationWarning) + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. +import ast +import codeop +import io +import re +import sys +import tokenize +import warnings + +from IPython.core.inputtransformer import (leading_indent, + classic_prompt, + ipy_prompt, + cellmagic, + assemble_logical_lines, + help_end, + escaped_commands, + assign_from_magic, + assign_from_system, + assemble_python_lines, + ) + +# These are available in this module for backwards compatibility. +from IPython.core.inputtransformer import (ESC_SHELL, ESC_SH_CAP, ESC_HELP, + ESC_HELP2, ESC_MAGIC, ESC_MAGIC2, + ESC_QUOTE, ESC_QUOTE2, ESC_PAREN, ESC_SEQUENCES) + +#----------------------------------------------------------------------------- +# Utilities +#----------------------------------------------------------------------------- + +# FIXME: These are general-purpose utilities that later can be moved to the +# general ward. Kept here for now because we're being very strict about test +# coverage with this code, and this lets us ensure that we keep 100% coverage +# while developing. + +# compiled regexps for autoindent management +dedent_re = re.compile('|'.join([ + r'^\s+raise(\s.*)?$', # raise statement (+ space + other stuff, maybe) + r'^\s+raise\([^\)]*\).*$', # wacky raise with immediate open paren + r'^\s+return(\s.*)?$', # normal return (+ space + other stuff, maybe) + r'^\s+return\([^\)]*\).*$', # wacky return with immediate open paren + r'^\s+pass\s*$', # pass (optionally followed by trailing spaces) + r'^\s+break\s*$', # break (optionally followed by trailing spaces) + r'^\s+continue\s*$', # continue (optionally followed by trailing spaces) +])) +ini_spaces_re = re.compile(r'^([ \t\r\f\v]+)') + +# regexp to match pure comment lines so we don't accidentally insert 'if 1:' +# before pure comments +comment_line_re = re.compile(r'^\s*\#') + + +def num_ini_spaces(s): + """Return the number of initial spaces in a string. + + Note that tabs are counted as a single space. For now, we do *not* support + mixing of tabs and spaces in the user's input. + + Parameters + ---------- + s : string + + Returns + ------- + n : int + """ + + ini_spaces = ini_spaces_re.match(s) + if ini_spaces: + return ini_spaces.end() + else: + return 0 + +# Fake token types for partial_tokenize: +INCOMPLETE_STRING = tokenize.N_TOKENS +IN_MULTILINE_STATEMENT = tokenize.N_TOKENS + 1 + +# The 2 classes below have the same API as TokenInfo, but don't try to look up +# a token type name that they won't find. +class IncompleteString: + type = exact_type = INCOMPLETE_STRING + def __init__(self, s, start, end, line): + self.s = s + self.start = start + self.end = end + self.line = line + +class InMultilineStatement: + type = exact_type = IN_MULTILINE_STATEMENT + def __init__(self, pos, line): + self.s = '' + self.start = self.end = pos + self.line = line + +def partial_tokens(s): + """Iterate over tokens from a possibly-incomplete string of code. + + This adds two special token types: INCOMPLETE_STRING and + IN_MULTILINE_STATEMENT. These can only occur as the last token yielded, and + represent the two main ways for code to be incomplete. + """ + readline = io.StringIO(s).readline + token = tokenize.TokenInfo(tokenize.NEWLINE, '', (1, 0), (1, 0), '') + try: + for token in tokenize.generate_tokens(readline): + yield token + except tokenize.TokenError as e: + # catch EOF error + lines = s.splitlines(keepends=True) + end = len(lines), len(lines[-1]) + if 'multi-line string' in e.args[0]: + l, c = start = token.end + s = lines[l-1][c:] + ''.join(lines[l:]) + yield IncompleteString(s, start, end, lines[-1]) + elif 'multi-line statement' in e.args[0]: + yield InMultilineStatement(end, lines[-1]) + else: + raise + +def find_next_indent(code): + """Find the number of spaces for the next line of indentation""" + tokens = list(partial_tokens(code)) + if tokens[-1].type == tokenize.ENDMARKER: + tokens.pop() + if not tokens: + return 0 + while (tokens[-1].type in {tokenize.DEDENT, tokenize.NEWLINE, tokenize.COMMENT}): + tokens.pop() + + if tokens[-1].type == INCOMPLETE_STRING: + # Inside a multiline string + return 0 + + # Find the indents used before + prev_indents = [0] + def _add_indent(n): + if n != prev_indents[-1]: + prev_indents.append(n) + + tokiter = iter(tokens) + for tok in tokiter: + if tok.type in {tokenize.INDENT, tokenize.DEDENT}: + _add_indent(tok.end[1]) + elif (tok.type == tokenize.NL): + try: + _add_indent(next(tokiter).start[1]) + except StopIteration: + break + + last_indent = prev_indents.pop() + + # If we've just opened a multiline statement (e.g. 'a = ['), indent more + if tokens[-1].type == IN_MULTILINE_STATEMENT: + if tokens[-2].exact_type in {tokenize.LPAR, tokenize.LSQB, tokenize.LBRACE}: + return last_indent + 4 + return last_indent + + if tokens[-1].exact_type == tokenize.COLON: + # Line ends with colon - indent + return last_indent + 4 + + if last_indent: + # Examine the last line for dedent cues - statements like return or + # raise which normally end a block of code. + last_line_starts = 0 + for i, tok in enumerate(tokens): + if tok.type == tokenize.NEWLINE: + last_line_starts = i + 1 + + last_line_tokens = tokens[last_line_starts:] + names = [t.string for t in last_line_tokens if t.type == tokenize.NAME] + if names and names[0] in {'raise', 'return', 'pass', 'break', 'continue'}: + # Find the most recent indentation less than the current level + for indent in reversed(prev_indents): + if indent < last_indent: + return indent + + return last_indent + + +def last_blank(src): + """Determine if the input source ends in a blank. + + A blank is either a newline or a line consisting of whitespace. + + Parameters + ---------- + src : string + A single or multiline string. + """ + if not src: return False + ll = src.splitlines()[-1] + return (ll == '') or ll.isspace() + + +last_two_blanks_re = re.compile(r'\n\s*\n\s*$', re.MULTILINE) +last_two_blanks_re2 = re.compile(r'.+\n\s*\n\s+$', re.MULTILINE) + +def last_two_blanks(src): + """Determine if the input source ends in two blanks. + + A blank is either a newline or a line consisting of whitespace. + + Parameters + ---------- + src : string + A single or multiline string. + """ + if not src: return False + # The logic here is tricky: I couldn't get a regexp to work and pass all + # the tests, so I took a different approach: split the source by lines, + # grab the last two and prepend '###\n' as a stand-in for whatever was in + # the body before the last two lines. Then, with that structure, it's + # possible to analyze with two regexps. Not the most elegant solution, but + # it works. If anyone tries to change this logic, make sure to validate + # the whole test suite first! + new_src = '\n'.join(['###\n'] + src.splitlines()[-2:]) + return (bool(last_two_blanks_re.match(new_src)) or + bool(last_two_blanks_re2.match(new_src)) ) + + +def remove_comments(src): + """Remove all comments from input source. + + Note: comments are NOT recognized inside of strings! + + Parameters + ---------- + src : string + A single or multiline input string. + + Returns + ------- + String with all Python comments removed. + """ + + return re.sub('#.*', '', src) + + +def get_input_encoding(): + """Return the default standard input encoding. + + If sys.stdin has no encoding, 'ascii' is returned.""" + # There are strange environments for which sys.stdin.encoding is None. We + # ensure that a valid encoding is returned. + encoding = getattr(sys.stdin, 'encoding', None) + if encoding is None: + encoding = 'ascii' + return encoding + +#----------------------------------------------------------------------------- +# Classes and functions for normal Python syntax handling +#----------------------------------------------------------------------------- + +class InputSplitter(object): + r"""An object that can accumulate lines of Python source before execution. + + This object is designed to be fed python source line-by-line, using + :meth:`push`. It will return on each push whether the currently pushed + code could be executed already. In addition, it provides a method called + :meth:`push_accepts_more` that can be used to query whether more input + can be pushed into a single interactive block. + + This is a simple example of how an interactive terminal-based client can use + this tool:: + + isp = InputSplitter() + while isp.push_accepts_more(): + indent = ' '*isp.indent_spaces + prompt = '>>> ' + indent + line = indent + raw_input(prompt) + isp.push(line) + print 'Input source was:\n', isp.source_reset(), + """ + # A cache for storing the current indentation + # The first value stores the most recently processed source input + # The second value is the number of spaces for the current indentation + # If self.source matches the first value, the second value is a valid + # current indentation. Otherwise, the cache is invalid and the indentation + # must be recalculated. + _indent_spaces_cache = None, None + # String, indicating the default input encoding. It is computed by default + # at initialization time via get_input_encoding(), but it can be reset by a + # client with specific knowledge of the encoding. + encoding = '' + # String where the current full source input is stored, properly encoded. + # Reading this attribute is the normal way of querying the currently pushed + # source code, that has been properly encoded. + source = '' + # Code object corresponding to the current source. It is automatically + # synced to the source, so it can be queried at any time to obtain the code + # object; it will be None if the source doesn't compile to valid Python. + code = None + + # Private attributes + + # List with lines of input accumulated so far + _buffer = None + # Command compiler + _compile = None + # Boolean indicating whether the current block is complete + _is_complete = None + # Boolean indicating whether the current block has an unrecoverable syntax error + _is_invalid = False + + def __init__(self): + """Create a new InputSplitter instance. + """ + self._buffer = [] + self._compile = codeop.CommandCompiler() + self.encoding = get_input_encoding() + + def reset(self): + """Reset the input buffer and associated state.""" + self._buffer[:] = [] + self.source = '' + self.code = None + self._is_complete = False + self._is_invalid = False + + def source_reset(self): + """Return the input source and perform a full reset. + """ + out = self.source + self.reset() + return out + + def check_complete(self, source): + """Return whether a block of code is ready to execute, or should be continued + + This is a non-stateful API, and will reset the state of this InputSplitter. + + Parameters + ---------- + source : string + Python input code, which can be multiline. + + Returns + ------- + status : str + One of 'complete', 'incomplete', or 'invalid' if source is not a + prefix of valid code. + indent_spaces : int or None + The number of spaces by which to indent the next line of code. If + status is not 'incomplete', this is None. + """ + self.reset() + try: + self.push(source) + except SyntaxError: + # Transformers in IPythonInputSplitter can raise SyntaxError, + # which push() will not catch. + return 'invalid', None + else: + if self._is_invalid: + return 'invalid', None + elif self.push_accepts_more(): + return 'incomplete', self.get_indent_spaces() + else: + return 'complete', None + finally: + self.reset() + + def push(self, lines:str) -> bool: + """Push one or more lines of input. + + This stores the given lines and returns a status code indicating + whether the code forms a complete Python block or not. + + Any exceptions generated in compilation are swallowed, but if an + exception was produced, the method returns True. + + Parameters + ---------- + lines : string + One or more lines of Python input. + + Returns + ------- + is_complete : boolean + True if the current input source (the result of the current input + plus prior inputs) forms a complete Python execution block. Note that + this value is also stored as a private attribute (``_is_complete``), so it + can be queried at any time. + """ + assert isinstance(lines, str) + self._store(lines) + source = self.source + + # Before calling _compile(), reset the code object to None so that if an + # exception is raised in compilation, we don't mislead by having + # inconsistent code/source attributes. + self.code, self._is_complete = None, None + self._is_invalid = False + + # Honor termination lines properly + if source.endswith('\\\n'): + return False + + try: + with warnings.catch_warnings(): + warnings.simplefilter('error', SyntaxWarning) + self.code = self._compile(source, symbol="exec") + # Invalid syntax can produce any of a number of different errors from + # inside the compiler, so we have to catch them all. Syntax errors + # immediately produce a 'ready' block, so the invalid Python can be + # sent to the kernel for evaluation with possible ipython + # special-syntax conversion. + except (SyntaxError, OverflowError, ValueError, TypeError, + MemoryError, SyntaxWarning): + self._is_complete = True + self._is_invalid = True + else: + # Compilation didn't produce any exceptions (though it may not have + # given a complete code object) + self._is_complete = self.code is not None + + return self._is_complete + + def push_accepts_more(self): + """Return whether a block of interactive input can accept more input. + + This method is meant to be used by line-oriented frontends, who need to + guess whether a block is complete or not based solely on prior and + current input lines. The InputSplitter considers it has a complete + interactive block and will not accept more input when either: + + * A SyntaxError is raised + + * The code is complete and consists of a single line or a single + non-compound statement + + * The code is complete and has a blank line at the end + + If the current input produces a syntax error, this method immediately + returns False but does *not* raise the syntax error exception, as + typically clients will want to send invalid syntax to an execution + backend which might convert the invalid syntax into valid Python via + one of the dynamic IPython mechanisms. + """ + + # With incomplete input, unconditionally accept more + # A syntax error also sets _is_complete to True - see push() + if not self._is_complete: + #print("Not complete") # debug + return True + + # The user can make any (complete) input execute by leaving a blank line + last_line = self.source.splitlines()[-1] + if (not last_line) or last_line.isspace(): + #print("Blank line") # debug + return False + + # If there's just a single line or AST node, and we're flush left, as is + # the case after a simple statement such as 'a=1', we want to execute it + # straight away. + if self.get_indent_spaces() == 0: + if len(self.source.splitlines()) <= 1: + return False + + try: + code_ast = ast.parse(u''.join(self._buffer)) + except Exception: + #print("Can't parse AST") # debug + return False + else: + if len(code_ast.body) == 1 and \ + not hasattr(code_ast.body[0], 'body'): + #print("Simple statement") # debug + return False + + # General fallback - accept more code + return True + + def get_indent_spaces(self): + sourcefor, n = self._indent_spaces_cache + if sourcefor == self.source: + return n + + # self.source always has a trailing newline + n = find_next_indent(self.source[:-1]) + self._indent_spaces_cache = (self.source, n) + return n + + # Backwards compatibility. I think all code that used .indent_spaces was + # inside IPython, but we can leave this here until IPython 7 in case any + # other modules are using it. -TK, November 2017 + indent_spaces = property(get_indent_spaces) + + def _store(self, lines, buffer=None, store='source'): + """Store one or more lines of input. + + If input lines are not newline-terminated, a newline is automatically + appended.""" + + if buffer is None: + buffer = self._buffer + + if lines.endswith('\n'): + buffer.append(lines) + else: + buffer.append(lines+'\n') + setattr(self, store, self._set_source(buffer)) + + def _set_source(self, buffer): + return u''.join(buffer) + + +class IPythonInputSplitter(InputSplitter): + """An input splitter that recognizes all of IPython's special syntax.""" + + # String with raw, untransformed input. + source_raw = '' + + # Flag to track when a transformer has stored input that it hasn't given + # back yet. + transformer_accumulating = False + + # Flag to track when assemble_python_lines has stored input that it hasn't + # given back yet. + within_python_line = False + + # Private attributes + + # List with lines of raw input accumulated so far. + _buffer_raw = None + + def __init__(self, line_input_checker=True, physical_line_transforms=None, + logical_line_transforms=None, python_line_transforms=None): + super(IPythonInputSplitter, self).__init__() + self._buffer_raw = [] + self._validate = True + + if physical_line_transforms is not None: + self.physical_line_transforms = physical_line_transforms + else: + self.physical_line_transforms = [ + leading_indent(), + classic_prompt(), + ipy_prompt(), + cellmagic(end_on_blank_line=line_input_checker), + ] + + self.assemble_logical_lines = assemble_logical_lines() + if logical_line_transforms is not None: + self.logical_line_transforms = logical_line_transforms + else: + self.logical_line_transforms = [ + help_end(), + escaped_commands(), + assign_from_magic(), + assign_from_system(), + ] + + self.assemble_python_lines = assemble_python_lines() + if python_line_transforms is not None: + self.python_line_transforms = python_line_transforms + else: + # We don't use any of these at present + self.python_line_transforms = [] + + @property + def transforms(self): + "Quick access to all transformers." + return self.physical_line_transforms + \ + [self.assemble_logical_lines] + self.logical_line_transforms + \ + [self.assemble_python_lines] + self.python_line_transforms + + @property + def transforms_in_use(self): + """Transformers, excluding logical line transformers if we're in a + Python line.""" + t = self.physical_line_transforms[:] + if not self.within_python_line: + t += [self.assemble_logical_lines] + self.logical_line_transforms + return t + [self.assemble_python_lines] + self.python_line_transforms + + def reset(self): + """Reset the input buffer and associated state.""" + super(IPythonInputSplitter, self).reset() + self._buffer_raw[:] = [] + self.source_raw = '' + self.transformer_accumulating = False + self.within_python_line = False + + for t in self.transforms: + try: + t.reset() + except SyntaxError: + # Nothing that calls reset() expects to handle transformer + # errors + pass + + def flush_transformers(self): + def _flush(transform, outs): + """yield transformed lines + + always strings, never None + + transform: the current transform + outs: an iterable of previously transformed inputs. + Each may be multiline, which will be passed + one line at a time to transform. + """ + for out in outs: + for line in out.splitlines(): + # push one line at a time + tmp = transform.push(line) + if tmp is not None: + yield tmp + + # reset the transform + tmp = transform.reset() + if tmp is not None: + yield tmp + + out = [] + for t in self.transforms_in_use: + out = _flush(t, out) + + out = list(out) + if out: + self._store('\n'.join(out)) + + def raw_reset(self): + """Return raw input only and perform a full reset. + """ + out = self.source_raw + self.reset() + return out + + def source_reset(self): + try: + self.flush_transformers() + return self.source + finally: + self.reset() + + def push_accepts_more(self): + if self.transformer_accumulating: + return True + else: + return super(IPythonInputSplitter, self).push_accepts_more() + + def transform_cell(self, cell): + """Process and translate a cell of input. + """ + self.reset() + try: + self.push(cell) + self.flush_transformers() + return self.source + finally: + self.reset() + + def push(self, lines:str) -> bool: + """Push one or more lines of IPython input. + + This stores the given lines and returns a status code indicating + whether the code forms a complete Python block or not, after processing + all input lines for special IPython syntax. + + Any exceptions generated in compilation are swallowed, but if an + exception was produced, the method returns True. + + Parameters + ---------- + lines : string + One or more lines of Python input. + + Returns + ------- + is_complete : boolean + True if the current input source (the result of the current input + plus prior inputs) forms a complete Python execution block. Note that + this value is also stored as a private attribute (_is_complete), so it + can be queried at any time. + """ + assert isinstance(lines, str) + # We must ensure all input is pure unicode + # ''.splitlines() --> [], but we need to push the empty line to transformers + lines_list = lines.splitlines() + if not lines_list: + lines_list = [''] + + # Store raw source before applying any transformations to it. Note + # that this must be done *after* the reset() call that would otherwise + # flush the buffer. + self._store(lines, self._buffer_raw, 'source_raw') + + transformed_lines_list = [] + for line in lines_list: + transformed = self._transform_line(line) + if transformed is not None: + transformed_lines_list.append(transformed) + + if transformed_lines_list: + transformed_lines = '\n'.join(transformed_lines_list) + return super(IPythonInputSplitter, self).push(transformed_lines) + else: + # Got nothing back from transformers - they must be waiting for + # more input. + return False + + def _transform_line(self, line): + """Push a line of input code through the various transformers. + + Returns any output from the transformers, or None if a transformer + is accumulating lines. + + Sets self.transformer_accumulating as a side effect. + """ + def _accumulating(dbg): + #print(dbg) + self.transformer_accumulating = True + return None + + for transformer in self.physical_line_transforms: + line = transformer.push(line) + if line is None: + return _accumulating(transformer) + + if not self.within_python_line: + line = self.assemble_logical_lines.push(line) + if line is None: + return _accumulating('acc logical line') + + for transformer in self.logical_line_transforms: + line = transformer.push(line) + if line is None: + return _accumulating(transformer) + + line = self.assemble_python_lines.push(line) + if line is None: + self.within_python_line = True + return _accumulating('acc python line') + else: + self.within_python_line = False + + for transformer in self.python_line_transforms: + line = transformer.push(line) + if line is None: + return _accumulating(transformer) + + #print("transformers clear") #debug + self.transformer_accumulating = False + return line + diff --git a/contrib/python/ipython/py3/IPython/core/inputtransformer.py b/contrib/python/ipython/py3/IPython/core/inputtransformer.py index 2e29b8bf214..afeca93cc0e 100644 --- a/contrib/python/ipython/py3/IPython/core/inputtransformer.py +++ b/contrib/python/ipython/py3/IPython/core/inputtransformer.py @@ -1,536 +1,536 @@ -"""DEPRECATED: Input transformer classes to support IPython special syntax. - -This module was deprecated in IPython 7.0, in favour of inputtransformer2. - -This includes the machinery to recognise and transform ``%magic`` commands, -``!system`` commands, ``help?`` querying, prompt stripping, and so forth. -""" -import abc -import functools -import re -import tokenize -from tokenize import generate_tokens, untokenize, TokenError -from io import StringIO - -from IPython.core.splitinput import LineInfo - -#----------------------------------------------------------------------------- -# Globals -#----------------------------------------------------------------------------- - -# The escape sequences that define the syntax transformations IPython will -# apply to user input. These can NOT be just changed here: many regular -# expressions and other parts of the code may use their hardcoded values, and -# for all intents and purposes they constitute the 'IPython syntax', so they -# should be considered fixed. - -ESC_SHELL = '!' # Send line to underlying system shell -ESC_SH_CAP = '!!' # Send line to system shell and capture output -ESC_HELP = '?' # Find information about object -ESC_HELP2 = '??' # Find extra-detailed information about object -ESC_MAGIC = '%' # Call magic function -ESC_MAGIC2 = '%%' # Call cell-magic function -ESC_QUOTE = ',' # Split args on whitespace, quote each as string and call -ESC_QUOTE2 = ';' # Quote all args as a single string, call -ESC_PAREN = '/' # Call first argument with rest of line as arguments - -ESC_SEQUENCES = [ESC_SHELL, ESC_SH_CAP, ESC_HELP ,\ - ESC_HELP2, ESC_MAGIC, ESC_MAGIC2,\ - ESC_QUOTE, ESC_QUOTE2, ESC_PAREN ] - - -class InputTransformer(metaclass=abc.ABCMeta): - """Abstract base class for line-based input transformers.""" - - @abc.abstractmethod - def push(self, line): - """Send a line of input to the transformer, returning the transformed - input or None if the transformer is waiting for more input. - - Must be overridden by subclasses. - - Implementations may raise ``SyntaxError`` if the input is invalid. No - other exceptions may be raised. - """ - pass - - @abc.abstractmethod - def reset(self): - """Return, transformed any lines that the transformer has accumulated, - and reset its internal state. - - Must be overridden by subclasses. - """ - pass - - @classmethod - def wrap(cls, func): - """Can be used by subclasses as a decorator, to return a factory that - will allow instantiation with the decorated object. - """ - @functools.wraps(func) - def transformer_factory(**kwargs): - return cls(func, **kwargs) - - return transformer_factory - -class StatelessInputTransformer(InputTransformer): - """Wrapper for a stateless input transformer implemented as a function.""" - def __init__(self, func): - self.func = func - - def __repr__(self): - return "StatelessInputTransformer(func={0!r})".format(self.func) - - def push(self, line): - """Send a line of input to the transformer, returning the - transformed input.""" - return self.func(line) - - def reset(self): - """No-op - exists for compatibility.""" - pass - -class CoroutineInputTransformer(InputTransformer): - """Wrapper for an input transformer implemented as a coroutine.""" - def __init__(self, coro, **kwargs): - # Prime it - self.coro = coro(**kwargs) - next(self.coro) - - def __repr__(self): - return "CoroutineInputTransformer(coro={0!r})".format(self.coro) - - def push(self, line): - """Send a line of input to the transformer, returning the - transformed input or None if the transformer is waiting for more - input. - """ - return self.coro.send(line) - - def reset(self): - """Return, transformed any lines that the transformer has - accumulated, and reset its internal state. - """ - return self.coro.send(None) - -class TokenInputTransformer(InputTransformer): - """Wrapper for a token-based input transformer. - - func should accept a list of tokens (5-tuples, see tokenize docs), and - return an iterable which can be passed to tokenize.untokenize(). - """ - def __init__(self, func): - self.func = func - self.buf = [] - self.reset_tokenizer() - - def reset_tokenizer(self): - it = iter(self.buf) - self.tokenizer = generate_tokens(it.__next__) - - def push(self, line): - self.buf.append(line + '\n') - if all(l.isspace() for l in self.buf): - return self.reset() - - tokens = [] - stop_at_NL = False - try: - for intok in self.tokenizer: - tokens.append(intok) - t = intok[0] - if t == tokenize.NEWLINE or (stop_at_NL and t == tokenize.NL): - # Stop before we try to pull a line we don't have yet - break - elif t == tokenize.ERRORTOKEN: - stop_at_NL = True - except TokenError: - # Multi-line statement - stop and try again with the next line - self.reset_tokenizer() - return None - - return self.output(tokens) - - def output(self, tokens): - self.buf.clear() - self.reset_tokenizer() - return untokenize(self.func(tokens)).rstrip('\n') - - def reset(self): - l = ''.join(self.buf) - self.buf.clear() - self.reset_tokenizer() - if l: - return l.rstrip('\n') - -class assemble_python_lines(TokenInputTransformer): - def __init__(self): - super(assemble_python_lines, self).__init__(None) - - def output(self, tokens): - return self.reset() - -@CoroutineInputTransformer.wrap -def assemble_logical_lines(): - r"""Join lines following explicit line continuations (\)""" - line = '' - while True: - line = (yield line) - if not line or line.isspace(): - continue - - parts = [] - while line is not None: - if line.endswith('\\') and (not has_comment(line)): - parts.append(line[:-1]) - line = (yield None) # Get another line - else: - parts.append(line) - break - - # Output - line = ''.join(parts) - -# Utilities -def _make_help_call(target, esc, lspace, next_input=None): - """Prepares a pinfo(2)/psearch call from a target name and the escape - (i.e. ? or ??)""" - method = 'pinfo2' if esc == '??' \ - else 'psearch' if '*' in target \ - else 'pinfo' - arg = " ".join([method, target]) - #Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args) - t_magic_name, _, t_magic_arg_s = arg.partition(' ') - t_magic_name = t_magic_name.lstrip(ESC_MAGIC) - if next_input is None: - return '%sget_ipython().run_line_magic(%r, %r)' % (lspace, t_magic_name, t_magic_arg_s) - else: - return '%sget_ipython().set_next_input(%r);get_ipython().run_line_magic(%r, %r)' % \ - (lspace, next_input, t_magic_name, t_magic_arg_s) - -# These define the transformations for the different escape characters. -def _tr_system(line_info): - "Translate lines escaped with: !" - cmd = line_info.line.lstrip().lstrip(ESC_SHELL) - return '%sget_ipython().system(%r)' % (line_info.pre, cmd) - -def _tr_system2(line_info): - "Translate lines escaped with: !!" - cmd = line_info.line.lstrip()[2:] - return '%sget_ipython().getoutput(%r)' % (line_info.pre, cmd) - -def _tr_help(line_info): - "Translate lines escaped with: ?/??" - # A naked help line should just fire the intro help screen - if not line_info.line[1:]: - return 'get_ipython().show_usage()' - - return _make_help_call(line_info.ifun, line_info.esc, line_info.pre) - -def _tr_magic(line_info): - "Translate lines escaped with: %" - tpl = '%sget_ipython().run_line_magic(%r, %r)' - if line_info.line.startswith(ESC_MAGIC2): - return line_info.line - cmd = ' '.join([line_info.ifun, line_info.the_rest]).strip() - #Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args) - t_magic_name, _, t_magic_arg_s = cmd.partition(' ') - t_magic_name = t_magic_name.lstrip(ESC_MAGIC) - return tpl % (line_info.pre, t_magic_name, t_magic_arg_s) - -def _tr_quote(line_info): - "Translate lines escaped with: ," - return '%s%s("%s")' % (line_info.pre, line_info.ifun, - '", "'.join(line_info.the_rest.split()) ) - -def _tr_quote2(line_info): - "Translate lines escaped with: ;" - return '%s%s("%s")' % (line_info.pre, line_info.ifun, - line_info.the_rest) - -def _tr_paren(line_info): - "Translate lines escaped with: /" - return '%s%s(%s)' % (line_info.pre, line_info.ifun, - ", ".join(line_info.the_rest.split())) - -tr = { ESC_SHELL : _tr_system, - ESC_SH_CAP : _tr_system2, - ESC_HELP : _tr_help, - ESC_HELP2 : _tr_help, - ESC_MAGIC : _tr_magic, - ESC_QUOTE : _tr_quote, - ESC_QUOTE2 : _tr_quote2, - ESC_PAREN : _tr_paren } - -@StatelessInputTransformer.wrap -def escaped_commands(line): - """Transform escaped commands - %magic, !system, ?help + various autocalls. - """ - if not line or line.isspace(): - return line - lineinf = LineInfo(line) - if lineinf.esc not in tr: - return line - - return tr[lineinf.esc](lineinf) - -_initial_space_re = re.compile(r'\s*') - -_help_end_re = re.compile(r"""(%{0,2} - (?!\d)[\w*]+ # Variable name - (\.(?!\d)[\w*]+)* # .etc.etc - ) - (\?\??)$ # ? or ?? - """, - re.VERBOSE) - -# Extra pseudotokens for multiline strings and data structures -_MULTILINE_STRING = object() -_MULTILINE_STRUCTURE = object() - -def _line_tokens(line): - """Helper for has_comment and ends_in_comment_or_string.""" - readline = StringIO(line).readline - toktypes = set() - try: - for t in generate_tokens(readline): - toktypes.add(t[0]) - except TokenError as e: - # There are only two cases where a TokenError is raised. - if 'multi-line string' in e.args[0]: - toktypes.add(_MULTILINE_STRING) - else: - toktypes.add(_MULTILINE_STRUCTURE) - return toktypes - -def has_comment(src): - """Indicate whether an input line has (i.e. ends in, or is) a comment. - - This uses tokenize, so it can distinguish comments from # inside strings. - - Parameters - ---------- - src : string - A single line input string. - - Returns - ------- - comment : bool - True if source has a comment. - """ - return (tokenize.COMMENT in _line_tokens(src)) - -def ends_in_comment_or_string(src): - """Indicates whether or not an input line ends in a comment or within - a multiline string. - - Parameters - ---------- - src : string - A single line input string. - - Returns - ------- - comment : bool - True if source ends in a comment or multiline string. - """ - toktypes = _line_tokens(src) - return (tokenize.COMMENT in toktypes) or (_MULTILINE_STRING in toktypes) - - -@StatelessInputTransformer.wrap -def help_end(line): - """Translate lines with ?/?? at the end""" - m = _help_end_re.search(line) - if m is None or ends_in_comment_or_string(line): - return line - target = m.group(1) - esc = m.group(3) - lspace = _initial_space_re.match(line).group(0) - - # If we're mid-command, put it back on the next prompt for the user. - next_input = line.rstrip('?') if line.strip() != m.group(0) else None - - return _make_help_call(target, esc, lspace, next_input) - - -@CoroutineInputTransformer.wrap -def cellmagic(end_on_blank_line=False): - """Captures & transforms cell magics. - - After a cell magic is started, this stores up any lines it gets until it is - reset (sent None). - """ - tpl = 'get_ipython().run_cell_magic(%r, %r, %r)' - cellmagic_help_re = re.compile(r'%%\w+\?') - line = '' - while True: - line = (yield line) - # consume leading empty lines - while not line: - line = (yield line) - - if not line.startswith(ESC_MAGIC2): - # This isn't a cell magic, idle waiting for reset then start over - while line is not None: - line = (yield line) - continue - - if cellmagic_help_re.match(line): - # This case will be handled by help_end - continue - - first = line - body = [] - line = (yield None) - while (line is not None) and \ - ((line.strip() != '') or not end_on_blank_line): - body.append(line) - line = (yield None) - - # Output - magic_name, _, first = first.partition(' ') - magic_name = magic_name.lstrip(ESC_MAGIC2) - line = tpl % (magic_name, first, u'\n'.join(body)) - - -def _strip_prompts(prompt_re, initial_re=None, turnoff_re=None): - """Remove matching input prompts from a block of input. - - Parameters - ---------- - prompt_re : regular expression - A regular expression matching any input prompt (including continuation) - initial_re : regular expression, optional - A regular expression matching only the initial prompt, but not continuation. - If no initial expression is given, prompt_re will be used everywhere. - Used mainly for plain Python prompts, where the continuation prompt - ``...`` is a valid Python expression in Python 3, so shouldn't be stripped. - - If initial_re and prompt_re differ, - only initial_re will be tested against the first line. - If any prompt is found on the first two lines, - prompts will be stripped from the rest of the block. - """ - if initial_re is None: - initial_re = prompt_re - line = '' - while True: - line = (yield line) - - # First line of cell - if line is None: - continue - out, n1 = initial_re.subn('', line, count=1) - if turnoff_re and not n1: - if turnoff_re.match(line): - # We're in e.g. a cell magic; disable this transformer for - # the rest of the cell. - while line is not None: - line = (yield line) - continue - - line = (yield out) - - if line is None: - continue - # check for any prompt on the second line of the cell, - # because people often copy from just after the first prompt, - # so we might not see it in the first line. - out, n2 = prompt_re.subn('', line, count=1) - line = (yield out) - - if n1 or n2: - # Found a prompt in the first two lines - check for it in - # the rest of the cell as well. - while line is not None: - line = (yield prompt_re.sub('', line, count=1)) - - else: - # Prompts not in input - wait for reset - while line is not None: - line = (yield line) - -@CoroutineInputTransformer.wrap -def classic_prompt(): - """Strip the >>>/... prompts of the Python interactive shell.""" - # FIXME: non-capturing version (?:...) usable? - prompt_re = re.compile(r'^(>>>|\.\.\.)( |$)') - initial_re = re.compile(r'^>>>( |$)') - # Any %magic/!system is IPython syntax, so we needn't look for >>> prompts - turnoff_re = re.compile(r'^[%!]') - return _strip_prompts(prompt_re, initial_re, turnoff_re) - -@CoroutineInputTransformer.wrap -def ipy_prompt(): - """Strip IPython's In [1]:/...: prompts.""" - # FIXME: non-capturing version (?:...) usable? - prompt_re = re.compile(r'^(In \[\d+\]: |\s*\.{3,}: ?)') - # Disable prompt stripping inside cell magics - turnoff_re = re.compile(r'^%%') - return _strip_prompts(prompt_re, turnoff_re=turnoff_re) - - -@CoroutineInputTransformer.wrap -def leading_indent(): - """Remove leading indentation. - - If the first line starts with a spaces or tabs, the same whitespace will be - removed from each following line until it is reset. - """ - space_re = re.compile(r'^[ \t]+') - line = '' - while True: - line = (yield line) - - if line is None: - continue - - m = space_re.match(line) - if m: - space = m.group(0) - while line is not None: - if line.startswith(space): - line = line[len(space):] - line = (yield line) - else: - # No leading spaces - wait for reset - while line is not None: - line = (yield line) - - -_assign_pat = \ -r'''(?P<lhs>(\s*) - ([\w\.]+) # Initial identifier - (\s*,\s* - \*?[\w\.]+)* # Further identifiers for unpacking - \s*?,? # Trailing comma - ) - \s*=\s* -''' - -assign_system_re = re.compile(r'{}!\s*(?P<cmd>.*)'.format(_assign_pat), re.VERBOSE) -assign_system_template = '%s = get_ipython().getoutput(%r)' -@StatelessInputTransformer.wrap -def assign_from_system(line): - """Transform assignment from system commands (e.g. files = !ls)""" - m = assign_system_re.match(line) - if m is None: - return line - - return assign_system_template % m.group('lhs', 'cmd') - -assign_magic_re = re.compile(r'{}%\s*(?P<cmd>.*)'.format(_assign_pat), re.VERBOSE) -assign_magic_template = '%s = get_ipython().run_line_magic(%r, %r)' -@StatelessInputTransformer.wrap -def assign_from_magic(line): - """Transform assignment from magic commands (e.g. a = %who_ls)""" - m = assign_magic_re.match(line) - if m is None: - return line - #Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args) - m_lhs, m_cmd = m.group('lhs', 'cmd') - t_magic_name, _, t_magic_arg_s = m_cmd.partition(' ') - t_magic_name = t_magic_name.lstrip(ESC_MAGIC) - return assign_magic_template % (m_lhs, t_magic_name, t_magic_arg_s) +"""DEPRECATED: Input transformer classes to support IPython special syntax. + +This module was deprecated in IPython 7.0, in favour of inputtransformer2. + +This includes the machinery to recognise and transform ``%magic`` commands, +``!system`` commands, ``help?`` querying, prompt stripping, and so forth. +""" +import abc +import functools +import re +import tokenize +from tokenize import generate_tokens, untokenize, TokenError +from io import StringIO + +from IPython.core.splitinput import LineInfo + +#----------------------------------------------------------------------------- +# Globals +#----------------------------------------------------------------------------- + +# The escape sequences that define the syntax transformations IPython will +# apply to user input. These can NOT be just changed here: many regular +# expressions and other parts of the code may use their hardcoded values, and +# for all intents and purposes they constitute the 'IPython syntax', so they +# should be considered fixed. + +ESC_SHELL = '!' # Send line to underlying system shell +ESC_SH_CAP = '!!' # Send line to system shell and capture output +ESC_HELP = '?' # Find information about object +ESC_HELP2 = '??' # Find extra-detailed information about object +ESC_MAGIC = '%' # Call magic function +ESC_MAGIC2 = '%%' # Call cell-magic function +ESC_QUOTE = ',' # Split args on whitespace, quote each as string and call +ESC_QUOTE2 = ';' # Quote all args as a single string, call +ESC_PAREN = '/' # Call first argument with rest of line as arguments + +ESC_SEQUENCES = [ESC_SHELL, ESC_SH_CAP, ESC_HELP ,\ + ESC_HELP2, ESC_MAGIC, ESC_MAGIC2,\ + ESC_QUOTE, ESC_QUOTE2, ESC_PAREN ] + + +class InputTransformer(metaclass=abc.ABCMeta): + """Abstract base class for line-based input transformers.""" + + @abc.abstractmethod + def push(self, line): + """Send a line of input to the transformer, returning the transformed + input or None if the transformer is waiting for more input. + + Must be overridden by subclasses. + + Implementations may raise ``SyntaxError`` if the input is invalid. No + other exceptions may be raised. + """ + pass + + @abc.abstractmethod + def reset(self): + """Return, transformed any lines that the transformer has accumulated, + and reset its internal state. + + Must be overridden by subclasses. + """ + pass + + @classmethod + def wrap(cls, func): + """Can be used by subclasses as a decorator, to return a factory that + will allow instantiation with the decorated object. + """ + @functools.wraps(func) + def transformer_factory(**kwargs): + return cls(func, **kwargs) + + return transformer_factory + +class StatelessInputTransformer(InputTransformer): + """Wrapper for a stateless input transformer implemented as a function.""" + def __init__(self, func): + self.func = func + + def __repr__(self): + return "StatelessInputTransformer(func={0!r})".format(self.func) + + def push(self, line): + """Send a line of input to the transformer, returning the + transformed input.""" + return self.func(line) + + def reset(self): + """No-op - exists for compatibility.""" + pass + +class CoroutineInputTransformer(InputTransformer): + """Wrapper for an input transformer implemented as a coroutine.""" + def __init__(self, coro, **kwargs): + # Prime it + self.coro = coro(**kwargs) + next(self.coro) + + def __repr__(self): + return "CoroutineInputTransformer(coro={0!r})".format(self.coro) + + def push(self, line): + """Send a line of input to the transformer, returning the + transformed input or None if the transformer is waiting for more + input. + """ + return self.coro.send(line) + + def reset(self): + """Return, transformed any lines that the transformer has + accumulated, and reset its internal state. + """ + return self.coro.send(None) + +class TokenInputTransformer(InputTransformer): + """Wrapper for a token-based input transformer. + + func should accept a list of tokens (5-tuples, see tokenize docs), and + return an iterable which can be passed to tokenize.untokenize(). + """ + def __init__(self, func): + self.func = func + self.buf = [] + self.reset_tokenizer() + + def reset_tokenizer(self): + it = iter(self.buf) + self.tokenizer = generate_tokens(it.__next__) + + def push(self, line): + self.buf.append(line + '\n') + if all(l.isspace() for l in self.buf): + return self.reset() + + tokens = [] + stop_at_NL = False + try: + for intok in self.tokenizer: + tokens.append(intok) + t = intok[0] + if t == tokenize.NEWLINE or (stop_at_NL and t == tokenize.NL): + # Stop before we try to pull a line we don't have yet + break + elif t == tokenize.ERRORTOKEN: + stop_at_NL = True + except TokenError: + # Multi-line statement - stop and try again with the next line + self.reset_tokenizer() + return None + + return self.output(tokens) + + def output(self, tokens): + self.buf.clear() + self.reset_tokenizer() + return untokenize(self.func(tokens)).rstrip('\n') + + def reset(self): + l = ''.join(self.buf) + self.buf.clear() + self.reset_tokenizer() + if l: + return l.rstrip('\n') + +class assemble_python_lines(TokenInputTransformer): + def __init__(self): + super(assemble_python_lines, self).__init__(None) + + def output(self, tokens): + return self.reset() + +@CoroutineInputTransformer.wrap +def assemble_logical_lines(): + r"""Join lines following explicit line continuations (\)""" + line = '' + while True: + line = (yield line) + if not line or line.isspace(): + continue + + parts = [] + while line is not None: + if line.endswith('\\') and (not has_comment(line)): + parts.append(line[:-1]) + line = (yield None) # Get another line + else: + parts.append(line) + break + + # Output + line = ''.join(parts) + +# Utilities +def _make_help_call(target, esc, lspace, next_input=None): + """Prepares a pinfo(2)/psearch call from a target name and the escape + (i.e. ? or ??)""" + method = 'pinfo2' if esc == '??' \ + else 'psearch' if '*' in target \ + else 'pinfo' + arg = " ".join([method, target]) + #Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args) + t_magic_name, _, t_magic_arg_s = arg.partition(' ') + t_magic_name = t_magic_name.lstrip(ESC_MAGIC) + if next_input is None: + return '%sget_ipython().run_line_magic(%r, %r)' % (lspace, t_magic_name, t_magic_arg_s) + else: + return '%sget_ipython().set_next_input(%r);get_ipython().run_line_magic(%r, %r)' % \ + (lspace, next_input, t_magic_name, t_magic_arg_s) + +# These define the transformations for the different escape characters. +def _tr_system(line_info): + "Translate lines escaped with: !" + cmd = line_info.line.lstrip().lstrip(ESC_SHELL) + return '%sget_ipython().system(%r)' % (line_info.pre, cmd) + +def _tr_system2(line_info): + "Translate lines escaped with: !!" + cmd = line_info.line.lstrip()[2:] + return '%sget_ipython().getoutput(%r)' % (line_info.pre, cmd) + +def _tr_help(line_info): + "Translate lines escaped with: ?/??" + # A naked help line should just fire the intro help screen + if not line_info.line[1:]: + return 'get_ipython().show_usage()' + + return _make_help_call(line_info.ifun, line_info.esc, line_info.pre) + +def _tr_magic(line_info): + "Translate lines escaped with: %" + tpl = '%sget_ipython().run_line_magic(%r, %r)' + if line_info.line.startswith(ESC_MAGIC2): + return line_info.line + cmd = ' '.join([line_info.ifun, line_info.the_rest]).strip() + #Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args) + t_magic_name, _, t_magic_arg_s = cmd.partition(' ') + t_magic_name = t_magic_name.lstrip(ESC_MAGIC) + return tpl % (line_info.pre, t_magic_name, t_magic_arg_s) + +def _tr_quote(line_info): + "Translate lines escaped with: ," + return '%s%s("%s")' % (line_info.pre, line_info.ifun, + '", "'.join(line_info.the_rest.split()) ) + +def _tr_quote2(line_info): + "Translate lines escaped with: ;" + return '%s%s("%s")' % (line_info.pre, line_info.ifun, + line_info.the_rest) + +def _tr_paren(line_info): + "Translate lines escaped with: /" + return '%s%s(%s)' % (line_info.pre, line_info.ifun, + ", ".join(line_info.the_rest.split())) + +tr = { ESC_SHELL : _tr_system, + ESC_SH_CAP : _tr_system2, + ESC_HELP : _tr_help, + ESC_HELP2 : _tr_help, + ESC_MAGIC : _tr_magic, + ESC_QUOTE : _tr_quote, + ESC_QUOTE2 : _tr_quote2, + ESC_PAREN : _tr_paren } + +@StatelessInputTransformer.wrap +def escaped_commands(line): + """Transform escaped commands - %magic, !system, ?help + various autocalls. + """ + if not line or line.isspace(): + return line + lineinf = LineInfo(line) + if lineinf.esc not in tr: + return line + + return tr[lineinf.esc](lineinf) + +_initial_space_re = re.compile(r'\s*') + +_help_end_re = re.compile(r"""(%{0,2} + (?!\d)[\w*]+ # Variable name + (\.(?!\d)[\w*]+)* # .etc.etc + ) + (\?\??)$ # ? or ?? + """, + re.VERBOSE) + +# Extra pseudotokens for multiline strings and data structures +_MULTILINE_STRING = object() +_MULTILINE_STRUCTURE = object() + +def _line_tokens(line): + """Helper for has_comment and ends_in_comment_or_string.""" + readline = StringIO(line).readline + toktypes = set() + try: + for t in generate_tokens(readline): + toktypes.add(t[0]) + except TokenError as e: + # There are only two cases where a TokenError is raised. + if 'multi-line string' in e.args[0]: + toktypes.add(_MULTILINE_STRING) + else: + toktypes.add(_MULTILINE_STRUCTURE) + return toktypes + +def has_comment(src): + """Indicate whether an input line has (i.e. ends in, or is) a comment. + + This uses tokenize, so it can distinguish comments from # inside strings. + + Parameters + ---------- + src : string + A single line input string. + + Returns + ------- + comment : bool + True if source has a comment. + """ + return (tokenize.COMMENT in _line_tokens(src)) + +def ends_in_comment_or_string(src): + """Indicates whether or not an input line ends in a comment or within + a multiline string. + + Parameters + ---------- + src : string + A single line input string. + + Returns + ------- + comment : bool + True if source ends in a comment or multiline string. + """ + toktypes = _line_tokens(src) + return (tokenize.COMMENT in toktypes) or (_MULTILINE_STRING in toktypes) + + +@StatelessInputTransformer.wrap +def help_end(line): + """Translate lines with ?/?? at the end""" + m = _help_end_re.search(line) + if m is None or ends_in_comment_or_string(line): + return line + target = m.group(1) + esc = m.group(3) + lspace = _initial_space_re.match(line).group(0) + + # If we're mid-command, put it back on the next prompt for the user. + next_input = line.rstrip('?') if line.strip() != m.group(0) else None + + return _make_help_call(target, esc, lspace, next_input) + + +@CoroutineInputTransformer.wrap +def cellmagic(end_on_blank_line=False): + """Captures & transforms cell magics. + + After a cell magic is started, this stores up any lines it gets until it is + reset (sent None). + """ + tpl = 'get_ipython().run_cell_magic(%r, %r, %r)' + cellmagic_help_re = re.compile(r'%%\w+\?') + line = '' + while True: + line = (yield line) + # consume leading empty lines + while not line: + line = (yield line) + + if not line.startswith(ESC_MAGIC2): + # This isn't a cell magic, idle waiting for reset then start over + while line is not None: + line = (yield line) + continue + + if cellmagic_help_re.match(line): + # This case will be handled by help_end + continue + + first = line + body = [] + line = (yield None) + while (line is not None) and \ + ((line.strip() != '') or not end_on_blank_line): + body.append(line) + line = (yield None) + + # Output + magic_name, _, first = first.partition(' ') + magic_name = magic_name.lstrip(ESC_MAGIC2) + line = tpl % (magic_name, first, u'\n'.join(body)) + + +def _strip_prompts(prompt_re, initial_re=None, turnoff_re=None): + """Remove matching input prompts from a block of input. + + Parameters + ---------- + prompt_re : regular expression + A regular expression matching any input prompt (including continuation) + initial_re : regular expression, optional + A regular expression matching only the initial prompt, but not continuation. + If no initial expression is given, prompt_re will be used everywhere. + Used mainly for plain Python prompts, where the continuation prompt + ``...`` is a valid Python expression in Python 3, so shouldn't be stripped. + + If initial_re and prompt_re differ, + only initial_re will be tested against the first line. + If any prompt is found on the first two lines, + prompts will be stripped from the rest of the block. + """ + if initial_re is None: + initial_re = prompt_re + line = '' + while True: + line = (yield line) + + # First line of cell + if line is None: + continue + out, n1 = initial_re.subn('', line, count=1) + if turnoff_re and not n1: + if turnoff_re.match(line): + # We're in e.g. a cell magic; disable this transformer for + # the rest of the cell. + while line is not None: + line = (yield line) + continue + + line = (yield out) + + if line is None: + continue + # check for any prompt on the second line of the cell, + # because people often copy from just after the first prompt, + # so we might not see it in the first line. + out, n2 = prompt_re.subn('', line, count=1) + line = (yield out) + + if n1 or n2: + # Found a prompt in the first two lines - check for it in + # the rest of the cell as well. + while line is not None: + line = (yield prompt_re.sub('', line, count=1)) + + else: + # Prompts not in input - wait for reset + while line is not None: + line = (yield line) + +@CoroutineInputTransformer.wrap +def classic_prompt(): + """Strip the >>>/... prompts of the Python interactive shell.""" + # FIXME: non-capturing version (?:...) usable? + prompt_re = re.compile(r'^(>>>|\.\.\.)( |$)') + initial_re = re.compile(r'^>>>( |$)') + # Any %magic/!system is IPython syntax, so we needn't look for >>> prompts + turnoff_re = re.compile(r'^[%!]') + return _strip_prompts(prompt_re, initial_re, turnoff_re) + +@CoroutineInputTransformer.wrap +def ipy_prompt(): + """Strip IPython's In [1]:/...: prompts.""" + # FIXME: non-capturing version (?:...) usable? + prompt_re = re.compile(r'^(In \[\d+\]: |\s*\.{3,}: ?)') + # Disable prompt stripping inside cell magics + turnoff_re = re.compile(r'^%%') + return _strip_prompts(prompt_re, turnoff_re=turnoff_re) + + +@CoroutineInputTransformer.wrap +def leading_indent(): + """Remove leading indentation. + + If the first line starts with a spaces or tabs, the same whitespace will be + removed from each following line until it is reset. + """ + space_re = re.compile(r'^[ \t]+') + line = '' + while True: + line = (yield line) + + if line is None: + continue + + m = space_re.match(line) + if m: + space = m.group(0) + while line is not None: + if line.startswith(space): + line = line[len(space):] + line = (yield line) + else: + # No leading spaces - wait for reset + while line is not None: + line = (yield line) + + +_assign_pat = \ +r'''(?P<lhs>(\s*) + ([\w\.]+) # Initial identifier + (\s*,\s* + \*?[\w\.]+)* # Further identifiers for unpacking + \s*?,? # Trailing comma + ) + \s*=\s* +''' + +assign_system_re = re.compile(r'{}!\s*(?P<cmd>.*)'.format(_assign_pat), re.VERBOSE) +assign_system_template = '%s = get_ipython().getoutput(%r)' +@StatelessInputTransformer.wrap +def assign_from_system(line): + """Transform assignment from system commands (e.g. files = !ls)""" + m = assign_system_re.match(line) + if m is None: + return line + + return assign_system_template % m.group('lhs', 'cmd') + +assign_magic_re = re.compile(r'{}%\s*(?P<cmd>.*)'.format(_assign_pat), re.VERBOSE) +assign_magic_template = '%s = get_ipython().run_line_magic(%r, %r)' +@StatelessInputTransformer.wrap +def assign_from_magic(line): + """Transform assignment from magic commands (e.g. a = %who_ls)""" + m = assign_magic_re.match(line) + if m is None: + return line + #Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args) + m_lhs, m_cmd = m.group('lhs', 'cmd') + t_magic_name, _, t_magic_arg_s = m_cmd.partition(' ') + t_magic_name = t_magic_name.lstrip(ESC_MAGIC) + return assign_magic_template % (m_lhs, t_magic_name, t_magic_arg_s) diff --git a/contrib/python/ipython/py3/IPython/core/inputtransformer2.py b/contrib/python/ipython/py3/IPython/core/inputtransformer2.py index a0b7664e84c..5b6f4a10b35 100644 --- a/contrib/python/ipython/py3/IPython/core/inputtransformer2.py +++ b/contrib/python/ipython/py3/IPython/core/inputtransformer2.py @@ -1,750 +1,750 @@ -"""Input transformer machinery to support IPython special syntax. - -This includes the machinery to recognise and transform ``%magic`` commands, -``!system`` commands, ``help?`` querying, prompt stripping, and so forth. - -Added: IPython 7.0. Replaces inputsplitter and inputtransformer which were -deprecated in 7.0. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import ast -import sys -from codeop import CommandCompiler, Compile -import re -import tokenize -from typing import List, Tuple, Union -import warnings - -_indent_re = re.compile(r'^[ \t]+') - -def leading_empty_lines(lines): - """Remove leading empty lines - - If the leading lines are empty or contain only whitespace, they will be - removed. - """ - if not lines: - return lines - for i, line in enumerate(lines): - if line and not line.isspace(): - return lines[i:] - return lines - -def leading_indent(lines): - """Remove leading indentation. - - If the first line starts with a spaces or tabs, the same whitespace will be - removed from each following line in the cell. - """ - if not lines: - return lines - m = _indent_re.match(lines[0]) - if not m: - return lines - space = m.group(0) - n = len(space) - return [l[n:] if l.startswith(space) else l - for l in lines] - -class PromptStripper: - """Remove matching input prompts from a block of input. - - Parameters - ---------- - prompt_re : regular expression - A regular expression matching any input prompt (including continuation, - e.g. ``...``) - initial_re : regular expression, optional - A regular expression matching only the initial prompt, but not continuation. - If no initial expression is given, prompt_re will be used everywhere. - Used mainly for plain Python prompts (``>>>``), where the continuation prompt - ``...`` is a valid Python expression in Python 3, so shouldn't be stripped. - - Notes - ----- - - If initial_re and prompt_re differ, - only initial_re will be tested against the first line. - If any prompt is found on the first two lines, - prompts will be stripped from the rest of the block. - """ - def __init__(self, prompt_re, initial_re=None): - self.prompt_re = prompt_re - self.initial_re = initial_re or prompt_re - - def _strip(self, lines): - return [self.prompt_re.sub('', l, count=1) for l in lines] - - def __call__(self, lines): - if not lines: - return lines - if self.initial_re.match(lines[0]) or \ - (len(lines) > 1 and self.prompt_re.match(lines[1])): - return self._strip(lines) - return lines - -classic_prompt = PromptStripper( - prompt_re=re.compile(r'^(>>>|\.\.\.)( |$)'), - initial_re=re.compile(r'^>>>( |$)') -) - -ipython_prompt = PromptStripper(re.compile(r'^(In \[\d+\]: |\s*\.{3,}: ?)')) - -def cell_magic(lines): - if not lines or not lines[0].startswith('%%'): - return lines - if re.match(r'%%\w+\?', lines[0]): - # This case will be handled by help_end - return lines - magic_name, _, first_line = lines[0][2:].rstrip().partition(' ') - body = ''.join(lines[1:]) - return ['get_ipython().run_cell_magic(%r, %r, %r)\n' - % (magic_name, first_line, body)] - - -def _find_assign_op(token_line) -> Union[int, None]: - """Get the index of the first assignment in the line ('=' not inside brackets) - - Note: We don't try to support multiple special assignment (a = b = %foo) - """ - paren_level = 0 - for i, ti in enumerate(token_line): - s = ti.string - if s == '=' and paren_level == 0: - return i - if s in {'(','[','{'}: - paren_level += 1 - elif s in {')', ']', '}'}: - if paren_level > 0: - paren_level -= 1 - -def find_end_of_continued_line(lines, start_line: int): - """Find the last line of a line explicitly extended using backslashes. - - Uses 0-indexed line numbers. - """ - end_line = start_line - while lines[end_line].endswith('\\\n'): - end_line += 1 - if end_line >= len(lines): - break - return end_line - -def assemble_continued_line(lines, start: Tuple[int, int], end_line: int): - r"""Assemble a single line from multiple continued line pieces - - Continued lines are lines ending in ``\``, and the line following the last - ``\`` in the block. - - For example, this code continues over multiple lines:: - - if (assign_ix is not None) \ - and (len(line) >= assign_ix + 2) \ - and (line[assign_ix+1].string == '%') \ - and (line[assign_ix+2].type == tokenize.NAME): - - This statement contains four continued line pieces. - Assembling these pieces into a single line would give:: - - if (assign_ix is not None) and (len(line) >= assign_ix + 2) and (line[... - - This uses 0-indexed line numbers. *start* is (lineno, colno). - - Used to allow ``%magic`` and ``!system`` commands to be continued over - multiple lines. - """ - parts = [lines[start[0]][start[1]:]] + lines[start[0]+1:end_line+1] - return ' '.join([p.rstrip()[:-1] for p in parts[:-1]] # Strip backslash+newline - + [parts[-1].rstrip()]) # Strip newline from last line - -class TokenTransformBase: - """Base class for transformations which examine tokens. - - Special syntax should not be transformed when it occurs inside strings or - comments. This is hard to reliably avoid with regexes. The solution is to - tokenise the code as Python, and recognise the special syntax in the tokens. - - IPython's special syntax is not valid Python syntax, so tokenising may go - wrong after the special syntax starts. These classes therefore find and - transform *one* instance of special syntax at a time into regular Python - syntax. After each transformation, tokens are regenerated to find the next - piece of special syntax. - - Subclasses need to implement one class method (find) - and one regular method (transform). - - The priority attribute can select which transformation to apply if multiple - transformers match in the same place. Lower numbers have higher priority. - This allows "%magic?" to be turned into a help call rather than a magic call. - """ - # Lower numbers -> higher priority (for matches in the same location) - priority = 10 - - def sortby(self): - return self.start_line, self.start_col, self.priority - - def __init__(self, start): - self.start_line = start[0] - 1 # Shift from 1-index to 0-index - self.start_col = start[1] - - @classmethod - def find(cls, tokens_by_line): - """Find one instance of special syntax in the provided tokens. - - Tokens are grouped into logical lines for convenience, - so it is easy to e.g. look at the first token of each line. - *tokens_by_line* is a list of lists of tokenize.TokenInfo objects. - - This should return an instance of its class, pointing to the start - position it has found, or None if it found no match. - """ - raise NotImplementedError - - def transform(self, lines: List[str]): - """Transform one instance of special syntax found by ``find()`` - - Takes a list of strings representing physical lines, - returns a similar list of transformed lines. - """ - raise NotImplementedError - -class MagicAssign(TokenTransformBase): - """Transformer for assignments from magics (a = %foo)""" - @classmethod - def find(cls, tokens_by_line): - """Find the first magic assignment (a = %foo) in the cell. - """ - for line in tokens_by_line: - assign_ix = _find_assign_op(line) - if (assign_ix is not None) \ - and (len(line) >= assign_ix + 2) \ - and (line[assign_ix+1].string == '%') \ - and (line[assign_ix+2].type == tokenize.NAME): - return cls(line[assign_ix+1].start) - - def transform(self, lines: List[str]): - """Transform a magic assignment found by the ``find()`` classmethod. - """ - start_line, start_col = self.start_line, self.start_col - lhs = lines[start_line][:start_col] - end_line = find_end_of_continued_line(lines, start_line) - rhs = assemble_continued_line(lines, (start_line, start_col), end_line) - assert rhs.startswith('%'), rhs - magic_name, _, args = rhs[1:].partition(' ') - - lines_before = lines[:start_line] - call = "get_ipython().run_line_magic({!r}, {!r})".format(magic_name, args) - new_line = lhs + call + '\n' - lines_after = lines[end_line+1:] - - return lines_before + [new_line] + lines_after - - -class SystemAssign(TokenTransformBase): - """Transformer for assignments from system commands (a = !foo)""" - @classmethod - def find(cls, tokens_by_line): - """Find the first system assignment (a = !foo) in the cell. - """ - for line in tokens_by_line: - assign_ix = _find_assign_op(line) - if (assign_ix is not None) \ - and not line[assign_ix].line.strip().startswith('=') \ - and (len(line) >= assign_ix + 2) \ - and (line[assign_ix + 1].type == tokenize.ERRORTOKEN): - ix = assign_ix + 1 - - while ix < len(line) and line[ix].type == tokenize.ERRORTOKEN: - if line[ix].string == '!': - return cls(line[ix].start) - elif not line[ix].string.isspace(): - break - ix += 1 - - def transform(self, lines: List[str]): - """Transform a system assignment found by the ``find()`` classmethod. - """ - start_line, start_col = self.start_line, self.start_col - - lhs = lines[start_line][:start_col] - end_line = find_end_of_continued_line(lines, start_line) - rhs = assemble_continued_line(lines, (start_line, start_col), end_line) - assert rhs.startswith('!'), rhs - cmd = rhs[1:] - - lines_before = lines[:start_line] - call = "get_ipython().getoutput({!r})".format(cmd) - new_line = lhs + call + '\n' - lines_after = lines[end_line + 1:] - - return lines_before + [new_line] + lines_after - -# The escape sequences that define the syntax transformations IPython will -# apply to user input. These can NOT be just changed here: many regular -# expressions and other parts of the code may use their hardcoded values, and -# for all intents and purposes they constitute the 'IPython syntax', so they -# should be considered fixed. - -ESC_SHELL = '!' # Send line to underlying system shell -ESC_SH_CAP = '!!' # Send line to system shell and capture output -ESC_HELP = '?' # Find information about object -ESC_HELP2 = '??' # Find extra-detailed information about object -ESC_MAGIC = '%' # Call magic function -ESC_MAGIC2 = '%%' # Call cell-magic function -ESC_QUOTE = ',' # Split args on whitespace, quote each as string and call -ESC_QUOTE2 = ';' # Quote all args as a single string, call -ESC_PAREN = '/' # Call first argument with rest of line as arguments - -ESCAPE_SINGLES = {'!', '?', '%', ',', ';', '/'} -ESCAPE_DOUBLES = {'!!', '??'} # %% (cell magic) is handled separately - -def _make_help_call(target, esc, next_input=None): - """Prepares a pinfo(2)/psearch call from a target name and the escape - (i.e. ? or ??)""" - method = 'pinfo2' if esc == '??' \ - else 'psearch' if '*' in target \ - else 'pinfo' - arg = " ".join([method, target]) - #Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args) - t_magic_name, _, t_magic_arg_s = arg.partition(' ') - t_magic_name = t_magic_name.lstrip(ESC_MAGIC) - if next_input is None: - return 'get_ipython().run_line_magic(%r, %r)' % (t_magic_name, t_magic_arg_s) - else: - return 'get_ipython().set_next_input(%r);get_ipython().run_line_magic(%r, %r)' % \ - (next_input, t_magic_name, t_magic_arg_s) - -def _tr_help(content): - """Translate lines escaped with: ? - - A naked help line should fire the intro help screen (shell.show_usage()) - """ - if not content: - return 'get_ipython().show_usage()' - - return _make_help_call(content, '?') - -def _tr_help2(content): - """Translate lines escaped with: ?? - - A naked help line should fire the intro help screen (shell.show_usage()) - """ - if not content: - return 'get_ipython().show_usage()' - - return _make_help_call(content, '??') - -def _tr_magic(content): - "Translate lines escaped with a percent sign: %" - name, _, args = content.partition(' ') - return 'get_ipython().run_line_magic(%r, %r)' % (name, args) - -def _tr_quote(content): - "Translate lines escaped with a comma: ," - name, _, args = content.partition(' ') - return '%s("%s")' % (name, '", "'.join(args.split()) ) - -def _tr_quote2(content): - "Translate lines escaped with a semicolon: ;" - name, _, args = content.partition(' ') - return '%s("%s")' % (name, args) - -def _tr_paren(content): - "Translate lines escaped with a slash: /" - name, _, args = content.partition(' ') - return '%s(%s)' % (name, ", ".join(args.split())) - -tr = { ESC_SHELL : 'get_ipython().system({!r})'.format, - ESC_SH_CAP : 'get_ipython().getoutput({!r})'.format, - ESC_HELP : _tr_help, - ESC_HELP2 : _tr_help2, - ESC_MAGIC : _tr_magic, - ESC_QUOTE : _tr_quote, - ESC_QUOTE2 : _tr_quote2, - ESC_PAREN : _tr_paren } - -class EscapedCommand(TokenTransformBase): - """Transformer for escaped commands like %foo, !foo, or /foo""" - @classmethod - def find(cls, tokens_by_line): - """Find the first escaped command (%foo, !foo, etc.) in the cell. - """ - for line in tokens_by_line: - if not line: - continue - ix = 0 - ll = len(line) - while ll > ix and line[ix].type in {tokenize.INDENT, tokenize.DEDENT}: - ix += 1 - if ix >= ll: - continue - if line[ix].string in ESCAPE_SINGLES: - return cls(line[ix].start) - - def transform(self, lines): - """Transform an escaped line found by the ``find()`` classmethod. - """ - start_line, start_col = self.start_line, self.start_col - - indent = lines[start_line][:start_col] - end_line = find_end_of_continued_line(lines, start_line) - line = assemble_continued_line(lines, (start_line, start_col), end_line) - - if len(line) > 1 and line[:2] in ESCAPE_DOUBLES: - escape, content = line[:2], line[2:] - else: - escape, content = line[:1], line[1:] - - if escape in tr: - call = tr[escape](content) - else: - call = '' - - lines_before = lines[:start_line] - new_line = indent + call + '\n' - lines_after = lines[end_line + 1:] - - return lines_before + [new_line] + lines_after - -_help_end_re = re.compile(r"""(%{0,2} - (?!\d)[\w*]+ # Variable name - (\.(?!\d)[\w*]+)* # .etc.etc - ) - (\?\??)$ # ? or ?? - """, - re.VERBOSE) - -class HelpEnd(TokenTransformBase): - """Transformer for help syntax: obj? and obj??""" - # This needs to be higher priority (lower number) than EscapedCommand so - # that inspecting magics (%foo?) works. - priority = 5 - - def __init__(self, start, q_locn): - super().__init__(start) - self.q_line = q_locn[0] - 1 # Shift from 1-indexed to 0-indexed - self.q_col = q_locn[1] - - @classmethod - def find(cls, tokens_by_line): - """Find the first help command (foo?) in the cell. - """ - for line in tokens_by_line: - # Last token is NEWLINE; look at last but one - if len(line) > 2 and line[-2].string == '?': - # Find the first token that's not INDENT/DEDENT - ix = 0 - while line[ix].type in {tokenize.INDENT, tokenize.DEDENT}: - ix += 1 - return cls(line[ix].start, line[-2].start) - - def transform(self, lines): - """Transform a help command found by the ``find()`` classmethod. - """ - piece = ''.join(lines[self.start_line:self.q_line+1]) - indent, content = piece[:self.start_col], piece[self.start_col:] - lines_before = lines[:self.start_line] - lines_after = lines[self.q_line + 1:] - - m = _help_end_re.search(content) - if not m: - raise SyntaxError(content) - assert m is not None, content - target = m.group(1) - esc = m.group(3) - - # If we're mid-command, put it back on the next prompt for the user. - next_input = None - if (not lines_before) and (not lines_after) \ - and content.strip() != m.group(0): - next_input = content.rstrip('?\n') - - call = _make_help_call(target, esc, next_input=next_input) - new_line = indent + call + '\n' - - return lines_before + [new_line] + lines_after - -def make_tokens_by_line(lines:List[str]): - """Tokenize a series of lines and group tokens by line. - - The tokens for a multiline Python string or expression are grouped as one - line. All lines except the last lines should keep their line ending ('\\n', - '\\r\\n') for this to properly work. Use `.splitlines(keeplineending=True)` - for example when passing block of text to this function. - - """ - # NL tokens are used inside multiline expressions, but also after blank - # lines or comments. This is intentional - see https://bugs.python.org/issue17061 - # We want to group the former case together but split the latter, so we - # track parentheses level, similar to the internals of tokenize. - NEWLINE, NL = tokenize.NEWLINE, tokenize.NL - tokens_by_line = [[]] - if len(lines) > 1 and not lines[0].endswith(('\n', '\r', '\r\n', '\x0b', '\x0c')): - warnings.warn("`make_tokens_by_line` received a list of lines which do not have lineending markers ('\\n', '\\r', '\\r\\n', '\\x0b', '\\x0c'), behavior will be unspecified") - parenlev = 0 - try: - for token in tokenize.generate_tokens(iter(lines).__next__): - tokens_by_line[-1].append(token) - if (token.type == NEWLINE) \ - or ((token.type == NL) and (parenlev <= 0)): - tokens_by_line.append([]) - elif token.string in {'(', '[', '{'}: - parenlev += 1 - elif token.string in {')', ']', '}'}: - if parenlev > 0: - parenlev -= 1 - except tokenize.TokenError: - # Input ended in a multiline string or expression. That's OK for us. - pass - - - if not tokens_by_line[-1]: - tokens_by_line.pop() - - - return tokens_by_line - -def show_linewise_tokens(s: str): - """For investigation and debugging""" - if not s.endswith('\n'): - s += '\n' - lines = s.splitlines(keepends=True) - for line in make_tokens_by_line(lines): - print("Line -------") - for tokinfo in line: - print(" ", tokinfo) - -# Arbitrary limit to prevent getting stuck in infinite loops -TRANSFORM_LOOP_LIMIT = 500 - -class TransformerManager: - """Applies various transformations to a cell or code block. - - The key methods for external use are ``transform_cell()`` - and ``check_complete()``. - """ - def __init__(self): - self.cleanup_transforms = [ - leading_empty_lines, - leading_indent, - classic_prompt, - ipython_prompt, - ] - self.line_transforms = [ - cell_magic, - ] - self.token_transformers = [ - MagicAssign, - SystemAssign, - EscapedCommand, - HelpEnd, - ] - - def do_one_token_transform(self, lines): - """Find and run the transform earliest in the code. - - Returns (changed, lines). - - This method is called repeatedly until changed is False, indicating - that all available transformations are complete. - - The tokens following IPython special syntax might not be valid, so - the transformed code is retokenised every time to identify the next - piece of special syntax. Hopefully long code cells are mostly valid - Python, not using lots of IPython special syntax, so this shouldn't be - a performance issue. - """ - tokens_by_line = make_tokens_by_line(lines) - candidates = [] - for transformer_cls in self.token_transformers: - transformer = transformer_cls.find(tokens_by_line) - if transformer: - candidates.append(transformer) - - if not candidates: - # Nothing to transform - return False, lines - ordered_transformers = sorted(candidates, key=TokenTransformBase.sortby) - for transformer in ordered_transformers: - try: - return True, transformer.transform(lines) - except SyntaxError: - pass - return False, lines - - def do_token_transforms(self, lines): - for _ in range(TRANSFORM_LOOP_LIMIT): - changed, lines = self.do_one_token_transform(lines) - if not changed: - return lines - - raise RuntimeError("Input transformation still changing after " - "%d iterations. Aborting." % TRANSFORM_LOOP_LIMIT) - - def transform_cell(self, cell: str) -> str: - """Transforms a cell of input code""" - if not cell.endswith('\n'): - cell += '\n' # Ensure the cell has a trailing newline - lines = cell.splitlines(keepends=True) - for transform in self.cleanup_transforms + self.line_transforms: - lines = transform(lines) - - lines = self.do_token_transforms(lines) - return ''.join(lines) - - def check_complete(self, cell: str): - """Return whether a block of code is ready to execute, or should be continued - - Parameters - ---------- - source : string - Python input code, which can be multiline. - - Returns - ------- - status : str - One of 'complete', 'incomplete', or 'invalid' if source is not a - prefix of valid code. - indent_spaces : int or None - The number of spaces by which to indent the next line of code. If - status is not 'incomplete', this is None. - """ - # Remember if the lines ends in a new line. - ends_with_newline = False - for character in reversed(cell): - if character == '\n': - ends_with_newline = True - break - elif character.strip(): - break - else: - continue - - if not ends_with_newline: - # Append an newline for consistent tokenization - # See https://bugs.python.org/issue33899 - cell += '\n' - - lines = cell.splitlines(keepends=True) - - if not lines: - return 'complete', None - - if lines[-1].endswith('\\'): - # Explicit backslash continuation - return 'incomplete', find_last_indent(lines) - - try: - for transform in self.cleanup_transforms: - if not getattr(transform, 'has_side_effects', False): - lines = transform(lines) - except SyntaxError: - return 'invalid', None - - if lines[0].startswith('%%'): - # Special case for cell magics - completion marked by blank line - if lines[-1].strip(): - return 'incomplete', find_last_indent(lines) - else: - return 'complete', None - - try: - for transform in self.line_transforms: - if not getattr(transform, 'has_side_effects', False): - lines = transform(lines) - lines = self.do_token_transforms(lines) - except SyntaxError: - return 'invalid', None - - tokens_by_line = make_tokens_by_line(lines) - - if not tokens_by_line: - return 'incomplete', find_last_indent(lines) - - if tokens_by_line[-1][-1].type != tokenize.ENDMARKER: - # We're in a multiline string or expression - return 'incomplete', find_last_indent(lines) - - newline_types = {tokenize.NEWLINE, tokenize.COMMENT, tokenize.ENDMARKER} - - # Pop the last line which only contains DEDENTs and ENDMARKER - last_token_line = None - if {t.type for t in tokens_by_line[-1]} in [ - {tokenize.DEDENT, tokenize.ENDMARKER}, - {tokenize.ENDMARKER} - ] and len(tokens_by_line) > 1: - last_token_line = tokens_by_line.pop() - - while tokens_by_line[-1] and tokens_by_line[-1][-1].type in newline_types: - tokens_by_line[-1].pop() - - if not tokens_by_line[-1]: - return 'incomplete', find_last_indent(lines) - - if tokens_by_line[-1][-1].string == ':': - # The last line starts a block (e.g. 'if foo:') - ix = 0 - while tokens_by_line[-1][ix].type in {tokenize.INDENT, tokenize.DEDENT}: - ix += 1 - - indent = tokens_by_line[-1][ix].start[1] - return 'incomplete', indent + 4 - - if tokens_by_line[-1][0].line.endswith('\\'): - return 'incomplete', None - - # At this point, our checks think the code is complete (or invalid). - # We'll use codeop.compile_command to check this with the real parser - try: - with warnings.catch_warnings(): - warnings.simplefilter('error', SyntaxWarning) - res = compile_command(''.join(lines), symbol='exec') - except (SyntaxError, OverflowError, ValueError, TypeError, - MemoryError, SyntaxWarning): - return 'invalid', None - else: - if res is None: - return 'incomplete', find_last_indent(lines) - - if last_token_line and last_token_line[0].type == tokenize.DEDENT: - if ends_with_newline: - return 'complete', None - return 'incomplete', find_last_indent(lines) - - # If there's a blank line at the end, assume we're ready to execute - if not lines[-1].strip(): - return 'complete', None - - return 'complete', None - - -def find_last_indent(lines): - m = _indent_re.match(lines[-1]) - if not m: - return 0 - return len(m.group(0).replace('\t', ' '*4)) - - -class MaybeAsyncCompile(Compile): - def __init__(self, extra_flags=0): - super().__init__() - self.flags |= extra_flags - - def __call__(self, *args, **kwds): - return compile(*args, **kwds) - - -class MaybeAsyncCommandCompiler(CommandCompiler): - def __init__(self, extra_flags=0): - self.compiler = MaybeAsyncCompile(extra_flags=extra_flags) - - -if (sys.version_info.major, sys.version_info.minor) >= (3, 8): - _extra_flags = ast.PyCF_ALLOW_TOP_LEVEL_AWAIT -else: - _extra_flags = ast.PyCF_ONLY_AST - -compile_command = MaybeAsyncCommandCompiler(extra_flags=_extra_flags) +"""Input transformer machinery to support IPython special syntax. + +This includes the machinery to recognise and transform ``%magic`` commands, +``!system`` commands, ``help?`` querying, prompt stripping, and so forth. + +Added: IPython 7.0. Replaces inputsplitter and inputtransformer which were +deprecated in 7.0. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import ast +import sys +from codeop import CommandCompiler, Compile +import re +import tokenize +from typing import List, Tuple, Union +import warnings + +_indent_re = re.compile(r'^[ \t]+') + +def leading_empty_lines(lines): + """Remove leading empty lines + + If the leading lines are empty or contain only whitespace, they will be + removed. + """ + if not lines: + return lines + for i, line in enumerate(lines): + if line and not line.isspace(): + return lines[i:] + return lines + +def leading_indent(lines): + """Remove leading indentation. + + If the first line starts with a spaces or tabs, the same whitespace will be + removed from each following line in the cell. + """ + if not lines: + return lines + m = _indent_re.match(lines[0]) + if not m: + return lines + space = m.group(0) + n = len(space) + return [l[n:] if l.startswith(space) else l + for l in lines] + +class PromptStripper: + """Remove matching input prompts from a block of input. + + Parameters + ---------- + prompt_re : regular expression + A regular expression matching any input prompt (including continuation, + e.g. ``...``) + initial_re : regular expression, optional + A regular expression matching only the initial prompt, but not continuation. + If no initial expression is given, prompt_re will be used everywhere. + Used mainly for plain Python prompts (``>>>``), where the continuation prompt + ``...`` is a valid Python expression in Python 3, so shouldn't be stripped. + + Notes + ----- + + If initial_re and prompt_re differ, + only initial_re will be tested against the first line. + If any prompt is found on the first two lines, + prompts will be stripped from the rest of the block. + """ + def __init__(self, prompt_re, initial_re=None): + self.prompt_re = prompt_re + self.initial_re = initial_re or prompt_re + + def _strip(self, lines): + return [self.prompt_re.sub('', l, count=1) for l in lines] + + def __call__(self, lines): + if not lines: + return lines + if self.initial_re.match(lines[0]) or \ + (len(lines) > 1 and self.prompt_re.match(lines[1])): + return self._strip(lines) + return lines + +classic_prompt = PromptStripper( + prompt_re=re.compile(r'^(>>>|\.\.\.)( |$)'), + initial_re=re.compile(r'^>>>( |$)') +) + +ipython_prompt = PromptStripper(re.compile(r'^(In \[\d+\]: |\s*\.{3,}: ?)')) + +def cell_magic(lines): + if not lines or not lines[0].startswith('%%'): + return lines + if re.match(r'%%\w+\?', lines[0]): + # This case will be handled by help_end + return lines + magic_name, _, first_line = lines[0][2:].rstrip().partition(' ') + body = ''.join(lines[1:]) + return ['get_ipython().run_cell_magic(%r, %r, %r)\n' + % (magic_name, first_line, body)] + + +def _find_assign_op(token_line) -> Union[int, None]: + """Get the index of the first assignment in the line ('=' not inside brackets) + + Note: We don't try to support multiple special assignment (a = b = %foo) + """ + paren_level = 0 + for i, ti in enumerate(token_line): + s = ti.string + if s == '=' and paren_level == 0: + return i + if s in {'(','[','{'}: + paren_level += 1 + elif s in {')', ']', '}'}: + if paren_level > 0: + paren_level -= 1 + +def find_end_of_continued_line(lines, start_line: int): + """Find the last line of a line explicitly extended using backslashes. + + Uses 0-indexed line numbers. + """ + end_line = start_line + while lines[end_line].endswith('\\\n'): + end_line += 1 + if end_line >= len(lines): + break + return end_line + +def assemble_continued_line(lines, start: Tuple[int, int], end_line: int): + r"""Assemble a single line from multiple continued line pieces + + Continued lines are lines ending in ``\``, and the line following the last + ``\`` in the block. + + For example, this code continues over multiple lines:: + + if (assign_ix is not None) \ + and (len(line) >= assign_ix + 2) \ + and (line[assign_ix+1].string == '%') \ + and (line[assign_ix+2].type == tokenize.NAME): + + This statement contains four continued line pieces. + Assembling these pieces into a single line would give:: + + if (assign_ix is not None) and (len(line) >= assign_ix + 2) and (line[... + + This uses 0-indexed line numbers. *start* is (lineno, colno). + + Used to allow ``%magic`` and ``!system`` commands to be continued over + multiple lines. + """ + parts = [lines[start[0]][start[1]:]] + lines[start[0]+1:end_line+1] + return ' '.join([p.rstrip()[:-1] for p in parts[:-1]] # Strip backslash+newline + + [parts[-1].rstrip()]) # Strip newline from last line + +class TokenTransformBase: + """Base class for transformations which examine tokens. + + Special syntax should not be transformed when it occurs inside strings or + comments. This is hard to reliably avoid with regexes. The solution is to + tokenise the code as Python, and recognise the special syntax in the tokens. + + IPython's special syntax is not valid Python syntax, so tokenising may go + wrong after the special syntax starts. These classes therefore find and + transform *one* instance of special syntax at a time into regular Python + syntax. After each transformation, tokens are regenerated to find the next + piece of special syntax. + + Subclasses need to implement one class method (find) + and one regular method (transform). + + The priority attribute can select which transformation to apply if multiple + transformers match in the same place. Lower numbers have higher priority. + This allows "%magic?" to be turned into a help call rather than a magic call. + """ + # Lower numbers -> higher priority (for matches in the same location) + priority = 10 + + def sortby(self): + return self.start_line, self.start_col, self.priority + + def __init__(self, start): + self.start_line = start[0] - 1 # Shift from 1-index to 0-index + self.start_col = start[1] + + @classmethod + def find(cls, tokens_by_line): + """Find one instance of special syntax in the provided tokens. + + Tokens are grouped into logical lines for convenience, + so it is easy to e.g. look at the first token of each line. + *tokens_by_line* is a list of lists of tokenize.TokenInfo objects. + + This should return an instance of its class, pointing to the start + position it has found, or None if it found no match. + """ + raise NotImplementedError + + def transform(self, lines: List[str]): + """Transform one instance of special syntax found by ``find()`` + + Takes a list of strings representing physical lines, + returns a similar list of transformed lines. + """ + raise NotImplementedError + +class MagicAssign(TokenTransformBase): + """Transformer for assignments from magics (a = %foo)""" + @classmethod + def find(cls, tokens_by_line): + """Find the first magic assignment (a = %foo) in the cell. + """ + for line in tokens_by_line: + assign_ix = _find_assign_op(line) + if (assign_ix is not None) \ + and (len(line) >= assign_ix + 2) \ + and (line[assign_ix+1].string == '%') \ + and (line[assign_ix+2].type == tokenize.NAME): + return cls(line[assign_ix+1].start) + + def transform(self, lines: List[str]): + """Transform a magic assignment found by the ``find()`` classmethod. + """ + start_line, start_col = self.start_line, self.start_col + lhs = lines[start_line][:start_col] + end_line = find_end_of_continued_line(lines, start_line) + rhs = assemble_continued_line(lines, (start_line, start_col), end_line) + assert rhs.startswith('%'), rhs + magic_name, _, args = rhs[1:].partition(' ') + + lines_before = lines[:start_line] + call = "get_ipython().run_line_magic({!r}, {!r})".format(magic_name, args) + new_line = lhs + call + '\n' + lines_after = lines[end_line+1:] + + return lines_before + [new_line] + lines_after + + +class SystemAssign(TokenTransformBase): + """Transformer for assignments from system commands (a = !foo)""" + @classmethod + def find(cls, tokens_by_line): + """Find the first system assignment (a = !foo) in the cell. + """ + for line in tokens_by_line: + assign_ix = _find_assign_op(line) + if (assign_ix is not None) \ + and not line[assign_ix].line.strip().startswith('=') \ + and (len(line) >= assign_ix + 2) \ + and (line[assign_ix + 1].type == tokenize.ERRORTOKEN): + ix = assign_ix + 1 + + while ix < len(line) and line[ix].type == tokenize.ERRORTOKEN: + if line[ix].string == '!': + return cls(line[ix].start) + elif not line[ix].string.isspace(): + break + ix += 1 + + def transform(self, lines: List[str]): + """Transform a system assignment found by the ``find()`` classmethod. + """ + start_line, start_col = self.start_line, self.start_col + + lhs = lines[start_line][:start_col] + end_line = find_end_of_continued_line(lines, start_line) + rhs = assemble_continued_line(lines, (start_line, start_col), end_line) + assert rhs.startswith('!'), rhs + cmd = rhs[1:] + + lines_before = lines[:start_line] + call = "get_ipython().getoutput({!r})".format(cmd) + new_line = lhs + call + '\n' + lines_after = lines[end_line + 1:] + + return lines_before + [new_line] + lines_after + +# The escape sequences that define the syntax transformations IPython will +# apply to user input. These can NOT be just changed here: many regular +# expressions and other parts of the code may use their hardcoded values, and +# for all intents and purposes they constitute the 'IPython syntax', so they +# should be considered fixed. + +ESC_SHELL = '!' # Send line to underlying system shell +ESC_SH_CAP = '!!' # Send line to system shell and capture output +ESC_HELP = '?' # Find information about object +ESC_HELP2 = '??' # Find extra-detailed information about object +ESC_MAGIC = '%' # Call magic function +ESC_MAGIC2 = '%%' # Call cell-magic function +ESC_QUOTE = ',' # Split args on whitespace, quote each as string and call +ESC_QUOTE2 = ';' # Quote all args as a single string, call +ESC_PAREN = '/' # Call first argument with rest of line as arguments + +ESCAPE_SINGLES = {'!', '?', '%', ',', ';', '/'} +ESCAPE_DOUBLES = {'!!', '??'} # %% (cell magic) is handled separately + +def _make_help_call(target, esc, next_input=None): + """Prepares a pinfo(2)/psearch call from a target name and the escape + (i.e. ? or ??)""" + method = 'pinfo2' if esc == '??' \ + else 'psearch' if '*' in target \ + else 'pinfo' + arg = " ".join([method, target]) + #Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args) + t_magic_name, _, t_magic_arg_s = arg.partition(' ') + t_magic_name = t_magic_name.lstrip(ESC_MAGIC) + if next_input is None: + return 'get_ipython().run_line_magic(%r, %r)' % (t_magic_name, t_magic_arg_s) + else: + return 'get_ipython().set_next_input(%r);get_ipython().run_line_magic(%r, %r)' % \ + (next_input, t_magic_name, t_magic_arg_s) + +def _tr_help(content): + """Translate lines escaped with: ? + + A naked help line should fire the intro help screen (shell.show_usage()) + """ + if not content: + return 'get_ipython().show_usage()' + + return _make_help_call(content, '?') + +def _tr_help2(content): + """Translate lines escaped with: ?? + + A naked help line should fire the intro help screen (shell.show_usage()) + """ + if not content: + return 'get_ipython().show_usage()' + + return _make_help_call(content, '??') + +def _tr_magic(content): + "Translate lines escaped with a percent sign: %" + name, _, args = content.partition(' ') + return 'get_ipython().run_line_magic(%r, %r)' % (name, args) + +def _tr_quote(content): + "Translate lines escaped with a comma: ," + name, _, args = content.partition(' ') + return '%s("%s")' % (name, '", "'.join(args.split()) ) + +def _tr_quote2(content): + "Translate lines escaped with a semicolon: ;" + name, _, args = content.partition(' ') + return '%s("%s")' % (name, args) + +def _tr_paren(content): + "Translate lines escaped with a slash: /" + name, _, args = content.partition(' ') + return '%s(%s)' % (name, ", ".join(args.split())) + +tr = { ESC_SHELL : 'get_ipython().system({!r})'.format, + ESC_SH_CAP : 'get_ipython().getoutput({!r})'.format, + ESC_HELP : _tr_help, + ESC_HELP2 : _tr_help2, + ESC_MAGIC : _tr_magic, + ESC_QUOTE : _tr_quote, + ESC_QUOTE2 : _tr_quote2, + ESC_PAREN : _tr_paren } + +class EscapedCommand(TokenTransformBase): + """Transformer for escaped commands like %foo, !foo, or /foo""" + @classmethod + def find(cls, tokens_by_line): + """Find the first escaped command (%foo, !foo, etc.) in the cell. + """ + for line in tokens_by_line: + if not line: + continue + ix = 0 + ll = len(line) + while ll > ix and line[ix].type in {tokenize.INDENT, tokenize.DEDENT}: + ix += 1 + if ix >= ll: + continue + if line[ix].string in ESCAPE_SINGLES: + return cls(line[ix].start) + + def transform(self, lines): + """Transform an escaped line found by the ``find()`` classmethod. + """ + start_line, start_col = self.start_line, self.start_col + + indent = lines[start_line][:start_col] + end_line = find_end_of_continued_line(lines, start_line) + line = assemble_continued_line(lines, (start_line, start_col), end_line) + + if len(line) > 1 and line[:2] in ESCAPE_DOUBLES: + escape, content = line[:2], line[2:] + else: + escape, content = line[:1], line[1:] + + if escape in tr: + call = tr[escape](content) + else: + call = '' + + lines_before = lines[:start_line] + new_line = indent + call + '\n' + lines_after = lines[end_line + 1:] + + return lines_before + [new_line] + lines_after + +_help_end_re = re.compile(r"""(%{0,2} + (?!\d)[\w*]+ # Variable name + (\.(?!\d)[\w*]+)* # .etc.etc + ) + (\?\??)$ # ? or ?? + """, + re.VERBOSE) + +class HelpEnd(TokenTransformBase): + """Transformer for help syntax: obj? and obj??""" + # This needs to be higher priority (lower number) than EscapedCommand so + # that inspecting magics (%foo?) works. + priority = 5 + + def __init__(self, start, q_locn): + super().__init__(start) + self.q_line = q_locn[0] - 1 # Shift from 1-indexed to 0-indexed + self.q_col = q_locn[1] + + @classmethod + def find(cls, tokens_by_line): + """Find the first help command (foo?) in the cell. + """ + for line in tokens_by_line: + # Last token is NEWLINE; look at last but one + if len(line) > 2 and line[-2].string == '?': + # Find the first token that's not INDENT/DEDENT + ix = 0 + while line[ix].type in {tokenize.INDENT, tokenize.DEDENT}: + ix += 1 + return cls(line[ix].start, line[-2].start) + + def transform(self, lines): + """Transform a help command found by the ``find()`` classmethod. + """ + piece = ''.join(lines[self.start_line:self.q_line+1]) + indent, content = piece[:self.start_col], piece[self.start_col:] + lines_before = lines[:self.start_line] + lines_after = lines[self.q_line + 1:] + + m = _help_end_re.search(content) + if not m: + raise SyntaxError(content) + assert m is not None, content + target = m.group(1) + esc = m.group(3) + + # If we're mid-command, put it back on the next prompt for the user. + next_input = None + if (not lines_before) and (not lines_after) \ + and content.strip() != m.group(0): + next_input = content.rstrip('?\n') + + call = _make_help_call(target, esc, next_input=next_input) + new_line = indent + call + '\n' + + return lines_before + [new_line] + lines_after + +def make_tokens_by_line(lines:List[str]): + """Tokenize a series of lines and group tokens by line. + + The tokens for a multiline Python string or expression are grouped as one + line. All lines except the last lines should keep their line ending ('\\n', + '\\r\\n') for this to properly work. Use `.splitlines(keeplineending=True)` + for example when passing block of text to this function. + + """ + # NL tokens are used inside multiline expressions, but also after blank + # lines or comments. This is intentional - see https://bugs.python.org/issue17061 + # We want to group the former case together but split the latter, so we + # track parentheses level, similar to the internals of tokenize. + NEWLINE, NL = tokenize.NEWLINE, tokenize.NL + tokens_by_line = [[]] + if len(lines) > 1 and not lines[0].endswith(('\n', '\r', '\r\n', '\x0b', '\x0c')): + warnings.warn("`make_tokens_by_line` received a list of lines which do not have lineending markers ('\\n', '\\r', '\\r\\n', '\\x0b', '\\x0c'), behavior will be unspecified") + parenlev = 0 + try: + for token in tokenize.generate_tokens(iter(lines).__next__): + tokens_by_line[-1].append(token) + if (token.type == NEWLINE) \ + or ((token.type == NL) and (parenlev <= 0)): + tokens_by_line.append([]) + elif token.string in {'(', '[', '{'}: + parenlev += 1 + elif token.string in {')', ']', '}'}: + if parenlev > 0: + parenlev -= 1 + except tokenize.TokenError: + # Input ended in a multiline string or expression. That's OK for us. + pass + + + if not tokens_by_line[-1]: + tokens_by_line.pop() + + + return tokens_by_line + +def show_linewise_tokens(s: str): + """For investigation and debugging""" + if not s.endswith('\n'): + s += '\n' + lines = s.splitlines(keepends=True) + for line in make_tokens_by_line(lines): + print("Line -------") + for tokinfo in line: + print(" ", tokinfo) + +# Arbitrary limit to prevent getting stuck in infinite loops +TRANSFORM_LOOP_LIMIT = 500 + +class TransformerManager: + """Applies various transformations to a cell or code block. + + The key methods for external use are ``transform_cell()`` + and ``check_complete()``. + """ + def __init__(self): + self.cleanup_transforms = [ + leading_empty_lines, + leading_indent, + classic_prompt, + ipython_prompt, + ] + self.line_transforms = [ + cell_magic, + ] + self.token_transformers = [ + MagicAssign, + SystemAssign, + EscapedCommand, + HelpEnd, + ] + + def do_one_token_transform(self, lines): + """Find and run the transform earliest in the code. + + Returns (changed, lines). + + This method is called repeatedly until changed is False, indicating + that all available transformations are complete. + + The tokens following IPython special syntax might not be valid, so + the transformed code is retokenised every time to identify the next + piece of special syntax. Hopefully long code cells are mostly valid + Python, not using lots of IPython special syntax, so this shouldn't be + a performance issue. + """ + tokens_by_line = make_tokens_by_line(lines) + candidates = [] + for transformer_cls in self.token_transformers: + transformer = transformer_cls.find(tokens_by_line) + if transformer: + candidates.append(transformer) + + if not candidates: + # Nothing to transform + return False, lines + ordered_transformers = sorted(candidates, key=TokenTransformBase.sortby) + for transformer in ordered_transformers: + try: + return True, transformer.transform(lines) + except SyntaxError: + pass + return False, lines + + def do_token_transforms(self, lines): + for _ in range(TRANSFORM_LOOP_LIMIT): + changed, lines = self.do_one_token_transform(lines) + if not changed: + return lines + + raise RuntimeError("Input transformation still changing after " + "%d iterations. Aborting." % TRANSFORM_LOOP_LIMIT) + + def transform_cell(self, cell: str) -> str: + """Transforms a cell of input code""" + if not cell.endswith('\n'): + cell += '\n' # Ensure the cell has a trailing newline + lines = cell.splitlines(keepends=True) + for transform in self.cleanup_transforms + self.line_transforms: + lines = transform(lines) + + lines = self.do_token_transforms(lines) + return ''.join(lines) + + def check_complete(self, cell: str): + """Return whether a block of code is ready to execute, or should be continued + + Parameters + ---------- + source : string + Python input code, which can be multiline. + + Returns + ------- + status : str + One of 'complete', 'incomplete', or 'invalid' if source is not a + prefix of valid code. + indent_spaces : int or None + The number of spaces by which to indent the next line of code. If + status is not 'incomplete', this is None. + """ + # Remember if the lines ends in a new line. + ends_with_newline = False + for character in reversed(cell): + if character == '\n': + ends_with_newline = True + break + elif character.strip(): + break + else: + continue + + if not ends_with_newline: + # Append an newline for consistent tokenization + # See https://bugs.python.org/issue33899 + cell += '\n' + + lines = cell.splitlines(keepends=True) + + if not lines: + return 'complete', None + + if lines[-1].endswith('\\'): + # Explicit backslash continuation + return 'incomplete', find_last_indent(lines) + + try: + for transform in self.cleanup_transforms: + if not getattr(transform, 'has_side_effects', False): + lines = transform(lines) + except SyntaxError: + return 'invalid', None + + if lines[0].startswith('%%'): + # Special case for cell magics - completion marked by blank line + if lines[-1].strip(): + return 'incomplete', find_last_indent(lines) + else: + return 'complete', None + + try: + for transform in self.line_transforms: + if not getattr(transform, 'has_side_effects', False): + lines = transform(lines) + lines = self.do_token_transforms(lines) + except SyntaxError: + return 'invalid', None + + tokens_by_line = make_tokens_by_line(lines) + + if not tokens_by_line: + return 'incomplete', find_last_indent(lines) + + if tokens_by_line[-1][-1].type != tokenize.ENDMARKER: + # We're in a multiline string or expression + return 'incomplete', find_last_indent(lines) + + newline_types = {tokenize.NEWLINE, tokenize.COMMENT, tokenize.ENDMARKER} + + # Pop the last line which only contains DEDENTs and ENDMARKER + last_token_line = None + if {t.type for t in tokens_by_line[-1]} in [ + {tokenize.DEDENT, tokenize.ENDMARKER}, + {tokenize.ENDMARKER} + ] and len(tokens_by_line) > 1: + last_token_line = tokens_by_line.pop() + + while tokens_by_line[-1] and tokens_by_line[-1][-1].type in newline_types: + tokens_by_line[-1].pop() + + if not tokens_by_line[-1]: + return 'incomplete', find_last_indent(lines) + + if tokens_by_line[-1][-1].string == ':': + # The last line starts a block (e.g. 'if foo:') + ix = 0 + while tokens_by_line[-1][ix].type in {tokenize.INDENT, tokenize.DEDENT}: + ix += 1 + + indent = tokens_by_line[-1][ix].start[1] + return 'incomplete', indent + 4 + + if tokens_by_line[-1][0].line.endswith('\\'): + return 'incomplete', None + + # At this point, our checks think the code is complete (or invalid). + # We'll use codeop.compile_command to check this with the real parser + try: + with warnings.catch_warnings(): + warnings.simplefilter('error', SyntaxWarning) + res = compile_command(''.join(lines), symbol='exec') + except (SyntaxError, OverflowError, ValueError, TypeError, + MemoryError, SyntaxWarning): + return 'invalid', None + else: + if res is None: + return 'incomplete', find_last_indent(lines) + + if last_token_line and last_token_line[0].type == tokenize.DEDENT: + if ends_with_newline: + return 'complete', None + return 'incomplete', find_last_indent(lines) + + # If there's a blank line at the end, assume we're ready to execute + if not lines[-1].strip(): + return 'complete', None + + return 'complete', None + + +def find_last_indent(lines): + m = _indent_re.match(lines[-1]) + if not m: + return 0 + return len(m.group(0).replace('\t', ' '*4)) + + +class MaybeAsyncCompile(Compile): + def __init__(self, extra_flags=0): + super().__init__() + self.flags |= extra_flags + + def __call__(self, *args, **kwds): + return compile(*args, **kwds) + + +class MaybeAsyncCommandCompiler(CommandCompiler): + def __init__(self, extra_flags=0): + self.compiler = MaybeAsyncCompile(extra_flags=extra_flags) + + +if (sys.version_info.major, sys.version_info.minor) >= (3, 8): + _extra_flags = ast.PyCF_ALLOW_TOP_LEVEL_AWAIT +else: + _extra_flags = ast.PyCF_ONLY_AST + +compile_command = MaybeAsyncCommandCompiler(extra_flags=_extra_flags) diff --git a/contrib/python/ipython/py3/IPython/core/interactiveshell.py b/contrib/python/ipython/py3/IPython/core/interactiveshell.py index 715a3103ca6..835dc8d0a8c 100644 --- a/contrib/python/ipython/py3/IPython/core/interactiveshell.py +++ b/contrib/python/ipython/py3/IPython/core/interactiveshell.py @@ -1,3840 +1,3840 @@ -# -*- coding: utf-8 -*- -"""Main IPython class.""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> -# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu> -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - - -import abc -import ast -import atexit -import builtins as builtin_mod -import functools -import inspect -import os -import re -import runpy -import sys -import tempfile -import traceback -import types -import subprocess -import warnings -from io import open as io_open - -from pathlib import Path -from pickleshare import PickleShareDB - -from traitlets.config.configurable import SingletonConfigurable -from traitlets.utils.importstring import import_item -from IPython.core import oinspect -from IPython.core import magic -from IPython.core import page -from IPython.core import prefilter -from IPython.core import ultratb -from IPython.core.alias import Alias, AliasManager -from IPython.core.autocall import ExitAutocall -from IPython.core.builtin_trap import BuiltinTrap -from IPython.core.events import EventManager, available_events -from IPython.core.compilerop import CachingCompiler, check_linecache_ipython -from IPython.core.debugger import InterruptiblePdb -from IPython.core.display_trap import DisplayTrap -from IPython.core.displayhook import DisplayHook -from IPython.core.displaypub import DisplayPublisher -from IPython.core.error import InputRejected, UsageError -from IPython.core.extensions import ExtensionManager -from IPython.core.formatters import DisplayFormatter -from IPython.core.history import HistoryManager -from IPython.core.inputtransformer2 import ESC_MAGIC, ESC_MAGIC2 -from IPython.core.logger import Logger -from IPython.core.macro import Macro -from IPython.core.payload import PayloadManager -from IPython.core.prefilter import PrefilterManager -from IPython.core.profiledir import ProfileDir -from IPython.core.usage import default_banner -from IPython.display import display -from IPython.testing.skipdoctest import skip_doctest -from IPython.utils import PyColorize -from IPython.utils import io -from IPython.utils import py3compat -from IPython.utils import openpy -from IPython.utils.decorators import undoc -from IPython.utils.io import ask_yes_no -from IPython.utils.ipstruct import Struct -from IPython.paths import get_ipython_dir -from IPython.utils.path import get_home_dir, get_py_filename, ensure_dir_exists -from IPython.utils.process import system, getoutput -from IPython.utils.strdispatch import StrDispatch -from IPython.utils.syspathcontext import prepended_to_syspath -from IPython.utils.text import format_screen, LSString, SList, DollarFormatter -from IPython.utils.tempdir import TemporaryDirectory -from traitlets import ( - Integer, Bool, CaselessStrEnum, Enum, List, Dict, Unicode, Instance, Type, - observe, default, validate, Any -) -from warnings import warn -from logging import error -import IPython.core.hooks - -from typing import List as ListType, Tuple, Optional -from ast import AST - -# NoOpContext is deprecated, but ipykernel imports it from here. -# See https://github.com/ipython/ipykernel/issues/157 -# (2016, let's try to remove than in IPython 8.0) -from IPython.utils.contexts import NoOpContext - -try: - import docrepr.sphinxify as sphx - - def sphinxify(doc): - with TemporaryDirectory() as dirname: - return { - 'text/html': sphx.sphinxify(doc, dirname), - 'text/plain': doc - } -except ImportError: - sphinxify = None - - -class ProvisionalWarning(DeprecationWarning): - """ - Warning class for unstable features - """ - pass - -if sys.version_info > (3,8): - from ast import Module -else : - # mock the new API, ignore second argument - # see https://github.com/ipython/ipython/issues/11590 - from ast import Module as OriginalModule - Module = lambda nodelist, type_ignores: OriginalModule(nodelist) - -if sys.version_info > (3,6): - _assign_nodes = (ast.AugAssign, ast.AnnAssign, ast.Assign) - _single_targets_nodes = (ast.AugAssign, ast.AnnAssign) -else: - _assign_nodes = (ast.AugAssign, ast.Assign ) - _single_targets_nodes = (ast.AugAssign, ) - -#----------------------------------------------------------------------------- -# Await Helpers -#----------------------------------------------------------------------------- - -def removed_co_newlocals(function:types.FunctionType) -> types.FunctionType: - """Return a function that do not create a new local scope. - - Given a function, create a clone of this function where the co_newlocal flag - has been removed, making this function code actually run in the sourounding - scope. - - We need this in order to run asynchronous code in user level namespace. - """ - from types import CodeType, FunctionType - CO_NEWLOCALS = 0x0002 - code = function.__code__ - new_co_flags = code.co_flags & ~CO_NEWLOCALS - if sys.version_info > (3, 8, 0, 'alpha', 3): - new_code = code.replace(co_flags=new_co_flags) - else: - new_code = CodeType( - code.co_argcount, - code.co_kwonlyargcount, - code.co_nlocals, - code.co_stacksize, - new_co_flags, - code.co_code, - code.co_consts, - code.co_names, - code.co_varnames, - code.co_filename, - code.co_name, - code.co_firstlineno, - code.co_lnotab, - code.co_freevars, - code.co_cellvars - ) - return FunctionType(new_code, globals(), function.__name__, function.__defaults__) - - -# we still need to run things using the asyncio eventloop, but there is no -# async integration -from .async_helpers import (_asyncio_runner, _asyncify, _pseudo_sync_runner) -from .async_helpers import _curio_runner, _trio_runner, _should_be_async - - -def _ast_asyncify(cell:str, wrapper_name:str) -> ast.Module: - """ - Parse a cell with top-level await and modify the AST to be able to run it later. - - Parameter - --------- - - cell: str - The code cell to asyncronify - wrapper_name: str - The name of the function to be used to wrap the passed `cell`. It is - advised to **not** use a python identifier in order to not pollute the - global namespace in which the function will be ran. - - Return - ------ - - A module object AST containing **one** function named `wrapper_name`. - - The given code is wrapped in a async-def function, parsed into an AST, and - the resulting function definition AST is modified to return the last - expression. - - The last expression or await node is moved into a return statement at the - end of the function, and removed from its original location. If the last - node is not Expr or Await nothing is done. - - The function `__code__` will need to be later modified (by - ``removed_co_newlocals``) in a subsequent step to not create new `locals()` - meaning that the local and global scope are the same, ie as if the body of - the function was at module level. - - Lastly a call to `locals()` is made just before the last expression of the - function, or just after the last assignment or statement to make sure the - global dict is updated as python function work with a local fast cache which - is updated only on `local()` calls. - """ - - from ast import Expr, Await, Return - if sys.version_info >= (3,8): - return ast.parse(cell) - tree = ast.parse(_asyncify(cell)) - - function_def = tree.body[0] - function_def.name = wrapper_name - try_block = function_def.body[0] - lastexpr = try_block.body[-1] - if isinstance(lastexpr, (Expr, Await)): - try_block.body[-1] = Return(lastexpr.value) - ast.fix_missing_locations(tree) - return tree -#----------------------------------------------------------------------------- -# Globals -#----------------------------------------------------------------------------- - -# compiled regexps for autoindent management -dedent_re = re.compile(r'^\s+raise|^\s+return|^\s+pass') - -#----------------------------------------------------------------------------- -# Utilities -#----------------------------------------------------------------------------- - -@undoc -def softspace(file, newvalue): - """Copied from code.py, to remove the dependency""" - - oldvalue = 0 - try: - oldvalue = file.softspace - except AttributeError: - pass - try: - file.softspace = newvalue - except (AttributeError, TypeError): - # "attribute-less object" or "read-only attributes" - pass - return oldvalue - -@undoc -def no_op(*a, **kw): - pass - - -class SpaceInInput(Exception): pass - - -def get_default_colors(): - "DEPRECATED" - warn('get_default_color is deprecated since IPython 5.0, and returns `Neutral` on all platforms.', - DeprecationWarning, stacklevel=2) - return 'Neutral' - - -class SeparateUnicode(Unicode): - r"""A Unicode subclass to validate separate_in, separate_out, etc. - - This is a Unicode based trait that converts '0'->'' and ``'\\n'->'\n'``. - """ - - def validate(self, obj, value): - if value == '0': value = '' - value = value.replace('\\n','\n') - return super(SeparateUnicode, self).validate(obj, value) - - -@undoc -class DummyMod(object): - """A dummy module used for IPython's interactive module when - a namespace must be assigned to the module's __dict__.""" - __spec__ = None - - -class ExecutionInfo(object): - """The arguments used for a call to :meth:`InteractiveShell.run_cell` - - Stores information about what is going to happen. - """ - raw_cell = None - store_history = False - silent = False - shell_futures = True - - def __init__(self, raw_cell, store_history, silent, shell_futures): - self.raw_cell = raw_cell - self.store_history = store_history - self.silent = silent - self.shell_futures = shell_futures - - def __repr__(self): - name = self.__class__.__qualname__ - raw_cell = ((self.raw_cell[:50] + '..') - if len(self.raw_cell) > 50 else self.raw_cell) - return '<%s object at %x, raw_cell="%s" store_history=%s silent=%s shell_futures=%s>' %\ - (name, id(self), raw_cell, self.store_history, self.silent, self.shell_futures) - - -class ExecutionResult(object): - """The result of a call to :meth:`InteractiveShell.run_cell` - - Stores information about what took place. - """ - execution_count = None - error_before_exec = None - error_in_exec = None - info = None - result = None - - def __init__(self, info): - self.info = info - - @property - def success(self): - return (self.error_before_exec is None) and (self.error_in_exec is None) - - def raise_error(self): - """Reraises error if `success` is `False`, otherwise does nothing""" - if self.error_before_exec is not None: - raise self.error_before_exec - if self.error_in_exec is not None: - raise self.error_in_exec - - def __repr__(self): - name = self.__class__.__qualname__ - return '<%s object at %x, execution_count=%s error_before_exec=%s error_in_exec=%s info=%s result=%s>' %\ - (name, id(self), self.execution_count, self.error_before_exec, self.error_in_exec, repr(self.info), repr(self.result)) - - -class InteractiveShell(SingletonConfigurable): - """An enhanced, interactive shell for Python.""" - - _instance = None - - ast_transformers = List([], help= - """ - A list of ast.NodeTransformer subclass instances, which will be applied - to user input before code is run. - """ - ).tag(config=True) - - autocall = Enum((0,1,2), default_value=0, help= - """ - Make IPython automatically call any callable object even if you didn't - type explicit parentheses. For example, 'str 43' becomes 'str(43)' - automatically. The value can be '0' to disable the feature, '1' for - 'smart' autocall, where it is not applied if there are no more - arguments on the line, and '2' for 'full' autocall, where all callable - objects are automatically called (even if no arguments are present). - """ - ).tag(config=True) - - autoindent = Bool(True, help= - """ - Autoindent IPython code entered interactively. - """ - ).tag(config=True) - - autoawait = Bool(True, help= - """ - Automatically run await statement in the top level repl. - """ - ).tag(config=True) - - loop_runner_map ={ - 'asyncio':(_asyncio_runner, True), - 'curio':(_curio_runner, True), - 'trio':(_trio_runner, True), - 'sync': (_pseudo_sync_runner, False) - } - - loop_runner = Any(default_value="IPython.core.interactiveshell._asyncio_runner", - allow_none=True, - help="""Select the loop runner that will be used to execute top-level asynchronous code""" - ).tag(config=True) - - @default('loop_runner') - def _default_loop_runner(self): - return import_item("IPython.core.interactiveshell._asyncio_runner") - - @validate('loop_runner') - def _import_runner(self, proposal): - if isinstance(proposal.value, str): - if proposal.value in self.loop_runner_map: - runner, autoawait = self.loop_runner_map[proposal.value] - self.autoawait = autoawait - return runner - runner = import_item(proposal.value) - if not callable(runner): - raise ValueError('loop_runner must be callable') - return runner - if not callable(proposal.value): - raise ValueError('loop_runner must be callable') - return proposal.value - - automagic = Bool(True, help= - """ - Enable magic commands to be called without the leading %. - """ - ).tag(config=True) - - banner1 = Unicode(default_banner, - help="""The part of the banner to be printed before the profile""" - ).tag(config=True) - banner2 = Unicode('', - help="""The part of the banner to be printed after the profile""" - ).tag(config=True) - - cache_size = Integer(1000, help= - """ - Set the size of the output cache. The default is 1000, you can - change it permanently in your config file. Setting it to 0 completely - disables the caching system, and the minimum value accepted is 3 (if - you provide a value less than 3, it is reset to 0 and a warning is - issued). This limit is defined because otherwise you'll spend more - time re-flushing a too small cache than working - """ - ).tag(config=True) - color_info = Bool(True, help= - """ - Use colors for displaying information about objects. Because this - information is passed through a pager (like 'less'), and some pagers - get confused with color codes, this capability can be turned off. - """ - ).tag(config=True) - colors = CaselessStrEnum(('Neutral', 'NoColor','LightBG','Linux'), - default_value='Neutral', - help="Set the color scheme (NoColor, Neutral, Linux, or LightBG)." - ).tag(config=True) - debug = Bool(False).tag(config=True) - disable_failing_post_execute = Bool(False, - help="Don't call post-execute functions that have failed in the past." - ).tag(config=True) - display_formatter = Instance(DisplayFormatter, allow_none=True) - displayhook_class = Type(DisplayHook) - display_pub_class = Type(DisplayPublisher) - compiler_class = Type(CachingCompiler) - - sphinxify_docstring = Bool(False, help= - """ - Enables rich html representation of docstrings. (This requires the - docrepr module). - """).tag(config=True) - - @observe("sphinxify_docstring") - def _sphinxify_docstring_changed(self, change): - if change['new']: - warn("`sphinxify_docstring` is provisional since IPython 5.0 and might change in future versions." , ProvisionalWarning) - - enable_html_pager = Bool(False, help= - """ - (Provisional API) enables html representation in mime bundles sent - to pagers. - """).tag(config=True) - - @observe("enable_html_pager") - def _enable_html_pager_changed(self, change): - if change['new']: - warn("`enable_html_pager` is provisional since IPython 5.0 and might change in future versions.", ProvisionalWarning) - - data_pub_class = None - - exit_now = Bool(False) - exiter = Instance(ExitAutocall) - @default('exiter') - def _exiter_default(self): - return ExitAutocall(self) - # Monotonically increasing execution counter - execution_count = Integer(1) - filename = Unicode("<ipython console>") - ipython_dir= Unicode('').tag(config=True) # Set to get_ipython_dir() in __init__ - - # Used to transform cells before running them, and check whether code is complete - input_transformer_manager = Instance('IPython.core.inputtransformer2.TransformerManager', - ()) - - @property - def input_transformers_cleanup(self): - return self.input_transformer_manager.cleanup_transforms - - input_transformers_post = List([], - help="A list of string input transformers, to be applied after IPython's " - "own input transformations." - ) - - @property - def input_splitter(self): - """Make this available for backward compatibility (pre-7.0 release) with existing code. - - For example, ipykernel ipykernel currently uses - `shell.input_splitter.check_complete` - """ - from warnings import warn - warn("`input_splitter` is deprecated since IPython 7.0, prefer `input_transformer_manager`.", - DeprecationWarning, stacklevel=2 - ) - return self.input_transformer_manager - - logstart = Bool(False, help= - """ - Start logging to the default log file in overwrite mode. - Use `logappend` to specify a log file to **append** logs to. - """ - ).tag(config=True) - logfile = Unicode('', help= - """ - The name of the logfile to use. - """ - ).tag(config=True) - logappend = Unicode('', help= - """ - Start logging to the given file in append mode. - Use `logfile` to specify a log file to **overwrite** logs to. - """ - ).tag(config=True) - object_info_string_level = Enum((0,1,2), default_value=0, - ).tag(config=True) - pdb = Bool(False, help= - """ - Automatically call the pdb debugger after every exception. - """ - ).tag(config=True) - display_page = Bool(False, - help="""If True, anything that would be passed to the pager - will be displayed as regular output instead.""" - ).tag(config=True) - - # deprecated prompt traits: - - prompt_in1 = Unicode('In [\\#]: ', - help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." - ).tag(config=True) - prompt_in2 = Unicode(' .\\D.: ', - help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." - ).tag(config=True) - prompt_out = Unicode('Out[\\#]: ', - help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." - ).tag(config=True) - prompts_pad_left = Bool(True, - help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." - ).tag(config=True) - - @observe('prompt_in1', 'prompt_in2', 'prompt_out', 'prompt_pad_left') - def _prompt_trait_changed(self, change): - name = change['name'] - warn("InteractiveShell.{name} is deprecated since IPython 4.0" - " and ignored since 5.0, set TerminalInteractiveShell.prompts" - " object directly.".format(name=name)) - - # protect against weird cases where self.config may not exist: - - show_rewritten_input = Bool(True, - help="Show rewritten input, e.g. for autocall." - ).tag(config=True) - - quiet = Bool(False).tag(config=True) - - history_length = Integer(10000, - help='Total length of command history' - ).tag(config=True) - - history_load_length = Integer(1000, help= - """ - The number of saved history entries to be loaded - into the history buffer at startup. - """ - ).tag(config=True) - - ast_node_interactivity = Enum(['all', 'last', 'last_expr', 'none', 'last_expr_or_assign'], - default_value='last_expr', - help=""" - 'all', 'last', 'last_expr' or 'none', 'last_expr_or_assign' specifying - which nodes should be run interactively (displaying output from expressions). - """ - ).tag(config=True) - - # TODO: this part of prompt management should be moved to the frontends. - # Use custom TraitTypes that convert '0'->'' and '\\n'->'\n' - separate_in = SeparateUnicode('\n').tag(config=True) - separate_out = SeparateUnicode('').tag(config=True) - separate_out2 = SeparateUnicode('').tag(config=True) - wildcards_case_sensitive = Bool(True).tag(config=True) - xmode = CaselessStrEnum(('Context', 'Plain', 'Verbose', 'Minimal'), - default_value='Context', - help="Switch modes for the IPython exception handlers." - ).tag(config=True) - - # Subcomponents of InteractiveShell - alias_manager = Instance('IPython.core.alias.AliasManager', allow_none=True) - prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True) - builtin_trap = Instance('IPython.core.builtin_trap.BuiltinTrap', allow_none=True) - display_trap = Instance('IPython.core.display_trap.DisplayTrap', allow_none=True) - extension_manager = Instance('IPython.core.extensions.ExtensionManager', allow_none=True) - payload_manager = Instance('IPython.core.payload.PayloadManager', allow_none=True) - history_manager = Instance('IPython.core.history.HistoryAccessorBase', allow_none=True) - magics_manager = Instance('IPython.core.magic.MagicsManager', allow_none=True) - - profile_dir = Instance('IPython.core.application.ProfileDir', allow_none=True) - @property - def profile(self): - if self.profile_dir is not None: - name = os.path.basename(self.profile_dir.location) - return name.replace('profile_','') - - - # Private interface - _post_execute = Dict() - - # Tracks any GUI loop loaded for pylab - pylab_gui_select = None - - last_execution_succeeded = Bool(True, help='Did last executed command succeeded') - - last_execution_result = Instance('IPython.core.interactiveshell.ExecutionResult', help='Result of executing the last command', allow_none=True) - - def __init__(self, ipython_dir=None, profile_dir=None, - user_module=None, user_ns=None, - custom_exceptions=((), None), **kwargs): - - # This is where traits with a config_key argument are updated - # from the values on config. - super(InteractiveShell, self).__init__(**kwargs) - if 'PromptManager' in self.config: - warn('As of IPython 5.0 `PromptManager` config will have no effect' - ' and has been replaced by TerminalInteractiveShell.prompts_class') - self.configurables = [self] - - # These are relatively independent and stateless - self.init_ipython_dir(ipython_dir) - self.init_profile_dir(profile_dir) - self.init_instance_attrs() - self.init_environment() - - # Check if we're in a virtualenv, and set up sys.path. - self.init_virtualenv() - - # Create namespaces (user_ns, user_global_ns, etc.) - self.init_create_namespaces(user_module, user_ns) - # This has to be done after init_create_namespaces because it uses - # something in self.user_ns, but before init_sys_modules, which - # is the first thing to modify sys. - # TODO: When we override sys.stdout and sys.stderr before this class - # is created, we are saving the overridden ones here. Not sure if this - # is what we want to do. - self.save_sys_module_state() - self.init_sys_modules() - - # While we're trying to have each part of the code directly access what - # it needs without keeping redundant references to objects, we have too - # much legacy code that expects ip.db to exist. - self.db = PickleShareDB(os.path.join(self.profile_dir.location, 'db')) - - self.init_history() - self.init_encoding() - self.init_prefilter() - - self.init_syntax_highlighting() - self.init_hooks() - self.init_events() - self.init_pushd_popd_magic() - self.init_user_ns() - self.init_logger() - self.init_builtins() - - # The following was in post_config_initialization - self.init_inspector() - self.raw_input_original = input - self.init_completer() - # TODO: init_io() needs to happen before init_traceback handlers - # because the traceback handlers hardcode the stdout/stderr streams. - # This logic in in debugger.Pdb and should eventually be changed. - self.init_io() - self.init_traceback_handlers(custom_exceptions) - self.init_prompts() - self.init_display_formatter() - self.init_display_pub() - self.init_data_pub() - self.init_displayhook() - self.init_magics() - self.init_alias() - self.init_logstart() - self.init_pdb() - self.init_extension_manager() - self.init_payload() - self.init_deprecation_warnings() - self.hooks.late_startup_hook() - self.events.trigger('shell_initialized', self) - atexit.register(self.atexit_operations) - - # The trio runner is used for running Trio in the foreground thread. It - # is different from `_trio_runner(async_fn)` in `async_helpers.py` - # which calls `trio.run()` for every cell. This runner runs all cells - # inside a single Trio event loop. If used, it is set from - # `ipykernel.kernelapp`. - self.trio_runner = None - - def get_ipython(self): - """Return the currently running IPython instance.""" - return self - - #------------------------------------------------------------------------- - # Trait changed handlers - #------------------------------------------------------------------------- - @observe('ipython_dir') - def _ipython_dir_changed(self, change): - ensure_dir_exists(change['new']) - - def set_autoindent(self,value=None): - """Set the autoindent flag. - - If called with no arguments, it acts as a toggle.""" - if value is None: - self.autoindent = not self.autoindent - else: - self.autoindent = value - - def set_trio_runner(self, tr): - self.trio_runner = tr - - #------------------------------------------------------------------------- - # init_* methods called by __init__ - #------------------------------------------------------------------------- - - def init_ipython_dir(self, ipython_dir): - if ipython_dir is not None: - self.ipython_dir = ipython_dir - return - - self.ipython_dir = get_ipython_dir() - - def init_profile_dir(self, profile_dir): - if profile_dir is not None: - self.profile_dir = profile_dir - return - self.profile_dir = ProfileDir.create_profile_dir_by_name( - self.ipython_dir, "default" - ) - - def init_instance_attrs(self): - self.more = False - - # command compiler - self.compile = self.compiler_class() - - # Make an empty namespace, which extension writers can rely on both - # existing and NEVER being used by ipython itself. This gives them a - # convenient location for storing additional information and state - # their extensions may require, without fear of collisions with other - # ipython names that may develop later. - self.meta = Struct() - - # Temporary files used for various purposes. Deleted at exit. - self.tempfiles = [] - self.tempdirs = [] - - # keep track of where we started running (mainly for crash post-mortem) - # This is not being used anywhere currently. - self.starting_dir = os.getcwd() - - # Indentation management - self.indent_current_nsp = 0 - - # Dict to track post-execution functions that have been registered - self._post_execute = {} - - def init_environment(self): - """Any changes we need to make to the user's environment.""" - pass - - def init_encoding(self): - # Get system encoding at startup time. Certain terminals (like Emacs - # under Win32 have it set to None, and we need to have a known valid - # encoding to use in the raw_input() method - try: - self.stdin_encoding = sys.stdin.encoding or 'ascii' - except AttributeError: - self.stdin_encoding = 'ascii' - - - @observe('colors') - def init_syntax_highlighting(self, changes=None): - # Python source parser/formatter for syntax highlighting - pyformat = PyColorize.Parser(style=self.colors, parent=self).format - self.pycolorize = lambda src: pyformat(src,'str') - - def refresh_style(self): - # No-op here, used in subclass - pass - - def init_pushd_popd_magic(self): - # for pushd/popd management - self.home_dir = get_home_dir() - - self.dir_stack = [] - - def init_logger(self): - self.logger = Logger(self.home_dir, logfname='ipython_log.py', - logmode='rotate') - - def init_logstart(self): - """Initialize logging in case it was requested at the command line. - """ - if self.logappend: - self.magic('logstart %s append' % self.logappend) - elif self.logfile: - self.magic('logstart %s' % self.logfile) - elif self.logstart: - self.magic('logstart') - - def init_deprecation_warnings(self): - """ - register default filter for deprecation warning. - - This will allow deprecation warning of function used interactively to show - warning to users, and still hide deprecation warning from libraries import. - """ - if sys.version_info < (3,7): - warnings.filterwarnings("default", category=DeprecationWarning, module=self.user_ns.get("__name__")) - - - def init_builtins(self): - # A single, static flag that we set to True. Its presence indicates - # that an IPython shell has been created, and we make no attempts at - # removing on exit or representing the existence of more than one - # IPython at a time. - builtin_mod.__dict__['__IPYTHON__'] = True - builtin_mod.__dict__['display'] = display - - self.builtin_trap = BuiltinTrap(shell=self) - - @observe('colors') - def init_inspector(self, changes=None): - # Object inspector - self.inspector = oinspect.Inspector(oinspect.InspectColors, - PyColorize.ANSICodeColors, - self.colors, - self.object_info_string_level) - - def init_io(self): - # This will just use sys.stdout and sys.stderr. If you want to - # override sys.stdout and sys.stderr themselves, you need to do that - # *before* instantiating this class, because io holds onto - # references to the underlying streams. - # io.std* are deprecated, but don't show our own deprecation warnings - # during initialization of the deprecated API. - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - io.stdout = io.IOStream(sys.stdout) - io.stderr = io.IOStream(sys.stderr) - - def init_prompts(self): - # Set system prompts, so that scripts can decide if they are running - # interactively. - sys.ps1 = 'In : ' - sys.ps2 = '...: ' - sys.ps3 = 'Out: ' - - def init_display_formatter(self): - self.display_formatter = DisplayFormatter(parent=self) - self.configurables.append(self.display_formatter) - - def init_display_pub(self): - self.display_pub = self.display_pub_class(parent=self, shell=self) - self.configurables.append(self.display_pub) - - def init_data_pub(self): - if not self.data_pub_class: - self.data_pub = None - return - self.data_pub = self.data_pub_class(parent=self) - self.configurables.append(self.data_pub) - - def init_displayhook(self): - # Initialize displayhook, set in/out prompts and printing system - self.displayhook = self.displayhook_class( - parent=self, - shell=self, - cache_size=self.cache_size, - ) - self.configurables.append(self.displayhook) - # This is a context manager that installs/revmoes the displayhook at - # the appropriate time. - self.display_trap = DisplayTrap(hook=self.displayhook) - - def init_virtualenv(self): - """Add the current virtualenv to sys.path so the user can import modules from it. - This isn't perfect: it doesn't use the Python interpreter with which the - virtualenv was built, and it ignores the --no-site-packages option. A - warning will appear suggesting the user installs IPython in the - virtualenv, but for many cases, it probably works well enough. - Adapted from code snippets online. - http://blog.ufsoft.org/2009/1/29/ipython-and-virtualenv - """ - if 'VIRTUAL_ENV' not in os.environ: - # Not in a virtualenv - return - elif os.environ["VIRTUAL_ENV"] == "": - warn("Virtual env path set to '', please check if this is intended.") - return - - p = Path(sys.executable) - p_venv = Path(os.environ["VIRTUAL_ENV"]) - - # fallback venv detection: - # stdlib venv may symlink sys.executable, so we can't use realpath. - # but others can symlink *to* the venv Python, so we can't just use sys.executable. - # So we just check every item in the symlink tree (generally <= 3) - paths = [p] - while p.is_symlink(): - p = Path(os.readlink(p)) - paths.append(p.resolve()) - - # In Cygwin paths like "c:\..." and '\cygdrive\c\...' are possible - if p_venv.parts[1] == "cygdrive": - drive_name = p_venv.parts[2] - p_venv = (drive_name + ":/") / Path(*p_venv.parts[3:]) - - if any(p_venv == p.parents[1] for p in paths): - # Our exe is inside or has access to the virtualenv, don't need to do anything. - return - - if sys.platform == "win32": - virtual_env = str(Path(os.environ["VIRTUAL_ENV"], "Lib", "site-packages")) - else: - virtual_env_path = Path( - os.environ["VIRTUAL_ENV"], "lib", "python{}.{}", "site-packages" - ) - p_ver = sys.version_info[:2] - - # Predict version from py[thon]-x.x in the $VIRTUAL_ENV - re_m = re.search(r"\bpy(?:thon)?([23])\.(\d+)\b", os.environ["VIRTUAL_ENV"]) - if re_m: - predicted_path = Path(str(virtual_env_path).format(*re_m.groups())) - if predicted_path.exists(): - p_ver = re_m.groups() - - virtual_env = str(virtual_env_path).format(*p_ver) - - warn( - "Attempting to work in a virtualenv. If you encounter problems, " - "please install IPython inside the virtualenv." - ) - import site - sys.path.insert(0, virtual_env) - site.addsitedir(virtual_env) - - #------------------------------------------------------------------------- - # Things related to injections into the sys module - #------------------------------------------------------------------------- - - def save_sys_module_state(self): - """Save the state of hooks in the sys module. - - This has to be called after self.user_module is created. - """ - self._orig_sys_module_state = {'stdin': sys.stdin, - 'stdout': sys.stdout, - 'stderr': sys.stderr, - 'excepthook': sys.excepthook} - self._orig_sys_modules_main_name = self.user_module.__name__ - self._orig_sys_modules_main_mod = sys.modules.get(self.user_module.__name__) - - def restore_sys_module_state(self): - """Restore the state of the sys module.""" - try: - for k, v in self._orig_sys_module_state.items(): - setattr(sys, k, v) - except AttributeError: - pass - # Reset what what done in self.init_sys_modules - if self._orig_sys_modules_main_mod is not None: - sys.modules[self._orig_sys_modules_main_name] = self._orig_sys_modules_main_mod - - #------------------------------------------------------------------------- - # Things related to the banner - #------------------------------------------------------------------------- - - @property - def banner(self): - banner = self.banner1 - if self.profile and self.profile != 'default': - banner += '\nIPython profile: %s\n' % self.profile - if self.banner2: - banner += '\n' + self.banner2 - return banner - - def show_banner(self, banner=None): - if banner is None: - banner = self.banner - sys.stdout.write(banner) - - #------------------------------------------------------------------------- - # Things related to hooks - #------------------------------------------------------------------------- - - def init_hooks(self): - # hooks holds pointers used for user-side customizations - self.hooks = Struct() - - self.strdispatchers = {} - - # Set all default hooks, defined in the IPython.hooks module. - hooks = IPython.core.hooks - for hook_name in hooks.__all__: - # default hooks have priority 100, i.e. low; user hooks should have - # 0-100 priority - self.set_hook(hook_name,getattr(hooks,hook_name), 100, _warn_deprecated=False) - - if self.display_page: - self.set_hook('show_in_pager', page.as_hook(page.display_page), 90) - - def set_hook(self,name,hook, priority=50, str_key=None, re_key=None, - _warn_deprecated=True): - """set_hook(name,hook) -> sets an internal IPython hook. - - IPython exposes some of its internal API as user-modifiable hooks. By - adding your function to one of these hooks, you can modify IPython's - behavior to call at runtime your own routines.""" - - # At some point in the future, this should validate the hook before it - # accepts it. Probably at least check that the hook takes the number - # of args it's supposed to. - - f = types.MethodType(hook,self) - - # check if the hook is for strdispatcher first - if str_key is not None: - sdp = self.strdispatchers.get(name, StrDispatch()) - sdp.add_s(str_key, f, priority ) - self.strdispatchers[name] = sdp - return - if re_key is not None: - sdp = self.strdispatchers.get(name, StrDispatch()) - sdp.add_re(re.compile(re_key), f, priority ) - self.strdispatchers[name] = sdp - return - - dp = getattr(self.hooks, name, None) - if name not in IPython.core.hooks.__all__: - print("Warning! Hook '%s' is not one of %s" % \ - (name, IPython.core.hooks.__all__ )) - - if _warn_deprecated and (name in IPython.core.hooks.deprecated): - alternative = IPython.core.hooks.deprecated[name] - warn("Hook {} is deprecated. Use {} instead.".format(name, alternative), stacklevel=2) - - if not dp: - dp = IPython.core.hooks.CommandChainDispatcher() - - try: - dp.add(f,priority) - except AttributeError: - # it was not commandchain, plain old func - replace - dp = f - - setattr(self.hooks,name, dp) - - #------------------------------------------------------------------------- - # Things related to events - #------------------------------------------------------------------------- - - def init_events(self): - self.events = EventManager(self, available_events) - - self.events.register("pre_execute", self._clear_warning_registry) - - def register_post_execute(self, func): - """DEPRECATED: Use ip.events.register('post_run_cell', func) - - Register a function for calling after code execution. - """ - warn("ip.register_post_execute is deprecated, use " - "ip.events.register('post_run_cell', func) instead.", stacklevel=2) - self.events.register('post_run_cell', func) - - def _clear_warning_registry(self): - # clear the warning registry, so that different code blocks with - # overlapping line number ranges don't cause spurious suppression of - # warnings (see gh-6611 for details) - if "__warningregistry__" in self.user_global_ns: - del self.user_global_ns["__warningregistry__"] - - #------------------------------------------------------------------------- - # Things related to the "main" module - #------------------------------------------------------------------------- - - def new_main_mod(self, filename, modname): - """Return a new 'main' module object for user code execution. - - ``filename`` should be the path of the script which will be run in the - module. Requests with the same filename will get the same module, with - its namespace cleared. - - ``modname`` should be the module name - normally either '__main__' or - the basename of the file without the extension. - - When scripts are executed via %run, we must keep a reference to their - __main__ module around so that Python doesn't - clear it, rendering references to module globals useless. - - This method keeps said reference in a private dict, keyed by the - absolute path of the script. This way, for multiple executions of the - same script we only keep one copy of the namespace (the last one), - thus preventing memory leaks from old references while allowing the - objects from the last execution to be accessible. - """ - filename = os.path.abspath(filename) - try: - main_mod = self._main_mod_cache[filename] - except KeyError: - main_mod = self._main_mod_cache[filename] = types.ModuleType( - modname, - doc="Module created for script run in IPython") - else: - main_mod.__dict__.clear() - main_mod.__name__ = modname - - main_mod.__file__ = filename - # It seems pydoc (and perhaps others) needs any module instance to - # implement a __nonzero__ method - main_mod.__nonzero__ = lambda : True - - return main_mod - - def clear_main_mod_cache(self): - """Clear the cache of main modules. - - Mainly for use by utilities like %reset. - - Examples - -------- - - In [15]: import IPython - - In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython') - - In [17]: len(_ip._main_mod_cache) > 0 - Out[17]: True - - In [18]: _ip.clear_main_mod_cache() - - In [19]: len(_ip._main_mod_cache) == 0 - Out[19]: True - """ - self._main_mod_cache.clear() - - #------------------------------------------------------------------------- - # Things related to debugging - #------------------------------------------------------------------------- - - def init_pdb(self): - # Set calling of pdb on exceptions - # self.call_pdb is a property - self.call_pdb = self.pdb - - def _get_call_pdb(self): - return self._call_pdb - - def _set_call_pdb(self,val): - - if val not in (0,1,False,True): - raise ValueError('new call_pdb value must be boolean') - - # store value in instance - self._call_pdb = val - - # notify the actual exception handlers - self.InteractiveTB.call_pdb = val - - call_pdb = property(_get_call_pdb,_set_call_pdb,None, - 'Control auto-activation of pdb at exceptions') - - def debugger(self,force=False): - """Call the pdb debugger. - - Keywords: - - - force(False): by default, this routine checks the instance call_pdb - flag and does not actually invoke the debugger if the flag is false. - The 'force' option forces the debugger to activate even if the flag - is false. - """ - - if not (force or self.call_pdb): - return - - if not hasattr(sys,'last_traceback'): - error('No traceback has been produced, nothing to debug.') - return - - self.InteractiveTB.debugger(force=True) - - #------------------------------------------------------------------------- - # Things related to IPython's various namespaces - #------------------------------------------------------------------------- - default_user_namespaces = True - - def init_create_namespaces(self, user_module=None, user_ns=None): - # Create the namespace where the user will operate. user_ns is - # normally the only one used, and it is passed to the exec calls as - # the locals argument. But we do carry a user_global_ns namespace - # given as the exec 'globals' argument, This is useful in embedding - # situations where the ipython shell opens in a context where the - # distinction between locals and globals is meaningful. For - # non-embedded contexts, it is just the same object as the user_ns dict. - - # FIXME. For some strange reason, __builtins__ is showing up at user - # level as a dict instead of a module. This is a manual fix, but I - # should really track down where the problem is coming from. Alex - # Schmolck reported this problem first. - - # A useful post by Alex Martelli on this topic: - # Re: inconsistent value from __builtins__ - # Von: Alex Martelli <aleaxit@yahoo.com> - # Datum: Freitag 01 Oktober 2004 04:45:34 nachmittags/abends - # Gruppen: comp.lang.python - - # Michael Hohn <hohn@hooknose.lbl.gov> wrote: - # > >>> print type(builtin_check.get_global_binding('__builtins__')) - # > <type 'dict'> - # > >>> print type(__builtins__) - # > <type 'module'> - # > Is this difference in return value intentional? - - # Well, it's documented that '__builtins__' can be either a dictionary - # or a module, and it's been that way for a long time. Whether it's - # intentional (or sensible), I don't know. In any case, the idea is - # that if you need to access the built-in namespace directly, you - # should start with "import __builtin__" (note, no 's') which will - # definitely give you a module. Yeah, it's somewhat confusing:-(. - - # These routines return a properly built module and dict as needed by - # the rest of the code, and can also be used by extension writers to - # generate properly initialized namespaces. - if (user_ns is not None) or (user_module is not None): - self.default_user_namespaces = False - self.user_module, self.user_ns = self.prepare_user_module(user_module, user_ns) - - # A record of hidden variables we have added to the user namespace, so - # we can list later only variables defined in actual interactive use. - self.user_ns_hidden = {} - - # Now that FakeModule produces a real module, we've run into a nasty - # problem: after script execution (via %run), the module where the user - # code ran is deleted. Now that this object is a true module (needed - # so doctest and other tools work correctly), the Python module - # teardown mechanism runs over it, and sets to None every variable - # present in that module. Top-level references to objects from the - # script survive, because the user_ns is updated with them. However, - # calling functions defined in the script that use other things from - # the script will fail, because the function's closure had references - # to the original objects, which are now all None. So we must protect - # these modules from deletion by keeping a cache. - # - # To avoid keeping stale modules around (we only need the one from the - # last run), we use a dict keyed with the full path to the script, so - # only the last version of the module is held in the cache. Note, - # however, that we must cache the module *namespace contents* (their - # __dict__). Because if we try to cache the actual modules, old ones - # (uncached) could be destroyed while still holding references (such as - # those held by GUI objects that tend to be long-lived)> - # - # The %reset command will flush this cache. See the cache_main_mod() - # and clear_main_mod_cache() methods for details on use. - - # This is the cache used for 'main' namespaces - self._main_mod_cache = {} - - # A table holding all the namespaces IPython deals with, so that - # introspection facilities can search easily. - self.ns_table = {'user_global':self.user_module.__dict__, - 'user_local':self.user_ns, - 'builtin':builtin_mod.__dict__ - } - - @property - def user_global_ns(self): - return self.user_module.__dict__ - - def prepare_user_module(self, user_module=None, user_ns=None): - """Prepare the module and namespace in which user code will be run. - - When IPython is started normally, both parameters are None: a new module - is created automatically, and its __dict__ used as the namespace. - - If only user_module is provided, its __dict__ is used as the namespace. - If only user_ns is provided, a dummy module is created, and user_ns - becomes the global namespace. If both are provided (as they may be - when embedding), user_ns is the local namespace, and user_module - provides the global namespace. - - Parameters - ---------- - user_module : module, optional - The current user module in which IPython is being run. If None, - a clean module will be created. - user_ns : dict, optional - A namespace in which to run interactive commands. - - Returns - ------- - A tuple of user_module and user_ns, each properly initialised. - """ - if user_module is None and user_ns is not None: - user_ns.setdefault("__name__", "__main__") - user_module = DummyMod() - user_module.__dict__ = user_ns - - if user_module is None: - user_module = types.ModuleType("__main__", - doc="Automatically created module for IPython interactive environment") - - # We must ensure that __builtin__ (without the final 's') is always - # available and pointing to the __builtin__ *module*. For more details: - # http://mail.python.org/pipermail/python-dev/2001-April/014068.html - user_module.__dict__.setdefault('__builtin__', builtin_mod) - user_module.__dict__.setdefault('__builtins__', builtin_mod) - - if user_ns is None: - user_ns = user_module.__dict__ - - return user_module, user_ns - - def init_sys_modules(self): - # We need to insert into sys.modules something that looks like a - # module but which accesses the IPython namespace, for shelve and - # pickle to work interactively. Normally they rely on getting - # everything out of __main__, but for embedding purposes each IPython - # instance has its own private namespace, so we can't go shoving - # everything into __main__. - - # note, however, that we should only do this for non-embedded - # ipythons, which really mimic the __main__.__dict__ with their own - # namespace. Embedded instances, on the other hand, should not do - # this because they need to manage the user local/global namespaces - # only, but they live within a 'normal' __main__ (meaning, they - # shouldn't overtake the execution environment of the script they're - # embedded in). - - # This is overridden in the InteractiveShellEmbed subclass to a no-op. - main_name = self.user_module.__name__ - sys.modules[main_name] = self.user_module - - def init_user_ns(self): - """Initialize all user-visible namespaces to their minimum defaults. - - Certain history lists are also initialized here, as they effectively - act as user namespaces. - - Notes - ----- - All data structures here are only filled in, they are NOT reset by this - method. If they were not empty before, data will simply be added to - them. - """ - # This function works in two parts: first we put a few things in - # user_ns, and we sync that contents into user_ns_hidden so that these - # initial variables aren't shown by %who. After the sync, we add the - # rest of what we *do* want the user to see with %who even on a new - # session (probably nothing, so they really only see their own stuff) - - # The user dict must *always* have a __builtin__ reference to the - # Python standard __builtin__ namespace, which must be imported. - # This is so that certain operations in prompt evaluation can be - # reliably executed with builtins. Note that we can NOT use - # __builtins__ (note the 's'), because that can either be a dict or a - # module, and can even mutate at runtime, depending on the context - # (Python makes no guarantees on it). In contrast, __builtin__ is - # always a module object, though it must be explicitly imported. - - # For more details: - # http://mail.python.org/pipermail/python-dev/2001-April/014068.html - ns = {} - - # make global variables for user access to the histories - ns['_ih'] = self.history_manager.input_hist_parsed - ns['_oh'] = self.history_manager.output_hist - ns['_dh'] = self.history_manager.dir_hist - - # user aliases to input and output histories. These shouldn't show up - # in %who, as they can have very large reprs. - ns['In'] = self.history_manager.input_hist_parsed - ns['Out'] = self.history_manager.output_hist - - # Store myself as the public api!!! - ns['get_ipython'] = self.get_ipython - - ns['exit'] = self.exiter - ns['quit'] = self.exiter - - # Sync what we've added so far to user_ns_hidden so these aren't seen - # by %who - self.user_ns_hidden.update(ns) - - # Anything put into ns now would show up in %who. Think twice before - # putting anything here, as we really want %who to show the user their - # stuff, not our variables. - - # Finally, update the real user's namespace - self.user_ns.update(ns) - - @property - def all_ns_refs(self): - """Get a list of references to all the namespace dictionaries in which - IPython might store a user-created object. - - Note that this does not include the displayhook, which also caches - objects from the output.""" - return [self.user_ns, self.user_global_ns, self.user_ns_hidden] + \ - [m.__dict__ for m in self._main_mod_cache.values()] - - def reset(self, new_session=True, aggressive=False): - """Clear all internal namespaces, and attempt to release references to - user objects. - - If new_session is True, a new history session will be opened. - """ - # Clear histories - self.history_manager.reset(new_session) - # Reset counter used to index all histories - if new_session: - self.execution_count = 1 - - # Reset last execution result - self.last_execution_succeeded = True - self.last_execution_result = None - - # Flush cached output items - if self.displayhook.do_full_cache: - self.displayhook.flush() - - # The main execution namespaces must be cleared very carefully, - # skipping the deletion of the builtin-related keys, because doing so - # would cause errors in many object's __del__ methods. - if self.user_ns is not self.user_global_ns: - self.user_ns.clear() - ns = self.user_global_ns - drop_keys = set(ns.keys()) - drop_keys.discard('__builtin__') - drop_keys.discard('__builtins__') - drop_keys.discard('__name__') - for k in drop_keys: - del ns[k] - - self.user_ns_hidden.clear() - - # Restore the user namespaces to minimal usability - self.init_user_ns() - if aggressive and not hasattr(self, "_sys_modules_keys"): - print("Cannot restore sys.module, no snapshot") - elif aggressive: - print("culling sys module...") - current_keys = set(sys.modules.keys()) - for k in current_keys - self._sys_modules_keys: - if k.startswith("multiprocessing"): - continue - del sys.modules[k] - - # Restore the default and user aliases - self.alias_manager.clear_aliases() - self.alias_manager.init_aliases() - - # Now define aliases that only make sense on the terminal, because they - # need direct access to the console in a way that we can't emulate in - # GUI or web frontend - if os.name == 'posix': - for cmd in ('clear', 'more', 'less', 'man'): - if cmd not in self.magics_manager.magics['line']: - self.alias_manager.soft_define_alias(cmd, cmd) - - # Flush the private list of module references kept for script - # execution protection - self.clear_main_mod_cache() - - def del_var(self, varname, by_name=False): - """Delete a variable from the various namespaces, so that, as - far as possible, we're not keeping any hidden references to it. - - Parameters - ---------- - varname : str - The name of the variable to delete. - by_name : bool - If True, delete variables with the given name in each - namespace. If False (default), find the variable in the user - namespace, and delete references to it. - """ - if varname in ('__builtin__', '__builtins__'): - raise ValueError("Refusing to delete %s" % varname) - - ns_refs = self.all_ns_refs - - if by_name: # Delete by name - for ns in ns_refs: - try: - del ns[varname] - except KeyError: - pass - else: # Delete by object - try: - obj = self.user_ns[varname] - except KeyError: - raise NameError("name '%s' is not defined" % varname) - # Also check in output history - ns_refs.append(self.history_manager.output_hist) - for ns in ns_refs: - to_delete = [n for n, o in ns.items() if o is obj] - for name in to_delete: - del ns[name] - - # Ensure it is removed from the last execution result - if self.last_execution_result.result is obj: - self.last_execution_result = None - - # displayhook keeps extra references, but not in a dictionary - for name in ('_', '__', '___'): - if getattr(self.displayhook, name) is obj: - setattr(self.displayhook, name, None) - - def reset_selective(self, regex=None): - """Clear selective variables from internal namespaces based on a - specified regular expression. - - Parameters - ---------- - regex : string or compiled pattern, optional - A regular expression pattern that will be used in searching - variable names in the users namespaces. - """ - if regex is not None: - try: - m = re.compile(regex) - except TypeError: - raise TypeError('regex must be a string or compiled pattern') - # Search for keys in each namespace that match the given regex - # If a match is found, delete the key/value pair. - for ns in self.all_ns_refs: - for var in ns: - if m.search(var): - del ns[var] - - def push(self, variables, interactive=True): - """Inject a group of variables into the IPython user namespace. - - Parameters - ---------- - variables : dict, str or list/tuple of str - The variables to inject into the user's namespace. If a dict, a - simple update is done. If a str, the string is assumed to have - variable names separated by spaces. A list/tuple of str can also - be used to give the variable names. If just the variable names are - give (list/tuple/str) then the variable values looked up in the - callers frame. - interactive : bool - If True (default), the variables will be listed with the ``who`` - magic. - """ - vdict = None - - # We need a dict of name/value pairs to do namespace updates. - if isinstance(variables, dict): - vdict = variables - elif isinstance(variables, (str, list, tuple)): - if isinstance(variables, str): - vlist = variables.split() - else: - vlist = variables - vdict = {} - cf = sys._getframe(1) - for name in vlist: - try: - vdict[name] = eval(name, cf.f_globals, cf.f_locals) - except: - print('Could not get variable %s from %s' % - (name,cf.f_code.co_name)) - else: - raise ValueError('variables must be a dict/str/list/tuple') - - # Propagate variables to user namespace - self.user_ns.update(vdict) - - # And configure interactive visibility - user_ns_hidden = self.user_ns_hidden - if interactive: - for name in vdict: - user_ns_hidden.pop(name, None) - else: - user_ns_hidden.update(vdict) - - def drop_by_id(self, variables): - """Remove a dict of variables from the user namespace, if they are the - same as the values in the dictionary. - - This is intended for use by extensions: variables that they've added can - be taken back out if they are unloaded, without removing any that the - user has overwritten. - - Parameters - ---------- - variables : dict - A dictionary mapping object names (as strings) to the objects. - """ - for name, obj in variables.items(): - if name in self.user_ns and self.user_ns[name] is obj: - del self.user_ns[name] - self.user_ns_hidden.pop(name, None) - - #------------------------------------------------------------------------- - # Things related to object introspection - #------------------------------------------------------------------------- - - def _ofind(self, oname, namespaces=None): - """Find an object in the available namespaces. - - self._ofind(oname) -> dict with keys: found,obj,ospace,ismagic - - Has special code to detect magic functions. - """ - oname = oname.strip() - if not oname.startswith(ESC_MAGIC) and \ - not oname.startswith(ESC_MAGIC2) and \ - not all(a.isidentifier() for a in oname.split(".")): - return {'found': False} - - if namespaces is None: - # Namespaces to search in: - # Put them in a list. The order is important so that we - # find things in the same order that Python finds them. - namespaces = [ ('Interactive', self.user_ns), - ('Interactive (global)', self.user_global_ns), - ('Python builtin', builtin_mod.__dict__), - ] - - ismagic = False - isalias = False - found = False - ospace = None - parent = None - obj = None - - - # Look for the given name by splitting it in parts. If the head is - # found, then we look for all the remaining parts as members, and only - # declare success if we can find them all. - oname_parts = oname.split('.') - oname_head, oname_rest = oname_parts[0],oname_parts[1:] - for nsname,ns in namespaces: - try: - obj = ns[oname_head] - except KeyError: - continue - else: - for idx, part in enumerate(oname_rest): - try: - parent = obj - # The last part is looked up in a special way to avoid - # descriptor invocation as it may raise or have side - # effects. - if idx == len(oname_rest) - 1: - obj = self._getattr_property(obj, part) - else: - obj = getattr(obj, part) - except: - # Blanket except b/c some badly implemented objects - # allow __getattr__ to raise exceptions other than - # AttributeError, which then crashes IPython. - break - else: - # If we finish the for loop (no break), we got all members - found = True - ospace = nsname - break # namespace loop - - # Try to see if it's magic - if not found: - obj = None - if oname.startswith(ESC_MAGIC2): - oname = oname.lstrip(ESC_MAGIC2) - obj = self.find_cell_magic(oname) - elif oname.startswith(ESC_MAGIC): - oname = oname.lstrip(ESC_MAGIC) - obj = self.find_line_magic(oname) - else: - # search without prefix, so run? will find %run? - obj = self.find_line_magic(oname) - if obj is None: - obj = self.find_cell_magic(oname) - if obj is not None: - found = True - ospace = 'IPython internal' - ismagic = True - isalias = isinstance(obj, Alias) - - # Last try: special-case some literals like '', [], {}, etc: - if not found and oname_head in ["''",'""','[]','{}','()']: - obj = eval(oname_head) - found = True - ospace = 'Interactive' - - return { - 'obj':obj, - 'found':found, - 'parent':parent, - 'ismagic':ismagic, - 'isalias':isalias, - 'namespace':ospace - } - - @staticmethod - def _getattr_property(obj, attrname): - """Property-aware getattr to use in object finding. - - If attrname represents a property, return it unevaluated (in case it has - side effects or raises an error. - - """ - if not isinstance(obj, type): - try: - # `getattr(type(obj), attrname)` is not guaranteed to return - # `obj`, but does so for property: - # - # property.__get__(self, None, cls) -> self - # - # The universal alternative is to traverse the mro manually - # searching for attrname in class dicts. - attr = getattr(type(obj), attrname) - except AttributeError: - pass - else: - # This relies on the fact that data descriptors (with both - # __get__ & __set__ magic methods) take precedence over - # instance-level attributes: - # - # class A(object): - # @property - # def foobar(self): return 123 - # a = A() - # a.__dict__['foobar'] = 345 - # a.foobar # == 123 - # - # So, a property may be returned right away. - if isinstance(attr, property): - return attr - - # Nothing helped, fall back. - return getattr(obj, attrname) - - def _object_find(self, oname, namespaces=None): - """Find an object and return a struct with info about it.""" - return Struct(self._ofind(oname, namespaces)) - - def _inspect(self, meth, oname, namespaces=None, **kw): - """Generic interface to the inspector system. - - This function is meant to be called by pdef, pdoc & friends. - """ - info = self._object_find(oname, namespaces) - docformat = sphinxify if self.sphinxify_docstring else None - if info.found: - pmethod = getattr(self.inspector, meth) - # TODO: only apply format_screen to the plain/text repr of the mime - # bundle. - formatter = format_screen if info.ismagic else docformat - if meth == 'pdoc': - pmethod(info.obj, oname, formatter) - elif meth == 'pinfo': - pmethod( - info.obj, - oname, - formatter, - info, - enable_html_pager=self.enable_html_pager, - **kw - ) - else: - pmethod(info.obj, oname) - else: - print('Object `%s` not found.' % oname) - return 'not found' # so callers can take other action - - def object_inspect(self, oname, detail_level=0): - """Get object info about oname""" - with self.builtin_trap: - info = self._object_find(oname) - if info.found: - return self.inspector.info(info.obj, oname, info=info, - detail_level=detail_level - ) - else: - return oinspect.object_info(name=oname, found=False) - - def object_inspect_text(self, oname, detail_level=0): - """Get object info as formatted text""" - return self.object_inspect_mime(oname, detail_level)['text/plain'] - - def object_inspect_mime(self, oname, detail_level=0): - """Get object info as a mimebundle of formatted representations. - - A mimebundle is a dictionary, keyed by mime-type. - It must always have the key `'text/plain'`. - """ - with self.builtin_trap: - info = self._object_find(oname) - if info.found: - docformat = sphinxify if self.sphinxify_docstring else None - return self.inspector._get_info( - info.obj, - oname, - info=info, - detail_level=detail_level, - formatter=docformat, - ) - else: - raise KeyError(oname) - - #------------------------------------------------------------------------- - # Things related to history management - #------------------------------------------------------------------------- - - def init_history(self): - """Sets up the command history, and starts regular autosaves.""" - self.history_manager = HistoryManager(shell=self, parent=self) - self.configurables.append(self.history_manager) - - #------------------------------------------------------------------------- - # Things related to exception handling and tracebacks (not debugging) - #------------------------------------------------------------------------- - - debugger_cls = InterruptiblePdb - - def init_traceback_handlers(self, custom_exceptions): - # Syntax error handler. - self.SyntaxTB = ultratb.SyntaxTB(color_scheme='NoColor', parent=self) - - # The interactive one is initialized with an offset, meaning we always - # want to remove the topmost item in the traceback, which is our own - # internal code. Valid modes: ['Plain','Context','Verbose','Minimal'] - self.InteractiveTB = ultratb.AutoFormattedTB(mode = 'Plain', - color_scheme='NoColor', - tb_offset = 1, - check_cache=check_linecache_ipython, - debugger_cls=self.debugger_cls, parent=self) - - # The instance will store a pointer to the system-wide exception hook, - # so that runtime code (such as magics) can access it. This is because - # during the read-eval loop, it may get temporarily overwritten. - self.sys_excepthook = sys.excepthook - - # and add any custom exception handlers the user may have specified - self.set_custom_exc(*custom_exceptions) - - # Set the exception mode - self.InteractiveTB.set_mode(mode=self.xmode) - - def set_custom_exc(self, exc_tuple, handler): - """set_custom_exc(exc_tuple, handler) - - Set a custom exception handler, which will be called if any of the - exceptions in exc_tuple occur in the mainloop (specifically, in the - run_code() method). - - Parameters - ---------- - - exc_tuple : tuple of exception classes - A *tuple* of exception classes, for which to call the defined - handler. It is very important that you use a tuple, and NOT A - LIST here, because of the way Python's except statement works. If - you only want to trap a single exception, use a singleton tuple:: - - exc_tuple == (MyCustomException,) - - handler : callable - handler must have the following signature:: - - def my_handler(self, etype, value, tb, tb_offset=None): - ... - return structured_traceback - - Your handler must return a structured traceback (a list of strings), - or None. - - This will be made into an instance method (via types.MethodType) - of IPython itself, and it will be called if any of the exceptions - listed in the exc_tuple are caught. If the handler is None, an - internal basic one is used, which just prints basic info. - - To protect IPython from crashes, if your handler ever raises an - exception or returns an invalid result, it will be immediately - disabled. - - Notes - ----- - - WARNING: by putting in your own exception handler into IPython's main - execution loop, you run a very good chance of nasty crashes. This - facility should only be used if you really know what you are doing.""" - if not isinstance(exc_tuple, tuple): - raise TypeError("The custom exceptions must be given as a tuple.") - - def dummy_handler(self, etype, value, tb, tb_offset=None): - print('*** Simple custom exception handler ***') - print('Exception type :', etype) - print('Exception value:', value) - print('Traceback :', tb) - - def validate_stb(stb): - """validate structured traceback return type - - return type of CustomTB *should* be a list of strings, but allow - single strings or None, which are harmless. - - This function will *always* return a list of strings, - and will raise a TypeError if stb is inappropriate. - """ - msg = "CustomTB must return list of strings, not %r" % stb - if stb is None: - return [] - elif isinstance(stb, str): - return [stb] - elif not isinstance(stb, list): - raise TypeError(msg) - # it's a list - for line in stb: - # check every element - if not isinstance(line, str): - raise TypeError(msg) - return stb - - if handler is None: - wrapped = dummy_handler - else: - def wrapped(self,etype,value,tb,tb_offset=None): - """wrap CustomTB handler, to protect IPython from user code - - This makes it harder (but not impossible) for custom exception - handlers to crash IPython. - """ - try: - stb = handler(self,etype,value,tb,tb_offset=tb_offset) - return validate_stb(stb) - except: - # clear custom handler immediately - self.set_custom_exc((), None) - print("Custom TB Handler failed, unregistering", file=sys.stderr) - # show the exception in handler first - stb = self.InteractiveTB.structured_traceback(*sys.exc_info()) - print(self.InteractiveTB.stb2text(stb)) - print("The original exception:") - stb = self.InteractiveTB.structured_traceback( - (etype,value,tb), tb_offset=tb_offset - ) - return stb - - self.CustomTB = types.MethodType(wrapped,self) - self.custom_exceptions = exc_tuple - - def excepthook(self, etype, value, tb): - """One more defense for GUI apps that call sys.excepthook. - - GUI frameworks like wxPython trap exceptions and call - sys.excepthook themselves. I guess this is a feature that - enables them to keep running after exceptions that would - otherwise kill their mainloop. This is a bother for IPython - which expects to catch all of the program exceptions with a try: - except: statement. - - Normally, IPython sets sys.excepthook to a CrashHandler instance, so if - any app directly invokes sys.excepthook, it will look to the user like - IPython crashed. In order to work around this, we can disable the - CrashHandler and replace it with this excepthook instead, which prints a - regular traceback using our InteractiveTB. In this fashion, apps which - call sys.excepthook will generate a regular-looking exception from - IPython, and the CrashHandler will only be triggered by real IPython - crashes. - - This hook should be used sparingly, only in places which are not likely - to be true IPython errors. - """ - self.showtraceback((etype, value, tb), tb_offset=0) - - def _get_exc_info(self, exc_tuple=None): - """get exc_info from a given tuple, sys.exc_info() or sys.last_type etc. - - Ensures sys.last_type,value,traceback hold the exc_info we found, - from whichever source. - - raises ValueError if none of these contain any information - """ - if exc_tuple is None: - etype, value, tb = sys.exc_info() - else: - etype, value, tb = exc_tuple - - if etype is None: - if hasattr(sys, 'last_type'): - etype, value, tb = sys.last_type, sys.last_value, \ - sys.last_traceback - - if etype is None: - raise ValueError("No exception to find") - - # Now store the exception info in sys.last_type etc. - # WARNING: these variables are somewhat deprecated and not - # necessarily safe to use in a threaded environment, but tools - # like pdb depend on their existence, so let's set them. If we - # find problems in the field, we'll need to revisit their use. - sys.last_type = etype - sys.last_value = value - sys.last_traceback = tb - - return etype, value, tb - - def show_usage_error(self, exc): - """Show a short message for UsageErrors - - These are special exceptions that shouldn't show a traceback. - """ - print("UsageError: %s" % exc, file=sys.stderr) - - def get_exception_only(self, exc_tuple=None): - """ - Return as a string (ending with a newline) the exception that - just occurred, without any traceback. - """ - etype, value, tb = self._get_exc_info(exc_tuple) - msg = traceback.format_exception_only(etype, value) - return ''.join(msg) - - def showtraceback(self, exc_tuple=None, filename=None, tb_offset=None, - exception_only=False, running_compiled_code=False): - """Display the exception that just occurred. - - If nothing is known about the exception, this is the method which - should be used throughout the code for presenting user tracebacks, - rather than directly invoking the InteractiveTB object. - - A specific showsyntaxerror() also exists, but this method can take - care of calling it if needed, so unless you are explicitly catching a - SyntaxError exception, don't try to analyze the stack manually and - simply call this method.""" - - try: - try: - etype, value, tb = self._get_exc_info(exc_tuple) - except ValueError: - print('No traceback available to show.', file=sys.stderr) - return - - if issubclass(etype, SyntaxError): - # Though this won't be called by syntax errors in the input - # line, there may be SyntaxError cases with imported code. - self.showsyntaxerror(filename, running_compiled_code) - elif etype is UsageError: - self.show_usage_error(value) - else: - if exception_only: - stb = ['An exception has occurred, use %tb to see ' - 'the full traceback.\n'] - stb.extend(self.InteractiveTB.get_exception_only(etype, - value)) - else: - try: - # Exception classes can customise their traceback - we - # use this in IPython.parallel for exceptions occurring - # in the engines. This should return a list of strings. - stb = value._render_traceback_() - except Exception: - stb = self.InteractiveTB.structured_traceback(etype, - value, tb, tb_offset=tb_offset) - - self._showtraceback(etype, value, stb) - if self.call_pdb: - # drop into debugger - self.debugger(force=True) - return - - # Actually show the traceback - self._showtraceback(etype, value, stb) - - except KeyboardInterrupt: - print('\n' + self.get_exception_only(), file=sys.stderr) - - def _showtraceback(self, etype, evalue, stb: str): - """Actually show a traceback. - - Subclasses may override this method to put the traceback on a different - place, like a side channel. - """ - val = self.InteractiveTB.stb2text(stb) - try: - print(val) - except UnicodeEncodeError: - print(val.encode("utf-8", "backslashreplace").decode()) - - def showsyntaxerror(self, filename=None, running_compiled_code=False): - """Display the syntax error that just occurred. - - This doesn't display a stack trace because there isn't one. - - If a filename is given, it is stuffed in the exception instead - of what was there before (because Python's parser always uses - "<string>" when reading from a string). - - If the syntax error occurred when running a compiled code (i.e. running_compile_code=True), - longer stack trace will be displayed. - """ - etype, value, last_traceback = self._get_exc_info() - - if filename and issubclass(etype, SyntaxError): - try: - value.filename = filename - except: - # Not the format we expect; leave it alone - pass - - # If the error occurred when executing compiled code, we should provide full stacktrace. - elist = traceback.extract_tb(last_traceback) if running_compiled_code else [] - stb = self.SyntaxTB.structured_traceback(etype, value, elist) - self._showtraceback(etype, value, stb) - - # This is overridden in TerminalInteractiveShell to show a message about - # the %paste magic. - def showindentationerror(self): - """Called by _run_cell when there's an IndentationError in code entered - at the prompt. - - This is overridden in TerminalInteractiveShell to show a message about - the %paste magic.""" - self.showsyntaxerror() - - #------------------------------------------------------------------------- - # Things related to readline - #------------------------------------------------------------------------- - - def init_readline(self): - """DEPRECATED - - Moved to terminal subclass, here only to simplify the init logic.""" - # Set a number of methods that depend on readline to be no-op - warnings.warn('`init_readline` is no-op since IPython 5.0 and is Deprecated', - DeprecationWarning, stacklevel=2) - self.set_custom_completer = no_op - - @skip_doctest - def set_next_input(self, s, replace=False): - """ Sets the 'default' input string for the next command line. - - Example:: - - In [1]: _ip.set_next_input("Hello Word") - In [2]: Hello Word_ # cursor is here - """ - self.rl_next_input = s - - def _indent_current_str(self): - """return the current level of indentation as a string""" - return self.input_splitter.get_indent_spaces() * ' ' - - #------------------------------------------------------------------------- - # Things related to text completion - #------------------------------------------------------------------------- - - def init_completer(self): - """Initialize the completion machinery. - - This creates completion machinery that can be used by client code, - either interactively in-process (typically triggered by the readline - library), programmatically (such as in test suites) or out-of-process - (typically over the network by remote frontends). - """ - from IPython.core.completer import IPCompleter - from IPython.core.completerlib import (module_completer, - magic_run_completer, cd_completer, reset_completer) - - self.Completer = IPCompleter(shell=self, - namespace=self.user_ns, - global_namespace=self.user_global_ns, - parent=self, - ) - self.configurables.append(self.Completer) - - # Add custom completers to the basic ones built into IPCompleter - sdisp = self.strdispatchers.get('complete_command', StrDispatch()) - self.strdispatchers['complete_command'] = sdisp - self.Completer.custom_completers = sdisp - - self.set_hook('complete_command', module_completer, str_key = 'import') - self.set_hook('complete_command', module_completer, str_key = 'from') - self.set_hook('complete_command', module_completer, str_key = '%aimport') - self.set_hook('complete_command', magic_run_completer, str_key = '%run') - self.set_hook('complete_command', cd_completer, str_key = '%cd') - self.set_hook('complete_command', reset_completer, str_key = '%reset') - - @skip_doctest - def complete(self, text, line=None, cursor_pos=None): - """Return the completed text and a list of completions. - - Parameters - ---------- - - text : string - A string of text to be completed on. It can be given as empty and - instead a line/position pair are given. In this case, the - completer itself will split the line like readline does. - - line : string, optional - The complete line that text is part of. - - cursor_pos : int, optional - The position of the cursor on the input line. - - Returns - ------- - text : string - The actual text that was completed. - - matches : list - A sorted list with all possible completions. - - The optional arguments allow the completion to take more context into - account, and are part of the low-level completion API. - - This is a wrapper around the completion mechanism, similar to what - readline does at the command line when the TAB key is hit. By - exposing it as a method, it can be used by other non-readline - environments (such as GUIs) for text completion. - - Simple usage example: - - In [1]: x = 'hello' - - In [2]: _ip.complete('x.l') - Out[2]: ('x.l', ['x.ljust', 'x.lower', 'x.lstrip']) - """ - - # Inject names into __builtin__ so we can complete on the added names. - with self.builtin_trap: - return self.Completer.complete(text, line, cursor_pos) - - def set_custom_completer(self, completer, pos=0) -> None: - """Adds a new custom completer function. - - The position argument (defaults to 0) is the index in the completers - list where you want the completer to be inserted. - - `completer` should have the following signature:: - - def completion(self: Completer, text: string) -> List[str]: - raise NotImplementedError - - It will be bound to the current Completer instance and pass some text - and return a list with current completions to suggest to the user. - """ - - newcomp = types.MethodType(completer, self.Completer) - self.Completer.custom_matchers.insert(pos,newcomp) - - def set_completer_frame(self, frame=None): - """Set the frame of the completer.""" - if frame: - self.Completer.namespace = frame.f_locals - self.Completer.global_namespace = frame.f_globals - else: - self.Completer.namespace = self.user_ns - self.Completer.global_namespace = self.user_global_ns - - #------------------------------------------------------------------------- - # Things related to magics - #------------------------------------------------------------------------- - - def init_magics(self): - from IPython.core import magics as m - self.magics_manager = magic.MagicsManager(shell=self, - parent=self, - user_magics=m.UserMagics(self)) - self.configurables.append(self.magics_manager) - - # Expose as public API from the magics manager - self.register_magics = self.magics_manager.register - - self.register_magics(m.AutoMagics, m.BasicMagics, m.CodeMagics, - m.ConfigMagics, m.DisplayMagics, m.ExecutionMagics, - m.ExtensionMagics, m.HistoryMagics, m.LoggingMagics, - m.NamespaceMagics, m.OSMagics, m.PackagingMagics, - m.PylabMagics, m.ScriptMagics, - ) - self.register_magics(m.AsyncMagics) - - # Register Magic Aliases - mman = self.magics_manager - # FIXME: magic aliases should be defined by the Magics classes - # or in MagicsManager, not here - mman.register_alias('ed', 'edit') - mman.register_alias('hist', 'history') - mman.register_alias('rep', 'recall') - mman.register_alias('SVG', 'svg', 'cell') - mman.register_alias('HTML', 'html', 'cell') - mman.register_alias('file', 'writefile', 'cell') - - # FIXME: Move the color initialization to the DisplayHook, which - # should be split into a prompt manager and displayhook. We probably - # even need a centralize colors management object. - self.run_line_magic('colors', self.colors) - - # Defined here so that it's included in the documentation - @functools.wraps(magic.MagicsManager.register_function) - def register_magic_function(self, func, magic_kind='line', magic_name=None): - self.magics_manager.register_function( - func, magic_kind=magic_kind, magic_name=magic_name - ) - - def run_line_magic(self, magic_name, line, _stack_depth=1): - """Execute the given line magic. - - Parameters - ---------- - magic_name : str - Name of the desired magic function, without '%' prefix. - - line : str - The rest of the input line as a single string. - - _stack_depth : int - If run_line_magic() is called from magic() then _stack_depth=2. - This is added to ensure backward compatibility for use of 'get_ipython().magic()' - """ - fn = self.find_line_magic(magic_name) - if fn is None: - cm = self.find_cell_magic(magic_name) - etpl = "Line magic function `%%%s` not found%s." - extra = '' if cm is None else (' (But cell magic `%%%%%s` exists, ' - 'did you mean that instead?)' % magic_name ) - raise UsageError(etpl % (magic_name, extra)) - else: - # Note: this is the distance in the stack to the user's frame. - # This will need to be updated if the internal calling logic gets - # refactored, or else we'll be expanding the wrong variables. - - # Determine stack_depth depending on where run_line_magic() has been called - stack_depth = _stack_depth - if getattr(fn, magic.MAGIC_NO_VAR_EXPAND_ATTR, False): - # magic has opted out of var_expand - magic_arg_s = line - else: - magic_arg_s = self.var_expand(line, stack_depth) - # Put magic args in a list so we can call with f(*a) syntax - args = [magic_arg_s] - kwargs = {} - # Grab local namespace if we need it: - if getattr(fn, "needs_local_scope", False): - kwargs['local_ns'] = self.get_local_scope(stack_depth) - with self.builtin_trap: - result = fn(*args, **kwargs) - return result - - def get_local_scope(self, stack_depth): - """Get local scope at given stack depth. - - Parameters - ---------- - stack_depth : int - Depth relative to calling frame - """ - return sys._getframe(stack_depth + 1).f_locals - - def run_cell_magic(self, magic_name, line, cell): - """Execute the given cell magic. - - Parameters - ---------- - magic_name : str - Name of the desired magic function, without '%' prefix. - - line : str - The rest of the first input line as a single string. - - cell : str - The body of the cell as a (possibly multiline) string. - """ - fn = self.find_cell_magic(magic_name) - if fn is None: - lm = self.find_line_magic(magic_name) - etpl = "Cell magic `%%{0}` not found{1}." - extra = '' if lm is None else (' (But line magic `%{0}` exists, ' - 'did you mean that instead?)'.format(magic_name)) - raise UsageError(etpl.format(magic_name, extra)) - elif cell == '': - message = '%%{0} is a cell magic, but the cell body is empty.'.format(magic_name) - if self.find_line_magic(magic_name) is not None: - message += ' Did you mean the line magic %{0} (single %)?'.format(magic_name) - raise UsageError(message) - else: - # Note: this is the distance in the stack to the user's frame. - # This will need to be updated if the internal calling logic gets - # refactored, or else we'll be expanding the wrong variables. - stack_depth = 2 - if getattr(fn, magic.MAGIC_NO_VAR_EXPAND_ATTR, False): - # magic has opted out of var_expand - magic_arg_s = line - else: - magic_arg_s = self.var_expand(line, stack_depth) - kwargs = {} - if getattr(fn, "needs_local_scope", False): - kwargs['local_ns'] = self.user_ns - - with self.builtin_trap: - args = (magic_arg_s, cell) - result = fn(*args, **kwargs) - return result - - def find_line_magic(self, magic_name): - """Find and return a line magic by name. - - Returns None if the magic isn't found.""" - return self.magics_manager.magics['line'].get(magic_name) - - def find_cell_magic(self, magic_name): - """Find and return a cell magic by name. - - Returns None if the magic isn't found.""" - return self.magics_manager.magics['cell'].get(magic_name) - - def find_magic(self, magic_name, magic_kind='line'): - """Find and return a magic of the given type by name. - - Returns None if the magic isn't found.""" - return self.magics_manager.magics[magic_kind].get(magic_name) - - def magic(self, arg_s): - """DEPRECATED. Use run_line_magic() instead. - - Call a magic function by name. - - Input: a string containing the name of the magic function to call and - any additional arguments to be passed to the magic. - - magic('name -opt foo bar') is equivalent to typing at the ipython - prompt: - - In[1]: %name -opt foo bar - - To call a magic without arguments, simply use magic('name'). - - This provides a proper Python function to call IPython's magics in any - valid Python code you can type at the interpreter, including loops and - compound statements. - """ - # TODO: should we issue a loud deprecation warning here? - magic_name, _, magic_arg_s = arg_s.partition(' ') - magic_name = magic_name.lstrip(prefilter.ESC_MAGIC) - return self.run_line_magic(magic_name, magic_arg_s, _stack_depth=2) - - #------------------------------------------------------------------------- - # Things related to macros - #------------------------------------------------------------------------- - - def define_macro(self, name, themacro): - """Define a new macro - - Parameters - ---------- - name : str - The name of the macro. - themacro : str or Macro - The action to do upon invoking the macro. If a string, a new - Macro object is created by passing the string to it. - """ - - from IPython.core import macro - - if isinstance(themacro, str): - themacro = macro.Macro(themacro) - if not isinstance(themacro, macro.Macro): - raise ValueError('A macro must be a string or a Macro instance.') - self.user_ns[name] = themacro - - #------------------------------------------------------------------------- - # Things related to the running of system commands - #------------------------------------------------------------------------- - - def system_piped(self, cmd): - """Call the given cmd in a subprocess, piping stdout/err - - Parameters - ---------- - cmd : str - Command to execute (can not end in '&', as background processes are - not supported. Should not be a command that expects input - other than simple text. - """ - if cmd.rstrip().endswith('&'): - # this is *far* from a rigorous test - # We do not support backgrounding processes because we either use - # pexpect or pipes to read from. Users can always just call - # os.system() or use ip.system=ip.system_raw - # if they really want a background process. - raise OSError("Background processes not supported.") - - # we explicitly do NOT return the subprocess status code, because - # a non-None value would trigger :func:`sys.displayhook` calls. - # Instead, we store the exit_code in user_ns. - self.user_ns['_exit_code'] = system(self.var_expand(cmd, depth=1)) - - def system_raw(self, cmd): - """Call the given cmd in a subprocess using os.system on Windows or - subprocess.call using the system shell on other platforms. - - Parameters - ---------- - cmd : str - Command to execute. - """ - cmd = self.var_expand(cmd, depth=1) - # protect os.system from UNC paths on Windows, which it can't handle: - if sys.platform == 'win32': - from IPython.utils._process_win32 import AvoidUNCPath - with AvoidUNCPath() as path: - if path is not None: - cmd = '"pushd %s &&"%s' % (path, cmd) - try: - ec = os.system(cmd) - except KeyboardInterrupt: - print('\n' + self.get_exception_only(), file=sys.stderr) - ec = -2 - else: - # For posix the result of the subprocess.call() below is an exit - # code, which by convention is zero for success, positive for - # program failure. Exit codes above 128 are reserved for signals, - # and the formula for converting a signal to an exit code is usually - # signal_number+128. To more easily differentiate between exit - # codes and signals, ipython uses negative numbers. For instance - # since control-c is signal 2 but exit code 130, ipython's - # _exit_code variable will read -2. Note that some shells like - # csh and fish don't follow sh/bash conventions for exit codes. - executable = os.environ.get('SHELL', None) - try: - # Use env shell instead of default /bin/sh - ec = subprocess.call(cmd, shell=True, executable=executable) - except KeyboardInterrupt: - # intercept control-C; a long traceback is not useful here - print('\n' + self.get_exception_only(), file=sys.stderr) - ec = 130 - if ec > 128: - ec = -(ec - 128) - - # We explicitly do NOT return the subprocess status code, because - # a non-None value would trigger :func:`sys.displayhook` calls. - # Instead, we store the exit_code in user_ns. Note the semantics - # of _exit_code: for control-c, _exit_code == -signal.SIGNIT, - # but raising SystemExit(_exit_code) will give status 254! - self.user_ns['_exit_code'] = ec - - # use piped system by default, because it is better behaved - system = system_piped - - def getoutput(self, cmd, split=True, depth=0): - """Get output (possibly including stderr) from a subprocess. - - Parameters - ---------- - cmd : str - Command to execute (can not end in '&', as background processes are - not supported. - split : bool, optional - If True, split the output into an IPython SList. Otherwise, an - IPython LSString is returned. These are objects similar to normal - lists and strings, with a few convenience attributes for easier - manipulation of line-based output. You can use '?' on them for - details. - depth : int, optional - How many frames above the caller are the local variables which should - be expanded in the command string? The default (0) assumes that the - expansion variables are in the stack frame calling this function. - """ - if cmd.rstrip().endswith('&'): - # this is *far* from a rigorous test - raise OSError("Background processes not supported.") - out = getoutput(self.var_expand(cmd, depth=depth+1)) - if split: - out = SList(out.splitlines()) - else: - out = LSString(out) - return out - - #------------------------------------------------------------------------- - # Things related to aliases - #------------------------------------------------------------------------- - - def init_alias(self): - self.alias_manager = AliasManager(shell=self, parent=self) - self.configurables.append(self.alias_manager) - - #------------------------------------------------------------------------- - # Things related to extensions - #------------------------------------------------------------------------- - - def init_extension_manager(self): - self.extension_manager = ExtensionManager(shell=self, parent=self) - self.configurables.append(self.extension_manager) - - #------------------------------------------------------------------------- - # Things related to payloads - #------------------------------------------------------------------------- - - def init_payload(self): - self.payload_manager = PayloadManager(parent=self) - self.configurables.append(self.payload_manager) - - #------------------------------------------------------------------------- - # Things related to the prefilter - #------------------------------------------------------------------------- - - def init_prefilter(self): - self.prefilter_manager = PrefilterManager(shell=self, parent=self) - self.configurables.append(self.prefilter_manager) - # Ultimately this will be refactored in the new interpreter code, but - # for now, we should expose the main prefilter method (there's legacy - # code out there that may rely on this). - self.prefilter = self.prefilter_manager.prefilter_lines - - def auto_rewrite_input(self, cmd): - """Print to the screen the rewritten form of the user's command. - - This shows visual feedback by rewriting input lines that cause - automatic calling to kick in, like:: - - /f x - - into:: - - ------> f(x) - - after the user's input prompt. This helps the user understand that the - input line was transformed automatically by IPython. - """ - if not self.show_rewritten_input: - return - - # This is overridden in TerminalInteractiveShell to use fancy prompts - print("------> " + cmd) - - #------------------------------------------------------------------------- - # Things related to extracting values/expressions from kernel and user_ns - #------------------------------------------------------------------------- - - def _user_obj_error(self): - """return simple exception dict - - for use in user_expressions - """ - - etype, evalue, tb = self._get_exc_info() - stb = self.InteractiveTB.get_exception_only(etype, evalue) - - exc_info = { - u'status' : 'error', - u'traceback' : stb, - u'ename' : etype.__name__, - u'evalue' : py3compat.safe_unicode(evalue), - } - - return exc_info - - def _format_user_obj(self, obj): - """format a user object to display dict - - for use in user_expressions - """ - - data, md = self.display_formatter.format(obj) - value = { - 'status' : 'ok', - 'data' : data, - 'metadata' : md, - } - return value - - def user_expressions(self, expressions): - """Evaluate a dict of expressions in the user's namespace. - - Parameters - ---------- - expressions : dict - A dict with string keys and string values. The expression values - should be valid Python expressions, each of which will be evaluated - in the user namespace. - - Returns - ------- - A dict, keyed like the input expressions dict, with the rich mime-typed - display_data of each value. - """ - out = {} - user_ns = self.user_ns - global_ns = self.user_global_ns - - for key, expr in expressions.items(): - try: - value = self._format_user_obj(eval(expr, global_ns, user_ns)) - except: - value = self._user_obj_error() - out[key] = value - return out - - #------------------------------------------------------------------------- - # Things related to the running of code - #------------------------------------------------------------------------- - - def ex(self, cmd): - """Execute a normal python statement in user namespace.""" - with self.builtin_trap: - exec(cmd, self.user_global_ns, self.user_ns) - - def ev(self, expr): - """Evaluate python expression expr in user namespace. - - Returns the result of evaluation - """ - with self.builtin_trap: - return eval(expr, self.user_global_ns, self.user_ns) - - def safe_execfile(self, fname, *where, exit_ignore=False, raise_exceptions=False, shell_futures=False): - """A safe version of the builtin execfile(). - - This version will never throw an exception, but instead print - helpful error messages to the screen. This only works on pure - Python files with the .py extension. - - Parameters - ---------- - fname : string - The name of the file to be executed. - where : tuple - One or two namespaces, passed to execfile() as (globals,locals). - If only one is given, it is passed as both. - exit_ignore : bool (False) - If True, then silence SystemExit for non-zero status (it is always - silenced for zero status, as it is so common). - raise_exceptions : bool (False) - If True raise exceptions everywhere. Meant for testing. - shell_futures : bool (False) - If True, the code will share future statements with the interactive - shell. It will both be affected by previous __future__ imports, and - any __future__ imports in the code will affect the shell. If False, - __future__ imports are not shared in either direction. - - """ - fname = os.path.abspath(os.path.expanduser(fname)) - - # Make sure we can open the file - try: - with open(fname): - pass - except: - warn('Could not open file <%s> for safe execution.' % fname) - return - - # Find things also in current directory. This is needed to mimic the - # behavior of running a script from the system command line, where - # Python inserts the script's directory into sys.path - dname = os.path.dirname(fname) - - with prepended_to_syspath(dname), self.builtin_trap: - try: - glob, loc = (where + (None, ))[:2] - py3compat.execfile( - fname, glob, loc, - self.compile if shell_futures else None) - except SystemExit as status: - # If the call was made with 0 or None exit status (sys.exit(0) - # or sys.exit() ), don't bother showing a traceback, as both of - # these are considered normal by the OS: - # > python -c'import sys;sys.exit(0)'; echo $? - # 0 - # > python -c'import sys;sys.exit()'; echo $? - # 0 - # For other exit status, we show the exception unless - # explicitly silenced, but only in short form. - if status.code: - if raise_exceptions: - raise - if not exit_ignore: - self.showtraceback(exception_only=True) - except: - if raise_exceptions: - raise - # tb offset is 2 because we wrap execfile - self.showtraceback(tb_offset=2) - - def safe_execfile_ipy(self, fname, shell_futures=False, raise_exceptions=False): - """Like safe_execfile, but for .ipy or .ipynb files with IPython syntax. - - Parameters - ---------- - fname : str - The name of the file to execute. The filename must have a - .ipy or .ipynb extension. - shell_futures : bool (False) - If True, the code will share future statements with the interactive - shell. It will both be affected by previous __future__ imports, and - any __future__ imports in the code will affect the shell. If False, - __future__ imports are not shared in either direction. - raise_exceptions : bool (False) - If True raise exceptions everywhere. Meant for testing. - """ - fname = os.path.abspath(os.path.expanduser(fname)) - - # Make sure we can open the file - try: - with open(fname): - pass - except: - warn('Could not open file <%s> for safe execution.' % fname) - return - - # Find things also in current directory. This is needed to mimic the - # behavior of running a script from the system command line, where - # Python inserts the script's directory into sys.path - dname = os.path.dirname(fname) - - def get_cells(): - """generator for sequence of code blocks to run""" - if fname.endswith('.ipynb'): - from nbformat import read - nb = read(fname, as_version=4) - if not nb.cells: - return - for cell in nb.cells: - if cell.cell_type == 'code': - yield cell.source - else: - with open(fname) as f: - yield f.read() - - with prepended_to_syspath(dname): - try: - for cell in get_cells(): - result = self.run_cell(cell, silent=True, shell_futures=shell_futures) - if raise_exceptions: - result.raise_error() - elif not result.success: - break - except: - if raise_exceptions: - raise - self.showtraceback() - warn('Unknown failure executing file: <%s>' % fname) - - def safe_run_module(self, mod_name, where): - """A safe version of runpy.run_module(). - - This version will never throw an exception, but instead print - helpful error messages to the screen. - - `SystemExit` exceptions with status code 0 or None are ignored. - - Parameters - ---------- - mod_name : string - The name of the module to be executed. - where : dict - The globals namespace. - """ - try: - try: - where.update( - runpy.run_module(str(mod_name), run_name="__main__", - alter_sys=True) - ) - except SystemExit as status: - if status.code: - raise - except: - self.showtraceback() - warn('Unknown failure executing module: <%s>' % mod_name) - - def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=True): - """Run a complete IPython cell. - - Parameters - ---------- - raw_cell : str - The code (including IPython code such as %magic functions) to run. - store_history : bool - If True, the raw and translated cell will be stored in IPython's - history. For user code calling back into IPython's machinery, this - should be set to False. - silent : bool - If True, avoid side-effects, such as implicit displayhooks and - and logging. silent=True forces store_history=False. - shell_futures : bool - If True, the code will share future statements with the interactive - shell. It will both be affected by previous __future__ imports, and - any __future__ imports in the code will affect the shell. If False, - __future__ imports are not shared in either direction. - - Returns - ------- - result : :class:`ExecutionResult` - """ - result = None - try: - result = self._run_cell( - raw_cell, store_history, silent, shell_futures) - finally: - self.events.trigger('post_execute') - if not silent: - self.events.trigger('post_run_cell', result) - return result - - def _run_cell(self, raw_cell:str, store_history:bool, silent:bool, shell_futures:bool): - """Internal method to run a complete IPython cell.""" - - # we need to avoid calling self.transform_cell multiple time on the same thing - # so we need to store some results: - preprocessing_exc_tuple = None - try: - transformed_cell = self.transform_cell(raw_cell) - except Exception: - transformed_cell = raw_cell - preprocessing_exc_tuple = sys.exc_info() - - assert transformed_cell is not None - coro = self.run_cell_async( - raw_cell, - store_history=store_history, - silent=silent, - shell_futures=shell_futures, - transformed_cell=transformed_cell, - preprocessing_exc_tuple=preprocessing_exc_tuple, - ) - - # run_cell_async is async, but may not actually need an eventloop. - # when this is the case, we want to run it using the pseudo_sync_runner - # so that code can invoke eventloops (for example via the %run , and - # `%paste` magic. - if self.trio_runner: - runner = self.trio_runner - elif self.should_run_async( - raw_cell, - transformed_cell=transformed_cell, - preprocessing_exc_tuple=preprocessing_exc_tuple, - ): - runner = self.loop_runner - else: - runner = _pseudo_sync_runner - - try: - return runner(coro) - except BaseException as e: - info = ExecutionInfo(raw_cell, store_history, silent, shell_futures) - result = ExecutionResult(info) - result.error_in_exec = e - self.showtraceback(running_compiled_code=True) - return result - return - - def should_run_async( - self, raw_cell: str, *, transformed_cell=None, preprocessing_exc_tuple=None - ) -> bool: - """Return whether a cell should be run asynchronously via a coroutine runner - - Parameters - ---------- - raw_cell: str - The code to be executed - - Returns - ------- - result: bool - Whether the code needs to be run with a coroutine runner or not - - .. versionadded:: 7.0 - """ - if not self.autoawait: - return False - if preprocessing_exc_tuple is not None: - return False - assert preprocessing_exc_tuple is None - if transformed_cell is None: - warnings.warn( - "`should_run_async` will not call `transform_cell`" - " automatically in the future. Please pass the result to" - " `transformed_cell` argument and any exception that happen" - " during the" - "transform in `preprocessing_exc_tuple` in" - " IPython 7.17 and above.", - DeprecationWarning, - stacklevel=2, - ) - try: - cell = self.transform_cell(raw_cell) - except Exception: - # any exception during transform will be raised - # prior to execution - return False - else: - cell = transformed_cell - return _should_be_async(cell) - - async def run_cell_async( - self, - raw_cell: str, - store_history=False, - silent=False, - shell_futures=True, - *, - transformed_cell: Optional[str] = None, - preprocessing_exc_tuple: Optional[Any] = None - ) -> ExecutionResult: - """Run a complete IPython cell asynchronously. - - Parameters - ---------- - raw_cell : str - The code (including IPython code such as %magic functions) to run. - store_history : bool - If True, the raw and translated cell will be stored in IPython's - history. For user code calling back into IPython's machinery, this - should be set to False. - silent : bool - If True, avoid side-effects, such as implicit displayhooks and - and logging. silent=True forces store_history=False. - shell_futures : bool - If True, the code will share future statements with the interactive - shell. It will both be affected by previous __future__ imports, and - any __future__ imports in the code will affect the shell. If False, - __future__ imports are not shared in either direction. - transformed_cell: str - cell that was passed through transformers - preprocessing_exc_tuple: - trace if the transformation failed. - - Returns - ------- - result : :class:`ExecutionResult` - - .. versionadded:: 7.0 - """ - info = ExecutionInfo( - raw_cell, store_history, silent, shell_futures) - result = ExecutionResult(info) - - if (not raw_cell) or raw_cell.isspace(): - self.last_execution_succeeded = True - self.last_execution_result = result - return result - - if silent: - store_history = False - - if store_history: - result.execution_count = self.execution_count - - def error_before_exec(value): - if store_history: - self.execution_count += 1 - result.error_before_exec = value - self.last_execution_succeeded = False - self.last_execution_result = result - return result - - self.events.trigger('pre_execute') - if not silent: - self.events.trigger('pre_run_cell', info) - - if transformed_cell is None: - warnings.warn( - "`run_cell_async` will not call `transform_cell`" - " automatically in the future. Please pass the result to" - " `transformed_cell` argument and any exception that happen" - " during the" - "transform in `preprocessing_exc_tuple` in" - " IPython 7.17 and above.", - DeprecationWarning, - stacklevel=2, - ) - # If any of our input transformation (input_transformer_manager or - # prefilter_manager) raises an exception, we store it in this variable - # so that we can display the error after logging the input and storing - # it in the history. - try: - cell = self.transform_cell(raw_cell) - except Exception: - preprocessing_exc_tuple = sys.exc_info() - cell = raw_cell # cell has to exist so it can be stored/logged - else: - preprocessing_exc_tuple = None - else: - if preprocessing_exc_tuple is None: - cell = transformed_cell - else: - cell = raw_cell - - # Store raw and processed history - if store_history: - self.history_manager.store_inputs(self.execution_count, - cell, raw_cell) - if not silent: - self.logger.log(cell, raw_cell) - - # Display the exception if input processing failed. - if preprocessing_exc_tuple is not None: - self.showtraceback(preprocessing_exc_tuple) - if store_history: - self.execution_count += 1 - return error_before_exec(preprocessing_exc_tuple[1]) - - # Our own compiler remembers the __future__ environment. If we want to - # run code with a separate __future__ environment, use the default - # compiler - compiler = self.compile if shell_futures else self.compiler_class() - - _run_async = False - - with self.builtin_trap: - cell_name = self.compile.cache( - cell, self.execution_count, raw_code=raw_cell - ) - - with self.display_trap: - # Compile to bytecode - try: - if sys.version_info < (3,8) and self.autoawait: - if _should_be_async(cell): - # the code AST below will not be user code: we wrap it - # in an `async def`. This will likely make some AST - # transformer below miss some transform opportunity and - # introduce a small coupling to run_code (in which we - # bake some assumptions of what _ast_asyncify returns. - # they are ways around (like grafting part of the ast - # later: - # - Here, return code_ast.body[0].body[1:-1], as well - # as last expression in return statement which is - # the user code part. - # - Let it go through the AST transformers, and graft - # - it back after the AST transform - # But that seem unreasonable, at least while we - # do not need it. - code_ast = _ast_asyncify(cell, 'async-def-wrapper') - _run_async = True - else: - code_ast = compiler.ast_parse(cell, filename=cell_name) - else: - code_ast = compiler.ast_parse(cell, filename=cell_name) - except self.custom_exceptions as e: - etype, value, tb = sys.exc_info() - self.CustomTB(etype, value, tb) - return error_before_exec(e) - except IndentationError as e: - self.showindentationerror() - return error_before_exec(e) - except (OverflowError, SyntaxError, ValueError, TypeError, - MemoryError) as e: - self.showsyntaxerror() - return error_before_exec(e) - - # Apply AST transformations - try: - code_ast = self.transform_ast(code_ast) - except InputRejected as e: - self.showtraceback() - return error_before_exec(e) - - # Give the displayhook a reference to our ExecutionResult so it - # can fill in the output value. - self.displayhook.exec_result = result - - # Execute the user code - interactivity = "none" if silent else self.ast_node_interactivity - if _run_async: - interactivity = 'async' - - has_raised = await self.run_ast_nodes(code_ast.body, cell_name, - interactivity=interactivity, compiler=compiler, result=result) - - self.last_execution_succeeded = not has_raised - self.last_execution_result = result - - # Reset this so later displayed values do not modify the - # ExecutionResult - self.displayhook.exec_result = None - - if store_history: - # Write output to the database. Does nothing unless - # history output logging is enabled. - self.history_manager.store_output(self.execution_count) - # Each cell is a *single* input, regardless of how many lines it has - self.execution_count += 1 - - return result - - def transform_cell(self, raw_cell): - """Transform an input cell before parsing it. - - Static transformations, implemented in IPython.core.inputtransformer2, - deal with things like ``%magic`` and ``!system`` commands. - These run on all input. - Dynamic transformations, for things like unescaped magics and the exit - autocall, depend on the state of the interpreter. - These only apply to single line inputs. - - These string-based transformations are followed by AST transformations; - see :meth:`transform_ast`. - """ - # Static input transformations - cell = self.input_transformer_manager.transform_cell(raw_cell) - - if len(cell.splitlines()) == 1: - # Dynamic transformations - only applied for single line commands - with self.builtin_trap: - # use prefilter_lines to handle trailing newlines - # restore trailing newline for ast.parse - cell = self.prefilter_manager.prefilter_lines(cell) + '\n' - - lines = cell.splitlines(keepends=True) - for transform in self.input_transformers_post: - lines = transform(lines) - cell = ''.join(lines) - - return cell - - def transform_ast(self, node): - """Apply the AST transformations from self.ast_transformers - - Parameters - ---------- - node : ast.Node - The root node to be transformed. Typically called with the ast.Module - produced by parsing user input. - - Returns - ------- - An ast.Node corresponding to the node it was called with. Note that it - may also modify the passed object, so don't rely on references to the - original AST. - """ - for transformer in self.ast_transformers: - try: - node = transformer.visit(node) - except InputRejected: - # User-supplied AST transformers can reject an input by raising - # an InputRejected. Short-circuit in this case so that we - # don't unregister the transform. - raise - except Exception: - warn("AST transformer %r threw an error. It will be unregistered." % transformer) - self.ast_transformers.remove(transformer) - - if self.ast_transformers: - ast.fix_missing_locations(node) - return node - - async def run_ast_nodes(self, nodelist:ListType[AST], cell_name:str, interactivity='last_expr', - compiler=compile, result=None): - """Run a sequence of AST nodes. The execution mode depends on the - interactivity parameter. - - Parameters - ---------- - nodelist : list - A sequence of AST nodes to run. - cell_name : str - Will be passed to the compiler as the filename of the cell. Typically - the value returned by ip.compile.cache(cell). - interactivity : str - 'all', 'last', 'last_expr' , 'last_expr_or_assign' or 'none', - specifying which nodes should be run interactively (displaying output - from expressions). 'last_expr' will run the last node interactively - only if it is an expression (i.e. expressions in loops or other blocks - are not displayed) 'last_expr_or_assign' will run the last expression - or the last assignment. Other values for this parameter will raise a - ValueError. - - Experimental value: 'async' Will try to run top level interactive - async/await code in default runner, this will not respect the - interactivity setting and will only run the last node if it is an - expression. - - compiler : callable - A function with the same interface as the built-in compile(), to turn - the AST nodes into code objects. Default is the built-in compile(). - result : ExecutionResult, optional - An object to store exceptions that occur during execution. - - Returns - ------- - True if an exception occurred while running code, False if it finished - running. - """ - if not nodelist: - return - - if interactivity == 'last_expr_or_assign': - if isinstance(nodelist[-1], _assign_nodes): - asg = nodelist[-1] - if isinstance(asg, ast.Assign) and len(asg.targets) == 1: - target = asg.targets[0] - elif isinstance(asg, _single_targets_nodes): - target = asg.target - else: - target = None - if isinstance(target, ast.Name): - nnode = ast.Expr(ast.Name(target.id, ast.Load())) - ast.fix_missing_locations(nnode) - nodelist.append(nnode) - interactivity = 'last_expr' - - _async = False - if interactivity == 'last_expr': - if isinstance(nodelist[-1], ast.Expr): - interactivity = "last" - else: - interactivity = "none" - - if interactivity == 'none': - to_run_exec, to_run_interactive = nodelist, [] - elif interactivity == 'last': - to_run_exec, to_run_interactive = nodelist[:-1], nodelist[-1:] - elif interactivity == 'all': - to_run_exec, to_run_interactive = [], nodelist - elif interactivity == 'async': - to_run_exec, to_run_interactive = [], nodelist - _async = True - else: - raise ValueError("Interactivity was %r" % interactivity) - - try: - if _async and sys.version_info > (3,8): - raise ValueError("This branch should never happen on Python 3.8 and above, " - "please try to upgrade IPython and open a bug report with your case.") - if _async: - # If interactivity is async the semantics of run_code are - # completely different Skip usual machinery. - mod = Module(nodelist, []) - async_wrapper_code = compiler(mod, cell_name, 'exec') - exec(async_wrapper_code, self.user_global_ns, self.user_ns) - async_code = removed_co_newlocals(self.user_ns.pop('async-def-wrapper')).__code__ - if (await self.run_code(async_code, result, async_=True)): - return True - else: - if sys.version_info > (3, 8): - def compare(code): - is_async = (inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE) - return is_async - else: - def compare(code): - return _async - - # refactor that to just change the mod constructor. - to_run = [] - for node in to_run_exec: - to_run.append((node, 'exec')) - - for node in to_run_interactive: - to_run.append((node, 'single')) - - for node,mode in to_run: - if mode == 'exec': - mod = Module([node], []) - elif mode == 'single': - mod = ast.Interactive([node]) - with compiler.extra_flags(getattr(ast, 'PyCF_ALLOW_TOP_LEVEL_AWAIT', 0x0) if self.autoawait else 0x0): - code = compiler(mod, cell_name, mode) - asy = compare(code) - if (await self.run_code(code, result, async_=asy)): - return True - - # Flush softspace - if softspace(sys.stdout, 0): - print() - - except: - # It's possible to have exceptions raised here, typically by - # compilation of odd code (such as a naked 'return' outside a - # function) that did parse but isn't valid. Typically the exception - # is a SyntaxError, but it's safest just to catch anything and show - # the user a traceback. - - # We do only one try/except outside the loop to minimize the impact - # on runtime, and also because if any node in the node list is - # broken, we should stop execution completely. - if result: - result.error_before_exec = sys.exc_info()[1] - self.showtraceback() - return True - - return False - - def _async_exec(self, code_obj: types.CodeType, user_ns: dict): - """ - Evaluate an asynchronous code object using a code runner - - Fake asynchronous execution of code_object in a namespace via a proxy namespace. - - Returns coroutine object, which can be executed via async loop runner - - WARNING: The semantics of `async_exec` are quite different from `exec`, - in particular you can only pass a single namespace. It also return a - handle to the value of the last things returned by code_object. - """ - - return eval(code_obj, user_ns) - - async def run_code(self, code_obj, result=None, *, async_=False): - """Execute a code object. - - When an exception occurs, self.showtraceback() is called to display a - traceback. - - Parameters - ---------- - code_obj : code object - A compiled code object, to be executed - result : ExecutionResult, optional - An object to store exceptions that occur during execution. - async_ : Bool (Experimental) - Attempt to run top-level asynchronous code in a default loop. - - Returns - ------- - False : successful execution. - True : an error occurred. - """ - # special value to say that anything above is IPython and should be - # hidden. - __tracebackhide__ = "__ipython_bottom__" - # Set our own excepthook in case the user code tries to call it - # directly, so that the IPython crash handler doesn't get triggered - old_excepthook, sys.excepthook = sys.excepthook, self.excepthook - - # we save the original sys.excepthook in the instance, in case config - # code (such as magics) needs access to it. - self.sys_excepthook = old_excepthook - outflag = True # happens in more places, so it's easier as default - try: - try: - self.hooks.pre_run_code_hook() - if async_ and sys.version_info < (3,8): - last_expr = (await self._async_exec(code_obj, self.user_ns)) - code = compile('last_expr', 'fake', "single") - exec(code, {'last_expr': last_expr}) - elif async_ : - await eval(code_obj, self.user_global_ns, self.user_ns) - else: - exec(code_obj, self.user_global_ns, self.user_ns) - finally: - # Reset our crash handler in place - sys.excepthook = old_excepthook - except SystemExit as e: - if result is not None: - result.error_in_exec = e - self.showtraceback(exception_only=True) - warn("To exit: use 'exit', 'quit', or Ctrl-D.", stacklevel=1) - except self.custom_exceptions: - etype, value, tb = sys.exc_info() - if result is not None: - result.error_in_exec = value - self.CustomTB(etype, value, tb) - except: - if result is not None: - result.error_in_exec = sys.exc_info()[1] - self.showtraceback(running_compiled_code=True) - else: - outflag = False - return outflag - - # For backwards compatibility - runcode = run_code - - def check_complete(self, code: str) -> Tuple[str, str]: - """Return whether a block of code is ready to execute, or should be continued - - Parameters - ---------- - source : string - Python input code, which can be multiline. - - Returns - ------- - status : str - One of 'complete', 'incomplete', or 'invalid' if source is not a - prefix of valid code. - indent : str - When status is 'incomplete', this is some whitespace to insert on - the next line of the prompt. - """ - status, nspaces = self.input_transformer_manager.check_complete(code) - return status, ' ' * (nspaces or 0) - - #------------------------------------------------------------------------- - # Things related to GUI support and pylab - #------------------------------------------------------------------------- - - active_eventloop = None - - def enable_gui(self, gui=None): - raise NotImplementedError('Implement enable_gui in a subclass') - - def enable_matplotlib(self, gui=None): - """Enable interactive matplotlib and inline figure support. - - This takes the following steps: - - 1. select the appropriate eventloop and matplotlib backend - 2. set up matplotlib for interactive use with that backend - 3. configure formatters for inline figure display - 4. enable the selected gui eventloop - - Parameters - ---------- - gui : optional, string - If given, dictates the choice of matplotlib GUI backend to use - (should be one of IPython's supported backends, 'qt', 'osx', 'tk', - 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by - matplotlib (as dictated by the matplotlib build-time options plus the - user's matplotlibrc configuration file). Note that not all backends - make sense in all contexts, for example a terminal ipython can't - display figures inline. - """ - from IPython.core import pylabtools as pt - from matplotlib_inline.backend_inline import configure_inline_support - gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select) - - if gui != 'inline': - # If we have our first gui selection, store it - if self.pylab_gui_select is None: - self.pylab_gui_select = gui - # Otherwise if they are different - elif gui != self.pylab_gui_select: - print('Warning: Cannot change to a different GUI toolkit: %s.' - ' Using %s instead.' % (gui, self.pylab_gui_select)) - gui, backend = pt.find_gui_and_backend(self.pylab_gui_select) - - pt.activate_matplotlib(backend) - configure_inline_support(self, backend) - - # Now we must activate the gui pylab wants to use, and fix %run to take - # plot updates into account - self.enable_gui(gui) - self.magics_manager.registry['ExecutionMagics'].default_runner = \ - pt.mpl_runner(self.safe_execfile) - - return gui, backend - - def enable_pylab(self, gui=None, import_all=True, welcome_message=False): - """Activate pylab support at runtime. - - This turns on support for matplotlib, preloads into the interactive - namespace all of numpy and pylab, and configures IPython to correctly - interact with the GUI event loop. The GUI backend to be used can be - optionally selected with the optional ``gui`` argument. - - This method only adds preloading the namespace to InteractiveShell.enable_matplotlib. - - Parameters - ---------- - gui : optional, string - If given, dictates the choice of matplotlib GUI backend to use - (should be one of IPython's supported backends, 'qt', 'osx', 'tk', - 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by - matplotlib (as dictated by the matplotlib build-time options plus the - user's matplotlibrc configuration file). Note that not all backends - make sense in all contexts, for example a terminal ipython can't - display figures inline. - import_all : optional, bool, default: True - Whether to do `from numpy import *` and `from pylab import *` - in addition to module imports. - welcome_message : deprecated - This argument is ignored, no welcome message will be displayed. - """ - from IPython.core.pylabtools import import_pylab - - gui, backend = self.enable_matplotlib(gui) - - # We want to prevent the loading of pylab to pollute the user's - # namespace as shown by the %who* magics, so we execute the activation - # code in an empty namespace, and we update *both* user_ns and - # user_ns_hidden with this information. - ns = {} - import_pylab(ns, import_all) - # warn about clobbered names - ignored = {"__builtins__"} - both = set(ns).intersection(self.user_ns).difference(ignored) - clobbered = [ name for name in both if self.user_ns[name] is not ns[name] ] - self.user_ns.update(ns) - self.user_ns_hidden.update(ns) - return gui, backend, clobbered - - #------------------------------------------------------------------------- - # Utilities - #------------------------------------------------------------------------- - - def var_expand(self, cmd, depth=0, formatter=DollarFormatter()): - """Expand python variables in a string. - - The depth argument indicates how many frames above the caller should - be walked to look for the local namespace where to expand variables. - - The global namespace for expansion is always the user's interactive - namespace. - """ - ns = self.user_ns.copy() - try: - frame = sys._getframe(depth+1) - except ValueError: - # This is thrown if there aren't that many frames on the stack, - # e.g. if a script called run_line_magic() directly. - pass - else: - ns.update(frame.f_locals) - - try: - # We have to use .vformat() here, because 'self' is a valid and common - # name, and expanding **ns for .format() would make it collide with - # the 'self' argument of the method. - cmd = formatter.vformat(cmd, args=[], kwargs=ns) - except Exception: - # if formatter couldn't format, just let it go untransformed - pass - return cmd - - def mktempfile(self, data=None, prefix='ipython_edit_'): - """Make a new tempfile and return its filename. - - This makes a call to tempfile.mkstemp (created in a tempfile.mkdtemp), - but it registers the created filename internally so ipython cleans it up - at exit time. - - Optional inputs: - - - data(None): if data is given, it gets written out to the temp file - immediately, and the file is closed again.""" - - dirname = tempfile.mkdtemp(prefix=prefix) - self.tempdirs.append(dirname) - - handle, filename = tempfile.mkstemp('.py', prefix, dir=dirname) - os.close(handle) # On Windows, there can only be one open handle on a file - self.tempfiles.append(filename) - - if data: - with open(filename, 'w') as tmp_file: - tmp_file.write(data) - return filename - - @undoc - def write(self,data): - """DEPRECATED: Write a string to the default output""" - warn('InteractiveShell.write() is deprecated, use sys.stdout instead', - DeprecationWarning, stacklevel=2) - sys.stdout.write(data) - - @undoc - def write_err(self,data): - """DEPRECATED: Write a string to the default error output""" - warn('InteractiveShell.write_err() is deprecated, use sys.stderr instead', - DeprecationWarning, stacklevel=2) - sys.stderr.write(data) - - def ask_yes_no(self, prompt, default=None, interrupt=None): - if self.quiet: - return True - return ask_yes_no(prompt,default,interrupt) - - def show_usage(self): - """Show a usage message""" - page.page(IPython.core.usage.interactive_usage) - - def extract_input_lines(self, range_str, raw=False): - """Return as a string a set of input history slices. - - Parameters - ---------- - range_str : string - The set of slices is given as a string, like "~5/6-~4/2 4:8 9", - since this function is for use by magic functions which get their - arguments as strings. The number before the / is the session - number: ~n goes n back from the current session. - - raw : bool, optional - By default, the processed input is used. If this is true, the raw - input history is used instead. - - Notes - ----- - - Slices can be described with two notations: - - * ``N:M`` -> standard python form, means including items N...(M-1). - * ``N-M`` -> include items N..M (closed endpoint). - """ - lines = self.history_manager.get_range_by_str(range_str, raw=raw) - return "\n".join(x for _, _, x in lines) - - def find_user_code(self, target, raw=True, py_only=False, skip_encoding_cookie=True, search_ns=False): - """Get a code string from history, file, url, or a string or macro. - - This is mainly used by magic functions. - - Parameters - ---------- - - target : str - - A string specifying code to retrieve. This will be tried respectively - as: ranges of input history (see %history for syntax), url, - corresponding .py file, filename, or an expression evaluating to a - string or Macro in the user namespace. - - raw : bool - If true (default), retrieve raw history. Has no effect on the other - retrieval mechanisms. - - py_only : bool (default False) - Only try to fetch python code, do not try alternative methods to decode file - if unicode fails. - - Returns - ------- - A string of code. - - ValueError is raised if nothing is found, and TypeError if it evaluates - to an object of another type. In each case, .args[0] is a printable - message. - """ - code = self.extract_input_lines(target, raw=raw) # Grab history - if code: - return code - try: - if target.startswith(('http://', 'https://')): - return openpy.read_py_url(target, skip_encoding_cookie=skip_encoding_cookie) - except UnicodeDecodeError: - if not py_only : - # Deferred import - from urllib.request import urlopen - response = urlopen(target) - return response.read().decode('latin1') - raise ValueError(("'%s' seem to be unreadable.") % target) - - potential_target = [target] - try : - potential_target.insert(0,get_py_filename(target)) - except IOError: - pass - - for tgt in potential_target : - if os.path.isfile(tgt): # Read file - try : - return openpy.read_py_file(tgt, skip_encoding_cookie=skip_encoding_cookie) - except UnicodeDecodeError : - if not py_only : - with io_open(tgt,'r', encoding='latin1') as f : - return f.read() - raise ValueError(("'%s' seem to be unreadable.") % target) - elif os.path.isdir(os.path.expanduser(tgt)): - raise ValueError("'%s' is a directory, not a regular file." % target) - - if search_ns: - # Inspect namespace to load object source - object_info = self.object_inspect(target, detail_level=1) - if object_info['found'] and object_info['source']: - return object_info['source'] - - try: # User namespace - codeobj = eval(target, self.user_ns) - except Exception: - raise ValueError(("'%s' was not found in history, as a file, url, " - "nor in the user namespace.") % target) - - if isinstance(codeobj, str): - return codeobj - elif isinstance(codeobj, Macro): - return codeobj.value - - raise TypeError("%s is neither a string nor a macro." % target, - codeobj) - - #------------------------------------------------------------------------- - # Things related to IPython exiting - #------------------------------------------------------------------------- - def atexit_operations(self): - """This will be executed at the time of exit. - - Cleanup operations and saving of persistent data that is done - unconditionally by IPython should be performed here. - - For things that may depend on startup flags or platform specifics (such - as having readline or not), register a separate atexit function in the - code that has the appropriate information, rather than trying to - clutter - """ - # Close the history session (this stores the end time and line count) - # this must be *before* the tempfile cleanup, in case of temporary - # history db - self.history_manager.end_session() - - # Cleanup all tempfiles and folders left around - for tfile in self.tempfiles: - try: - os.unlink(tfile) - except OSError: - pass - - for tdir in self.tempdirs: - try: - os.rmdir(tdir) - except OSError: - pass - - # Clear all user namespaces to release all references cleanly. - self.reset(new_session=False) - - # Run user hooks - self.hooks.shutdown_hook() - - def cleanup(self): - self.restore_sys_module_state() - - - # Overridden in terminal subclass to change prompts - def switch_doctest_mode(self, mode): - pass - - -class InteractiveShellABC(metaclass=abc.ABCMeta): - """An abstract base class for InteractiveShell.""" - -InteractiveShellABC.register(InteractiveShell) +# -*- coding: utf-8 -*- +"""Main IPython class.""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> +# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu> +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + + +import abc +import ast +import atexit +import builtins as builtin_mod +import functools +import inspect +import os +import re +import runpy +import sys +import tempfile +import traceback +import types +import subprocess +import warnings +from io import open as io_open + +from pathlib import Path +from pickleshare import PickleShareDB + +from traitlets.config.configurable import SingletonConfigurable +from traitlets.utils.importstring import import_item +from IPython.core import oinspect +from IPython.core import magic +from IPython.core import page +from IPython.core import prefilter +from IPython.core import ultratb +from IPython.core.alias import Alias, AliasManager +from IPython.core.autocall import ExitAutocall +from IPython.core.builtin_trap import BuiltinTrap +from IPython.core.events import EventManager, available_events +from IPython.core.compilerop import CachingCompiler, check_linecache_ipython +from IPython.core.debugger import InterruptiblePdb +from IPython.core.display_trap import DisplayTrap +from IPython.core.displayhook import DisplayHook +from IPython.core.displaypub import DisplayPublisher +from IPython.core.error import InputRejected, UsageError +from IPython.core.extensions import ExtensionManager +from IPython.core.formatters import DisplayFormatter +from IPython.core.history import HistoryManager +from IPython.core.inputtransformer2 import ESC_MAGIC, ESC_MAGIC2 +from IPython.core.logger import Logger +from IPython.core.macro import Macro +from IPython.core.payload import PayloadManager +from IPython.core.prefilter import PrefilterManager +from IPython.core.profiledir import ProfileDir +from IPython.core.usage import default_banner +from IPython.display import display +from IPython.testing.skipdoctest import skip_doctest +from IPython.utils import PyColorize +from IPython.utils import io +from IPython.utils import py3compat +from IPython.utils import openpy +from IPython.utils.decorators import undoc +from IPython.utils.io import ask_yes_no +from IPython.utils.ipstruct import Struct +from IPython.paths import get_ipython_dir +from IPython.utils.path import get_home_dir, get_py_filename, ensure_dir_exists +from IPython.utils.process import system, getoutput +from IPython.utils.strdispatch import StrDispatch +from IPython.utils.syspathcontext import prepended_to_syspath +from IPython.utils.text import format_screen, LSString, SList, DollarFormatter +from IPython.utils.tempdir import TemporaryDirectory +from traitlets import ( + Integer, Bool, CaselessStrEnum, Enum, List, Dict, Unicode, Instance, Type, + observe, default, validate, Any +) +from warnings import warn +from logging import error +import IPython.core.hooks + +from typing import List as ListType, Tuple, Optional +from ast import AST + +# NoOpContext is deprecated, but ipykernel imports it from here. +# See https://github.com/ipython/ipykernel/issues/157 +# (2016, let's try to remove than in IPython 8.0) +from IPython.utils.contexts import NoOpContext + +try: + import docrepr.sphinxify as sphx + + def sphinxify(doc): + with TemporaryDirectory() as dirname: + return { + 'text/html': sphx.sphinxify(doc, dirname), + 'text/plain': doc + } +except ImportError: + sphinxify = None + + +class ProvisionalWarning(DeprecationWarning): + """ + Warning class for unstable features + """ + pass + +if sys.version_info > (3,8): + from ast import Module +else : + # mock the new API, ignore second argument + # see https://github.com/ipython/ipython/issues/11590 + from ast import Module as OriginalModule + Module = lambda nodelist, type_ignores: OriginalModule(nodelist) + +if sys.version_info > (3,6): + _assign_nodes = (ast.AugAssign, ast.AnnAssign, ast.Assign) + _single_targets_nodes = (ast.AugAssign, ast.AnnAssign) +else: + _assign_nodes = (ast.AugAssign, ast.Assign ) + _single_targets_nodes = (ast.AugAssign, ) + +#----------------------------------------------------------------------------- +# Await Helpers +#----------------------------------------------------------------------------- + +def removed_co_newlocals(function:types.FunctionType) -> types.FunctionType: + """Return a function that do not create a new local scope. + + Given a function, create a clone of this function where the co_newlocal flag + has been removed, making this function code actually run in the sourounding + scope. + + We need this in order to run asynchronous code in user level namespace. + """ + from types import CodeType, FunctionType + CO_NEWLOCALS = 0x0002 + code = function.__code__ + new_co_flags = code.co_flags & ~CO_NEWLOCALS + if sys.version_info > (3, 8, 0, 'alpha', 3): + new_code = code.replace(co_flags=new_co_flags) + else: + new_code = CodeType( + code.co_argcount, + code.co_kwonlyargcount, + code.co_nlocals, + code.co_stacksize, + new_co_flags, + code.co_code, + code.co_consts, + code.co_names, + code.co_varnames, + code.co_filename, + code.co_name, + code.co_firstlineno, + code.co_lnotab, + code.co_freevars, + code.co_cellvars + ) + return FunctionType(new_code, globals(), function.__name__, function.__defaults__) + + +# we still need to run things using the asyncio eventloop, but there is no +# async integration +from .async_helpers import (_asyncio_runner, _asyncify, _pseudo_sync_runner) +from .async_helpers import _curio_runner, _trio_runner, _should_be_async + + +def _ast_asyncify(cell:str, wrapper_name:str) -> ast.Module: + """ + Parse a cell with top-level await and modify the AST to be able to run it later. + + Parameter + --------- + + cell: str + The code cell to asyncronify + wrapper_name: str + The name of the function to be used to wrap the passed `cell`. It is + advised to **not** use a python identifier in order to not pollute the + global namespace in which the function will be ran. + + Return + ------ + + A module object AST containing **one** function named `wrapper_name`. + + The given code is wrapped in a async-def function, parsed into an AST, and + the resulting function definition AST is modified to return the last + expression. + + The last expression or await node is moved into a return statement at the + end of the function, and removed from its original location. If the last + node is not Expr or Await nothing is done. + + The function `__code__` will need to be later modified (by + ``removed_co_newlocals``) in a subsequent step to not create new `locals()` + meaning that the local and global scope are the same, ie as if the body of + the function was at module level. + + Lastly a call to `locals()` is made just before the last expression of the + function, or just after the last assignment or statement to make sure the + global dict is updated as python function work with a local fast cache which + is updated only on `local()` calls. + """ + + from ast import Expr, Await, Return + if sys.version_info >= (3,8): + return ast.parse(cell) + tree = ast.parse(_asyncify(cell)) + + function_def = tree.body[0] + function_def.name = wrapper_name + try_block = function_def.body[0] + lastexpr = try_block.body[-1] + if isinstance(lastexpr, (Expr, Await)): + try_block.body[-1] = Return(lastexpr.value) + ast.fix_missing_locations(tree) + return tree +#----------------------------------------------------------------------------- +# Globals +#----------------------------------------------------------------------------- + +# compiled regexps for autoindent management +dedent_re = re.compile(r'^\s+raise|^\s+return|^\s+pass') + +#----------------------------------------------------------------------------- +# Utilities +#----------------------------------------------------------------------------- + +@undoc +def softspace(file, newvalue): + """Copied from code.py, to remove the dependency""" + + oldvalue = 0 + try: + oldvalue = file.softspace + except AttributeError: + pass + try: + file.softspace = newvalue + except (AttributeError, TypeError): + # "attribute-less object" or "read-only attributes" + pass + return oldvalue + +@undoc +def no_op(*a, **kw): + pass + + +class SpaceInInput(Exception): pass + + +def get_default_colors(): + "DEPRECATED" + warn('get_default_color is deprecated since IPython 5.0, and returns `Neutral` on all platforms.', + DeprecationWarning, stacklevel=2) + return 'Neutral' + + +class SeparateUnicode(Unicode): + r"""A Unicode subclass to validate separate_in, separate_out, etc. + + This is a Unicode based trait that converts '0'->'' and ``'\\n'->'\n'``. + """ + + def validate(self, obj, value): + if value == '0': value = '' + value = value.replace('\\n','\n') + return super(SeparateUnicode, self).validate(obj, value) + + +@undoc +class DummyMod(object): + """A dummy module used for IPython's interactive module when + a namespace must be assigned to the module's __dict__.""" + __spec__ = None + + +class ExecutionInfo(object): + """The arguments used for a call to :meth:`InteractiveShell.run_cell` + + Stores information about what is going to happen. + """ + raw_cell = None + store_history = False + silent = False + shell_futures = True + + def __init__(self, raw_cell, store_history, silent, shell_futures): + self.raw_cell = raw_cell + self.store_history = store_history + self.silent = silent + self.shell_futures = shell_futures + + def __repr__(self): + name = self.__class__.__qualname__ + raw_cell = ((self.raw_cell[:50] + '..') + if len(self.raw_cell) > 50 else self.raw_cell) + return '<%s object at %x, raw_cell="%s" store_history=%s silent=%s shell_futures=%s>' %\ + (name, id(self), raw_cell, self.store_history, self.silent, self.shell_futures) + + +class ExecutionResult(object): + """The result of a call to :meth:`InteractiveShell.run_cell` + + Stores information about what took place. + """ + execution_count = None + error_before_exec = None + error_in_exec = None + info = None + result = None + + def __init__(self, info): + self.info = info + + @property + def success(self): + return (self.error_before_exec is None) and (self.error_in_exec is None) + + def raise_error(self): + """Reraises error if `success` is `False`, otherwise does nothing""" + if self.error_before_exec is not None: + raise self.error_before_exec + if self.error_in_exec is not None: + raise self.error_in_exec + + def __repr__(self): + name = self.__class__.__qualname__ + return '<%s object at %x, execution_count=%s error_before_exec=%s error_in_exec=%s info=%s result=%s>' %\ + (name, id(self), self.execution_count, self.error_before_exec, self.error_in_exec, repr(self.info), repr(self.result)) + + +class InteractiveShell(SingletonConfigurable): + """An enhanced, interactive shell for Python.""" + + _instance = None + + ast_transformers = List([], help= + """ + A list of ast.NodeTransformer subclass instances, which will be applied + to user input before code is run. + """ + ).tag(config=True) + + autocall = Enum((0,1,2), default_value=0, help= + """ + Make IPython automatically call any callable object even if you didn't + type explicit parentheses. For example, 'str 43' becomes 'str(43)' + automatically. The value can be '0' to disable the feature, '1' for + 'smart' autocall, where it is not applied if there are no more + arguments on the line, and '2' for 'full' autocall, where all callable + objects are automatically called (even if no arguments are present). + """ + ).tag(config=True) + + autoindent = Bool(True, help= + """ + Autoindent IPython code entered interactively. + """ + ).tag(config=True) + + autoawait = Bool(True, help= + """ + Automatically run await statement in the top level repl. + """ + ).tag(config=True) + + loop_runner_map ={ + 'asyncio':(_asyncio_runner, True), + 'curio':(_curio_runner, True), + 'trio':(_trio_runner, True), + 'sync': (_pseudo_sync_runner, False) + } + + loop_runner = Any(default_value="IPython.core.interactiveshell._asyncio_runner", + allow_none=True, + help="""Select the loop runner that will be used to execute top-level asynchronous code""" + ).tag(config=True) + + @default('loop_runner') + def _default_loop_runner(self): + return import_item("IPython.core.interactiveshell._asyncio_runner") + + @validate('loop_runner') + def _import_runner(self, proposal): + if isinstance(proposal.value, str): + if proposal.value in self.loop_runner_map: + runner, autoawait = self.loop_runner_map[proposal.value] + self.autoawait = autoawait + return runner + runner = import_item(proposal.value) + if not callable(runner): + raise ValueError('loop_runner must be callable') + return runner + if not callable(proposal.value): + raise ValueError('loop_runner must be callable') + return proposal.value + + automagic = Bool(True, help= + """ + Enable magic commands to be called without the leading %. + """ + ).tag(config=True) + + banner1 = Unicode(default_banner, + help="""The part of the banner to be printed before the profile""" + ).tag(config=True) + banner2 = Unicode('', + help="""The part of the banner to be printed after the profile""" + ).tag(config=True) + + cache_size = Integer(1000, help= + """ + Set the size of the output cache. The default is 1000, you can + change it permanently in your config file. Setting it to 0 completely + disables the caching system, and the minimum value accepted is 3 (if + you provide a value less than 3, it is reset to 0 and a warning is + issued). This limit is defined because otherwise you'll spend more + time re-flushing a too small cache than working + """ + ).tag(config=True) + color_info = Bool(True, help= + """ + Use colors for displaying information about objects. Because this + information is passed through a pager (like 'less'), and some pagers + get confused with color codes, this capability can be turned off. + """ + ).tag(config=True) + colors = CaselessStrEnum(('Neutral', 'NoColor','LightBG','Linux'), + default_value='Neutral', + help="Set the color scheme (NoColor, Neutral, Linux, or LightBG)." + ).tag(config=True) + debug = Bool(False).tag(config=True) + disable_failing_post_execute = Bool(False, + help="Don't call post-execute functions that have failed in the past." + ).tag(config=True) + display_formatter = Instance(DisplayFormatter, allow_none=True) + displayhook_class = Type(DisplayHook) + display_pub_class = Type(DisplayPublisher) + compiler_class = Type(CachingCompiler) + + sphinxify_docstring = Bool(False, help= + """ + Enables rich html representation of docstrings. (This requires the + docrepr module). + """).tag(config=True) + + @observe("sphinxify_docstring") + def _sphinxify_docstring_changed(self, change): + if change['new']: + warn("`sphinxify_docstring` is provisional since IPython 5.0 and might change in future versions." , ProvisionalWarning) + + enable_html_pager = Bool(False, help= + """ + (Provisional API) enables html representation in mime bundles sent + to pagers. + """).tag(config=True) + + @observe("enable_html_pager") + def _enable_html_pager_changed(self, change): + if change['new']: + warn("`enable_html_pager` is provisional since IPython 5.0 and might change in future versions.", ProvisionalWarning) + + data_pub_class = None + + exit_now = Bool(False) + exiter = Instance(ExitAutocall) + @default('exiter') + def _exiter_default(self): + return ExitAutocall(self) + # Monotonically increasing execution counter + execution_count = Integer(1) + filename = Unicode("<ipython console>") + ipython_dir= Unicode('').tag(config=True) # Set to get_ipython_dir() in __init__ + + # Used to transform cells before running them, and check whether code is complete + input_transformer_manager = Instance('IPython.core.inputtransformer2.TransformerManager', + ()) + + @property + def input_transformers_cleanup(self): + return self.input_transformer_manager.cleanup_transforms + + input_transformers_post = List([], + help="A list of string input transformers, to be applied after IPython's " + "own input transformations." + ) + + @property + def input_splitter(self): + """Make this available for backward compatibility (pre-7.0 release) with existing code. + + For example, ipykernel ipykernel currently uses + `shell.input_splitter.check_complete` + """ + from warnings import warn + warn("`input_splitter` is deprecated since IPython 7.0, prefer `input_transformer_manager`.", + DeprecationWarning, stacklevel=2 + ) + return self.input_transformer_manager + + logstart = Bool(False, help= + """ + Start logging to the default log file in overwrite mode. + Use `logappend` to specify a log file to **append** logs to. + """ + ).tag(config=True) + logfile = Unicode('', help= + """ + The name of the logfile to use. + """ + ).tag(config=True) + logappend = Unicode('', help= + """ + Start logging to the given file in append mode. + Use `logfile` to specify a log file to **overwrite** logs to. + """ + ).tag(config=True) + object_info_string_level = Enum((0,1,2), default_value=0, + ).tag(config=True) + pdb = Bool(False, help= + """ + Automatically call the pdb debugger after every exception. + """ + ).tag(config=True) + display_page = Bool(False, + help="""If True, anything that would be passed to the pager + will be displayed as regular output instead.""" + ).tag(config=True) + + # deprecated prompt traits: + + prompt_in1 = Unicode('In [\\#]: ', + help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." + ).tag(config=True) + prompt_in2 = Unicode(' .\\D.: ', + help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." + ).tag(config=True) + prompt_out = Unicode('Out[\\#]: ', + help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." + ).tag(config=True) + prompts_pad_left = Bool(True, + help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." + ).tag(config=True) + + @observe('prompt_in1', 'prompt_in2', 'prompt_out', 'prompt_pad_left') + def _prompt_trait_changed(self, change): + name = change['name'] + warn("InteractiveShell.{name} is deprecated since IPython 4.0" + " and ignored since 5.0, set TerminalInteractiveShell.prompts" + " object directly.".format(name=name)) + + # protect against weird cases where self.config may not exist: + + show_rewritten_input = Bool(True, + help="Show rewritten input, e.g. for autocall." + ).tag(config=True) + + quiet = Bool(False).tag(config=True) + + history_length = Integer(10000, + help='Total length of command history' + ).tag(config=True) + + history_load_length = Integer(1000, help= + """ + The number of saved history entries to be loaded + into the history buffer at startup. + """ + ).tag(config=True) + + ast_node_interactivity = Enum(['all', 'last', 'last_expr', 'none', 'last_expr_or_assign'], + default_value='last_expr', + help=""" + 'all', 'last', 'last_expr' or 'none', 'last_expr_or_assign' specifying + which nodes should be run interactively (displaying output from expressions). + """ + ).tag(config=True) + + # TODO: this part of prompt management should be moved to the frontends. + # Use custom TraitTypes that convert '0'->'' and '\\n'->'\n' + separate_in = SeparateUnicode('\n').tag(config=True) + separate_out = SeparateUnicode('').tag(config=True) + separate_out2 = SeparateUnicode('').tag(config=True) + wildcards_case_sensitive = Bool(True).tag(config=True) + xmode = CaselessStrEnum(('Context', 'Plain', 'Verbose', 'Minimal'), + default_value='Context', + help="Switch modes for the IPython exception handlers." + ).tag(config=True) + + # Subcomponents of InteractiveShell + alias_manager = Instance('IPython.core.alias.AliasManager', allow_none=True) + prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True) + builtin_trap = Instance('IPython.core.builtin_trap.BuiltinTrap', allow_none=True) + display_trap = Instance('IPython.core.display_trap.DisplayTrap', allow_none=True) + extension_manager = Instance('IPython.core.extensions.ExtensionManager', allow_none=True) + payload_manager = Instance('IPython.core.payload.PayloadManager', allow_none=True) + history_manager = Instance('IPython.core.history.HistoryAccessorBase', allow_none=True) + magics_manager = Instance('IPython.core.magic.MagicsManager', allow_none=True) + + profile_dir = Instance('IPython.core.application.ProfileDir', allow_none=True) + @property + def profile(self): + if self.profile_dir is not None: + name = os.path.basename(self.profile_dir.location) + return name.replace('profile_','') + + + # Private interface + _post_execute = Dict() + + # Tracks any GUI loop loaded for pylab + pylab_gui_select = None + + last_execution_succeeded = Bool(True, help='Did last executed command succeeded') + + last_execution_result = Instance('IPython.core.interactiveshell.ExecutionResult', help='Result of executing the last command', allow_none=True) + + def __init__(self, ipython_dir=None, profile_dir=None, + user_module=None, user_ns=None, + custom_exceptions=((), None), **kwargs): + + # This is where traits with a config_key argument are updated + # from the values on config. + super(InteractiveShell, self).__init__(**kwargs) + if 'PromptManager' in self.config: + warn('As of IPython 5.0 `PromptManager` config will have no effect' + ' and has been replaced by TerminalInteractiveShell.prompts_class') + self.configurables = [self] + + # These are relatively independent and stateless + self.init_ipython_dir(ipython_dir) + self.init_profile_dir(profile_dir) + self.init_instance_attrs() + self.init_environment() + + # Check if we're in a virtualenv, and set up sys.path. + self.init_virtualenv() + + # Create namespaces (user_ns, user_global_ns, etc.) + self.init_create_namespaces(user_module, user_ns) + # This has to be done after init_create_namespaces because it uses + # something in self.user_ns, but before init_sys_modules, which + # is the first thing to modify sys. + # TODO: When we override sys.stdout and sys.stderr before this class + # is created, we are saving the overridden ones here. Not sure if this + # is what we want to do. + self.save_sys_module_state() + self.init_sys_modules() + + # While we're trying to have each part of the code directly access what + # it needs without keeping redundant references to objects, we have too + # much legacy code that expects ip.db to exist. + self.db = PickleShareDB(os.path.join(self.profile_dir.location, 'db')) + + self.init_history() + self.init_encoding() + self.init_prefilter() + + self.init_syntax_highlighting() + self.init_hooks() + self.init_events() + self.init_pushd_popd_magic() + self.init_user_ns() + self.init_logger() + self.init_builtins() + + # The following was in post_config_initialization + self.init_inspector() + self.raw_input_original = input + self.init_completer() + # TODO: init_io() needs to happen before init_traceback handlers + # because the traceback handlers hardcode the stdout/stderr streams. + # This logic in in debugger.Pdb and should eventually be changed. + self.init_io() + self.init_traceback_handlers(custom_exceptions) + self.init_prompts() + self.init_display_formatter() + self.init_display_pub() + self.init_data_pub() + self.init_displayhook() + self.init_magics() + self.init_alias() + self.init_logstart() + self.init_pdb() + self.init_extension_manager() + self.init_payload() + self.init_deprecation_warnings() + self.hooks.late_startup_hook() + self.events.trigger('shell_initialized', self) + atexit.register(self.atexit_operations) + + # The trio runner is used for running Trio in the foreground thread. It + # is different from `_trio_runner(async_fn)` in `async_helpers.py` + # which calls `trio.run()` for every cell. This runner runs all cells + # inside a single Trio event loop. If used, it is set from + # `ipykernel.kernelapp`. + self.trio_runner = None + + def get_ipython(self): + """Return the currently running IPython instance.""" + return self + + #------------------------------------------------------------------------- + # Trait changed handlers + #------------------------------------------------------------------------- + @observe('ipython_dir') + def _ipython_dir_changed(self, change): + ensure_dir_exists(change['new']) + + def set_autoindent(self,value=None): + """Set the autoindent flag. + + If called with no arguments, it acts as a toggle.""" + if value is None: + self.autoindent = not self.autoindent + else: + self.autoindent = value + + def set_trio_runner(self, tr): + self.trio_runner = tr + + #------------------------------------------------------------------------- + # init_* methods called by __init__ + #------------------------------------------------------------------------- + + def init_ipython_dir(self, ipython_dir): + if ipython_dir is not None: + self.ipython_dir = ipython_dir + return + + self.ipython_dir = get_ipython_dir() + + def init_profile_dir(self, profile_dir): + if profile_dir is not None: + self.profile_dir = profile_dir + return + self.profile_dir = ProfileDir.create_profile_dir_by_name( + self.ipython_dir, "default" + ) + + def init_instance_attrs(self): + self.more = False + + # command compiler + self.compile = self.compiler_class() + + # Make an empty namespace, which extension writers can rely on both + # existing and NEVER being used by ipython itself. This gives them a + # convenient location for storing additional information and state + # their extensions may require, without fear of collisions with other + # ipython names that may develop later. + self.meta = Struct() + + # Temporary files used for various purposes. Deleted at exit. + self.tempfiles = [] + self.tempdirs = [] + + # keep track of where we started running (mainly for crash post-mortem) + # This is not being used anywhere currently. + self.starting_dir = os.getcwd() + + # Indentation management + self.indent_current_nsp = 0 + + # Dict to track post-execution functions that have been registered + self._post_execute = {} + + def init_environment(self): + """Any changes we need to make to the user's environment.""" + pass + + def init_encoding(self): + # Get system encoding at startup time. Certain terminals (like Emacs + # under Win32 have it set to None, and we need to have a known valid + # encoding to use in the raw_input() method + try: + self.stdin_encoding = sys.stdin.encoding or 'ascii' + except AttributeError: + self.stdin_encoding = 'ascii' + + + @observe('colors') + def init_syntax_highlighting(self, changes=None): + # Python source parser/formatter for syntax highlighting + pyformat = PyColorize.Parser(style=self.colors, parent=self).format + self.pycolorize = lambda src: pyformat(src,'str') + + def refresh_style(self): + # No-op here, used in subclass + pass + + def init_pushd_popd_magic(self): + # for pushd/popd management + self.home_dir = get_home_dir() + + self.dir_stack = [] + + def init_logger(self): + self.logger = Logger(self.home_dir, logfname='ipython_log.py', + logmode='rotate') + + def init_logstart(self): + """Initialize logging in case it was requested at the command line. + """ + if self.logappend: + self.magic('logstart %s append' % self.logappend) + elif self.logfile: + self.magic('logstart %s' % self.logfile) + elif self.logstart: + self.magic('logstart') + + def init_deprecation_warnings(self): + """ + register default filter for deprecation warning. + + This will allow deprecation warning of function used interactively to show + warning to users, and still hide deprecation warning from libraries import. + """ + if sys.version_info < (3,7): + warnings.filterwarnings("default", category=DeprecationWarning, module=self.user_ns.get("__name__")) + + + def init_builtins(self): + # A single, static flag that we set to True. Its presence indicates + # that an IPython shell has been created, and we make no attempts at + # removing on exit or representing the existence of more than one + # IPython at a time. + builtin_mod.__dict__['__IPYTHON__'] = True + builtin_mod.__dict__['display'] = display + + self.builtin_trap = BuiltinTrap(shell=self) + + @observe('colors') + def init_inspector(self, changes=None): + # Object inspector + self.inspector = oinspect.Inspector(oinspect.InspectColors, + PyColorize.ANSICodeColors, + self.colors, + self.object_info_string_level) + + def init_io(self): + # This will just use sys.stdout and sys.stderr. If you want to + # override sys.stdout and sys.stderr themselves, you need to do that + # *before* instantiating this class, because io holds onto + # references to the underlying streams. + # io.std* are deprecated, but don't show our own deprecation warnings + # during initialization of the deprecated API. + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + io.stdout = io.IOStream(sys.stdout) + io.stderr = io.IOStream(sys.stderr) + + def init_prompts(self): + # Set system prompts, so that scripts can decide if they are running + # interactively. + sys.ps1 = 'In : ' + sys.ps2 = '...: ' + sys.ps3 = 'Out: ' + + def init_display_formatter(self): + self.display_formatter = DisplayFormatter(parent=self) + self.configurables.append(self.display_formatter) + + def init_display_pub(self): + self.display_pub = self.display_pub_class(parent=self, shell=self) + self.configurables.append(self.display_pub) + + def init_data_pub(self): + if not self.data_pub_class: + self.data_pub = None + return + self.data_pub = self.data_pub_class(parent=self) + self.configurables.append(self.data_pub) + + def init_displayhook(self): + # Initialize displayhook, set in/out prompts and printing system + self.displayhook = self.displayhook_class( + parent=self, + shell=self, + cache_size=self.cache_size, + ) + self.configurables.append(self.displayhook) + # This is a context manager that installs/revmoes the displayhook at + # the appropriate time. + self.display_trap = DisplayTrap(hook=self.displayhook) + + def init_virtualenv(self): + """Add the current virtualenv to sys.path so the user can import modules from it. + This isn't perfect: it doesn't use the Python interpreter with which the + virtualenv was built, and it ignores the --no-site-packages option. A + warning will appear suggesting the user installs IPython in the + virtualenv, but for many cases, it probably works well enough. + Adapted from code snippets online. + http://blog.ufsoft.org/2009/1/29/ipython-and-virtualenv + """ + if 'VIRTUAL_ENV' not in os.environ: + # Not in a virtualenv + return + elif os.environ["VIRTUAL_ENV"] == "": + warn("Virtual env path set to '', please check if this is intended.") + return + + p = Path(sys.executable) + p_venv = Path(os.environ["VIRTUAL_ENV"]) + + # fallback venv detection: + # stdlib venv may symlink sys.executable, so we can't use realpath. + # but others can symlink *to* the venv Python, so we can't just use sys.executable. + # So we just check every item in the symlink tree (generally <= 3) + paths = [p] + while p.is_symlink(): + p = Path(os.readlink(p)) + paths.append(p.resolve()) + + # In Cygwin paths like "c:\..." and '\cygdrive\c\...' are possible + if p_venv.parts[1] == "cygdrive": + drive_name = p_venv.parts[2] + p_venv = (drive_name + ":/") / Path(*p_venv.parts[3:]) + + if any(p_venv == p.parents[1] for p in paths): + # Our exe is inside or has access to the virtualenv, don't need to do anything. + return + + if sys.platform == "win32": + virtual_env = str(Path(os.environ["VIRTUAL_ENV"], "Lib", "site-packages")) + else: + virtual_env_path = Path( + os.environ["VIRTUAL_ENV"], "lib", "python{}.{}", "site-packages" + ) + p_ver = sys.version_info[:2] + + # Predict version from py[thon]-x.x in the $VIRTUAL_ENV + re_m = re.search(r"\bpy(?:thon)?([23])\.(\d+)\b", os.environ["VIRTUAL_ENV"]) + if re_m: + predicted_path = Path(str(virtual_env_path).format(*re_m.groups())) + if predicted_path.exists(): + p_ver = re_m.groups() + + virtual_env = str(virtual_env_path).format(*p_ver) + + warn( + "Attempting to work in a virtualenv. If you encounter problems, " + "please install IPython inside the virtualenv." + ) + import site + sys.path.insert(0, virtual_env) + site.addsitedir(virtual_env) + + #------------------------------------------------------------------------- + # Things related to injections into the sys module + #------------------------------------------------------------------------- + + def save_sys_module_state(self): + """Save the state of hooks in the sys module. + + This has to be called after self.user_module is created. + """ + self._orig_sys_module_state = {'stdin': sys.stdin, + 'stdout': sys.stdout, + 'stderr': sys.stderr, + 'excepthook': sys.excepthook} + self._orig_sys_modules_main_name = self.user_module.__name__ + self._orig_sys_modules_main_mod = sys.modules.get(self.user_module.__name__) + + def restore_sys_module_state(self): + """Restore the state of the sys module.""" + try: + for k, v in self._orig_sys_module_state.items(): + setattr(sys, k, v) + except AttributeError: + pass + # Reset what what done in self.init_sys_modules + if self._orig_sys_modules_main_mod is not None: + sys.modules[self._orig_sys_modules_main_name] = self._orig_sys_modules_main_mod + + #------------------------------------------------------------------------- + # Things related to the banner + #------------------------------------------------------------------------- + + @property + def banner(self): + banner = self.banner1 + if self.profile and self.profile != 'default': + banner += '\nIPython profile: %s\n' % self.profile + if self.banner2: + banner += '\n' + self.banner2 + return banner + + def show_banner(self, banner=None): + if banner is None: + banner = self.banner + sys.stdout.write(banner) + + #------------------------------------------------------------------------- + # Things related to hooks + #------------------------------------------------------------------------- + + def init_hooks(self): + # hooks holds pointers used for user-side customizations + self.hooks = Struct() + + self.strdispatchers = {} + + # Set all default hooks, defined in the IPython.hooks module. + hooks = IPython.core.hooks + for hook_name in hooks.__all__: + # default hooks have priority 100, i.e. low; user hooks should have + # 0-100 priority + self.set_hook(hook_name,getattr(hooks,hook_name), 100, _warn_deprecated=False) + + if self.display_page: + self.set_hook('show_in_pager', page.as_hook(page.display_page), 90) + + def set_hook(self,name,hook, priority=50, str_key=None, re_key=None, + _warn_deprecated=True): + """set_hook(name,hook) -> sets an internal IPython hook. + + IPython exposes some of its internal API as user-modifiable hooks. By + adding your function to one of these hooks, you can modify IPython's + behavior to call at runtime your own routines.""" + + # At some point in the future, this should validate the hook before it + # accepts it. Probably at least check that the hook takes the number + # of args it's supposed to. + + f = types.MethodType(hook,self) + + # check if the hook is for strdispatcher first + if str_key is not None: + sdp = self.strdispatchers.get(name, StrDispatch()) + sdp.add_s(str_key, f, priority ) + self.strdispatchers[name] = sdp + return + if re_key is not None: + sdp = self.strdispatchers.get(name, StrDispatch()) + sdp.add_re(re.compile(re_key), f, priority ) + self.strdispatchers[name] = sdp + return + + dp = getattr(self.hooks, name, None) + if name not in IPython.core.hooks.__all__: + print("Warning! Hook '%s' is not one of %s" % \ + (name, IPython.core.hooks.__all__ )) + + if _warn_deprecated and (name in IPython.core.hooks.deprecated): + alternative = IPython.core.hooks.deprecated[name] + warn("Hook {} is deprecated. Use {} instead.".format(name, alternative), stacklevel=2) + + if not dp: + dp = IPython.core.hooks.CommandChainDispatcher() + + try: + dp.add(f,priority) + except AttributeError: + # it was not commandchain, plain old func - replace + dp = f + + setattr(self.hooks,name, dp) + + #------------------------------------------------------------------------- + # Things related to events + #------------------------------------------------------------------------- + + def init_events(self): + self.events = EventManager(self, available_events) + + self.events.register("pre_execute", self._clear_warning_registry) + + def register_post_execute(self, func): + """DEPRECATED: Use ip.events.register('post_run_cell', func) + + Register a function for calling after code execution. + """ + warn("ip.register_post_execute is deprecated, use " + "ip.events.register('post_run_cell', func) instead.", stacklevel=2) + self.events.register('post_run_cell', func) + + def _clear_warning_registry(self): + # clear the warning registry, so that different code blocks with + # overlapping line number ranges don't cause spurious suppression of + # warnings (see gh-6611 for details) + if "__warningregistry__" in self.user_global_ns: + del self.user_global_ns["__warningregistry__"] + + #------------------------------------------------------------------------- + # Things related to the "main" module + #------------------------------------------------------------------------- + + def new_main_mod(self, filename, modname): + """Return a new 'main' module object for user code execution. + + ``filename`` should be the path of the script which will be run in the + module. Requests with the same filename will get the same module, with + its namespace cleared. + + ``modname`` should be the module name - normally either '__main__' or + the basename of the file without the extension. + + When scripts are executed via %run, we must keep a reference to their + __main__ module around so that Python doesn't + clear it, rendering references to module globals useless. + + This method keeps said reference in a private dict, keyed by the + absolute path of the script. This way, for multiple executions of the + same script we only keep one copy of the namespace (the last one), + thus preventing memory leaks from old references while allowing the + objects from the last execution to be accessible. + """ + filename = os.path.abspath(filename) + try: + main_mod = self._main_mod_cache[filename] + except KeyError: + main_mod = self._main_mod_cache[filename] = types.ModuleType( + modname, + doc="Module created for script run in IPython") + else: + main_mod.__dict__.clear() + main_mod.__name__ = modname + + main_mod.__file__ = filename + # It seems pydoc (and perhaps others) needs any module instance to + # implement a __nonzero__ method + main_mod.__nonzero__ = lambda : True + + return main_mod + + def clear_main_mod_cache(self): + """Clear the cache of main modules. + + Mainly for use by utilities like %reset. + + Examples + -------- + + In [15]: import IPython + + In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython') + + In [17]: len(_ip._main_mod_cache) > 0 + Out[17]: True + + In [18]: _ip.clear_main_mod_cache() + + In [19]: len(_ip._main_mod_cache) == 0 + Out[19]: True + """ + self._main_mod_cache.clear() + + #------------------------------------------------------------------------- + # Things related to debugging + #------------------------------------------------------------------------- + + def init_pdb(self): + # Set calling of pdb on exceptions + # self.call_pdb is a property + self.call_pdb = self.pdb + + def _get_call_pdb(self): + return self._call_pdb + + def _set_call_pdb(self,val): + + if val not in (0,1,False,True): + raise ValueError('new call_pdb value must be boolean') + + # store value in instance + self._call_pdb = val + + # notify the actual exception handlers + self.InteractiveTB.call_pdb = val + + call_pdb = property(_get_call_pdb,_set_call_pdb,None, + 'Control auto-activation of pdb at exceptions') + + def debugger(self,force=False): + """Call the pdb debugger. + + Keywords: + + - force(False): by default, this routine checks the instance call_pdb + flag and does not actually invoke the debugger if the flag is false. + The 'force' option forces the debugger to activate even if the flag + is false. + """ + + if not (force or self.call_pdb): + return + + if not hasattr(sys,'last_traceback'): + error('No traceback has been produced, nothing to debug.') + return + + self.InteractiveTB.debugger(force=True) + + #------------------------------------------------------------------------- + # Things related to IPython's various namespaces + #------------------------------------------------------------------------- + default_user_namespaces = True + + def init_create_namespaces(self, user_module=None, user_ns=None): + # Create the namespace where the user will operate. user_ns is + # normally the only one used, and it is passed to the exec calls as + # the locals argument. But we do carry a user_global_ns namespace + # given as the exec 'globals' argument, This is useful in embedding + # situations where the ipython shell opens in a context where the + # distinction between locals and globals is meaningful. For + # non-embedded contexts, it is just the same object as the user_ns dict. + + # FIXME. For some strange reason, __builtins__ is showing up at user + # level as a dict instead of a module. This is a manual fix, but I + # should really track down where the problem is coming from. Alex + # Schmolck reported this problem first. + + # A useful post by Alex Martelli on this topic: + # Re: inconsistent value from __builtins__ + # Von: Alex Martelli <aleaxit@yahoo.com> + # Datum: Freitag 01 Oktober 2004 04:45:34 nachmittags/abends + # Gruppen: comp.lang.python + + # Michael Hohn <hohn@hooknose.lbl.gov> wrote: + # > >>> print type(builtin_check.get_global_binding('__builtins__')) + # > <type 'dict'> + # > >>> print type(__builtins__) + # > <type 'module'> + # > Is this difference in return value intentional? + + # Well, it's documented that '__builtins__' can be either a dictionary + # or a module, and it's been that way for a long time. Whether it's + # intentional (or sensible), I don't know. In any case, the idea is + # that if you need to access the built-in namespace directly, you + # should start with "import __builtin__" (note, no 's') which will + # definitely give you a module. Yeah, it's somewhat confusing:-(. + + # These routines return a properly built module and dict as needed by + # the rest of the code, and can also be used by extension writers to + # generate properly initialized namespaces. + if (user_ns is not None) or (user_module is not None): + self.default_user_namespaces = False + self.user_module, self.user_ns = self.prepare_user_module(user_module, user_ns) + + # A record of hidden variables we have added to the user namespace, so + # we can list later only variables defined in actual interactive use. + self.user_ns_hidden = {} + + # Now that FakeModule produces a real module, we've run into a nasty + # problem: after script execution (via %run), the module where the user + # code ran is deleted. Now that this object is a true module (needed + # so doctest and other tools work correctly), the Python module + # teardown mechanism runs over it, and sets to None every variable + # present in that module. Top-level references to objects from the + # script survive, because the user_ns is updated with them. However, + # calling functions defined in the script that use other things from + # the script will fail, because the function's closure had references + # to the original objects, which are now all None. So we must protect + # these modules from deletion by keeping a cache. + # + # To avoid keeping stale modules around (we only need the one from the + # last run), we use a dict keyed with the full path to the script, so + # only the last version of the module is held in the cache. Note, + # however, that we must cache the module *namespace contents* (their + # __dict__). Because if we try to cache the actual modules, old ones + # (uncached) could be destroyed while still holding references (such as + # those held by GUI objects that tend to be long-lived)> + # + # The %reset command will flush this cache. See the cache_main_mod() + # and clear_main_mod_cache() methods for details on use. + + # This is the cache used for 'main' namespaces + self._main_mod_cache = {} + + # A table holding all the namespaces IPython deals with, so that + # introspection facilities can search easily. + self.ns_table = {'user_global':self.user_module.__dict__, + 'user_local':self.user_ns, + 'builtin':builtin_mod.__dict__ + } + + @property + def user_global_ns(self): + return self.user_module.__dict__ + + def prepare_user_module(self, user_module=None, user_ns=None): + """Prepare the module and namespace in which user code will be run. + + When IPython is started normally, both parameters are None: a new module + is created automatically, and its __dict__ used as the namespace. + + If only user_module is provided, its __dict__ is used as the namespace. + If only user_ns is provided, a dummy module is created, and user_ns + becomes the global namespace. If both are provided (as they may be + when embedding), user_ns is the local namespace, and user_module + provides the global namespace. + + Parameters + ---------- + user_module : module, optional + The current user module in which IPython is being run. If None, + a clean module will be created. + user_ns : dict, optional + A namespace in which to run interactive commands. + + Returns + ------- + A tuple of user_module and user_ns, each properly initialised. + """ + if user_module is None and user_ns is not None: + user_ns.setdefault("__name__", "__main__") + user_module = DummyMod() + user_module.__dict__ = user_ns + + if user_module is None: + user_module = types.ModuleType("__main__", + doc="Automatically created module for IPython interactive environment") + + # We must ensure that __builtin__ (without the final 's') is always + # available and pointing to the __builtin__ *module*. For more details: + # http://mail.python.org/pipermail/python-dev/2001-April/014068.html + user_module.__dict__.setdefault('__builtin__', builtin_mod) + user_module.__dict__.setdefault('__builtins__', builtin_mod) + + if user_ns is None: + user_ns = user_module.__dict__ + + return user_module, user_ns + + def init_sys_modules(self): + # We need to insert into sys.modules something that looks like a + # module but which accesses the IPython namespace, for shelve and + # pickle to work interactively. Normally they rely on getting + # everything out of __main__, but for embedding purposes each IPython + # instance has its own private namespace, so we can't go shoving + # everything into __main__. + + # note, however, that we should only do this for non-embedded + # ipythons, which really mimic the __main__.__dict__ with their own + # namespace. Embedded instances, on the other hand, should not do + # this because they need to manage the user local/global namespaces + # only, but they live within a 'normal' __main__ (meaning, they + # shouldn't overtake the execution environment of the script they're + # embedded in). + + # This is overridden in the InteractiveShellEmbed subclass to a no-op. + main_name = self.user_module.__name__ + sys.modules[main_name] = self.user_module + + def init_user_ns(self): + """Initialize all user-visible namespaces to their minimum defaults. + + Certain history lists are also initialized here, as they effectively + act as user namespaces. + + Notes + ----- + All data structures here are only filled in, they are NOT reset by this + method. If they were not empty before, data will simply be added to + them. + """ + # This function works in two parts: first we put a few things in + # user_ns, and we sync that contents into user_ns_hidden so that these + # initial variables aren't shown by %who. After the sync, we add the + # rest of what we *do* want the user to see with %who even on a new + # session (probably nothing, so they really only see their own stuff) + + # The user dict must *always* have a __builtin__ reference to the + # Python standard __builtin__ namespace, which must be imported. + # This is so that certain operations in prompt evaluation can be + # reliably executed with builtins. Note that we can NOT use + # __builtins__ (note the 's'), because that can either be a dict or a + # module, and can even mutate at runtime, depending on the context + # (Python makes no guarantees on it). In contrast, __builtin__ is + # always a module object, though it must be explicitly imported. + + # For more details: + # http://mail.python.org/pipermail/python-dev/2001-April/014068.html + ns = {} + + # make global variables for user access to the histories + ns['_ih'] = self.history_manager.input_hist_parsed + ns['_oh'] = self.history_manager.output_hist + ns['_dh'] = self.history_manager.dir_hist + + # user aliases to input and output histories. These shouldn't show up + # in %who, as they can have very large reprs. + ns['In'] = self.history_manager.input_hist_parsed + ns['Out'] = self.history_manager.output_hist + + # Store myself as the public api!!! + ns['get_ipython'] = self.get_ipython + + ns['exit'] = self.exiter + ns['quit'] = self.exiter + + # Sync what we've added so far to user_ns_hidden so these aren't seen + # by %who + self.user_ns_hidden.update(ns) + + # Anything put into ns now would show up in %who. Think twice before + # putting anything here, as we really want %who to show the user their + # stuff, not our variables. + + # Finally, update the real user's namespace + self.user_ns.update(ns) + + @property + def all_ns_refs(self): + """Get a list of references to all the namespace dictionaries in which + IPython might store a user-created object. + + Note that this does not include the displayhook, which also caches + objects from the output.""" + return [self.user_ns, self.user_global_ns, self.user_ns_hidden] + \ + [m.__dict__ for m in self._main_mod_cache.values()] + + def reset(self, new_session=True, aggressive=False): + """Clear all internal namespaces, and attempt to release references to + user objects. + + If new_session is True, a new history session will be opened. + """ + # Clear histories + self.history_manager.reset(new_session) + # Reset counter used to index all histories + if new_session: + self.execution_count = 1 + + # Reset last execution result + self.last_execution_succeeded = True + self.last_execution_result = None + + # Flush cached output items + if self.displayhook.do_full_cache: + self.displayhook.flush() + + # The main execution namespaces must be cleared very carefully, + # skipping the deletion of the builtin-related keys, because doing so + # would cause errors in many object's __del__ methods. + if self.user_ns is not self.user_global_ns: + self.user_ns.clear() + ns = self.user_global_ns + drop_keys = set(ns.keys()) + drop_keys.discard('__builtin__') + drop_keys.discard('__builtins__') + drop_keys.discard('__name__') + for k in drop_keys: + del ns[k] + + self.user_ns_hidden.clear() + + # Restore the user namespaces to minimal usability + self.init_user_ns() + if aggressive and not hasattr(self, "_sys_modules_keys"): + print("Cannot restore sys.module, no snapshot") + elif aggressive: + print("culling sys module...") + current_keys = set(sys.modules.keys()) + for k in current_keys - self._sys_modules_keys: + if k.startswith("multiprocessing"): + continue + del sys.modules[k] + + # Restore the default and user aliases + self.alias_manager.clear_aliases() + self.alias_manager.init_aliases() + + # Now define aliases that only make sense on the terminal, because they + # need direct access to the console in a way that we can't emulate in + # GUI or web frontend + if os.name == 'posix': + for cmd in ('clear', 'more', 'less', 'man'): + if cmd not in self.magics_manager.magics['line']: + self.alias_manager.soft_define_alias(cmd, cmd) + + # Flush the private list of module references kept for script + # execution protection + self.clear_main_mod_cache() + + def del_var(self, varname, by_name=False): + """Delete a variable from the various namespaces, so that, as + far as possible, we're not keeping any hidden references to it. + + Parameters + ---------- + varname : str + The name of the variable to delete. + by_name : bool + If True, delete variables with the given name in each + namespace. If False (default), find the variable in the user + namespace, and delete references to it. + """ + if varname in ('__builtin__', '__builtins__'): + raise ValueError("Refusing to delete %s" % varname) + + ns_refs = self.all_ns_refs + + if by_name: # Delete by name + for ns in ns_refs: + try: + del ns[varname] + except KeyError: + pass + else: # Delete by object + try: + obj = self.user_ns[varname] + except KeyError: + raise NameError("name '%s' is not defined" % varname) + # Also check in output history + ns_refs.append(self.history_manager.output_hist) + for ns in ns_refs: + to_delete = [n for n, o in ns.items() if o is obj] + for name in to_delete: + del ns[name] + + # Ensure it is removed from the last execution result + if self.last_execution_result.result is obj: + self.last_execution_result = None + + # displayhook keeps extra references, but not in a dictionary + for name in ('_', '__', '___'): + if getattr(self.displayhook, name) is obj: + setattr(self.displayhook, name, None) + + def reset_selective(self, regex=None): + """Clear selective variables from internal namespaces based on a + specified regular expression. + + Parameters + ---------- + regex : string or compiled pattern, optional + A regular expression pattern that will be used in searching + variable names in the users namespaces. + """ + if regex is not None: + try: + m = re.compile(regex) + except TypeError: + raise TypeError('regex must be a string or compiled pattern') + # Search for keys in each namespace that match the given regex + # If a match is found, delete the key/value pair. + for ns in self.all_ns_refs: + for var in ns: + if m.search(var): + del ns[var] + + def push(self, variables, interactive=True): + """Inject a group of variables into the IPython user namespace. + + Parameters + ---------- + variables : dict, str or list/tuple of str + The variables to inject into the user's namespace. If a dict, a + simple update is done. If a str, the string is assumed to have + variable names separated by spaces. A list/tuple of str can also + be used to give the variable names. If just the variable names are + give (list/tuple/str) then the variable values looked up in the + callers frame. + interactive : bool + If True (default), the variables will be listed with the ``who`` + magic. + """ + vdict = None + + # We need a dict of name/value pairs to do namespace updates. + if isinstance(variables, dict): + vdict = variables + elif isinstance(variables, (str, list, tuple)): + if isinstance(variables, str): + vlist = variables.split() + else: + vlist = variables + vdict = {} + cf = sys._getframe(1) + for name in vlist: + try: + vdict[name] = eval(name, cf.f_globals, cf.f_locals) + except: + print('Could not get variable %s from %s' % + (name,cf.f_code.co_name)) + else: + raise ValueError('variables must be a dict/str/list/tuple') + + # Propagate variables to user namespace + self.user_ns.update(vdict) + + # And configure interactive visibility + user_ns_hidden = self.user_ns_hidden + if interactive: + for name in vdict: + user_ns_hidden.pop(name, None) + else: + user_ns_hidden.update(vdict) + + def drop_by_id(self, variables): + """Remove a dict of variables from the user namespace, if they are the + same as the values in the dictionary. + + This is intended for use by extensions: variables that they've added can + be taken back out if they are unloaded, without removing any that the + user has overwritten. + + Parameters + ---------- + variables : dict + A dictionary mapping object names (as strings) to the objects. + """ + for name, obj in variables.items(): + if name in self.user_ns and self.user_ns[name] is obj: + del self.user_ns[name] + self.user_ns_hidden.pop(name, None) + + #------------------------------------------------------------------------- + # Things related to object introspection + #------------------------------------------------------------------------- + + def _ofind(self, oname, namespaces=None): + """Find an object in the available namespaces. + + self._ofind(oname) -> dict with keys: found,obj,ospace,ismagic + + Has special code to detect magic functions. + """ + oname = oname.strip() + if not oname.startswith(ESC_MAGIC) and \ + not oname.startswith(ESC_MAGIC2) and \ + not all(a.isidentifier() for a in oname.split(".")): + return {'found': False} + + if namespaces is None: + # Namespaces to search in: + # Put them in a list. The order is important so that we + # find things in the same order that Python finds them. + namespaces = [ ('Interactive', self.user_ns), + ('Interactive (global)', self.user_global_ns), + ('Python builtin', builtin_mod.__dict__), + ] + + ismagic = False + isalias = False + found = False + ospace = None + parent = None + obj = None + + + # Look for the given name by splitting it in parts. If the head is + # found, then we look for all the remaining parts as members, and only + # declare success if we can find them all. + oname_parts = oname.split('.') + oname_head, oname_rest = oname_parts[0],oname_parts[1:] + for nsname,ns in namespaces: + try: + obj = ns[oname_head] + except KeyError: + continue + else: + for idx, part in enumerate(oname_rest): + try: + parent = obj + # The last part is looked up in a special way to avoid + # descriptor invocation as it may raise or have side + # effects. + if idx == len(oname_rest) - 1: + obj = self._getattr_property(obj, part) + else: + obj = getattr(obj, part) + except: + # Blanket except b/c some badly implemented objects + # allow __getattr__ to raise exceptions other than + # AttributeError, which then crashes IPython. + break + else: + # If we finish the for loop (no break), we got all members + found = True + ospace = nsname + break # namespace loop + + # Try to see if it's magic + if not found: + obj = None + if oname.startswith(ESC_MAGIC2): + oname = oname.lstrip(ESC_MAGIC2) + obj = self.find_cell_magic(oname) + elif oname.startswith(ESC_MAGIC): + oname = oname.lstrip(ESC_MAGIC) + obj = self.find_line_magic(oname) + else: + # search without prefix, so run? will find %run? + obj = self.find_line_magic(oname) + if obj is None: + obj = self.find_cell_magic(oname) + if obj is not None: + found = True + ospace = 'IPython internal' + ismagic = True + isalias = isinstance(obj, Alias) + + # Last try: special-case some literals like '', [], {}, etc: + if not found and oname_head in ["''",'""','[]','{}','()']: + obj = eval(oname_head) + found = True + ospace = 'Interactive' + + return { + 'obj':obj, + 'found':found, + 'parent':parent, + 'ismagic':ismagic, + 'isalias':isalias, + 'namespace':ospace + } + + @staticmethod + def _getattr_property(obj, attrname): + """Property-aware getattr to use in object finding. + + If attrname represents a property, return it unevaluated (in case it has + side effects or raises an error. + + """ + if not isinstance(obj, type): + try: + # `getattr(type(obj), attrname)` is not guaranteed to return + # `obj`, but does so for property: + # + # property.__get__(self, None, cls) -> self + # + # The universal alternative is to traverse the mro manually + # searching for attrname in class dicts. + attr = getattr(type(obj), attrname) + except AttributeError: + pass + else: + # This relies on the fact that data descriptors (with both + # __get__ & __set__ magic methods) take precedence over + # instance-level attributes: + # + # class A(object): + # @property + # def foobar(self): return 123 + # a = A() + # a.__dict__['foobar'] = 345 + # a.foobar # == 123 + # + # So, a property may be returned right away. + if isinstance(attr, property): + return attr + + # Nothing helped, fall back. + return getattr(obj, attrname) + + def _object_find(self, oname, namespaces=None): + """Find an object and return a struct with info about it.""" + return Struct(self._ofind(oname, namespaces)) + + def _inspect(self, meth, oname, namespaces=None, **kw): + """Generic interface to the inspector system. + + This function is meant to be called by pdef, pdoc & friends. + """ + info = self._object_find(oname, namespaces) + docformat = sphinxify if self.sphinxify_docstring else None + if info.found: + pmethod = getattr(self.inspector, meth) + # TODO: only apply format_screen to the plain/text repr of the mime + # bundle. + formatter = format_screen if info.ismagic else docformat + if meth == 'pdoc': + pmethod(info.obj, oname, formatter) + elif meth == 'pinfo': + pmethod( + info.obj, + oname, + formatter, + info, + enable_html_pager=self.enable_html_pager, + **kw + ) + else: + pmethod(info.obj, oname) + else: + print('Object `%s` not found.' % oname) + return 'not found' # so callers can take other action + + def object_inspect(self, oname, detail_level=0): + """Get object info about oname""" + with self.builtin_trap: + info = self._object_find(oname) + if info.found: + return self.inspector.info(info.obj, oname, info=info, + detail_level=detail_level + ) + else: + return oinspect.object_info(name=oname, found=False) + + def object_inspect_text(self, oname, detail_level=0): + """Get object info as formatted text""" + return self.object_inspect_mime(oname, detail_level)['text/plain'] + + def object_inspect_mime(self, oname, detail_level=0): + """Get object info as a mimebundle of formatted representations. + + A mimebundle is a dictionary, keyed by mime-type. + It must always have the key `'text/plain'`. + """ + with self.builtin_trap: + info = self._object_find(oname) + if info.found: + docformat = sphinxify if self.sphinxify_docstring else None + return self.inspector._get_info( + info.obj, + oname, + info=info, + detail_level=detail_level, + formatter=docformat, + ) + else: + raise KeyError(oname) + + #------------------------------------------------------------------------- + # Things related to history management + #------------------------------------------------------------------------- + + def init_history(self): + """Sets up the command history, and starts regular autosaves.""" + self.history_manager = HistoryManager(shell=self, parent=self) + self.configurables.append(self.history_manager) + + #------------------------------------------------------------------------- + # Things related to exception handling and tracebacks (not debugging) + #------------------------------------------------------------------------- + + debugger_cls = InterruptiblePdb + + def init_traceback_handlers(self, custom_exceptions): + # Syntax error handler. + self.SyntaxTB = ultratb.SyntaxTB(color_scheme='NoColor', parent=self) + + # The interactive one is initialized with an offset, meaning we always + # want to remove the topmost item in the traceback, which is our own + # internal code. Valid modes: ['Plain','Context','Verbose','Minimal'] + self.InteractiveTB = ultratb.AutoFormattedTB(mode = 'Plain', + color_scheme='NoColor', + tb_offset = 1, + check_cache=check_linecache_ipython, + debugger_cls=self.debugger_cls, parent=self) + + # The instance will store a pointer to the system-wide exception hook, + # so that runtime code (such as magics) can access it. This is because + # during the read-eval loop, it may get temporarily overwritten. + self.sys_excepthook = sys.excepthook + + # and add any custom exception handlers the user may have specified + self.set_custom_exc(*custom_exceptions) + + # Set the exception mode + self.InteractiveTB.set_mode(mode=self.xmode) + + def set_custom_exc(self, exc_tuple, handler): + """set_custom_exc(exc_tuple, handler) + + Set a custom exception handler, which will be called if any of the + exceptions in exc_tuple occur in the mainloop (specifically, in the + run_code() method). + + Parameters + ---------- + + exc_tuple : tuple of exception classes + A *tuple* of exception classes, for which to call the defined + handler. It is very important that you use a tuple, and NOT A + LIST here, because of the way Python's except statement works. If + you only want to trap a single exception, use a singleton tuple:: + + exc_tuple == (MyCustomException,) + + handler : callable + handler must have the following signature:: + + def my_handler(self, etype, value, tb, tb_offset=None): + ... + return structured_traceback + + Your handler must return a structured traceback (a list of strings), + or None. + + This will be made into an instance method (via types.MethodType) + of IPython itself, and it will be called if any of the exceptions + listed in the exc_tuple are caught. If the handler is None, an + internal basic one is used, which just prints basic info. + + To protect IPython from crashes, if your handler ever raises an + exception or returns an invalid result, it will be immediately + disabled. + + Notes + ----- + + WARNING: by putting in your own exception handler into IPython's main + execution loop, you run a very good chance of nasty crashes. This + facility should only be used if you really know what you are doing.""" + if not isinstance(exc_tuple, tuple): + raise TypeError("The custom exceptions must be given as a tuple.") + + def dummy_handler(self, etype, value, tb, tb_offset=None): + print('*** Simple custom exception handler ***') + print('Exception type :', etype) + print('Exception value:', value) + print('Traceback :', tb) + + def validate_stb(stb): + """validate structured traceback return type + + return type of CustomTB *should* be a list of strings, but allow + single strings or None, which are harmless. + + This function will *always* return a list of strings, + and will raise a TypeError if stb is inappropriate. + """ + msg = "CustomTB must return list of strings, not %r" % stb + if stb is None: + return [] + elif isinstance(stb, str): + return [stb] + elif not isinstance(stb, list): + raise TypeError(msg) + # it's a list + for line in stb: + # check every element + if not isinstance(line, str): + raise TypeError(msg) + return stb + + if handler is None: + wrapped = dummy_handler + else: + def wrapped(self,etype,value,tb,tb_offset=None): + """wrap CustomTB handler, to protect IPython from user code + + This makes it harder (but not impossible) for custom exception + handlers to crash IPython. + """ + try: + stb = handler(self,etype,value,tb,tb_offset=tb_offset) + return validate_stb(stb) + except: + # clear custom handler immediately + self.set_custom_exc((), None) + print("Custom TB Handler failed, unregistering", file=sys.stderr) + # show the exception in handler first + stb = self.InteractiveTB.structured_traceback(*sys.exc_info()) + print(self.InteractiveTB.stb2text(stb)) + print("The original exception:") + stb = self.InteractiveTB.structured_traceback( + (etype,value,tb), tb_offset=tb_offset + ) + return stb + + self.CustomTB = types.MethodType(wrapped,self) + self.custom_exceptions = exc_tuple + + def excepthook(self, etype, value, tb): + """One more defense for GUI apps that call sys.excepthook. + + GUI frameworks like wxPython trap exceptions and call + sys.excepthook themselves. I guess this is a feature that + enables them to keep running after exceptions that would + otherwise kill their mainloop. This is a bother for IPython + which expects to catch all of the program exceptions with a try: + except: statement. + + Normally, IPython sets sys.excepthook to a CrashHandler instance, so if + any app directly invokes sys.excepthook, it will look to the user like + IPython crashed. In order to work around this, we can disable the + CrashHandler and replace it with this excepthook instead, which prints a + regular traceback using our InteractiveTB. In this fashion, apps which + call sys.excepthook will generate a regular-looking exception from + IPython, and the CrashHandler will only be triggered by real IPython + crashes. + + This hook should be used sparingly, only in places which are not likely + to be true IPython errors. + """ + self.showtraceback((etype, value, tb), tb_offset=0) + + def _get_exc_info(self, exc_tuple=None): + """get exc_info from a given tuple, sys.exc_info() or sys.last_type etc. + + Ensures sys.last_type,value,traceback hold the exc_info we found, + from whichever source. + + raises ValueError if none of these contain any information + """ + if exc_tuple is None: + etype, value, tb = sys.exc_info() + else: + etype, value, tb = exc_tuple + + if etype is None: + if hasattr(sys, 'last_type'): + etype, value, tb = sys.last_type, sys.last_value, \ + sys.last_traceback + + if etype is None: + raise ValueError("No exception to find") + + # Now store the exception info in sys.last_type etc. + # WARNING: these variables are somewhat deprecated and not + # necessarily safe to use in a threaded environment, but tools + # like pdb depend on their existence, so let's set them. If we + # find problems in the field, we'll need to revisit their use. + sys.last_type = etype + sys.last_value = value + sys.last_traceback = tb + + return etype, value, tb + + def show_usage_error(self, exc): + """Show a short message for UsageErrors + + These are special exceptions that shouldn't show a traceback. + """ + print("UsageError: %s" % exc, file=sys.stderr) + + def get_exception_only(self, exc_tuple=None): + """ + Return as a string (ending with a newline) the exception that + just occurred, without any traceback. + """ + etype, value, tb = self._get_exc_info(exc_tuple) + msg = traceback.format_exception_only(etype, value) + return ''.join(msg) + + def showtraceback(self, exc_tuple=None, filename=None, tb_offset=None, + exception_only=False, running_compiled_code=False): + """Display the exception that just occurred. + + If nothing is known about the exception, this is the method which + should be used throughout the code for presenting user tracebacks, + rather than directly invoking the InteractiveTB object. + + A specific showsyntaxerror() also exists, but this method can take + care of calling it if needed, so unless you are explicitly catching a + SyntaxError exception, don't try to analyze the stack manually and + simply call this method.""" + + try: + try: + etype, value, tb = self._get_exc_info(exc_tuple) + except ValueError: + print('No traceback available to show.', file=sys.stderr) + return + + if issubclass(etype, SyntaxError): + # Though this won't be called by syntax errors in the input + # line, there may be SyntaxError cases with imported code. + self.showsyntaxerror(filename, running_compiled_code) + elif etype is UsageError: + self.show_usage_error(value) + else: + if exception_only: + stb = ['An exception has occurred, use %tb to see ' + 'the full traceback.\n'] + stb.extend(self.InteractiveTB.get_exception_only(etype, + value)) + else: + try: + # Exception classes can customise their traceback - we + # use this in IPython.parallel for exceptions occurring + # in the engines. This should return a list of strings. + stb = value._render_traceback_() + except Exception: + stb = self.InteractiveTB.structured_traceback(etype, + value, tb, tb_offset=tb_offset) + + self._showtraceback(etype, value, stb) + if self.call_pdb: + # drop into debugger + self.debugger(force=True) + return + + # Actually show the traceback + self._showtraceback(etype, value, stb) + + except KeyboardInterrupt: + print('\n' + self.get_exception_only(), file=sys.stderr) + + def _showtraceback(self, etype, evalue, stb: str): + """Actually show a traceback. + + Subclasses may override this method to put the traceback on a different + place, like a side channel. + """ + val = self.InteractiveTB.stb2text(stb) + try: + print(val) + except UnicodeEncodeError: + print(val.encode("utf-8", "backslashreplace").decode()) + + def showsyntaxerror(self, filename=None, running_compiled_code=False): + """Display the syntax error that just occurred. + + This doesn't display a stack trace because there isn't one. + + If a filename is given, it is stuffed in the exception instead + of what was there before (because Python's parser always uses + "<string>" when reading from a string). + + If the syntax error occurred when running a compiled code (i.e. running_compile_code=True), + longer stack trace will be displayed. + """ + etype, value, last_traceback = self._get_exc_info() + + if filename and issubclass(etype, SyntaxError): + try: + value.filename = filename + except: + # Not the format we expect; leave it alone + pass + + # If the error occurred when executing compiled code, we should provide full stacktrace. + elist = traceback.extract_tb(last_traceback) if running_compiled_code else [] + stb = self.SyntaxTB.structured_traceback(etype, value, elist) + self._showtraceback(etype, value, stb) + + # This is overridden in TerminalInteractiveShell to show a message about + # the %paste magic. + def showindentationerror(self): + """Called by _run_cell when there's an IndentationError in code entered + at the prompt. + + This is overridden in TerminalInteractiveShell to show a message about + the %paste magic.""" + self.showsyntaxerror() + + #------------------------------------------------------------------------- + # Things related to readline + #------------------------------------------------------------------------- + + def init_readline(self): + """DEPRECATED + + Moved to terminal subclass, here only to simplify the init logic.""" + # Set a number of methods that depend on readline to be no-op + warnings.warn('`init_readline` is no-op since IPython 5.0 and is Deprecated', + DeprecationWarning, stacklevel=2) + self.set_custom_completer = no_op + + @skip_doctest + def set_next_input(self, s, replace=False): + """ Sets the 'default' input string for the next command line. + + Example:: + + In [1]: _ip.set_next_input("Hello Word") + In [2]: Hello Word_ # cursor is here + """ + self.rl_next_input = s + + def _indent_current_str(self): + """return the current level of indentation as a string""" + return self.input_splitter.get_indent_spaces() * ' ' + + #------------------------------------------------------------------------- + # Things related to text completion + #------------------------------------------------------------------------- + + def init_completer(self): + """Initialize the completion machinery. + + This creates completion machinery that can be used by client code, + either interactively in-process (typically triggered by the readline + library), programmatically (such as in test suites) or out-of-process + (typically over the network by remote frontends). + """ + from IPython.core.completer import IPCompleter + from IPython.core.completerlib import (module_completer, + magic_run_completer, cd_completer, reset_completer) + + self.Completer = IPCompleter(shell=self, + namespace=self.user_ns, + global_namespace=self.user_global_ns, + parent=self, + ) + self.configurables.append(self.Completer) + + # Add custom completers to the basic ones built into IPCompleter + sdisp = self.strdispatchers.get('complete_command', StrDispatch()) + self.strdispatchers['complete_command'] = sdisp + self.Completer.custom_completers = sdisp + + self.set_hook('complete_command', module_completer, str_key = 'import') + self.set_hook('complete_command', module_completer, str_key = 'from') + self.set_hook('complete_command', module_completer, str_key = '%aimport') + self.set_hook('complete_command', magic_run_completer, str_key = '%run') + self.set_hook('complete_command', cd_completer, str_key = '%cd') + self.set_hook('complete_command', reset_completer, str_key = '%reset') + + @skip_doctest + def complete(self, text, line=None, cursor_pos=None): + """Return the completed text and a list of completions. + + Parameters + ---------- + + text : string + A string of text to be completed on. It can be given as empty and + instead a line/position pair are given. In this case, the + completer itself will split the line like readline does. + + line : string, optional + The complete line that text is part of. + + cursor_pos : int, optional + The position of the cursor on the input line. + + Returns + ------- + text : string + The actual text that was completed. + + matches : list + A sorted list with all possible completions. + + The optional arguments allow the completion to take more context into + account, and are part of the low-level completion API. + + This is a wrapper around the completion mechanism, similar to what + readline does at the command line when the TAB key is hit. By + exposing it as a method, it can be used by other non-readline + environments (such as GUIs) for text completion. + + Simple usage example: + + In [1]: x = 'hello' + + In [2]: _ip.complete('x.l') + Out[2]: ('x.l', ['x.ljust', 'x.lower', 'x.lstrip']) + """ + + # Inject names into __builtin__ so we can complete on the added names. + with self.builtin_trap: + return self.Completer.complete(text, line, cursor_pos) + + def set_custom_completer(self, completer, pos=0) -> None: + """Adds a new custom completer function. + + The position argument (defaults to 0) is the index in the completers + list where you want the completer to be inserted. + + `completer` should have the following signature:: + + def completion(self: Completer, text: string) -> List[str]: + raise NotImplementedError + + It will be bound to the current Completer instance and pass some text + and return a list with current completions to suggest to the user. + """ + + newcomp = types.MethodType(completer, self.Completer) + self.Completer.custom_matchers.insert(pos,newcomp) + + def set_completer_frame(self, frame=None): + """Set the frame of the completer.""" + if frame: + self.Completer.namespace = frame.f_locals + self.Completer.global_namespace = frame.f_globals + else: + self.Completer.namespace = self.user_ns + self.Completer.global_namespace = self.user_global_ns + + #------------------------------------------------------------------------- + # Things related to magics + #------------------------------------------------------------------------- + + def init_magics(self): + from IPython.core import magics as m + self.magics_manager = magic.MagicsManager(shell=self, + parent=self, + user_magics=m.UserMagics(self)) + self.configurables.append(self.magics_manager) + + # Expose as public API from the magics manager + self.register_magics = self.magics_manager.register + + self.register_magics(m.AutoMagics, m.BasicMagics, m.CodeMagics, + m.ConfigMagics, m.DisplayMagics, m.ExecutionMagics, + m.ExtensionMagics, m.HistoryMagics, m.LoggingMagics, + m.NamespaceMagics, m.OSMagics, m.PackagingMagics, + m.PylabMagics, m.ScriptMagics, + ) + self.register_magics(m.AsyncMagics) + + # Register Magic Aliases + mman = self.magics_manager + # FIXME: magic aliases should be defined by the Magics classes + # or in MagicsManager, not here + mman.register_alias('ed', 'edit') + mman.register_alias('hist', 'history') + mman.register_alias('rep', 'recall') + mman.register_alias('SVG', 'svg', 'cell') + mman.register_alias('HTML', 'html', 'cell') + mman.register_alias('file', 'writefile', 'cell') + + # FIXME: Move the color initialization to the DisplayHook, which + # should be split into a prompt manager and displayhook. We probably + # even need a centralize colors management object. + self.run_line_magic('colors', self.colors) + + # Defined here so that it's included in the documentation + @functools.wraps(magic.MagicsManager.register_function) + def register_magic_function(self, func, magic_kind='line', magic_name=None): + self.magics_manager.register_function( + func, magic_kind=magic_kind, magic_name=magic_name + ) + + def run_line_magic(self, magic_name, line, _stack_depth=1): + """Execute the given line magic. + + Parameters + ---------- + magic_name : str + Name of the desired magic function, without '%' prefix. + + line : str + The rest of the input line as a single string. + + _stack_depth : int + If run_line_magic() is called from magic() then _stack_depth=2. + This is added to ensure backward compatibility for use of 'get_ipython().magic()' + """ + fn = self.find_line_magic(magic_name) + if fn is None: + cm = self.find_cell_magic(magic_name) + etpl = "Line magic function `%%%s` not found%s." + extra = '' if cm is None else (' (But cell magic `%%%%%s` exists, ' + 'did you mean that instead?)' % magic_name ) + raise UsageError(etpl % (magic_name, extra)) + else: + # Note: this is the distance in the stack to the user's frame. + # This will need to be updated if the internal calling logic gets + # refactored, or else we'll be expanding the wrong variables. + + # Determine stack_depth depending on where run_line_magic() has been called + stack_depth = _stack_depth + if getattr(fn, magic.MAGIC_NO_VAR_EXPAND_ATTR, False): + # magic has opted out of var_expand + magic_arg_s = line + else: + magic_arg_s = self.var_expand(line, stack_depth) + # Put magic args in a list so we can call with f(*a) syntax + args = [magic_arg_s] + kwargs = {} + # Grab local namespace if we need it: + if getattr(fn, "needs_local_scope", False): + kwargs['local_ns'] = self.get_local_scope(stack_depth) + with self.builtin_trap: + result = fn(*args, **kwargs) + return result + + def get_local_scope(self, stack_depth): + """Get local scope at given stack depth. + + Parameters + ---------- + stack_depth : int + Depth relative to calling frame + """ + return sys._getframe(stack_depth + 1).f_locals + + def run_cell_magic(self, magic_name, line, cell): + """Execute the given cell magic. + + Parameters + ---------- + magic_name : str + Name of the desired magic function, without '%' prefix. + + line : str + The rest of the first input line as a single string. + + cell : str + The body of the cell as a (possibly multiline) string. + """ + fn = self.find_cell_magic(magic_name) + if fn is None: + lm = self.find_line_magic(magic_name) + etpl = "Cell magic `%%{0}` not found{1}." + extra = '' if lm is None else (' (But line magic `%{0}` exists, ' + 'did you mean that instead?)'.format(magic_name)) + raise UsageError(etpl.format(magic_name, extra)) + elif cell == '': + message = '%%{0} is a cell magic, but the cell body is empty.'.format(magic_name) + if self.find_line_magic(magic_name) is not None: + message += ' Did you mean the line magic %{0} (single %)?'.format(magic_name) + raise UsageError(message) + else: + # Note: this is the distance in the stack to the user's frame. + # This will need to be updated if the internal calling logic gets + # refactored, or else we'll be expanding the wrong variables. + stack_depth = 2 + if getattr(fn, magic.MAGIC_NO_VAR_EXPAND_ATTR, False): + # magic has opted out of var_expand + magic_arg_s = line + else: + magic_arg_s = self.var_expand(line, stack_depth) + kwargs = {} + if getattr(fn, "needs_local_scope", False): + kwargs['local_ns'] = self.user_ns + + with self.builtin_trap: + args = (magic_arg_s, cell) + result = fn(*args, **kwargs) + return result + + def find_line_magic(self, magic_name): + """Find and return a line magic by name. + + Returns None if the magic isn't found.""" + return self.magics_manager.magics['line'].get(magic_name) + + def find_cell_magic(self, magic_name): + """Find and return a cell magic by name. + + Returns None if the magic isn't found.""" + return self.magics_manager.magics['cell'].get(magic_name) + + def find_magic(self, magic_name, magic_kind='line'): + """Find and return a magic of the given type by name. + + Returns None if the magic isn't found.""" + return self.magics_manager.magics[magic_kind].get(magic_name) + + def magic(self, arg_s): + """DEPRECATED. Use run_line_magic() instead. + + Call a magic function by name. + + Input: a string containing the name of the magic function to call and + any additional arguments to be passed to the magic. + + magic('name -opt foo bar') is equivalent to typing at the ipython + prompt: + + In[1]: %name -opt foo bar + + To call a magic without arguments, simply use magic('name'). + + This provides a proper Python function to call IPython's magics in any + valid Python code you can type at the interpreter, including loops and + compound statements. + """ + # TODO: should we issue a loud deprecation warning here? + magic_name, _, magic_arg_s = arg_s.partition(' ') + magic_name = magic_name.lstrip(prefilter.ESC_MAGIC) + return self.run_line_magic(magic_name, magic_arg_s, _stack_depth=2) + + #------------------------------------------------------------------------- + # Things related to macros + #------------------------------------------------------------------------- + + def define_macro(self, name, themacro): + """Define a new macro + + Parameters + ---------- + name : str + The name of the macro. + themacro : str or Macro + The action to do upon invoking the macro. If a string, a new + Macro object is created by passing the string to it. + """ + + from IPython.core import macro + + if isinstance(themacro, str): + themacro = macro.Macro(themacro) + if not isinstance(themacro, macro.Macro): + raise ValueError('A macro must be a string or a Macro instance.') + self.user_ns[name] = themacro + + #------------------------------------------------------------------------- + # Things related to the running of system commands + #------------------------------------------------------------------------- + + def system_piped(self, cmd): + """Call the given cmd in a subprocess, piping stdout/err + + Parameters + ---------- + cmd : str + Command to execute (can not end in '&', as background processes are + not supported. Should not be a command that expects input + other than simple text. + """ + if cmd.rstrip().endswith('&'): + # this is *far* from a rigorous test + # We do not support backgrounding processes because we either use + # pexpect or pipes to read from. Users can always just call + # os.system() or use ip.system=ip.system_raw + # if they really want a background process. + raise OSError("Background processes not supported.") + + # we explicitly do NOT return the subprocess status code, because + # a non-None value would trigger :func:`sys.displayhook` calls. + # Instead, we store the exit_code in user_ns. + self.user_ns['_exit_code'] = system(self.var_expand(cmd, depth=1)) + + def system_raw(self, cmd): + """Call the given cmd in a subprocess using os.system on Windows or + subprocess.call using the system shell on other platforms. + + Parameters + ---------- + cmd : str + Command to execute. + """ + cmd = self.var_expand(cmd, depth=1) + # protect os.system from UNC paths on Windows, which it can't handle: + if sys.platform == 'win32': + from IPython.utils._process_win32 import AvoidUNCPath + with AvoidUNCPath() as path: + if path is not None: + cmd = '"pushd %s &&"%s' % (path, cmd) + try: + ec = os.system(cmd) + except KeyboardInterrupt: + print('\n' + self.get_exception_only(), file=sys.stderr) + ec = -2 + else: + # For posix the result of the subprocess.call() below is an exit + # code, which by convention is zero for success, positive for + # program failure. Exit codes above 128 are reserved for signals, + # and the formula for converting a signal to an exit code is usually + # signal_number+128. To more easily differentiate between exit + # codes and signals, ipython uses negative numbers. For instance + # since control-c is signal 2 but exit code 130, ipython's + # _exit_code variable will read -2. Note that some shells like + # csh and fish don't follow sh/bash conventions for exit codes. + executable = os.environ.get('SHELL', None) + try: + # Use env shell instead of default /bin/sh + ec = subprocess.call(cmd, shell=True, executable=executable) + except KeyboardInterrupt: + # intercept control-C; a long traceback is not useful here + print('\n' + self.get_exception_only(), file=sys.stderr) + ec = 130 + if ec > 128: + ec = -(ec - 128) + + # We explicitly do NOT return the subprocess status code, because + # a non-None value would trigger :func:`sys.displayhook` calls. + # Instead, we store the exit_code in user_ns. Note the semantics + # of _exit_code: for control-c, _exit_code == -signal.SIGNIT, + # but raising SystemExit(_exit_code) will give status 254! + self.user_ns['_exit_code'] = ec + + # use piped system by default, because it is better behaved + system = system_piped + + def getoutput(self, cmd, split=True, depth=0): + """Get output (possibly including stderr) from a subprocess. + + Parameters + ---------- + cmd : str + Command to execute (can not end in '&', as background processes are + not supported. + split : bool, optional + If True, split the output into an IPython SList. Otherwise, an + IPython LSString is returned. These are objects similar to normal + lists and strings, with a few convenience attributes for easier + manipulation of line-based output. You can use '?' on them for + details. + depth : int, optional + How many frames above the caller are the local variables which should + be expanded in the command string? The default (0) assumes that the + expansion variables are in the stack frame calling this function. + """ + if cmd.rstrip().endswith('&'): + # this is *far* from a rigorous test + raise OSError("Background processes not supported.") + out = getoutput(self.var_expand(cmd, depth=depth+1)) + if split: + out = SList(out.splitlines()) + else: + out = LSString(out) + return out + + #------------------------------------------------------------------------- + # Things related to aliases + #------------------------------------------------------------------------- + + def init_alias(self): + self.alias_manager = AliasManager(shell=self, parent=self) + self.configurables.append(self.alias_manager) + + #------------------------------------------------------------------------- + # Things related to extensions + #------------------------------------------------------------------------- + + def init_extension_manager(self): + self.extension_manager = ExtensionManager(shell=self, parent=self) + self.configurables.append(self.extension_manager) + + #------------------------------------------------------------------------- + # Things related to payloads + #------------------------------------------------------------------------- + + def init_payload(self): + self.payload_manager = PayloadManager(parent=self) + self.configurables.append(self.payload_manager) + + #------------------------------------------------------------------------- + # Things related to the prefilter + #------------------------------------------------------------------------- + + def init_prefilter(self): + self.prefilter_manager = PrefilterManager(shell=self, parent=self) + self.configurables.append(self.prefilter_manager) + # Ultimately this will be refactored in the new interpreter code, but + # for now, we should expose the main prefilter method (there's legacy + # code out there that may rely on this). + self.prefilter = self.prefilter_manager.prefilter_lines + + def auto_rewrite_input(self, cmd): + """Print to the screen the rewritten form of the user's command. + + This shows visual feedback by rewriting input lines that cause + automatic calling to kick in, like:: + + /f x + + into:: + + ------> f(x) + + after the user's input prompt. This helps the user understand that the + input line was transformed automatically by IPython. + """ + if not self.show_rewritten_input: + return + + # This is overridden in TerminalInteractiveShell to use fancy prompts + print("------> " + cmd) + + #------------------------------------------------------------------------- + # Things related to extracting values/expressions from kernel and user_ns + #------------------------------------------------------------------------- + + def _user_obj_error(self): + """return simple exception dict + + for use in user_expressions + """ + + etype, evalue, tb = self._get_exc_info() + stb = self.InteractiveTB.get_exception_only(etype, evalue) + + exc_info = { + u'status' : 'error', + u'traceback' : stb, + u'ename' : etype.__name__, + u'evalue' : py3compat.safe_unicode(evalue), + } + + return exc_info + + def _format_user_obj(self, obj): + """format a user object to display dict + + for use in user_expressions + """ + + data, md = self.display_formatter.format(obj) + value = { + 'status' : 'ok', + 'data' : data, + 'metadata' : md, + } + return value + + def user_expressions(self, expressions): + """Evaluate a dict of expressions in the user's namespace. + + Parameters + ---------- + expressions : dict + A dict with string keys and string values. The expression values + should be valid Python expressions, each of which will be evaluated + in the user namespace. + + Returns + ------- + A dict, keyed like the input expressions dict, with the rich mime-typed + display_data of each value. + """ + out = {} + user_ns = self.user_ns + global_ns = self.user_global_ns + + for key, expr in expressions.items(): + try: + value = self._format_user_obj(eval(expr, global_ns, user_ns)) + except: + value = self._user_obj_error() + out[key] = value + return out + + #------------------------------------------------------------------------- + # Things related to the running of code + #------------------------------------------------------------------------- + + def ex(self, cmd): + """Execute a normal python statement in user namespace.""" + with self.builtin_trap: + exec(cmd, self.user_global_ns, self.user_ns) + + def ev(self, expr): + """Evaluate python expression expr in user namespace. + + Returns the result of evaluation + """ + with self.builtin_trap: + return eval(expr, self.user_global_ns, self.user_ns) + + def safe_execfile(self, fname, *where, exit_ignore=False, raise_exceptions=False, shell_futures=False): + """A safe version of the builtin execfile(). + + This version will never throw an exception, but instead print + helpful error messages to the screen. This only works on pure + Python files with the .py extension. + + Parameters + ---------- + fname : string + The name of the file to be executed. + where : tuple + One or two namespaces, passed to execfile() as (globals,locals). + If only one is given, it is passed as both. + exit_ignore : bool (False) + If True, then silence SystemExit for non-zero status (it is always + silenced for zero status, as it is so common). + raise_exceptions : bool (False) + If True raise exceptions everywhere. Meant for testing. + shell_futures : bool (False) + If True, the code will share future statements with the interactive + shell. It will both be affected by previous __future__ imports, and + any __future__ imports in the code will affect the shell. If False, + __future__ imports are not shared in either direction. + + """ + fname = os.path.abspath(os.path.expanduser(fname)) + + # Make sure we can open the file + try: + with open(fname): + pass + except: + warn('Could not open file <%s> for safe execution.' % fname) + return + + # Find things also in current directory. This is needed to mimic the + # behavior of running a script from the system command line, where + # Python inserts the script's directory into sys.path + dname = os.path.dirname(fname) + + with prepended_to_syspath(dname), self.builtin_trap: + try: + glob, loc = (where + (None, ))[:2] + py3compat.execfile( + fname, glob, loc, + self.compile if shell_futures else None) + except SystemExit as status: + # If the call was made with 0 or None exit status (sys.exit(0) + # or sys.exit() ), don't bother showing a traceback, as both of + # these are considered normal by the OS: + # > python -c'import sys;sys.exit(0)'; echo $? + # 0 + # > python -c'import sys;sys.exit()'; echo $? + # 0 + # For other exit status, we show the exception unless + # explicitly silenced, but only in short form. + if status.code: + if raise_exceptions: + raise + if not exit_ignore: + self.showtraceback(exception_only=True) + except: + if raise_exceptions: + raise + # tb offset is 2 because we wrap execfile + self.showtraceback(tb_offset=2) + + def safe_execfile_ipy(self, fname, shell_futures=False, raise_exceptions=False): + """Like safe_execfile, but for .ipy or .ipynb files with IPython syntax. + + Parameters + ---------- + fname : str + The name of the file to execute. The filename must have a + .ipy or .ipynb extension. + shell_futures : bool (False) + If True, the code will share future statements with the interactive + shell. It will both be affected by previous __future__ imports, and + any __future__ imports in the code will affect the shell. If False, + __future__ imports are not shared in either direction. + raise_exceptions : bool (False) + If True raise exceptions everywhere. Meant for testing. + """ + fname = os.path.abspath(os.path.expanduser(fname)) + + # Make sure we can open the file + try: + with open(fname): + pass + except: + warn('Could not open file <%s> for safe execution.' % fname) + return + + # Find things also in current directory. This is needed to mimic the + # behavior of running a script from the system command line, where + # Python inserts the script's directory into sys.path + dname = os.path.dirname(fname) + + def get_cells(): + """generator for sequence of code blocks to run""" + if fname.endswith('.ipynb'): + from nbformat import read + nb = read(fname, as_version=4) + if not nb.cells: + return + for cell in nb.cells: + if cell.cell_type == 'code': + yield cell.source + else: + with open(fname) as f: + yield f.read() + + with prepended_to_syspath(dname): + try: + for cell in get_cells(): + result = self.run_cell(cell, silent=True, shell_futures=shell_futures) + if raise_exceptions: + result.raise_error() + elif not result.success: + break + except: + if raise_exceptions: + raise + self.showtraceback() + warn('Unknown failure executing file: <%s>' % fname) + + def safe_run_module(self, mod_name, where): + """A safe version of runpy.run_module(). + + This version will never throw an exception, but instead print + helpful error messages to the screen. + + `SystemExit` exceptions with status code 0 or None are ignored. + + Parameters + ---------- + mod_name : string + The name of the module to be executed. + where : dict + The globals namespace. + """ + try: + try: + where.update( + runpy.run_module(str(mod_name), run_name="__main__", + alter_sys=True) + ) + except SystemExit as status: + if status.code: + raise + except: + self.showtraceback() + warn('Unknown failure executing module: <%s>' % mod_name) + + def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=True): + """Run a complete IPython cell. + + Parameters + ---------- + raw_cell : str + The code (including IPython code such as %magic functions) to run. + store_history : bool + If True, the raw and translated cell will be stored in IPython's + history. For user code calling back into IPython's machinery, this + should be set to False. + silent : bool + If True, avoid side-effects, such as implicit displayhooks and + and logging. silent=True forces store_history=False. + shell_futures : bool + If True, the code will share future statements with the interactive + shell. It will both be affected by previous __future__ imports, and + any __future__ imports in the code will affect the shell. If False, + __future__ imports are not shared in either direction. + + Returns + ------- + result : :class:`ExecutionResult` + """ + result = None + try: + result = self._run_cell( + raw_cell, store_history, silent, shell_futures) + finally: + self.events.trigger('post_execute') + if not silent: + self.events.trigger('post_run_cell', result) + return result + + def _run_cell(self, raw_cell:str, store_history:bool, silent:bool, shell_futures:bool): + """Internal method to run a complete IPython cell.""" + + # we need to avoid calling self.transform_cell multiple time on the same thing + # so we need to store some results: + preprocessing_exc_tuple = None + try: + transformed_cell = self.transform_cell(raw_cell) + except Exception: + transformed_cell = raw_cell + preprocessing_exc_tuple = sys.exc_info() + + assert transformed_cell is not None + coro = self.run_cell_async( + raw_cell, + store_history=store_history, + silent=silent, + shell_futures=shell_futures, + transformed_cell=transformed_cell, + preprocessing_exc_tuple=preprocessing_exc_tuple, + ) + + # run_cell_async is async, but may not actually need an eventloop. + # when this is the case, we want to run it using the pseudo_sync_runner + # so that code can invoke eventloops (for example via the %run , and + # `%paste` magic. + if self.trio_runner: + runner = self.trio_runner + elif self.should_run_async( + raw_cell, + transformed_cell=transformed_cell, + preprocessing_exc_tuple=preprocessing_exc_tuple, + ): + runner = self.loop_runner + else: + runner = _pseudo_sync_runner + + try: + return runner(coro) + except BaseException as e: + info = ExecutionInfo(raw_cell, store_history, silent, shell_futures) + result = ExecutionResult(info) + result.error_in_exec = e + self.showtraceback(running_compiled_code=True) + return result + return + + def should_run_async( + self, raw_cell: str, *, transformed_cell=None, preprocessing_exc_tuple=None + ) -> bool: + """Return whether a cell should be run asynchronously via a coroutine runner + + Parameters + ---------- + raw_cell: str + The code to be executed + + Returns + ------- + result: bool + Whether the code needs to be run with a coroutine runner or not + + .. versionadded:: 7.0 + """ + if not self.autoawait: + return False + if preprocessing_exc_tuple is not None: + return False + assert preprocessing_exc_tuple is None + if transformed_cell is None: + warnings.warn( + "`should_run_async` will not call `transform_cell`" + " automatically in the future. Please pass the result to" + " `transformed_cell` argument and any exception that happen" + " during the" + "transform in `preprocessing_exc_tuple` in" + " IPython 7.17 and above.", + DeprecationWarning, + stacklevel=2, + ) + try: + cell = self.transform_cell(raw_cell) + except Exception: + # any exception during transform will be raised + # prior to execution + return False + else: + cell = transformed_cell + return _should_be_async(cell) + + async def run_cell_async( + self, + raw_cell: str, + store_history=False, + silent=False, + shell_futures=True, + *, + transformed_cell: Optional[str] = None, + preprocessing_exc_tuple: Optional[Any] = None + ) -> ExecutionResult: + """Run a complete IPython cell asynchronously. + + Parameters + ---------- + raw_cell : str + The code (including IPython code such as %magic functions) to run. + store_history : bool + If True, the raw and translated cell will be stored in IPython's + history. For user code calling back into IPython's machinery, this + should be set to False. + silent : bool + If True, avoid side-effects, such as implicit displayhooks and + and logging. silent=True forces store_history=False. + shell_futures : bool + If True, the code will share future statements with the interactive + shell. It will both be affected by previous __future__ imports, and + any __future__ imports in the code will affect the shell. If False, + __future__ imports are not shared in either direction. + transformed_cell: str + cell that was passed through transformers + preprocessing_exc_tuple: + trace if the transformation failed. + + Returns + ------- + result : :class:`ExecutionResult` + + .. versionadded:: 7.0 + """ + info = ExecutionInfo( + raw_cell, store_history, silent, shell_futures) + result = ExecutionResult(info) + + if (not raw_cell) or raw_cell.isspace(): + self.last_execution_succeeded = True + self.last_execution_result = result + return result + + if silent: + store_history = False + + if store_history: + result.execution_count = self.execution_count + + def error_before_exec(value): + if store_history: + self.execution_count += 1 + result.error_before_exec = value + self.last_execution_succeeded = False + self.last_execution_result = result + return result + + self.events.trigger('pre_execute') + if not silent: + self.events.trigger('pre_run_cell', info) + + if transformed_cell is None: + warnings.warn( + "`run_cell_async` will not call `transform_cell`" + " automatically in the future. Please pass the result to" + " `transformed_cell` argument and any exception that happen" + " during the" + "transform in `preprocessing_exc_tuple` in" + " IPython 7.17 and above.", + DeprecationWarning, + stacklevel=2, + ) + # If any of our input transformation (input_transformer_manager or + # prefilter_manager) raises an exception, we store it in this variable + # so that we can display the error after logging the input and storing + # it in the history. + try: + cell = self.transform_cell(raw_cell) + except Exception: + preprocessing_exc_tuple = sys.exc_info() + cell = raw_cell # cell has to exist so it can be stored/logged + else: + preprocessing_exc_tuple = None + else: + if preprocessing_exc_tuple is None: + cell = transformed_cell + else: + cell = raw_cell + + # Store raw and processed history + if store_history: + self.history_manager.store_inputs(self.execution_count, + cell, raw_cell) + if not silent: + self.logger.log(cell, raw_cell) + + # Display the exception if input processing failed. + if preprocessing_exc_tuple is not None: + self.showtraceback(preprocessing_exc_tuple) + if store_history: + self.execution_count += 1 + return error_before_exec(preprocessing_exc_tuple[1]) + + # Our own compiler remembers the __future__ environment. If we want to + # run code with a separate __future__ environment, use the default + # compiler + compiler = self.compile if shell_futures else self.compiler_class() + + _run_async = False + + with self.builtin_trap: + cell_name = self.compile.cache( + cell, self.execution_count, raw_code=raw_cell + ) + + with self.display_trap: + # Compile to bytecode + try: + if sys.version_info < (3,8) and self.autoawait: + if _should_be_async(cell): + # the code AST below will not be user code: we wrap it + # in an `async def`. This will likely make some AST + # transformer below miss some transform opportunity and + # introduce a small coupling to run_code (in which we + # bake some assumptions of what _ast_asyncify returns. + # they are ways around (like grafting part of the ast + # later: + # - Here, return code_ast.body[0].body[1:-1], as well + # as last expression in return statement which is + # the user code part. + # - Let it go through the AST transformers, and graft + # - it back after the AST transform + # But that seem unreasonable, at least while we + # do not need it. + code_ast = _ast_asyncify(cell, 'async-def-wrapper') + _run_async = True + else: + code_ast = compiler.ast_parse(cell, filename=cell_name) + else: + code_ast = compiler.ast_parse(cell, filename=cell_name) + except self.custom_exceptions as e: + etype, value, tb = sys.exc_info() + self.CustomTB(etype, value, tb) + return error_before_exec(e) + except IndentationError as e: + self.showindentationerror() + return error_before_exec(e) + except (OverflowError, SyntaxError, ValueError, TypeError, + MemoryError) as e: + self.showsyntaxerror() + return error_before_exec(e) + + # Apply AST transformations + try: + code_ast = self.transform_ast(code_ast) + except InputRejected as e: + self.showtraceback() + return error_before_exec(e) + + # Give the displayhook a reference to our ExecutionResult so it + # can fill in the output value. + self.displayhook.exec_result = result + + # Execute the user code + interactivity = "none" if silent else self.ast_node_interactivity + if _run_async: + interactivity = 'async' + + has_raised = await self.run_ast_nodes(code_ast.body, cell_name, + interactivity=interactivity, compiler=compiler, result=result) + + self.last_execution_succeeded = not has_raised + self.last_execution_result = result + + # Reset this so later displayed values do not modify the + # ExecutionResult + self.displayhook.exec_result = None + + if store_history: + # Write output to the database. Does nothing unless + # history output logging is enabled. + self.history_manager.store_output(self.execution_count) + # Each cell is a *single* input, regardless of how many lines it has + self.execution_count += 1 + + return result + + def transform_cell(self, raw_cell): + """Transform an input cell before parsing it. + + Static transformations, implemented in IPython.core.inputtransformer2, + deal with things like ``%magic`` and ``!system`` commands. + These run on all input. + Dynamic transformations, for things like unescaped magics and the exit + autocall, depend on the state of the interpreter. + These only apply to single line inputs. + + These string-based transformations are followed by AST transformations; + see :meth:`transform_ast`. + """ + # Static input transformations + cell = self.input_transformer_manager.transform_cell(raw_cell) + + if len(cell.splitlines()) == 1: + # Dynamic transformations - only applied for single line commands + with self.builtin_trap: + # use prefilter_lines to handle trailing newlines + # restore trailing newline for ast.parse + cell = self.prefilter_manager.prefilter_lines(cell) + '\n' + + lines = cell.splitlines(keepends=True) + for transform in self.input_transformers_post: + lines = transform(lines) + cell = ''.join(lines) + + return cell + + def transform_ast(self, node): + """Apply the AST transformations from self.ast_transformers + + Parameters + ---------- + node : ast.Node + The root node to be transformed. Typically called with the ast.Module + produced by parsing user input. + + Returns + ------- + An ast.Node corresponding to the node it was called with. Note that it + may also modify the passed object, so don't rely on references to the + original AST. + """ + for transformer in self.ast_transformers: + try: + node = transformer.visit(node) + except InputRejected: + # User-supplied AST transformers can reject an input by raising + # an InputRejected. Short-circuit in this case so that we + # don't unregister the transform. + raise + except Exception: + warn("AST transformer %r threw an error. It will be unregistered." % transformer) + self.ast_transformers.remove(transformer) + + if self.ast_transformers: + ast.fix_missing_locations(node) + return node + + async def run_ast_nodes(self, nodelist:ListType[AST], cell_name:str, interactivity='last_expr', + compiler=compile, result=None): + """Run a sequence of AST nodes. The execution mode depends on the + interactivity parameter. + + Parameters + ---------- + nodelist : list + A sequence of AST nodes to run. + cell_name : str + Will be passed to the compiler as the filename of the cell. Typically + the value returned by ip.compile.cache(cell). + interactivity : str + 'all', 'last', 'last_expr' , 'last_expr_or_assign' or 'none', + specifying which nodes should be run interactively (displaying output + from expressions). 'last_expr' will run the last node interactively + only if it is an expression (i.e. expressions in loops or other blocks + are not displayed) 'last_expr_or_assign' will run the last expression + or the last assignment. Other values for this parameter will raise a + ValueError. + + Experimental value: 'async' Will try to run top level interactive + async/await code in default runner, this will not respect the + interactivity setting and will only run the last node if it is an + expression. + + compiler : callable + A function with the same interface as the built-in compile(), to turn + the AST nodes into code objects. Default is the built-in compile(). + result : ExecutionResult, optional + An object to store exceptions that occur during execution. + + Returns + ------- + True if an exception occurred while running code, False if it finished + running. + """ + if not nodelist: + return + + if interactivity == 'last_expr_or_assign': + if isinstance(nodelist[-1], _assign_nodes): + asg = nodelist[-1] + if isinstance(asg, ast.Assign) and len(asg.targets) == 1: + target = asg.targets[0] + elif isinstance(asg, _single_targets_nodes): + target = asg.target + else: + target = None + if isinstance(target, ast.Name): + nnode = ast.Expr(ast.Name(target.id, ast.Load())) + ast.fix_missing_locations(nnode) + nodelist.append(nnode) + interactivity = 'last_expr' + + _async = False + if interactivity == 'last_expr': + if isinstance(nodelist[-1], ast.Expr): + interactivity = "last" + else: + interactivity = "none" + + if interactivity == 'none': + to_run_exec, to_run_interactive = nodelist, [] + elif interactivity == 'last': + to_run_exec, to_run_interactive = nodelist[:-1], nodelist[-1:] + elif interactivity == 'all': + to_run_exec, to_run_interactive = [], nodelist + elif interactivity == 'async': + to_run_exec, to_run_interactive = [], nodelist + _async = True + else: + raise ValueError("Interactivity was %r" % interactivity) + + try: + if _async and sys.version_info > (3,8): + raise ValueError("This branch should never happen on Python 3.8 and above, " + "please try to upgrade IPython and open a bug report with your case.") + if _async: + # If interactivity is async the semantics of run_code are + # completely different Skip usual machinery. + mod = Module(nodelist, []) + async_wrapper_code = compiler(mod, cell_name, 'exec') + exec(async_wrapper_code, self.user_global_ns, self.user_ns) + async_code = removed_co_newlocals(self.user_ns.pop('async-def-wrapper')).__code__ + if (await self.run_code(async_code, result, async_=True)): + return True + else: + if sys.version_info > (3, 8): + def compare(code): + is_async = (inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE) + return is_async + else: + def compare(code): + return _async + + # refactor that to just change the mod constructor. + to_run = [] + for node in to_run_exec: + to_run.append((node, 'exec')) + + for node in to_run_interactive: + to_run.append((node, 'single')) + + for node,mode in to_run: + if mode == 'exec': + mod = Module([node], []) + elif mode == 'single': + mod = ast.Interactive([node]) + with compiler.extra_flags(getattr(ast, 'PyCF_ALLOW_TOP_LEVEL_AWAIT', 0x0) if self.autoawait else 0x0): + code = compiler(mod, cell_name, mode) + asy = compare(code) + if (await self.run_code(code, result, async_=asy)): + return True + + # Flush softspace + if softspace(sys.stdout, 0): + print() + + except: + # It's possible to have exceptions raised here, typically by + # compilation of odd code (such as a naked 'return' outside a + # function) that did parse but isn't valid. Typically the exception + # is a SyntaxError, but it's safest just to catch anything and show + # the user a traceback. + + # We do only one try/except outside the loop to minimize the impact + # on runtime, and also because if any node in the node list is + # broken, we should stop execution completely. + if result: + result.error_before_exec = sys.exc_info()[1] + self.showtraceback() + return True + + return False + + def _async_exec(self, code_obj: types.CodeType, user_ns: dict): + """ + Evaluate an asynchronous code object using a code runner + + Fake asynchronous execution of code_object in a namespace via a proxy namespace. + + Returns coroutine object, which can be executed via async loop runner + + WARNING: The semantics of `async_exec` are quite different from `exec`, + in particular you can only pass a single namespace. It also return a + handle to the value of the last things returned by code_object. + """ + + return eval(code_obj, user_ns) + + async def run_code(self, code_obj, result=None, *, async_=False): + """Execute a code object. + + When an exception occurs, self.showtraceback() is called to display a + traceback. + + Parameters + ---------- + code_obj : code object + A compiled code object, to be executed + result : ExecutionResult, optional + An object to store exceptions that occur during execution. + async_ : Bool (Experimental) + Attempt to run top-level asynchronous code in a default loop. + + Returns + ------- + False : successful execution. + True : an error occurred. + """ + # special value to say that anything above is IPython and should be + # hidden. + __tracebackhide__ = "__ipython_bottom__" + # Set our own excepthook in case the user code tries to call it + # directly, so that the IPython crash handler doesn't get triggered + old_excepthook, sys.excepthook = sys.excepthook, self.excepthook + + # we save the original sys.excepthook in the instance, in case config + # code (such as magics) needs access to it. + self.sys_excepthook = old_excepthook + outflag = True # happens in more places, so it's easier as default + try: + try: + self.hooks.pre_run_code_hook() + if async_ and sys.version_info < (3,8): + last_expr = (await self._async_exec(code_obj, self.user_ns)) + code = compile('last_expr', 'fake', "single") + exec(code, {'last_expr': last_expr}) + elif async_ : + await eval(code_obj, self.user_global_ns, self.user_ns) + else: + exec(code_obj, self.user_global_ns, self.user_ns) + finally: + # Reset our crash handler in place + sys.excepthook = old_excepthook + except SystemExit as e: + if result is not None: + result.error_in_exec = e + self.showtraceback(exception_only=True) + warn("To exit: use 'exit', 'quit', or Ctrl-D.", stacklevel=1) + except self.custom_exceptions: + etype, value, tb = sys.exc_info() + if result is not None: + result.error_in_exec = value + self.CustomTB(etype, value, tb) + except: + if result is not None: + result.error_in_exec = sys.exc_info()[1] + self.showtraceback(running_compiled_code=True) + else: + outflag = False + return outflag + + # For backwards compatibility + runcode = run_code + + def check_complete(self, code: str) -> Tuple[str, str]: + """Return whether a block of code is ready to execute, or should be continued + + Parameters + ---------- + source : string + Python input code, which can be multiline. + + Returns + ------- + status : str + One of 'complete', 'incomplete', or 'invalid' if source is not a + prefix of valid code. + indent : str + When status is 'incomplete', this is some whitespace to insert on + the next line of the prompt. + """ + status, nspaces = self.input_transformer_manager.check_complete(code) + return status, ' ' * (nspaces or 0) + + #------------------------------------------------------------------------- + # Things related to GUI support and pylab + #------------------------------------------------------------------------- + + active_eventloop = None + + def enable_gui(self, gui=None): + raise NotImplementedError('Implement enable_gui in a subclass') + + def enable_matplotlib(self, gui=None): + """Enable interactive matplotlib and inline figure support. + + This takes the following steps: + + 1. select the appropriate eventloop and matplotlib backend + 2. set up matplotlib for interactive use with that backend + 3. configure formatters for inline figure display + 4. enable the selected gui eventloop + + Parameters + ---------- + gui : optional, string + If given, dictates the choice of matplotlib GUI backend to use + (should be one of IPython's supported backends, 'qt', 'osx', 'tk', + 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by + matplotlib (as dictated by the matplotlib build-time options plus the + user's matplotlibrc configuration file). Note that not all backends + make sense in all contexts, for example a terminal ipython can't + display figures inline. + """ + from IPython.core import pylabtools as pt + from matplotlib_inline.backend_inline import configure_inline_support + gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select) + + if gui != 'inline': + # If we have our first gui selection, store it + if self.pylab_gui_select is None: + self.pylab_gui_select = gui + # Otherwise if they are different + elif gui != self.pylab_gui_select: + print('Warning: Cannot change to a different GUI toolkit: %s.' + ' Using %s instead.' % (gui, self.pylab_gui_select)) + gui, backend = pt.find_gui_and_backend(self.pylab_gui_select) + + pt.activate_matplotlib(backend) + configure_inline_support(self, backend) + + # Now we must activate the gui pylab wants to use, and fix %run to take + # plot updates into account + self.enable_gui(gui) + self.magics_manager.registry['ExecutionMagics'].default_runner = \ + pt.mpl_runner(self.safe_execfile) + + return gui, backend + + def enable_pylab(self, gui=None, import_all=True, welcome_message=False): + """Activate pylab support at runtime. + + This turns on support for matplotlib, preloads into the interactive + namespace all of numpy and pylab, and configures IPython to correctly + interact with the GUI event loop. The GUI backend to be used can be + optionally selected with the optional ``gui`` argument. + + This method only adds preloading the namespace to InteractiveShell.enable_matplotlib. + + Parameters + ---------- + gui : optional, string + If given, dictates the choice of matplotlib GUI backend to use + (should be one of IPython's supported backends, 'qt', 'osx', 'tk', + 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by + matplotlib (as dictated by the matplotlib build-time options plus the + user's matplotlibrc configuration file). Note that not all backends + make sense in all contexts, for example a terminal ipython can't + display figures inline. + import_all : optional, bool, default: True + Whether to do `from numpy import *` and `from pylab import *` + in addition to module imports. + welcome_message : deprecated + This argument is ignored, no welcome message will be displayed. + """ + from IPython.core.pylabtools import import_pylab + + gui, backend = self.enable_matplotlib(gui) + + # We want to prevent the loading of pylab to pollute the user's + # namespace as shown by the %who* magics, so we execute the activation + # code in an empty namespace, and we update *both* user_ns and + # user_ns_hidden with this information. + ns = {} + import_pylab(ns, import_all) + # warn about clobbered names + ignored = {"__builtins__"} + both = set(ns).intersection(self.user_ns).difference(ignored) + clobbered = [ name for name in both if self.user_ns[name] is not ns[name] ] + self.user_ns.update(ns) + self.user_ns_hidden.update(ns) + return gui, backend, clobbered + + #------------------------------------------------------------------------- + # Utilities + #------------------------------------------------------------------------- + + def var_expand(self, cmd, depth=0, formatter=DollarFormatter()): + """Expand python variables in a string. + + The depth argument indicates how many frames above the caller should + be walked to look for the local namespace where to expand variables. + + The global namespace for expansion is always the user's interactive + namespace. + """ + ns = self.user_ns.copy() + try: + frame = sys._getframe(depth+1) + except ValueError: + # This is thrown if there aren't that many frames on the stack, + # e.g. if a script called run_line_magic() directly. + pass + else: + ns.update(frame.f_locals) + + try: + # We have to use .vformat() here, because 'self' is a valid and common + # name, and expanding **ns for .format() would make it collide with + # the 'self' argument of the method. + cmd = formatter.vformat(cmd, args=[], kwargs=ns) + except Exception: + # if formatter couldn't format, just let it go untransformed + pass + return cmd + + def mktempfile(self, data=None, prefix='ipython_edit_'): + """Make a new tempfile and return its filename. + + This makes a call to tempfile.mkstemp (created in a tempfile.mkdtemp), + but it registers the created filename internally so ipython cleans it up + at exit time. + + Optional inputs: + + - data(None): if data is given, it gets written out to the temp file + immediately, and the file is closed again.""" + + dirname = tempfile.mkdtemp(prefix=prefix) + self.tempdirs.append(dirname) + + handle, filename = tempfile.mkstemp('.py', prefix, dir=dirname) + os.close(handle) # On Windows, there can only be one open handle on a file + self.tempfiles.append(filename) + + if data: + with open(filename, 'w') as tmp_file: + tmp_file.write(data) + return filename + + @undoc + def write(self,data): + """DEPRECATED: Write a string to the default output""" + warn('InteractiveShell.write() is deprecated, use sys.stdout instead', + DeprecationWarning, stacklevel=2) + sys.stdout.write(data) + + @undoc + def write_err(self,data): + """DEPRECATED: Write a string to the default error output""" + warn('InteractiveShell.write_err() is deprecated, use sys.stderr instead', + DeprecationWarning, stacklevel=2) + sys.stderr.write(data) + + def ask_yes_no(self, prompt, default=None, interrupt=None): + if self.quiet: + return True + return ask_yes_no(prompt,default,interrupt) + + def show_usage(self): + """Show a usage message""" + page.page(IPython.core.usage.interactive_usage) + + def extract_input_lines(self, range_str, raw=False): + """Return as a string a set of input history slices. + + Parameters + ---------- + range_str : string + The set of slices is given as a string, like "~5/6-~4/2 4:8 9", + since this function is for use by magic functions which get their + arguments as strings. The number before the / is the session + number: ~n goes n back from the current session. + + raw : bool, optional + By default, the processed input is used. If this is true, the raw + input history is used instead. + + Notes + ----- + + Slices can be described with two notations: + + * ``N:M`` -> standard python form, means including items N...(M-1). + * ``N-M`` -> include items N..M (closed endpoint). + """ + lines = self.history_manager.get_range_by_str(range_str, raw=raw) + return "\n".join(x for _, _, x in lines) + + def find_user_code(self, target, raw=True, py_only=False, skip_encoding_cookie=True, search_ns=False): + """Get a code string from history, file, url, or a string or macro. + + This is mainly used by magic functions. + + Parameters + ---------- + + target : str + + A string specifying code to retrieve. This will be tried respectively + as: ranges of input history (see %history for syntax), url, + corresponding .py file, filename, or an expression evaluating to a + string or Macro in the user namespace. + + raw : bool + If true (default), retrieve raw history. Has no effect on the other + retrieval mechanisms. + + py_only : bool (default False) + Only try to fetch python code, do not try alternative methods to decode file + if unicode fails. + + Returns + ------- + A string of code. + + ValueError is raised if nothing is found, and TypeError if it evaluates + to an object of another type. In each case, .args[0] is a printable + message. + """ + code = self.extract_input_lines(target, raw=raw) # Grab history + if code: + return code + try: + if target.startswith(('http://', 'https://')): + return openpy.read_py_url(target, skip_encoding_cookie=skip_encoding_cookie) + except UnicodeDecodeError: + if not py_only : + # Deferred import + from urllib.request import urlopen + response = urlopen(target) + return response.read().decode('latin1') + raise ValueError(("'%s' seem to be unreadable.") % target) + + potential_target = [target] + try : + potential_target.insert(0,get_py_filename(target)) + except IOError: + pass + + for tgt in potential_target : + if os.path.isfile(tgt): # Read file + try : + return openpy.read_py_file(tgt, skip_encoding_cookie=skip_encoding_cookie) + except UnicodeDecodeError : + if not py_only : + with io_open(tgt,'r', encoding='latin1') as f : + return f.read() + raise ValueError(("'%s' seem to be unreadable.") % target) + elif os.path.isdir(os.path.expanduser(tgt)): + raise ValueError("'%s' is a directory, not a regular file." % target) + + if search_ns: + # Inspect namespace to load object source + object_info = self.object_inspect(target, detail_level=1) + if object_info['found'] and object_info['source']: + return object_info['source'] + + try: # User namespace + codeobj = eval(target, self.user_ns) + except Exception: + raise ValueError(("'%s' was not found in history, as a file, url, " + "nor in the user namespace.") % target) + + if isinstance(codeobj, str): + return codeobj + elif isinstance(codeobj, Macro): + return codeobj.value + + raise TypeError("%s is neither a string nor a macro." % target, + codeobj) + + #------------------------------------------------------------------------- + # Things related to IPython exiting + #------------------------------------------------------------------------- + def atexit_operations(self): + """This will be executed at the time of exit. + + Cleanup operations and saving of persistent data that is done + unconditionally by IPython should be performed here. + + For things that may depend on startup flags or platform specifics (such + as having readline or not), register a separate atexit function in the + code that has the appropriate information, rather than trying to + clutter + """ + # Close the history session (this stores the end time and line count) + # this must be *before* the tempfile cleanup, in case of temporary + # history db + self.history_manager.end_session() + + # Cleanup all tempfiles and folders left around + for tfile in self.tempfiles: + try: + os.unlink(tfile) + except OSError: + pass + + for tdir in self.tempdirs: + try: + os.rmdir(tdir) + except OSError: + pass + + # Clear all user namespaces to release all references cleanly. + self.reset(new_session=False) + + # Run user hooks + self.hooks.shutdown_hook() + + def cleanup(self): + self.restore_sys_module_state() + + + # Overridden in terminal subclass to change prompts + def switch_doctest_mode(self, mode): + pass + + +class InteractiveShellABC(metaclass=abc.ABCMeta): + """An abstract base class for InteractiveShell.""" + +InteractiveShellABC.register(InteractiveShell) diff --git a/contrib/python/ipython/py3/IPython/core/latex_symbols.py b/contrib/python/ipython/py3/IPython/core/latex_symbols.py index e9d9fb58e23..164d917beb6 100644 --- a/contrib/python/ipython/py3/IPython/core/latex_symbols.py +++ b/contrib/python/ipython/py3/IPython/core/latex_symbols.py @@ -1,1301 +1,1301 @@ -# encoding: utf-8 - -# DO NOT EDIT THIS FILE BY HAND. - -# To update this file, run the script /tools/gen_latex_symbols.py using Python 3 - -# This file is autogenerated from the file: -# https://raw.githubusercontent.com/JuliaLang/julia/master/base/latex_symbols.jl -# This original list is filtered to remove any unicode characters that are not valid -# Python identifiers. - -latex_symbols = { - - "\\euler" : "ℯ", - "\\^a" : "ᵃ", - "\\^b" : "ᵇ", - "\\^c" : "ᶜ", - "\\^d" : "ᵈ", - "\\^e" : "ᵉ", - "\\^f" : "ᶠ", - "\\^g" : "ᵍ", - "\\^h" : "ʰ", - "\\^i" : "ⁱ", - "\\^j" : "ʲ", - "\\^k" : "ᵏ", - "\\^l" : "ˡ", - "\\^m" : "ᵐ", - "\\^n" : "ⁿ", - "\\^o" : "ᵒ", - "\\^p" : "ᵖ", - "\\^r" : "ʳ", - "\\^s" : "ˢ", - "\\^t" : "ᵗ", - "\\^u" : "ᵘ", - "\\^v" : "ᵛ", - "\\^w" : "ʷ", - "\\^x" : "ˣ", - "\\^y" : "ʸ", - "\\^z" : "ᶻ", - "\\^A" : "ᴬ", - "\\^B" : "ᴮ", - "\\^D" : "ᴰ", - "\\^E" : "ᴱ", - "\\^G" : "ᴳ", - "\\^H" : "ᴴ", - "\\^I" : "ᴵ", - "\\^J" : "ᴶ", - "\\^K" : "ᴷ", - "\\^L" : "ᴸ", - "\\^M" : "ᴹ", - "\\^N" : "ᴺ", - "\\^O" : "ᴼ", - "\\^P" : "ᴾ", - "\\^R" : "ᴿ", - "\\^T" : "ᵀ", - "\\^U" : "ᵁ", - "\\^V" : "ⱽ", - "\\^W" : "ᵂ", - "\\^alpha" : "ᵅ", - "\\^beta" : "ᵝ", - "\\^gamma" : "ᵞ", - "\\^delta" : "ᵟ", - "\\^epsilon" : "ᵋ", - "\\^theta" : "ᶿ", - "\\^iota" : "ᶥ", - "\\^phi" : "ᵠ", - "\\^chi" : "ᵡ", - "\\^Phi" : "ᶲ", - "\\_a" : "ₐ", - "\\_e" : "ₑ", - "\\_h" : "ₕ", - "\\_i" : "ᵢ", - "\\_j" : "ⱼ", - "\\_k" : "ₖ", - "\\_l" : "ₗ", - "\\_m" : "ₘ", - "\\_n" : "ₙ", - "\\_o" : "ₒ", - "\\_p" : "ₚ", - "\\_r" : "ᵣ", - "\\_s" : "ₛ", - "\\_t" : "ₜ", - "\\_u" : "ᵤ", - "\\_v" : "ᵥ", - "\\_x" : "ₓ", - "\\_schwa" : "ₔ", - "\\_beta" : "ᵦ", - "\\_gamma" : "ᵧ", - "\\_rho" : "ᵨ", - "\\_phi" : "ᵩ", - "\\_chi" : "ᵪ", - "\\hbar" : "ħ", - "\\sout" : "̶", - "\\ordfeminine" : "ª", - "\\cdotp" : "·", - "\\ordmasculine" : "º", - "\\AA" : "Å", - "\\AE" : "Æ", - "\\DH" : "Ð", - "\\O" : "Ø", - "\\TH" : "Þ", - "\\ss" : "ß", - "\\aa" : "å", - "\\ae" : "æ", - "\\eth" : "ð", - "\\dh" : "ð", - "\\o" : "ø", - "\\th" : "þ", - "\\DJ" : "Đ", - "\\dj" : "đ", - "\\imath" : "ı", - "\\jmath" : "ȷ", - "\\L" : "Ł", - "\\l" : "ł", - "\\NG" : "Ŋ", - "\\ng" : "ŋ", - "\\OE" : "Œ", - "\\oe" : "œ", - "\\hvlig" : "ƕ", - "\\nrleg" : "ƞ", - "\\doublepipe" : "ǂ", - "\\trna" : "ɐ", - "\\trnsa" : "ɒ", - "\\openo" : "ɔ", - "\\rtld" : "ɖ", - "\\schwa" : "ə", - "\\varepsilon" : "ε", - "\\pgamma" : "ɣ", - "\\pbgam" : "ɤ", - "\\trnh" : "ɥ", - "\\btdl" : "ɬ", - "\\rtll" : "ɭ", - "\\trnm" : "ɯ", - "\\trnmlr" : "ɰ", - "\\ltlmr" : "ɱ", - "\\ltln" : "ɲ", - "\\rtln" : "ɳ", - "\\clomeg" : "ɷ", - "\\ltphi" : "ɸ", - "\\trnr" : "ɹ", - "\\trnrl" : "ɺ", - "\\rttrnr" : "ɻ", - "\\rl" : "ɼ", - "\\rtlr" : "ɽ", - "\\fhr" : "ɾ", - "\\rtls" : "ʂ", - "\\esh" : "ʃ", - "\\trnt" : "ʇ", - "\\rtlt" : "ʈ", - "\\pupsil" : "ʊ", - "\\pscrv" : "ʋ", - "\\invv" : "ʌ", - "\\invw" : "ʍ", - "\\trny" : "ʎ", - "\\rtlz" : "ʐ", - "\\yogh" : "ʒ", - "\\glst" : "ʔ", - "\\reglst" : "ʕ", - "\\inglst" : "ʖ", - "\\turnk" : "ʞ", - "\\dyogh" : "ʤ", - "\\tesh" : "ʧ", - "\\rasp" : "ʼ", - "\\verts" : "ˈ", - "\\verti" : "ˌ", - "\\lmrk" : "ː", - "\\hlmrk" : "ˑ", - "\\grave" : "̀", - "\\acute" : "́", - "\\hat" : "̂", - "\\tilde" : "̃", - "\\bar" : "̄", - "\\breve" : "̆", - "\\dot" : "̇", - "\\ddot" : "̈", - "\\ocirc" : "̊", - "\\H" : "̋", - "\\check" : "̌", - "\\palh" : "̡", - "\\rh" : "̢", - "\\c" : "̧", - "\\k" : "̨", - "\\sbbrg" : "̪", - "\\strike" : "̶", - "\\Alpha" : "Α", - "\\Beta" : "Β", - "\\Gamma" : "Γ", - "\\Delta" : "Δ", - "\\Epsilon" : "Ε", - "\\Zeta" : "Ζ", - "\\Eta" : "Η", - "\\Theta" : "Θ", - "\\Iota" : "Ι", - "\\Kappa" : "Κ", - "\\Lambda" : "Λ", - "\\Xi" : "Ξ", - "\\Pi" : "Π", - "\\Rho" : "Ρ", - "\\Sigma" : "Σ", - "\\Tau" : "Τ", - "\\Upsilon" : "Υ", - "\\Phi" : "Φ", - "\\Chi" : "Χ", - "\\Psi" : "Ψ", - "\\Omega" : "Ω", - "\\alpha" : "α", - "\\beta" : "β", - "\\gamma" : "γ", - "\\delta" : "δ", - "\\zeta" : "ζ", - "\\eta" : "η", - "\\theta" : "θ", - "\\iota" : "ι", - "\\kappa" : "κ", - "\\lambda" : "λ", - "\\mu" : "μ", - "\\nu" : "ν", - "\\xi" : "ξ", - "\\pi" : "π", - "\\rho" : "ρ", - "\\varsigma" : "ς", - "\\sigma" : "σ", - "\\tau" : "τ", - "\\upsilon" : "υ", - "\\varphi" : "φ", - "\\chi" : "χ", - "\\psi" : "ψ", - "\\omega" : "ω", - "\\vartheta" : "ϑ", - "\\phi" : "ϕ", - "\\varpi" : "ϖ", - "\\Stigma" : "Ϛ", - "\\Digamma" : "Ϝ", - "\\digamma" : "ϝ", - "\\Koppa" : "Ϟ", - "\\Sampi" : "Ϡ", - "\\varkappa" : "ϰ", - "\\varrho" : "ϱ", - "\\varTheta" : "ϴ", - "\\epsilon" : "ϵ", - "\\dddot" : "⃛", - "\\ddddot" : "⃜", - "\\hslash" : "ℏ", - "\\Im" : "ℑ", - "\\ell" : "ℓ", - "\\wp" : "℘", - "\\Re" : "ℜ", - "\\aleph" : "ℵ", - "\\beth" : "ℶ", - "\\gimel" : "ℷ", - "\\daleth" : "ℸ", - "\\bbPi" : "ℿ", - "\\Zbar" : "Ƶ", - "\\overbar" : "̅", - "\\ovhook" : "̉", - "\\candra" : "̐", - "\\oturnedcomma" : "̒", - "\\ocommatopright" : "̕", - "\\droang" : "̚", - "\\wideutilde" : "̰", - "\\not" : "̸", - "\\upMu" : "Μ", - "\\upNu" : "Ν", - "\\upOmicron" : "Ο", - "\\upepsilon" : "ε", - "\\upomicron" : "ο", - "\\upvarbeta" : "ϐ", - "\\upoldKoppa" : "Ϙ", - "\\upoldkoppa" : "ϙ", - "\\upstigma" : "ϛ", - "\\upkoppa" : "ϟ", - "\\upsampi" : "ϡ", - "\\tieconcat" : "⁀", - "\\leftharpoonaccent" : "⃐", - "\\rightharpoonaccent" : "⃑", - "\\vertoverlay" : "⃒", - "\\overleftarrow" : "⃖", - "\\vec" : "⃗", - "\\overleftrightarrow" : "⃡", - "\\annuity" : "⃧", - "\\threeunderdot" : "⃨", - "\\widebridgeabove" : "⃩", - "\\bbC" : "ℂ", - "\\eulermascheroni" : "ℇ", - "\\scrg" : "ℊ", - "\\scrH" : "ℋ", - "\\frakH" : "ℌ", - "\\bbH" : "ℍ", - "\\planck" : "ℎ", - "\\scrI" : "ℐ", - "\\scrL" : "ℒ", - "\\bbN" : "ℕ", - "\\bbP" : "ℙ", - "\\bbQ" : "ℚ", - "\\scrR" : "ℛ", - "\\bbR" : "ℝ", - "\\bbZ" : "ℤ", - "\\frakZ" : "ℨ", - "\\Angstrom" : "Å", - "\\scrB" : "ℬ", - "\\frakC" : "ℭ", - "\\scre" : "ℯ", - "\\scrE" : "ℰ", - "\\scrF" : "ℱ", - "\\Finv" : "Ⅎ", - "\\scrM" : "ℳ", - "\\scro" : "ℴ", - "\\bbgamma" : "ℽ", - "\\bbGamma" : "ℾ", - "\\bbiD" : "ⅅ", - "\\bbid" : "ⅆ", - "\\bbie" : "ⅇ", - "\\bbii" : "ⅈ", - "\\bbij" : "ⅉ", - "\\bfA" : "𝐀", - "\\bfB" : "𝐁", - "\\bfC" : "𝐂", - "\\bfD" : "𝐃", - "\\bfE" : "𝐄", - "\\bfF" : "𝐅", - "\\bfG" : "𝐆", - "\\bfH" : "𝐇", - "\\bfI" : "𝐈", - "\\bfJ" : "𝐉", - "\\bfK" : "𝐊", - "\\bfL" : "𝐋", - "\\bfM" : "𝐌", - "\\bfN" : "𝐍", - "\\bfO" : "𝐎", - "\\bfP" : "𝐏", - "\\bfQ" : "𝐐", - "\\bfR" : "𝐑", - "\\bfS" : "𝐒", - "\\bfT" : "𝐓", - "\\bfU" : "𝐔", - "\\bfV" : "𝐕", - "\\bfW" : "𝐖", - "\\bfX" : "𝐗", - "\\bfY" : "𝐘", - "\\bfZ" : "𝐙", - "\\bfa" : "𝐚", - "\\bfb" : "𝐛", - "\\bfc" : "𝐜", - "\\bfd" : "𝐝", - "\\bfe" : "𝐞", - "\\bff" : "𝐟", - "\\bfg" : "𝐠", - "\\bfh" : "𝐡", - "\\bfi" : "𝐢", - "\\bfj" : "𝐣", - "\\bfk" : "𝐤", - "\\bfl" : "𝐥", - "\\bfm" : "𝐦", - "\\bfn" : "𝐧", - "\\bfo" : "𝐨", - "\\bfp" : "𝐩", - "\\bfq" : "𝐪", - "\\bfr" : "𝐫", - "\\bfs" : "𝐬", - "\\bft" : "𝐭", - "\\bfu" : "𝐮", - "\\bfv" : "𝐯", - "\\bfw" : "𝐰", - "\\bfx" : "𝐱", - "\\bfy" : "𝐲", - "\\bfz" : "𝐳", - "\\itA" : "𝐴", - "\\itB" : "𝐵", - "\\itC" : "𝐶", - "\\itD" : "𝐷", - "\\itE" : "𝐸", - "\\itF" : "𝐹", - "\\itG" : "𝐺", - "\\itH" : "𝐻", - "\\itI" : "𝐼", - "\\itJ" : "𝐽", - "\\itK" : "𝐾", - "\\itL" : "𝐿", - "\\itM" : "𝑀", - "\\itN" : "𝑁", - "\\itO" : "𝑂", - "\\itP" : "𝑃", - "\\itQ" : "𝑄", - "\\itR" : "𝑅", - "\\itS" : "𝑆", - "\\itT" : "𝑇", - "\\itU" : "𝑈", - "\\itV" : "𝑉", - "\\itW" : "𝑊", - "\\itX" : "𝑋", - "\\itY" : "𝑌", - "\\itZ" : "𝑍", - "\\ita" : "𝑎", - "\\itb" : "𝑏", - "\\itc" : "𝑐", - "\\itd" : "𝑑", - "\\ite" : "𝑒", - "\\itf" : "𝑓", - "\\itg" : "𝑔", - "\\iti" : "𝑖", - "\\itj" : "𝑗", - "\\itk" : "𝑘", - "\\itl" : "𝑙", - "\\itm" : "𝑚", - "\\itn" : "𝑛", - "\\ito" : "𝑜", - "\\itp" : "𝑝", - "\\itq" : "𝑞", - "\\itr" : "𝑟", - "\\its" : "𝑠", - "\\itt" : "𝑡", - "\\itu" : "𝑢", - "\\itv" : "𝑣", - "\\itw" : "𝑤", - "\\itx" : "𝑥", - "\\ity" : "𝑦", - "\\itz" : "𝑧", - "\\biA" : "𝑨", - "\\biB" : "𝑩", - "\\biC" : "𝑪", - "\\biD" : "𝑫", - "\\biE" : "𝑬", - "\\biF" : "𝑭", - "\\biG" : "𝑮", - "\\biH" : "𝑯", - "\\biI" : "𝑰", - "\\biJ" : "𝑱", - "\\biK" : "𝑲", - "\\biL" : "𝑳", - "\\biM" : "𝑴", - "\\biN" : "𝑵", - "\\biO" : "𝑶", - "\\biP" : "𝑷", - "\\biQ" : "𝑸", - "\\biR" : "𝑹", - "\\biS" : "𝑺", - "\\biT" : "𝑻", - "\\biU" : "𝑼", - "\\biV" : "𝑽", - "\\biW" : "𝑾", - "\\biX" : "𝑿", - "\\biY" : "𝒀", - "\\biZ" : "𝒁", - "\\bia" : "𝒂", - "\\bib" : "𝒃", - "\\bic" : "𝒄", - "\\bid" : "𝒅", - "\\bie" : "𝒆", - "\\bif" : "𝒇", - "\\big" : "𝒈", - "\\bih" : "𝒉", - "\\bii" : "𝒊", - "\\bij" : "𝒋", - "\\bik" : "𝒌", - "\\bil" : "𝒍", - "\\bim" : "𝒎", - "\\bin" : "𝒏", - "\\bio" : "𝒐", - "\\bip" : "𝒑", - "\\biq" : "𝒒", - "\\bir" : "𝒓", - "\\bis" : "𝒔", - "\\bit" : "𝒕", - "\\biu" : "𝒖", - "\\biv" : "𝒗", - "\\biw" : "𝒘", - "\\bix" : "𝒙", - "\\biy" : "𝒚", - "\\biz" : "𝒛", - "\\scrA" : "𝒜", - "\\scrC" : "𝒞", - "\\scrD" : "𝒟", - "\\scrG" : "𝒢", - "\\scrJ" : "𝒥", - "\\scrK" : "𝒦", - "\\scrN" : "𝒩", - "\\scrO" : "𝒪", - "\\scrP" : "𝒫", - "\\scrQ" : "𝒬", - "\\scrS" : "𝒮", - "\\scrT" : "𝒯", - "\\scrU" : "𝒰", - "\\scrV" : "𝒱", - "\\scrW" : "𝒲", - "\\scrX" : "𝒳", - "\\scrY" : "𝒴", - "\\scrZ" : "𝒵", - "\\scra" : "𝒶", - "\\scrb" : "𝒷", - "\\scrc" : "𝒸", - "\\scrd" : "𝒹", - "\\scrf" : "𝒻", - "\\scrh" : "𝒽", - "\\scri" : "𝒾", - "\\scrj" : "𝒿", - "\\scrk" : "𝓀", - "\\scrm" : "𝓂", - "\\scrn" : "𝓃", - "\\scrp" : "𝓅", - "\\scrq" : "𝓆", - "\\scrr" : "𝓇", - "\\scrs" : "𝓈", - "\\scrt" : "𝓉", - "\\scru" : "𝓊", - "\\scrv" : "𝓋", - "\\scrw" : "𝓌", - "\\scrx" : "𝓍", - "\\scry" : "𝓎", - "\\scrz" : "𝓏", - "\\bscrA" : "𝓐", - "\\bscrB" : "𝓑", - "\\bscrC" : "𝓒", - "\\bscrD" : "𝓓", - "\\bscrE" : "𝓔", - "\\bscrF" : "𝓕", - "\\bscrG" : "𝓖", - "\\bscrH" : "𝓗", - "\\bscrI" : "𝓘", - "\\bscrJ" : "𝓙", - "\\bscrK" : "𝓚", - "\\bscrL" : "𝓛", - "\\bscrM" : "𝓜", - "\\bscrN" : "𝓝", - "\\bscrO" : "𝓞", - "\\bscrP" : "𝓟", - "\\bscrQ" : "𝓠", - "\\bscrR" : "𝓡", - "\\bscrS" : "𝓢", - "\\bscrT" : "𝓣", - "\\bscrU" : "𝓤", - "\\bscrV" : "𝓥", - "\\bscrW" : "𝓦", - "\\bscrX" : "𝓧", - "\\bscrY" : "𝓨", - "\\bscrZ" : "𝓩", - "\\bscra" : "𝓪", - "\\bscrb" : "𝓫", - "\\bscrc" : "𝓬", - "\\bscrd" : "𝓭", - "\\bscre" : "𝓮", - "\\bscrf" : "𝓯", - "\\bscrg" : "𝓰", - "\\bscrh" : "𝓱", - "\\bscri" : "𝓲", - "\\bscrj" : "𝓳", - "\\bscrk" : "𝓴", - "\\bscrl" : "𝓵", - "\\bscrm" : "𝓶", - "\\bscrn" : "𝓷", - "\\bscro" : "𝓸", - "\\bscrp" : "𝓹", - "\\bscrq" : "𝓺", - "\\bscrr" : "𝓻", - "\\bscrs" : "𝓼", - "\\bscrt" : "𝓽", - "\\bscru" : "𝓾", - "\\bscrv" : "𝓿", - "\\bscrw" : "𝔀", - "\\bscrx" : "𝔁", - "\\bscry" : "𝔂", - "\\bscrz" : "𝔃", - "\\frakA" : "𝔄", - "\\frakB" : "𝔅", - "\\frakD" : "𝔇", - "\\frakE" : "𝔈", - "\\frakF" : "𝔉", - "\\frakG" : "𝔊", - "\\frakJ" : "𝔍", - "\\frakK" : "𝔎", - "\\frakL" : "𝔏", - "\\frakM" : "𝔐", - "\\frakN" : "𝔑", - "\\frakO" : "𝔒", - "\\frakP" : "𝔓", - "\\frakQ" : "𝔔", - "\\frakS" : "𝔖", - "\\frakT" : "𝔗", - "\\frakU" : "𝔘", - "\\frakV" : "𝔙", - "\\frakW" : "𝔚", - "\\frakX" : "𝔛", - "\\frakY" : "𝔜", - "\\fraka" : "𝔞", - "\\frakb" : "𝔟", - "\\frakc" : "𝔠", - "\\frakd" : "𝔡", - "\\frake" : "𝔢", - "\\frakf" : "𝔣", - "\\frakg" : "𝔤", - "\\frakh" : "𝔥", - "\\fraki" : "𝔦", - "\\frakj" : "𝔧", - "\\frakk" : "𝔨", - "\\frakl" : "𝔩", - "\\frakm" : "𝔪", - "\\frakn" : "𝔫", - "\\frako" : "𝔬", - "\\frakp" : "𝔭", - "\\frakq" : "𝔮", - "\\frakr" : "𝔯", - "\\fraks" : "𝔰", - "\\frakt" : "𝔱", - "\\fraku" : "𝔲", - "\\frakv" : "𝔳", - "\\frakw" : "𝔴", - "\\frakx" : "𝔵", - "\\fraky" : "𝔶", - "\\frakz" : "𝔷", - "\\bbA" : "𝔸", - "\\bbB" : "𝔹", - "\\bbD" : "𝔻", - "\\bbE" : "𝔼", - "\\bbF" : "𝔽", - "\\bbG" : "𝔾", - "\\bbI" : "𝕀", - "\\bbJ" : "𝕁", - "\\bbK" : "𝕂", - "\\bbL" : "𝕃", - "\\bbM" : "𝕄", - "\\bbO" : "𝕆", - "\\bbS" : "𝕊", - "\\bbT" : "𝕋", - "\\bbU" : "𝕌", - "\\bbV" : "𝕍", - "\\bbW" : "𝕎", - "\\bbX" : "𝕏", - "\\bbY" : "𝕐", - "\\bba" : "𝕒", - "\\bbb" : "𝕓", - "\\bbc" : "𝕔", - "\\bbd" : "𝕕", - "\\bbe" : "𝕖", - "\\bbf" : "𝕗", - "\\bbg" : "𝕘", - "\\bbh" : "𝕙", - "\\bbi" : "𝕚", - "\\bbj" : "𝕛", - "\\bbk" : "𝕜", - "\\bbl" : "𝕝", - "\\bbm" : "𝕞", - "\\bbn" : "𝕟", - "\\bbo" : "𝕠", - "\\bbp" : "𝕡", - "\\bbq" : "𝕢", - "\\bbr" : "𝕣", - "\\bbs" : "𝕤", - "\\bbt" : "𝕥", - "\\bbu" : "𝕦", - "\\bbv" : "𝕧", - "\\bbw" : "𝕨", - "\\bbx" : "𝕩", - "\\bby" : "𝕪", - "\\bbz" : "𝕫", - "\\bfrakA" : "𝕬", - "\\bfrakB" : "𝕭", - "\\bfrakC" : "𝕮", - "\\bfrakD" : "𝕯", - "\\bfrakE" : "𝕰", - "\\bfrakF" : "𝕱", - "\\bfrakG" : "𝕲", - "\\bfrakH" : "𝕳", - "\\bfrakI" : "𝕴", - "\\bfrakJ" : "𝕵", - "\\bfrakK" : "𝕶", - "\\bfrakL" : "𝕷", - "\\bfrakM" : "𝕸", - "\\bfrakN" : "𝕹", - "\\bfrakO" : "𝕺", - "\\bfrakP" : "𝕻", - "\\bfrakQ" : "𝕼", - "\\bfrakR" : "𝕽", - "\\bfrakS" : "𝕾", - "\\bfrakT" : "𝕿", - "\\bfrakU" : "𝖀", - "\\bfrakV" : "𝖁", - "\\bfrakW" : "𝖂", - "\\bfrakX" : "𝖃", - "\\bfrakY" : "𝖄", - "\\bfrakZ" : "𝖅", - "\\bfraka" : "𝖆", - "\\bfrakb" : "𝖇", - "\\bfrakc" : "𝖈", - "\\bfrakd" : "𝖉", - "\\bfrake" : "𝖊", - "\\bfrakf" : "𝖋", - "\\bfrakg" : "𝖌", - "\\bfrakh" : "𝖍", - "\\bfraki" : "𝖎", - "\\bfrakj" : "𝖏", - "\\bfrakk" : "𝖐", - "\\bfrakl" : "𝖑", - "\\bfrakm" : "𝖒", - "\\bfrakn" : "𝖓", - "\\bfrako" : "𝖔", - "\\bfrakp" : "𝖕", - "\\bfrakq" : "𝖖", - "\\bfrakr" : "𝖗", - "\\bfraks" : "𝖘", - "\\bfrakt" : "𝖙", - "\\bfraku" : "𝖚", - "\\bfrakv" : "𝖛", - "\\bfrakw" : "𝖜", - "\\bfrakx" : "𝖝", - "\\bfraky" : "𝖞", - "\\bfrakz" : "𝖟", - "\\sansA" : "𝖠", - "\\sansB" : "𝖡", - "\\sansC" : "𝖢", - "\\sansD" : "𝖣", - "\\sansE" : "𝖤", - "\\sansF" : "𝖥", - "\\sansG" : "𝖦", - "\\sansH" : "𝖧", - "\\sansI" : "𝖨", - "\\sansJ" : "𝖩", - "\\sansK" : "𝖪", - "\\sansL" : "𝖫", - "\\sansM" : "𝖬", - "\\sansN" : "𝖭", - "\\sansO" : "𝖮", - "\\sansP" : "𝖯", - "\\sansQ" : "𝖰", - "\\sansR" : "𝖱", - "\\sansS" : "𝖲", - "\\sansT" : "𝖳", - "\\sansU" : "𝖴", - "\\sansV" : "𝖵", - "\\sansW" : "𝖶", - "\\sansX" : "𝖷", - "\\sansY" : "𝖸", - "\\sansZ" : "𝖹", - "\\sansa" : "𝖺", - "\\sansb" : "𝖻", - "\\sansc" : "𝖼", - "\\sansd" : "𝖽", - "\\sanse" : "𝖾", - "\\sansf" : "𝖿", - "\\sansg" : "𝗀", - "\\sansh" : "𝗁", - "\\sansi" : "𝗂", - "\\sansj" : "𝗃", - "\\sansk" : "𝗄", - "\\sansl" : "𝗅", - "\\sansm" : "𝗆", - "\\sansn" : "𝗇", - "\\sanso" : "𝗈", - "\\sansp" : "𝗉", - "\\sansq" : "𝗊", - "\\sansr" : "𝗋", - "\\sanss" : "𝗌", - "\\sanst" : "𝗍", - "\\sansu" : "𝗎", - "\\sansv" : "𝗏", - "\\sansw" : "𝗐", - "\\sansx" : "𝗑", - "\\sansy" : "𝗒", - "\\sansz" : "𝗓", - "\\bsansA" : "𝗔", - "\\bsansB" : "𝗕", - "\\bsansC" : "𝗖", - "\\bsansD" : "𝗗", - "\\bsansE" : "𝗘", - "\\bsansF" : "𝗙", - "\\bsansG" : "𝗚", - "\\bsansH" : "𝗛", - "\\bsansI" : "𝗜", - "\\bsansJ" : "𝗝", - "\\bsansK" : "𝗞", - "\\bsansL" : "𝗟", - "\\bsansM" : "𝗠", - "\\bsansN" : "𝗡", - "\\bsansO" : "𝗢", - "\\bsansP" : "𝗣", - "\\bsansQ" : "𝗤", - "\\bsansR" : "𝗥", - "\\bsansS" : "𝗦", - "\\bsansT" : "𝗧", - "\\bsansU" : "𝗨", - "\\bsansV" : "𝗩", - "\\bsansW" : "𝗪", - "\\bsansX" : "𝗫", - "\\bsansY" : "𝗬", - "\\bsansZ" : "𝗭", - "\\bsansa" : "𝗮", - "\\bsansb" : "𝗯", - "\\bsansc" : "𝗰", - "\\bsansd" : "𝗱", - "\\bsanse" : "𝗲", - "\\bsansf" : "𝗳", - "\\bsansg" : "𝗴", - "\\bsansh" : "𝗵", - "\\bsansi" : "𝗶", - "\\bsansj" : "𝗷", - "\\bsansk" : "𝗸", - "\\bsansl" : "𝗹", - "\\bsansm" : "𝗺", - "\\bsansn" : "𝗻", - "\\bsanso" : "𝗼", - "\\bsansp" : "𝗽", - "\\bsansq" : "𝗾", - "\\bsansr" : "𝗿", - "\\bsanss" : "𝘀", - "\\bsanst" : "𝘁", - "\\bsansu" : "𝘂", - "\\bsansv" : "𝘃", - "\\bsansw" : "𝘄", - "\\bsansx" : "𝘅", - "\\bsansy" : "𝘆", - "\\bsansz" : "𝘇", - "\\isansA" : "𝘈", - "\\isansB" : "𝘉", - "\\isansC" : "𝘊", - "\\isansD" : "𝘋", - "\\isansE" : "𝘌", - "\\isansF" : "𝘍", - "\\isansG" : "𝘎", - "\\isansH" : "𝘏", - "\\isansI" : "𝘐", - "\\isansJ" : "𝘑", - "\\isansK" : "𝘒", - "\\isansL" : "𝘓", - "\\isansM" : "𝘔", - "\\isansN" : "𝘕", - "\\isansO" : "𝘖", - "\\isansP" : "𝘗", - "\\isansQ" : "𝘘", - "\\isansR" : "𝘙", - "\\isansS" : "𝘚", - "\\isansT" : "𝘛", - "\\isansU" : "𝘜", - "\\isansV" : "𝘝", - "\\isansW" : "𝘞", - "\\isansX" : "𝘟", - "\\isansY" : "𝘠", - "\\isansZ" : "𝘡", - "\\isansa" : "𝘢", - "\\isansb" : "𝘣", - "\\isansc" : "𝘤", - "\\isansd" : "𝘥", - "\\isanse" : "𝘦", - "\\isansf" : "𝘧", - "\\isansg" : "𝘨", - "\\isansh" : "𝘩", - "\\isansi" : "𝘪", - "\\isansj" : "𝘫", - "\\isansk" : "𝘬", - "\\isansl" : "𝘭", - "\\isansm" : "𝘮", - "\\isansn" : "𝘯", - "\\isanso" : "𝘰", - "\\isansp" : "𝘱", - "\\isansq" : "𝘲", - "\\isansr" : "𝘳", - "\\isanss" : "𝘴", - "\\isanst" : "𝘵", - "\\isansu" : "𝘶", - "\\isansv" : "𝘷", - "\\isansw" : "𝘸", - "\\isansx" : "𝘹", - "\\isansy" : "𝘺", - "\\isansz" : "𝘻", - "\\bisansA" : "𝘼", - "\\bisansB" : "𝘽", - "\\bisansC" : "𝘾", - "\\bisansD" : "𝘿", - "\\bisansE" : "𝙀", - "\\bisansF" : "𝙁", - "\\bisansG" : "𝙂", - "\\bisansH" : "𝙃", - "\\bisansI" : "𝙄", - "\\bisansJ" : "𝙅", - "\\bisansK" : "𝙆", - "\\bisansL" : "𝙇", - "\\bisansM" : "𝙈", - "\\bisansN" : "𝙉", - "\\bisansO" : "𝙊", - "\\bisansP" : "𝙋", - "\\bisansQ" : "𝙌", - "\\bisansR" : "𝙍", - "\\bisansS" : "𝙎", - "\\bisansT" : "𝙏", - "\\bisansU" : "𝙐", - "\\bisansV" : "𝙑", - "\\bisansW" : "𝙒", - "\\bisansX" : "𝙓", - "\\bisansY" : "𝙔", - "\\bisansZ" : "𝙕", - "\\bisansa" : "𝙖", - "\\bisansb" : "𝙗", - "\\bisansc" : "𝙘", - "\\bisansd" : "𝙙", - "\\bisanse" : "𝙚", - "\\bisansf" : "𝙛", - "\\bisansg" : "𝙜", - "\\bisansh" : "𝙝", - "\\bisansi" : "𝙞", - "\\bisansj" : "𝙟", - "\\bisansk" : "𝙠", - "\\bisansl" : "𝙡", - "\\bisansm" : "𝙢", - "\\bisansn" : "𝙣", - "\\bisanso" : "𝙤", - "\\bisansp" : "𝙥", - "\\bisansq" : "𝙦", - "\\bisansr" : "𝙧", - "\\bisanss" : "𝙨", - "\\bisanst" : "𝙩", - "\\bisansu" : "𝙪", - "\\bisansv" : "𝙫", - "\\bisansw" : "𝙬", - "\\bisansx" : "𝙭", - "\\bisansy" : "𝙮", - "\\bisansz" : "𝙯", - "\\ttA" : "𝙰", - "\\ttB" : "𝙱", - "\\ttC" : "𝙲", - "\\ttD" : "𝙳", - "\\ttE" : "𝙴", - "\\ttF" : "𝙵", - "\\ttG" : "𝙶", - "\\ttH" : "𝙷", - "\\ttI" : "𝙸", - "\\ttJ" : "𝙹", - "\\ttK" : "𝙺", - "\\ttL" : "𝙻", - "\\ttM" : "𝙼", - "\\ttN" : "𝙽", - "\\ttO" : "𝙾", - "\\ttP" : "𝙿", - "\\ttQ" : "𝚀", - "\\ttR" : "𝚁", - "\\ttS" : "𝚂", - "\\ttT" : "𝚃", - "\\ttU" : "𝚄", - "\\ttV" : "𝚅", - "\\ttW" : "𝚆", - "\\ttX" : "𝚇", - "\\ttY" : "𝚈", - "\\ttZ" : "𝚉", - "\\tta" : "𝚊", - "\\ttb" : "𝚋", - "\\ttc" : "𝚌", - "\\ttd" : "𝚍", - "\\tte" : "𝚎", - "\\ttf" : "𝚏", - "\\ttg" : "𝚐", - "\\tth" : "𝚑", - "\\tti" : "𝚒", - "\\ttj" : "𝚓", - "\\ttk" : "𝚔", - "\\ttl" : "𝚕", - "\\ttm" : "𝚖", - "\\ttn" : "𝚗", - "\\tto" : "𝚘", - "\\ttp" : "𝚙", - "\\ttq" : "𝚚", - "\\ttr" : "𝚛", - "\\tts" : "𝚜", - "\\ttt" : "𝚝", - "\\ttu" : "𝚞", - "\\ttv" : "𝚟", - "\\ttw" : "𝚠", - "\\ttx" : "𝚡", - "\\tty" : "𝚢", - "\\ttz" : "𝚣", - "\\bfAlpha" : "𝚨", - "\\bfBeta" : "𝚩", - "\\bfGamma" : "𝚪", - "\\bfDelta" : "𝚫", - "\\bfEpsilon" : "𝚬", - "\\bfZeta" : "𝚭", - "\\bfEta" : "𝚮", - "\\bfTheta" : "𝚯", - "\\bfIota" : "𝚰", - "\\bfKappa" : "𝚱", - "\\bfLambda" : "𝚲", - "\\bfMu" : "𝚳", - "\\bfNu" : "𝚴", - "\\bfXi" : "𝚵", - "\\bfOmicron" : "𝚶", - "\\bfPi" : "𝚷", - "\\bfRho" : "𝚸", - "\\bfvarTheta" : "𝚹", - "\\bfSigma" : "𝚺", - "\\bfTau" : "𝚻", - "\\bfUpsilon" : "𝚼", - "\\bfPhi" : "𝚽", - "\\bfChi" : "𝚾", - "\\bfPsi" : "𝚿", - "\\bfOmega" : "𝛀", - "\\bfalpha" : "𝛂", - "\\bfbeta" : "𝛃", - "\\bfgamma" : "𝛄", - "\\bfdelta" : "𝛅", - "\\bfepsilon" : "𝛆", - "\\bfzeta" : "𝛇", - "\\bfeta" : "𝛈", - "\\bftheta" : "𝛉", - "\\bfiota" : "𝛊", - "\\bfkappa" : "𝛋", - "\\bflambda" : "𝛌", - "\\bfmu" : "𝛍", - "\\bfnu" : "𝛎", - "\\bfxi" : "𝛏", - "\\bfomicron" : "𝛐", - "\\bfpi" : "𝛑", - "\\bfrho" : "𝛒", - "\\bfvarsigma" : "𝛓", - "\\bfsigma" : "𝛔", - "\\bftau" : "𝛕", - "\\bfupsilon" : "𝛖", - "\\bfvarphi" : "𝛗", - "\\bfchi" : "𝛘", - "\\bfpsi" : "𝛙", - "\\bfomega" : "𝛚", - "\\bfvarepsilon" : "𝛜", - "\\bfvartheta" : "𝛝", - "\\bfvarkappa" : "𝛞", - "\\bfphi" : "𝛟", - "\\bfvarrho" : "𝛠", - "\\bfvarpi" : "𝛡", - "\\itAlpha" : "𝛢", - "\\itBeta" : "𝛣", - "\\itGamma" : "𝛤", - "\\itDelta" : "𝛥", - "\\itEpsilon" : "𝛦", - "\\itZeta" : "𝛧", - "\\itEta" : "𝛨", - "\\itTheta" : "𝛩", - "\\itIota" : "𝛪", - "\\itKappa" : "𝛫", - "\\itLambda" : "𝛬", - "\\itMu" : "𝛭", - "\\itNu" : "𝛮", - "\\itXi" : "𝛯", - "\\itOmicron" : "𝛰", - "\\itPi" : "𝛱", - "\\itRho" : "𝛲", - "\\itvarTheta" : "𝛳", - "\\itSigma" : "𝛴", - "\\itTau" : "𝛵", - "\\itUpsilon" : "𝛶", - "\\itPhi" : "𝛷", - "\\itChi" : "𝛸", - "\\itPsi" : "𝛹", - "\\itOmega" : "𝛺", - "\\italpha" : "𝛼", - "\\itbeta" : "𝛽", - "\\itgamma" : "𝛾", - "\\itdelta" : "𝛿", - "\\itepsilon" : "𝜀", - "\\itzeta" : "𝜁", - "\\iteta" : "𝜂", - "\\ittheta" : "𝜃", - "\\itiota" : "𝜄", - "\\itkappa" : "𝜅", - "\\itlambda" : "𝜆", - "\\itmu" : "𝜇", - "\\itnu" : "𝜈", - "\\itxi" : "𝜉", - "\\itomicron" : "𝜊", - "\\itpi" : "𝜋", - "\\itrho" : "𝜌", - "\\itvarsigma" : "𝜍", - "\\itsigma" : "𝜎", - "\\ittau" : "𝜏", - "\\itupsilon" : "𝜐", - "\\itphi" : "𝜑", - "\\itchi" : "𝜒", - "\\itpsi" : "𝜓", - "\\itomega" : "𝜔", - "\\itvarepsilon" : "𝜖", - "\\itvartheta" : "𝜗", - "\\itvarkappa" : "𝜘", - "\\itvarphi" : "𝜙", - "\\itvarrho" : "𝜚", - "\\itvarpi" : "𝜛", - "\\biAlpha" : "𝜜", - "\\biBeta" : "𝜝", - "\\biGamma" : "𝜞", - "\\biDelta" : "𝜟", - "\\biEpsilon" : "𝜠", - "\\biZeta" : "𝜡", - "\\biEta" : "𝜢", - "\\biTheta" : "𝜣", - "\\biIota" : "𝜤", - "\\biKappa" : "𝜥", - "\\biLambda" : "𝜦", - "\\biMu" : "𝜧", - "\\biNu" : "𝜨", - "\\biXi" : "𝜩", - "\\biOmicron" : "𝜪", - "\\biPi" : "𝜫", - "\\biRho" : "𝜬", - "\\bivarTheta" : "𝜭", - "\\biSigma" : "𝜮", - "\\biTau" : "𝜯", - "\\biUpsilon" : "𝜰", - "\\biPhi" : "𝜱", - "\\biChi" : "𝜲", - "\\biPsi" : "𝜳", - "\\biOmega" : "𝜴", - "\\bialpha" : "𝜶", - "\\bibeta" : "𝜷", - "\\bigamma" : "𝜸", - "\\bidelta" : "𝜹", - "\\biepsilon" : "𝜺", - "\\bizeta" : "𝜻", - "\\bieta" : "𝜼", - "\\bitheta" : "𝜽", - "\\biiota" : "𝜾", - "\\bikappa" : "𝜿", - "\\bilambda" : "𝝀", - "\\bimu" : "𝝁", - "\\binu" : "𝝂", - "\\bixi" : "𝝃", - "\\biomicron" : "𝝄", - "\\bipi" : "𝝅", - "\\birho" : "𝝆", - "\\bivarsigma" : "𝝇", - "\\bisigma" : "𝝈", - "\\bitau" : "𝝉", - "\\biupsilon" : "𝝊", - "\\biphi" : "𝝋", - "\\bichi" : "𝝌", - "\\bipsi" : "𝝍", - "\\biomega" : "𝝎", - "\\bivarepsilon" : "𝝐", - "\\bivartheta" : "𝝑", - "\\bivarkappa" : "𝝒", - "\\bivarphi" : "𝝓", - "\\bivarrho" : "𝝔", - "\\bivarpi" : "𝝕", - "\\bsansAlpha" : "𝝖", - "\\bsansBeta" : "𝝗", - "\\bsansGamma" : "𝝘", - "\\bsansDelta" : "𝝙", - "\\bsansEpsilon" : "𝝚", - "\\bsansZeta" : "𝝛", - "\\bsansEta" : "𝝜", - "\\bsansTheta" : "𝝝", - "\\bsansIota" : "𝝞", - "\\bsansKappa" : "𝝟", - "\\bsansLambda" : "𝝠", - "\\bsansMu" : "𝝡", - "\\bsansNu" : "𝝢", - "\\bsansXi" : "𝝣", - "\\bsansOmicron" : "𝝤", - "\\bsansPi" : "𝝥", - "\\bsansRho" : "𝝦", - "\\bsansvarTheta" : "𝝧", - "\\bsansSigma" : "𝝨", - "\\bsansTau" : "𝝩", - "\\bsansUpsilon" : "𝝪", - "\\bsansPhi" : "𝝫", - "\\bsansChi" : "𝝬", - "\\bsansPsi" : "𝝭", - "\\bsansOmega" : "𝝮", - "\\bsansalpha" : "𝝰", - "\\bsansbeta" : "𝝱", - "\\bsansgamma" : "𝝲", - "\\bsansdelta" : "𝝳", - "\\bsansepsilon" : "𝝴", - "\\bsanszeta" : "𝝵", - "\\bsanseta" : "𝝶", - "\\bsanstheta" : "𝝷", - "\\bsansiota" : "𝝸", - "\\bsanskappa" : "𝝹", - "\\bsanslambda" : "𝝺", - "\\bsansmu" : "𝝻", - "\\bsansnu" : "𝝼", - "\\bsansxi" : "𝝽", - "\\bsansomicron" : "𝝾", - "\\bsanspi" : "𝝿", - "\\bsansrho" : "𝞀", - "\\bsansvarsigma" : "𝞁", - "\\bsanssigma" : "𝞂", - "\\bsanstau" : "𝞃", - "\\bsansupsilon" : "𝞄", - "\\bsansphi" : "𝞅", - "\\bsanschi" : "𝞆", - "\\bsanspsi" : "𝞇", - "\\bsansomega" : "𝞈", - "\\bsansvarepsilon" : "𝞊", - "\\bsansvartheta" : "𝞋", - "\\bsansvarkappa" : "𝞌", - "\\bsansvarphi" : "𝞍", - "\\bsansvarrho" : "𝞎", - "\\bsansvarpi" : "𝞏", - "\\bisansAlpha" : "𝞐", - "\\bisansBeta" : "𝞑", - "\\bisansGamma" : "𝞒", - "\\bisansDelta" : "𝞓", - "\\bisansEpsilon" : "𝞔", - "\\bisansZeta" : "𝞕", - "\\bisansEta" : "𝞖", - "\\bisansTheta" : "𝞗", - "\\bisansIota" : "𝞘", - "\\bisansKappa" : "𝞙", - "\\bisansLambda" : "𝞚", - "\\bisansMu" : "𝞛", - "\\bisansNu" : "𝞜", - "\\bisansXi" : "𝞝", - "\\bisansOmicron" : "𝞞", - "\\bisansPi" : "𝞟", - "\\bisansRho" : "𝞠", - "\\bisansvarTheta" : "𝞡", - "\\bisansSigma" : "𝞢", - "\\bisansTau" : "𝞣", - "\\bisansUpsilon" : "𝞤", - "\\bisansPhi" : "𝞥", - "\\bisansChi" : "𝞦", - "\\bisansPsi" : "𝞧", - "\\bisansOmega" : "𝞨", - "\\bisansalpha" : "𝞪", - "\\bisansbeta" : "𝞫", - "\\bisansgamma" : "𝞬", - "\\bisansdelta" : "𝞭", - "\\bisansepsilon" : "𝞮", - "\\bisanszeta" : "𝞯", - "\\bisanseta" : "𝞰", - "\\bisanstheta" : "𝞱", - "\\bisansiota" : "𝞲", - "\\bisanskappa" : "𝞳", - "\\bisanslambda" : "𝞴", - "\\bisansmu" : "𝞵", - "\\bisansnu" : "𝞶", - "\\bisansxi" : "𝞷", - "\\bisansomicron" : "𝞸", - "\\bisanspi" : "𝞹", - "\\bisansrho" : "𝞺", - "\\bisansvarsigma" : "𝞻", - "\\bisanssigma" : "𝞼", - "\\bisanstau" : "𝞽", - "\\bisansupsilon" : "𝞾", - "\\bisansphi" : "𝞿", - "\\bisanschi" : "𝟀", - "\\bisanspsi" : "𝟁", - "\\bisansomega" : "𝟂", - "\\bisansvarepsilon" : "𝟄", - "\\bisansvartheta" : "𝟅", - "\\bisansvarkappa" : "𝟆", - "\\bisansvarphi" : "𝟇", - "\\bisansvarrho" : "𝟈", - "\\bisansvarpi" : "𝟉", - "\\bfzero" : "𝟎", - "\\bfone" : "𝟏", - "\\bftwo" : "𝟐", - "\\bfthree" : "𝟑", - "\\bffour" : "𝟒", - "\\bffive" : "𝟓", - "\\bfsix" : "𝟔", - "\\bfseven" : "𝟕", - "\\bfeight" : "𝟖", - "\\bfnine" : "𝟗", - "\\bbzero" : "𝟘", - "\\bbone" : "𝟙", - "\\bbtwo" : "𝟚", - "\\bbthree" : "𝟛", - "\\bbfour" : "𝟜", - "\\bbfive" : "𝟝", - "\\bbsix" : "𝟞", - "\\bbseven" : "𝟟", - "\\bbeight" : "𝟠", - "\\bbnine" : "𝟡", - "\\sanszero" : "𝟢", - "\\sansone" : "𝟣", - "\\sanstwo" : "𝟤", - "\\sansthree" : "𝟥", - "\\sansfour" : "𝟦", - "\\sansfive" : "𝟧", - "\\sanssix" : "𝟨", - "\\sansseven" : "𝟩", - "\\sanseight" : "𝟪", - "\\sansnine" : "𝟫", - "\\bsanszero" : "𝟬", - "\\bsansone" : "𝟭", - "\\bsanstwo" : "𝟮", - "\\bsansthree" : "𝟯", - "\\bsansfour" : "𝟰", - "\\bsansfive" : "𝟱", - "\\bsanssix" : "𝟲", - "\\bsansseven" : "𝟳", - "\\bsanseight" : "𝟴", - "\\bsansnine" : "𝟵", - "\\ttzero" : "𝟶", - "\\ttone" : "𝟷", - "\\tttwo" : "𝟸", - "\\ttthree" : "𝟹", - "\\ttfour" : "𝟺", - "\\ttfive" : "𝟻", - "\\ttsix" : "𝟼", - "\\ttseven" : "𝟽", - "\\tteight" : "𝟾", - "\\ttnine" : "𝟿", - "\\underbar" : "̲", - "\\underleftrightarrow" : "͍", -} - - -reverse_latex_symbol = { v:k for k,v in latex_symbols.items()} +# encoding: utf-8 + +# DO NOT EDIT THIS FILE BY HAND. + +# To update this file, run the script /tools/gen_latex_symbols.py using Python 3 + +# This file is autogenerated from the file: +# https://raw.githubusercontent.com/JuliaLang/julia/master/base/latex_symbols.jl +# This original list is filtered to remove any unicode characters that are not valid +# Python identifiers. + +latex_symbols = { + + "\\euler" : "ℯ", + "\\^a" : "ᵃ", + "\\^b" : "ᵇ", + "\\^c" : "ᶜ", + "\\^d" : "ᵈ", + "\\^e" : "ᵉ", + "\\^f" : "ᶠ", + "\\^g" : "ᵍ", + "\\^h" : "ʰ", + "\\^i" : "ⁱ", + "\\^j" : "ʲ", + "\\^k" : "ᵏ", + "\\^l" : "ˡ", + "\\^m" : "ᵐ", + "\\^n" : "ⁿ", + "\\^o" : "ᵒ", + "\\^p" : "ᵖ", + "\\^r" : "ʳ", + "\\^s" : "ˢ", + "\\^t" : "ᵗ", + "\\^u" : "ᵘ", + "\\^v" : "ᵛ", + "\\^w" : "ʷ", + "\\^x" : "ˣ", + "\\^y" : "ʸ", + "\\^z" : "ᶻ", + "\\^A" : "ᴬ", + "\\^B" : "ᴮ", + "\\^D" : "ᴰ", + "\\^E" : "ᴱ", + "\\^G" : "ᴳ", + "\\^H" : "ᴴ", + "\\^I" : "ᴵ", + "\\^J" : "ᴶ", + "\\^K" : "ᴷ", + "\\^L" : "ᴸ", + "\\^M" : "ᴹ", + "\\^N" : "ᴺ", + "\\^O" : "ᴼ", + "\\^P" : "ᴾ", + "\\^R" : "ᴿ", + "\\^T" : "ᵀ", + "\\^U" : "ᵁ", + "\\^V" : "ⱽ", + "\\^W" : "ᵂ", + "\\^alpha" : "ᵅ", + "\\^beta" : "ᵝ", + "\\^gamma" : "ᵞ", + "\\^delta" : "ᵟ", + "\\^epsilon" : "ᵋ", + "\\^theta" : "ᶿ", + "\\^iota" : "ᶥ", + "\\^phi" : "ᵠ", + "\\^chi" : "ᵡ", + "\\^Phi" : "ᶲ", + "\\_a" : "ₐ", + "\\_e" : "ₑ", + "\\_h" : "ₕ", + "\\_i" : "ᵢ", + "\\_j" : "ⱼ", + "\\_k" : "ₖ", + "\\_l" : "ₗ", + "\\_m" : "ₘ", + "\\_n" : "ₙ", + "\\_o" : "ₒ", + "\\_p" : "ₚ", + "\\_r" : "ᵣ", + "\\_s" : "ₛ", + "\\_t" : "ₜ", + "\\_u" : "ᵤ", + "\\_v" : "ᵥ", + "\\_x" : "ₓ", + "\\_schwa" : "ₔ", + "\\_beta" : "ᵦ", + "\\_gamma" : "ᵧ", + "\\_rho" : "ᵨ", + "\\_phi" : "ᵩ", + "\\_chi" : "ᵪ", + "\\hbar" : "ħ", + "\\sout" : "̶", + "\\ordfeminine" : "ª", + "\\cdotp" : "·", + "\\ordmasculine" : "º", + "\\AA" : "Å", + "\\AE" : "Æ", + "\\DH" : "Ð", + "\\O" : "Ø", + "\\TH" : "Þ", + "\\ss" : "ß", + "\\aa" : "å", + "\\ae" : "æ", + "\\eth" : "ð", + "\\dh" : "ð", + "\\o" : "ø", + "\\th" : "þ", + "\\DJ" : "Đ", + "\\dj" : "đ", + "\\imath" : "ı", + "\\jmath" : "ȷ", + "\\L" : "Ł", + "\\l" : "ł", + "\\NG" : "Ŋ", + "\\ng" : "ŋ", + "\\OE" : "Œ", + "\\oe" : "œ", + "\\hvlig" : "ƕ", + "\\nrleg" : "ƞ", + "\\doublepipe" : "ǂ", + "\\trna" : "ɐ", + "\\trnsa" : "ɒ", + "\\openo" : "ɔ", + "\\rtld" : "ɖ", + "\\schwa" : "ə", + "\\varepsilon" : "ε", + "\\pgamma" : "ɣ", + "\\pbgam" : "ɤ", + "\\trnh" : "ɥ", + "\\btdl" : "ɬ", + "\\rtll" : "ɭ", + "\\trnm" : "ɯ", + "\\trnmlr" : "ɰ", + "\\ltlmr" : "ɱ", + "\\ltln" : "ɲ", + "\\rtln" : "ɳ", + "\\clomeg" : "ɷ", + "\\ltphi" : "ɸ", + "\\trnr" : "ɹ", + "\\trnrl" : "ɺ", + "\\rttrnr" : "ɻ", + "\\rl" : "ɼ", + "\\rtlr" : "ɽ", + "\\fhr" : "ɾ", + "\\rtls" : "ʂ", + "\\esh" : "ʃ", + "\\trnt" : "ʇ", + "\\rtlt" : "ʈ", + "\\pupsil" : "ʊ", + "\\pscrv" : "ʋ", + "\\invv" : "ʌ", + "\\invw" : "ʍ", + "\\trny" : "ʎ", + "\\rtlz" : "ʐ", + "\\yogh" : "ʒ", + "\\glst" : "ʔ", + "\\reglst" : "ʕ", + "\\inglst" : "ʖ", + "\\turnk" : "ʞ", + "\\dyogh" : "ʤ", + "\\tesh" : "ʧ", + "\\rasp" : "ʼ", + "\\verts" : "ˈ", + "\\verti" : "ˌ", + "\\lmrk" : "ː", + "\\hlmrk" : "ˑ", + "\\grave" : "̀", + "\\acute" : "́", + "\\hat" : "̂", + "\\tilde" : "̃", + "\\bar" : "̄", + "\\breve" : "̆", + "\\dot" : "̇", + "\\ddot" : "̈", + "\\ocirc" : "̊", + "\\H" : "̋", + "\\check" : "̌", + "\\palh" : "̡", + "\\rh" : "̢", + "\\c" : "̧", + "\\k" : "̨", + "\\sbbrg" : "̪", + "\\strike" : "̶", + "\\Alpha" : "Α", + "\\Beta" : "Β", + "\\Gamma" : "Γ", + "\\Delta" : "Δ", + "\\Epsilon" : "Ε", + "\\Zeta" : "Ζ", + "\\Eta" : "Η", + "\\Theta" : "Θ", + "\\Iota" : "Ι", + "\\Kappa" : "Κ", + "\\Lambda" : "Λ", + "\\Xi" : "Ξ", + "\\Pi" : "Π", + "\\Rho" : "Ρ", + "\\Sigma" : "Σ", + "\\Tau" : "Τ", + "\\Upsilon" : "Υ", + "\\Phi" : "Φ", + "\\Chi" : "Χ", + "\\Psi" : "Ψ", + "\\Omega" : "Ω", + "\\alpha" : "α", + "\\beta" : "β", + "\\gamma" : "γ", + "\\delta" : "δ", + "\\zeta" : "ζ", + "\\eta" : "η", + "\\theta" : "θ", + "\\iota" : "ι", + "\\kappa" : "κ", + "\\lambda" : "λ", + "\\mu" : "μ", + "\\nu" : "ν", + "\\xi" : "ξ", + "\\pi" : "π", + "\\rho" : "ρ", + "\\varsigma" : "ς", + "\\sigma" : "σ", + "\\tau" : "τ", + "\\upsilon" : "υ", + "\\varphi" : "φ", + "\\chi" : "χ", + "\\psi" : "ψ", + "\\omega" : "ω", + "\\vartheta" : "ϑ", + "\\phi" : "ϕ", + "\\varpi" : "ϖ", + "\\Stigma" : "Ϛ", + "\\Digamma" : "Ϝ", + "\\digamma" : "ϝ", + "\\Koppa" : "Ϟ", + "\\Sampi" : "Ϡ", + "\\varkappa" : "ϰ", + "\\varrho" : "ϱ", + "\\varTheta" : "ϴ", + "\\epsilon" : "ϵ", + "\\dddot" : "⃛", + "\\ddddot" : "⃜", + "\\hslash" : "ℏ", + "\\Im" : "ℑ", + "\\ell" : "ℓ", + "\\wp" : "℘", + "\\Re" : "ℜ", + "\\aleph" : "ℵ", + "\\beth" : "ℶ", + "\\gimel" : "ℷ", + "\\daleth" : "ℸ", + "\\bbPi" : "ℿ", + "\\Zbar" : "Ƶ", + "\\overbar" : "̅", + "\\ovhook" : "̉", + "\\candra" : "̐", + "\\oturnedcomma" : "̒", + "\\ocommatopright" : "̕", + "\\droang" : "̚", + "\\wideutilde" : "̰", + "\\not" : "̸", + "\\upMu" : "Μ", + "\\upNu" : "Ν", + "\\upOmicron" : "Ο", + "\\upepsilon" : "ε", + "\\upomicron" : "ο", + "\\upvarbeta" : "ϐ", + "\\upoldKoppa" : "Ϙ", + "\\upoldkoppa" : "ϙ", + "\\upstigma" : "ϛ", + "\\upkoppa" : "ϟ", + "\\upsampi" : "ϡ", + "\\tieconcat" : "⁀", + "\\leftharpoonaccent" : "⃐", + "\\rightharpoonaccent" : "⃑", + "\\vertoverlay" : "⃒", + "\\overleftarrow" : "⃖", + "\\vec" : "⃗", + "\\overleftrightarrow" : "⃡", + "\\annuity" : "⃧", + "\\threeunderdot" : "⃨", + "\\widebridgeabove" : "⃩", + "\\bbC" : "ℂ", + "\\eulermascheroni" : "ℇ", + "\\scrg" : "ℊ", + "\\scrH" : "ℋ", + "\\frakH" : "ℌ", + "\\bbH" : "ℍ", + "\\planck" : "ℎ", + "\\scrI" : "ℐ", + "\\scrL" : "ℒ", + "\\bbN" : "ℕ", + "\\bbP" : "ℙ", + "\\bbQ" : "ℚ", + "\\scrR" : "ℛ", + "\\bbR" : "ℝ", + "\\bbZ" : "ℤ", + "\\frakZ" : "ℨ", + "\\Angstrom" : "Å", + "\\scrB" : "ℬ", + "\\frakC" : "ℭ", + "\\scre" : "ℯ", + "\\scrE" : "ℰ", + "\\scrF" : "ℱ", + "\\Finv" : "Ⅎ", + "\\scrM" : "ℳ", + "\\scro" : "ℴ", + "\\bbgamma" : "ℽ", + "\\bbGamma" : "ℾ", + "\\bbiD" : "ⅅ", + "\\bbid" : "ⅆ", + "\\bbie" : "ⅇ", + "\\bbii" : "ⅈ", + "\\bbij" : "ⅉ", + "\\bfA" : "𝐀", + "\\bfB" : "𝐁", + "\\bfC" : "𝐂", + "\\bfD" : "𝐃", + "\\bfE" : "𝐄", + "\\bfF" : "𝐅", + "\\bfG" : "𝐆", + "\\bfH" : "𝐇", + "\\bfI" : "𝐈", + "\\bfJ" : "𝐉", + "\\bfK" : "𝐊", + "\\bfL" : "𝐋", + "\\bfM" : "𝐌", + "\\bfN" : "𝐍", + "\\bfO" : "𝐎", + "\\bfP" : "𝐏", + "\\bfQ" : "𝐐", + "\\bfR" : "𝐑", + "\\bfS" : "𝐒", + "\\bfT" : "𝐓", + "\\bfU" : "𝐔", + "\\bfV" : "𝐕", + "\\bfW" : "𝐖", + "\\bfX" : "𝐗", + "\\bfY" : "𝐘", + "\\bfZ" : "𝐙", + "\\bfa" : "𝐚", + "\\bfb" : "𝐛", + "\\bfc" : "𝐜", + "\\bfd" : "𝐝", + "\\bfe" : "𝐞", + "\\bff" : "𝐟", + "\\bfg" : "𝐠", + "\\bfh" : "𝐡", + "\\bfi" : "𝐢", + "\\bfj" : "𝐣", + "\\bfk" : "𝐤", + "\\bfl" : "𝐥", + "\\bfm" : "𝐦", + "\\bfn" : "𝐧", + "\\bfo" : "𝐨", + "\\bfp" : "𝐩", + "\\bfq" : "𝐪", + "\\bfr" : "𝐫", + "\\bfs" : "𝐬", + "\\bft" : "𝐭", + "\\bfu" : "𝐮", + "\\bfv" : "𝐯", + "\\bfw" : "𝐰", + "\\bfx" : "𝐱", + "\\bfy" : "𝐲", + "\\bfz" : "𝐳", + "\\itA" : "𝐴", + "\\itB" : "𝐵", + "\\itC" : "𝐶", + "\\itD" : "𝐷", + "\\itE" : "𝐸", + "\\itF" : "𝐹", + "\\itG" : "𝐺", + "\\itH" : "𝐻", + "\\itI" : "𝐼", + "\\itJ" : "𝐽", + "\\itK" : "𝐾", + "\\itL" : "𝐿", + "\\itM" : "𝑀", + "\\itN" : "𝑁", + "\\itO" : "𝑂", + "\\itP" : "𝑃", + "\\itQ" : "𝑄", + "\\itR" : "𝑅", + "\\itS" : "𝑆", + "\\itT" : "𝑇", + "\\itU" : "𝑈", + "\\itV" : "𝑉", + "\\itW" : "𝑊", + "\\itX" : "𝑋", + "\\itY" : "𝑌", + "\\itZ" : "𝑍", + "\\ita" : "𝑎", + "\\itb" : "𝑏", + "\\itc" : "𝑐", + "\\itd" : "𝑑", + "\\ite" : "𝑒", + "\\itf" : "𝑓", + "\\itg" : "𝑔", + "\\iti" : "𝑖", + "\\itj" : "𝑗", + "\\itk" : "𝑘", + "\\itl" : "𝑙", + "\\itm" : "𝑚", + "\\itn" : "𝑛", + "\\ito" : "𝑜", + "\\itp" : "𝑝", + "\\itq" : "𝑞", + "\\itr" : "𝑟", + "\\its" : "𝑠", + "\\itt" : "𝑡", + "\\itu" : "𝑢", + "\\itv" : "𝑣", + "\\itw" : "𝑤", + "\\itx" : "𝑥", + "\\ity" : "𝑦", + "\\itz" : "𝑧", + "\\biA" : "𝑨", + "\\biB" : "𝑩", + "\\biC" : "𝑪", + "\\biD" : "𝑫", + "\\biE" : "𝑬", + "\\biF" : "𝑭", + "\\biG" : "𝑮", + "\\biH" : "𝑯", + "\\biI" : "𝑰", + "\\biJ" : "𝑱", + "\\biK" : "𝑲", + "\\biL" : "𝑳", + "\\biM" : "𝑴", + "\\biN" : "𝑵", + "\\biO" : "𝑶", + "\\biP" : "𝑷", + "\\biQ" : "𝑸", + "\\biR" : "𝑹", + "\\biS" : "𝑺", + "\\biT" : "𝑻", + "\\biU" : "𝑼", + "\\biV" : "𝑽", + "\\biW" : "𝑾", + "\\biX" : "𝑿", + "\\biY" : "𝒀", + "\\biZ" : "𝒁", + "\\bia" : "𝒂", + "\\bib" : "𝒃", + "\\bic" : "𝒄", + "\\bid" : "𝒅", + "\\bie" : "𝒆", + "\\bif" : "𝒇", + "\\big" : "𝒈", + "\\bih" : "𝒉", + "\\bii" : "𝒊", + "\\bij" : "𝒋", + "\\bik" : "𝒌", + "\\bil" : "𝒍", + "\\bim" : "𝒎", + "\\bin" : "𝒏", + "\\bio" : "𝒐", + "\\bip" : "𝒑", + "\\biq" : "𝒒", + "\\bir" : "𝒓", + "\\bis" : "𝒔", + "\\bit" : "𝒕", + "\\biu" : "𝒖", + "\\biv" : "𝒗", + "\\biw" : "𝒘", + "\\bix" : "𝒙", + "\\biy" : "𝒚", + "\\biz" : "𝒛", + "\\scrA" : "𝒜", + "\\scrC" : "𝒞", + "\\scrD" : "𝒟", + "\\scrG" : "𝒢", + "\\scrJ" : "𝒥", + "\\scrK" : "𝒦", + "\\scrN" : "𝒩", + "\\scrO" : "𝒪", + "\\scrP" : "𝒫", + "\\scrQ" : "𝒬", + "\\scrS" : "𝒮", + "\\scrT" : "𝒯", + "\\scrU" : "𝒰", + "\\scrV" : "𝒱", + "\\scrW" : "𝒲", + "\\scrX" : "𝒳", + "\\scrY" : "𝒴", + "\\scrZ" : "𝒵", + "\\scra" : "𝒶", + "\\scrb" : "𝒷", + "\\scrc" : "𝒸", + "\\scrd" : "𝒹", + "\\scrf" : "𝒻", + "\\scrh" : "𝒽", + "\\scri" : "𝒾", + "\\scrj" : "𝒿", + "\\scrk" : "𝓀", + "\\scrm" : "𝓂", + "\\scrn" : "𝓃", + "\\scrp" : "𝓅", + "\\scrq" : "𝓆", + "\\scrr" : "𝓇", + "\\scrs" : "𝓈", + "\\scrt" : "𝓉", + "\\scru" : "𝓊", + "\\scrv" : "𝓋", + "\\scrw" : "𝓌", + "\\scrx" : "𝓍", + "\\scry" : "𝓎", + "\\scrz" : "𝓏", + "\\bscrA" : "𝓐", + "\\bscrB" : "𝓑", + "\\bscrC" : "𝓒", + "\\bscrD" : "𝓓", + "\\bscrE" : "𝓔", + "\\bscrF" : "𝓕", + "\\bscrG" : "𝓖", + "\\bscrH" : "𝓗", + "\\bscrI" : "𝓘", + "\\bscrJ" : "𝓙", + "\\bscrK" : "𝓚", + "\\bscrL" : "𝓛", + "\\bscrM" : "𝓜", + "\\bscrN" : "𝓝", + "\\bscrO" : "𝓞", + "\\bscrP" : "𝓟", + "\\bscrQ" : "𝓠", + "\\bscrR" : "𝓡", + "\\bscrS" : "𝓢", + "\\bscrT" : "𝓣", + "\\bscrU" : "𝓤", + "\\bscrV" : "𝓥", + "\\bscrW" : "𝓦", + "\\bscrX" : "𝓧", + "\\bscrY" : "𝓨", + "\\bscrZ" : "𝓩", + "\\bscra" : "𝓪", + "\\bscrb" : "𝓫", + "\\bscrc" : "𝓬", + "\\bscrd" : "𝓭", + "\\bscre" : "𝓮", + "\\bscrf" : "𝓯", + "\\bscrg" : "𝓰", + "\\bscrh" : "𝓱", + "\\bscri" : "𝓲", + "\\bscrj" : "𝓳", + "\\bscrk" : "𝓴", + "\\bscrl" : "𝓵", + "\\bscrm" : "𝓶", + "\\bscrn" : "𝓷", + "\\bscro" : "𝓸", + "\\bscrp" : "𝓹", + "\\bscrq" : "𝓺", + "\\bscrr" : "𝓻", + "\\bscrs" : "𝓼", + "\\bscrt" : "𝓽", + "\\bscru" : "𝓾", + "\\bscrv" : "𝓿", + "\\bscrw" : "𝔀", + "\\bscrx" : "𝔁", + "\\bscry" : "𝔂", + "\\bscrz" : "𝔃", + "\\frakA" : "𝔄", + "\\frakB" : "𝔅", + "\\frakD" : "𝔇", + "\\frakE" : "𝔈", + "\\frakF" : "𝔉", + "\\frakG" : "𝔊", + "\\frakJ" : "𝔍", + "\\frakK" : "𝔎", + "\\frakL" : "𝔏", + "\\frakM" : "𝔐", + "\\frakN" : "𝔑", + "\\frakO" : "𝔒", + "\\frakP" : "𝔓", + "\\frakQ" : "𝔔", + "\\frakS" : "𝔖", + "\\frakT" : "𝔗", + "\\frakU" : "𝔘", + "\\frakV" : "𝔙", + "\\frakW" : "𝔚", + "\\frakX" : "𝔛", + "\\frakY" : "𝔜", + "\\fraka" : "𝔞", + "\\frakb" : "𝔟", + "\\frakc" : "𝔠", + "\\frakd" : "𝔡", + "\\frake" : "𝔢", + "\\frakf" : "𝔣", + "\\frakg" : "𝔤", + "\\frakh" : "𝔥", + "\\fraki" : "𝔦", + "\\frakj" : "𝔧", + "\\frakk" : "𝔨", + "\\frakl" : "𝔩", + "\\frakm" : "𝔪", + "\\frakn" : "𝔫", + "\\frako" : "𝔬", + "\\frakp" : "𝔭", + "\\frakq" : "𝔮", + "\\frakr" : "𝔯", + "\\fraks" : "𝔰", + "\\frakt" : "𝔱", + "\\fraku" : "𝔲", + "\\frakv" : "𝔳", + "\\frakw" : "𝔴", + "\\frakx" : "𝔵", + "\\fraky" : "𝔶", + "\\frakz" : "𝔷", + "\\bbA" : "𝔸", + "\\bbB" : "𝔹", + "\\bbD" : "𝔻", + "\\bbE" : "𝔼", + "\\bbF" : "𝔽", + "\\bbG" : "𝔾", + "\\bbI" : "𝕀", + "\\bbJ" : "𝕁", + "\\bbK" : "𝕂", + "\\bbL" : "𝕃", + "\\bbM" : "𝕄", + "\\bbO" : "𝕆", + "\\bbS" : "𝕊", + "\\bbT" : "𝕋", + "\\bbU" : "𝕌", + "\\bbV" : "𝕍", + "\\bbW" : "𝕎", + "\\bbX" : "𝕏", + "\\bbY" : "𝕐", + "\\bba" : "𝕒", + "\\bbb" : "𝕓", + "\\bbc" : "𝕔", + "\\bbd" : "𝕕", + "\\bbe" : "𝕖", + "\\bbf" : "𝕗", + "\\bbg" : "𝕘", + "\\bbh" : "𝕙", + "\\bbi" : "𝕚", + "\\bbj" : "𝕛", + "\\bbk" : "𝕜", + "\\bbl" : "𝕝", + "\\bbm" : "𝕞", + "\\bbn" : "𝕟", + "\\bbo" : "𝕠", + "\\bbp" : "𝕡", + "\\bbq" : "𝕢", + "\\bbr" : "𝕣", + "\\bbs" : "𝕤", + "\\bbt" : "𝕥", + "\\bbu" : "𝕦", + "\\bbv" : "𝕧", + "\\bbw" : "𝕨", + "\\bbx" : "𝕩", + "\\bby" : "𝕪", + "\\bbz" : "𝕫", + "\\bfrakA" : "𝕬", + "\\bfrakB" : "𝕭", + "\\bfrakC" : "𝕮", + "\\bfrakD" : "𝕯", + "\\bfrakE" : "𝕰", + "\\bfrakF" : "𝕱", + "\\bfrakG" : "𝕲", + "\\bfrakH" : "𝕳", + "\\bfrakI" : "𝕴", + "\\bfrakJ" : "𝕵", + "\\bfrakK" : "𝕶", + "\\bfrakL" : "𝕷", + "\\bfrakM" : "𝕸", + "\\bfrakN" : "𝕹", + "\\bfrakO" : "𝕺", + "\\bfrakP" : "𝕻", + "\\bfrakQ" : "𝕼", + "\\bfrakR" : "𝕽", + "\\bfrakS" : "𝕾", + "\\bfrakT" : "𝕿", + "\\bfrakU" : "𝖀", + "\\bfrakV" : "𝖁", + "\\bfrakW" : "𝖂", + "\\bfrakX" : "𝖃", + "\\bfrakY" : "𝖄", + "\\bfrakZ" : "𝖅", + "\\bfraka" : "𝖆", + "\\bfrakb" : "𝖇", + "\\bfrakc" : "𝖈", + "\\bfrakd" : "𝖉", + "\\bfrake" : "𝖊", + "\\bfrakf" : "𝖋", + "\\bfrakg" : "𝖌", + "\\bfrakh" : "𝖍", + "\\bfraki" : "𝖎", + "\\bfrakj" : "𝖏", + "\\bfrakk" : "𝖐", + "\\bfrakl" : "𝖑", + "\\bfrakm" : "𝖒", + "\\bfrakn" : "𝖓", + "\\bfrako" : "𝖔", + "\\bfrakp" : "𝖕", + "\\bfrakq" : "𝖖", + "\\bfrakr" : "𝖗", + "\\bfraks" : "𝖘", + "\\bfrakt" : "𝖙", + "\\bfraku" : "𝖚", + "\\bfrakv" : "𝖛", + "\\bfrakw" : "𝖜", + "\\bfrakx" : "𝖝", + "\\bfraky" : "𝖞", + "\\bfrakz" : "𝖟", + "\\sansA" : "𝖠", + "\\sansB" : "𝖡", + "\\sansC" : "𝖢", + "\\sansD" : "𝖣", + "\\sansE" : "𝖤", + "\\sansF" : "𝖥", + "\\sansG" : "𝖦", + "\\sansH" : "𝖧", + "\\sansI" : "𝖨", + "\\sansJ" : "𝖩", + "\\sansK" : "𝖪", + "\\sansL" : "𝖫", + "\\sansM" : "𝖬", + "\\sansN" : "𝖭", + "\\sansO" : "𝖮", + "\\sansP" : "𝖯", + "\\sansQ" : "𝖰", + "\\sansR" : "𝖱", + "\\sansS" : "𝖲", + "\\sansT" : "𝖳", + "\\sansU" : "𝖴", + "\\sansV" : "𝖵", + "\\sansW" : "𝖶", + "\\sansX" : "𝖷", + "\\sansY" : "𝖸", + "\\sansZ" : "𝖹", + "\\sansa" : "𝖺", + "\\sansb" : "𝖻", + "\\sansc" : "𝖼", + "\\sansd" : "𝖽", + "\\sanse" : "𝖾", + "\\sansf" : "𝖿", + "\\sansg" : "𝗀", + "\\sansh" : "𝗁", + "\\sansi" : "𝗂", + "\\sansj" : "𝗃", + "\\sansk" : "𝗄", + "\\sansl" : "𝗅", + "\\sansm" : "𝗆", + "\\sansn" : "𝗇", + "\\sanso" : "𝗈", + "\\sansp" : "𝗉", + "\\sansq" : "𝗊", + "\\sansr" : "𝗋", + "\\sanss" : "𝗌", + "\\sanst" : "𝗍", + "\\sansu" : "𝗎", + "\\sansv" : "𝗏", + "\\sansw" : "𝗐", + "\\sansx" : "𝗑", + "\\sansy" : "𝗒", + "\\sansz" : "𝗓", + "\\bsansA" : "𝗔", + "\\bsansB" : "𝗕", + "\\bsansC" : "𝗖", + "\\bsansD" : "𝗗", + "\\bsansE" : "𝗘", + "\\bsansF" : "𝗙", + "\\bsansG" : "𝗚", + "\\bsansH" : "𝗛", + "\\bsansI" : "𝗜", + "\\bsansJ" : "𝗝", + "\\bsansK" : "𝗞", + "\\bsansL" : "𝗟", + "\\bsansM" : "𝗠", + "\\bsansN" : "𝗡", + "\\bsansO" : "𝗢", + "\\bsansP" : "𝗣", + "\\bsansQ" : "𝗤", + "\\bsansR" : "𝗥", + "\\bsansS" : "𝗦", + "\\bsansT" : "𝗧", + "\\bsansU" : "𝗨", + "\\bsansV" : "𝗩", + "\\bsansW" : "𝗪", + "\\bsansX" : "𝗫", + "\\bsansY" : "𝗬", + "\\bsansZ" : "𝗭", + "\\bsansa" : "𝗮", + "\\bsansb" : "𝗯", + "\\bsansc" : "𝗰", + "\\bsansd" : "𝗱", + "\\bsanse" : "𝗲", + "\\bsansf" : "𝗳", + "\\bsansg" : "𝗴", + "\\bsansh" : "𝗵", + "\\bsansi" : "𝗶", + "\\bsansj" : "𝗷", + "\\bsansk" : "𝗸", + "\\bsansl" : "𝗹", + "\\bsansm" : "𝗺", + "\\bsansn" : "𝗻", + "\\bsanso" : "𝗼", + "\\bsansp" : "𝗽", + "\\bsansq" : "𝗾", + "\\bsansr" : "𝗿", + "\\bsanss" : "𝘀", + "\\bsanst" : "𝘁", + "\\bsansu" : "𝘂", + "\\bsansv" : "𝘃", + "\\bsansw" : "𝘄", + "\\bsansx" : "𝘅", + "\\bsansy" : "𝘆", + "\\bsansz" : "𝘇", + "\\isansA" : "𝘈", + "\\isansB" : "𝘉", + "\\isansC" : "𝘊", + "\\isansD" : "𝘋", + "\\isansE" : "𝘌", + "\\isansF" : "𝘍", + "\\isansG" : "𝘎", + "\\isansH" : "𝘏", + "\\isansI" : "𝘐", + "\\isansJ" : "𝘑", + "\\isansK" : "𝘒", + "\\isansL" : "𝘓", + "\\isansM" : "𝘔", + "\\isansN" : "𝘕", + "\\isansO" : "𝘖", + "\\isansP" : "𝘗", + "\\isansQ" : "𝘘", + "\\isansR" : "𝘙", + "\\isansS" : "𝘚", + "\\isansT" : "𝘛", + "\\isansU" : "𝘜", + "\\isansV" : "𝘝", + "\\isansW" : "𝘞", + "\\isansX" : "𝘟", + "\\isansY" : "𝘠", + "\\isansZ" : "𝘡", + "\\isansa" : "𝘢", + "\\isansb" : "𝘣", + "\\isansc" : "𝘤", + "\\isansd" : "𝘥", + "\\isanse" : "𝘦", + "\\isansf" : "𝘧", + "\\isansg" : "𝘨", + "\\isansh" : "𝘩", + "\\isansi" : "𝘪", + "\\isansj" : "𝘫", + "\\isansk" : "𝘬", + "\\isansl" : "𝘭", + "\\isansm" : "𝘮", + "\\isansn" : "𝘯", + "\\isanso" : "𝘰", + "\\isansp" : "𝘱", + "\\isansq" : "𝘲", + "\\isansr" : "𝘳", + "\\isanss" : "𝘴", + "\\isanst" : "𝘵", + "\\isansu" : "𝘶", + "\\isansv" : "𝘷", + "\\isansw" : "𝘸", + "\\isansx" : "𝘹", + "\\isansy" : "𝘺", + "\\isansz" : "𝘻", + "\\bisansA" : "𝘼", + "\\bisansB" : "𝘽", + "\\bisansC" : "𝘾", + "\\bisansD" : "𝘿", + "\\bisansE" : "𝙀", + "\\bisansF" : "𝙁", + "\\bisansG" : "𝙂", + "\\bisansH" : "𝙃", + "\\bisansI" : "𝙄", + "\\bisansJ" : "𝙅", + "\\bisansK" : "𝙆", + "\\bisansL" : "𝙇", + "\\bisansM" : "𝙈", + "\\bisansN" : "𝙉", + "\\bisansO" : "𝙊", + "\\bisansP" : "𝙋", + "\\bisansQ" : "𝙌", + "\\bisansR" : "𝙍", + "\\bisansS" : "𝙎", + "\\bisansT" : "𝙏", + "\\bisansU" : "𝙐", + "\\bisansV" : "𝙑", + "\\bisansW" : "𝙒", + "\\bisansX" : "𝙓", + "\\bisansY" : "𝙔", + "\\bisansZ" : "𝙕", + "\\bisansa" : "𝙖", + "\\bisansb" : "𝙗", + "\\bisansc" : "𝙘", + "\\bisansd" : "𝙙", + "\\bisanse" : "𝙚", + "\\bisansf" : "𝙛", + "\\bisansg" : "𝙜", + "\\bisansh" : "𝙝", + "\\bisansi" : "𝙞", + "\\bisansj" : "𝙟", + "\\bisansk" : "𝙠", + "\\bisansl" : "𝙡", + "\\bisansm" : "𝙢", + "\\bisansn" : "𝙣", + "\\bisanso" : "𝙤", + "\\bisansp" : "𝙥", + "\\bisansq" : "𝙦", + "\\bisansr" : "𝙧", + "\\bisanss" : "𝙨", + "\\bisanst" : "𝙩", + "\\bisansu" : "𝙪", + "\\bisansv" : "𝙫", + "\\bisansw" : "𝙬", + "\\bisansx" : "𝙭", + "\\bisansy" : "𝙮", + "\\bisansz" : "𝙯", + "\\ttA" : "𝙰", + "\\ttB" : "𝙱", + "\\ttC" : "𝙲", + "\\ttD" : "𝙳", + "\\ttE" : "𝙴", + "\\ttF" : "𝙵", + "\\ttG" : "𝙶", + "\\ttH" : "𝙷", + "\\ttI" : "𝙸", + "\\ttJ" : "𝙹", + "\\ttK" : "𝙺", + "\\ttL" : "𝙻", + "\\ttM" : "𝙼", + "\\ttN" : "𝙽", + "\\ttO" : "𝙾", + "\\ttP" : "𝙿", + "\\ttQ" : "𝚀", + "\\ttR" : "𝚁", + "\\ttS" : "𝚂", + "\\ttT" : "𝚃", + "\\ttU" : "𝚄", + "\\ttV" : "𝚅", + "\\ttW" : "𝚆", + "\\ttX" : "𝚇", + "\\ttY" : "𝚈", + "\\ttZ" : "𝚉", + "\\tta" : "𝚊", + "\\ttb" : "𝚋", + "\\ttc" : "𝚌", + "\\ttd" : "𝚍", + "\\tte" : "𝚎", + "\\ttf" : "𝚏", + "\\ttg" : "𝚐", + "\\tth" : "𝚑", + "\\tti" : "𝚒", + "\\ttj" : "𝚓", + "\\ttk" : "𝚔", + "\\ttl" : "𝚕", + "\\ttm" : "𝚖", + "\\ttn" : "𝚗", + "\\tto" : "𝚘", + "\\ttp" : "𝚙", + "\\ttq" : "𝚚", + "\\ttr" : "𝚛", + "\\tts" : "𝚜", + "\\ttt" : "𝚝", + "\\ttu" : "𝚞", + "\\ttv" : "𝚟", + "\\ttw" : "𝚠", + "\\ttx" : "𝚡", + "\\tty" : "𝚢", + "\\ttz" : "𝚣", + "\\bfAlpha" : "𝚨", + "\\bfBeta" : "𝚩", + "\\bfGamma" : "𝚪", + "\\bfDelta" : "𝚫", + "\\bfEpsilon" : "𝚬", + "\\bfZeta" : "𝚭", + "\\bfEta" : "𝚮", + "\\bfTheta" : "𝚯", + "\\bfIota" : "𝚰", + "\\bfKappa" : "𝚱", + "\\bfLambda" : "𝚲", + "\\bfMu" : "𝚳", + "\\bfNu" : "𝚴", + "\\bfXi" : "𝚵", + "\\bfOmicron" : "𝚶", + "\\bfPi" : "𝚷", + "\\bfRho" : "𝚸", + "\\bfvarTheta" : "𝚹", + "\\bfSigma" : "𝚺", + "\\bfTau" : "𝚻", + "\\bfUpsilon" : "𝚼", + "\\bfPhi" : "𝚽", + "\\bfChi" : "𝚾", + "\\bfPsi" : "𝚿", + "\\bfOmega" : "𝛀", + "\\bfalpha" : "𝛂", + "\\bfbeta" : "𝛃", + "\\bfgamma" : "𝛄", + "\\bfdelta" : "𝛅", + "\\bfepsilon" : "𝛆", + "\\bfzeta" : "𝛇", + "\\bfeta" : "𝛈", + "\\bftheta" : "𝛉", + "\\bfiota" : "𝛊", + "\\bfkappa" : "𝛋", + "\\bflambda" : "𝛌", + "\\bfmu" : "𝛍", + "\\bfnu" : "𝛎", + "\\bfxi" : "𝛏", + "\\bfomicron" : "𝛐", + "\\bfpi" : "𝛑", + "\\bfrho" : "𝛒", + "\\bfvarsigma" : "𝛓", + "\\bfsigma" : "𝛔", + "\\bftau" : "𝛕", + "\\bfupsilon" : "𝛖", + "\\bfvarphi" : "𝛗", + "\\bfchi" : "𝛘", + "\\bfpsi" : "𝛙", + "\\bfomega" : "𝛚", + "\\bfvarepsilon" : "𝛜", + "\\bfvartheta" : "𝛝", + "\\bfvarkappa" : "𝛞", + "\\bfphi" : "𝛟", + "\\bfvarrho" : "𝛠", + "\\bfvarpi" : "𝛡", + "\\itAlpha" : "𝛢", + "\\itBeta" : "𝛣", + "\\itGamma" : "𝛤", + "\\itDelta" : "𝛥", + "\\itEpsilon" : "𝛦", + "\\itZeta" : "𝛧", + "\\itEta" : "𝛨", + "\\itTheta" : "𝛩", + "\\itIota" : "𝛪", + "\\itKappa" : "𝛫", + "\\itLambda" : "𝛬", + "\\itMu" : "𝛭", + "\\itNu" : "𝛮", + "\\itXi" : "𝛯", + "\\itOmicron" : "𝛰", + "\\itPi" : "𝛱", + "\\itRho" : "𝛲", + "\\itvarTheta" : "𝛳", + "\\itSigma" : "𝛴", + "\\itTau" : "𝛵", + "\\itUpsilon" : "𝛶", + "\\itPhi" : "𝛷", + "\\itChi" : "𝛸", + "\\itPsi" : "𝛹", + "\\itOmega" : "𝛺", + "\\italpha" : "𝛼", + "\\itbeta" : "𝛽", + "\\itgamma" : "𝛾", + "\\itdelta" : "𝛿", + "\\itepsilon" : "𝜀", + "\\itzeta" : "𝜁", + "\\iteta" : "𝜂", + "\\ittheta" : "𝜃", + "\\itiota" : "𝜄", + "\\itkappa" : "𝜅", + "\\itlambda" : "𝜆", + "\\itmu" : "𝜇", + "\\itnu" : "𝜈", + "\\itxi" : "𝜉", + "\\itomicron" : "𝜊", + "\\itpi" : "𝜋", + "\\itrho" : "𝜌", + "\\itvarsigma" : "𝜍", + "\\itsigma" : "𝜎", + "\\ittau" : "𝜏", + "\\itupsilon" : "𝜐", + "\\itphi" : "𝜑", + "\\itchi" : "𝜒", + "\\itpsi" : "𝜓", + "\\itomega" : "𝜔", + "\\itvarepsilon" : "𝜖", + "\\itvartheta" : "𝜗", + "\\itvarkappa" : "𝜘", + "\\itvarphi" : "𝜙", + "\\itvarrho" : "𝜚", + "\\itvarpi" : "𝜛", + "\\biAlpha" : "𝜜", + "\\biBeta" : "𝜝", + "\\biGamma" : "𝜞", + "\\biDelta" : "𝜟", + "\\biEpsilon" : "𝜠", + "\\biZeta" : "𝜡", + "\\biEta" : "𝜢", + "\\biTheta" : "𝜣", + "\\biIota" : "𝜤", + "\\biKappa" : "𝜥", + "\\biLambda" : "𝜦", + "\\biMu" : "𝜧", + "\\biNu" : "𝜨", + "\\biXi" : "𝜩", + "\\biOmicron" : "𝜪", + "\\biPi" : "𝜫", + "\\biRho" : "𝜬", + "\\bivarTheta" : "𝜭", + "\\biSigma" : "𝜮", + "\\biTau" : "𝜯", + "\\biUpsilon" : "𝜰", + "\\biPhi" : "𝜱", + "\\biChi" : "𝜲", + "\\biPsi" : "𝜳", + "\\biOmega" : "𝜴", + "\\bialpha" : "𝜶", + "\\bibeta" : "𝜷", + "\\bigamma" : "𝜸", + "\\bidelta" : "𝜹", + "\\biepsilon" : "𝜺", + "\\bizeta" : "𝜻", + "\\bieta" : "𝜼", + "\\bitheta" : "𝜽", + "\\biiota" : "𝜾", + "\\bikappa" : "𝜿", + "\\bilambda" : "𝝀", + "\\bimu" : "𝝁", + "\\binu" : "𝝂", + "\\bixi" : "𝝃", + "\\biomicron" : "𝝄", + "\\bipi" : "𝝅", + "\\birho" : "𝝆", + "\\bivarsigma" : "𝝇", + "\\bisigma" : "𝝈", + "\\bitau" : "𝝉", + "\\biupsilon" : "𝝊", + "\\biphi" : "𝝋", + "\\bichi" : "𝝌", + "\\bipsi" : "𝝍", + "\\biomega" : "𝝎", + "\\bivarepsilon" : "𝝐", + "\\bivartheta" : "𝝑", + "\\bivarkappa" : "𝝒", + "\\bivarphi" : "𝝓", + "\\bivarrho" : "𝝔", + "\\bivarpi" : "𝝕", + "\\bsansAlpha" : "𝝖", + "\\bsansBeta" : "𝝗", + "\\bsansGamma" : "𝝘", + "\\bsansDelta" : "𝝙", + "\\bsansEpsilon" : "𝝚", + "\\bsansZeta" : "𝝛", + "\\bsansEta" : "𝝜", + "\\bsansTheta" : "𝝝", + "\\bsansIota" : "𝝞", + "\\bsansKappa" : "𝝟", + "\\bsansLambda" : "𝝠", + "\\bsansMu" : "𝝡", + "\\bsansNu" : "𝝢", + "\\bsansXi" : "𝝣", + "\\bsansOmicron" : "𝝤", + "\\bsansPi" : "𝝥", + "\\bsansRho" : "𝝦", + "\\bsansvarTheta" : "𝝧", + "\\bsansSigma" : "𝝨", + "\\bsansTau" : "𝝩", + "\\bsansUpsilon" : "𝝪", + "\\bsansPhi" : "𝝫", + "\\bsansChi" : "𝝬", + "\\bsansPsi" : "𝝭", + "\\bsansOmega" : "𝝮", + "\\bsansalpha" : "𝝰", + "\\bsansbeta" : "𝝱", + "\\bsansgamma" : "𝝲", + "\\bsansdelta" : "𝝳", + "\\bsansepsilon" : "𝝴", + "\\bsanszeta" : "𝝵", + "\\bsanseta" : "𝝶", + "\\bsanstheta" : "𝝷", + "\\bsansiota" : "𝝸", + "\\bsanskappa" : "𝝹", + "\\bsanslambda" : "𝝺", + "\\bsansmu" : "𝝻", + "\\bsansnu" : "𝝼", + "\\bsansxi" : "𝝽", + "\\bsansomicron" : "𝝾", + "\\bsanspi" : "𝝿", + "\\bsansrho" : "𝞀", + "\\bsansvarsigma" : "𝞁", + "\\bsanssigma" : "𝞂", + "\\bsanstau" : "𝞃", + "\\bsansupsilon" : "𝞄", + "\\bsansphi" : "𝞅", + "\\bsanschi" : "𝞆", + "\\bsanspsi" : "𝞇", + "\\bsansomega" : "𝞈", + "\\bsansvarepsilon" : "𝞊", + "\\bsansvartheta" : "𝞋", + "\\bsansvarkappa" : "𝞌", + "\\bsansvarphi" : "𝞍", + "\\bsansvarrho" : "𝞎", + "\\bsansvarpi" : "𝞏", + "\\bisansAlpha" : "𝞐", + "\\bisansBeta" : "𝞑", + "\\bisansGamma" : "𝞒", + "\\bisansDelta" : "𝞓", + "\\bisansEpsilon" : "𝞔", + "\\bisansZeta" : "𝞕", + "\\bisansEta" : "𝞖", + "\\bisansTheta" : "𝞗", + "\\bisansIota" : "𝞘", + "\\bisansKappa" : "𝞙", + "\\bisansLambda" : "𝞚", + "\\bisansMu" : "𝞛", + "\\bisansNu" : "𝞜", + "\\bisansXi" : "𝞝", + "\\bisansOmicron" : "𝞞", + "\\bisansPi" : "𝞟", + "\\bisansRho" : "𝞠", + "\\bisansvarTheta" : "𝞡", + "\\bisansSigma" : "𝞢", + "\\bisansTau" : "𝞣", + "\\bisansUpsilon" : "𝞤", + "\\bisansPhi" : "𝞥", + "\\bisansChi" : "𝞦", + "\\bisansPsi" : "𝞧", + "\\bisansOmega" : "𝞨", + "\\bisansalpha" : "𝞪", + "\\bisansbeta" : "𝞫", + "\\bisansgamma" : "𝞬", + "\\bisansdelta" : "𝞭", + "\\bisansepsilon" : "𝞮", + "\\bisanszeta" : "𝞯", + "\\bisanseta" : "𝞰", + "\\bisanstheta" : "𝞱", + "\\bisansiota" : "𝞲", + "\\bisanskappa" : "𝞳", + "\\bisanslambda" : "𝞴", + "\\bisansmu" : "𝞵", + "\\bisansnu" : "𝞶", + "\\bisansxi" : "𝞷", + "\\bisansomicron" : "𝞸", + "\\bisanspi" : "𝞹", + "\\bisansrho" : "𝞺", + "\\bisansvarsigma" : "𝞻", + "\\bisanssigma" : "𝞼", + "\\bisanstau" : "𝞽", + "\\bisansupsilon" : "𝞾", + "\\bisansphi" : "𝞿", + "\\bisanschi" : "𝟀", + "\\bisanspsi" : "𝟁", + "\\bisansomega" : "𝟂", + "\\bisansvarepsilon" : "𝟄", + "\\bisansvartheta" : "𝟅", + "\\bisansvarkappa" : "𝟆", + "\\bisansvarphi" : "𝟇", + "\\bisansvarrho" : "𝟈", + "\\bisansvarpi" : "𝟉", + "\\bfzero" : "𝟎", + "\\bfone" : "𝟏", + "\\bftwo" : "𝟐", + "\\bfthree" : "𝟑", + "\\bffour" : "𝟒", + "\\bffive" : "𝟓", + "\\bfsix" : "𝟔", + "\\bfseven" : "𝟕", + "\\bfeight" : "𝟖", + "\\bfnine" : "𝟗", + "\\bbzero" : "𝟘", + "\\bbone" : "𝟙", + "\\bbtwo" : "𝟚", + "\\bbthree" : "𝟛", + "\\bbfour" : "𝟜", + "\\bbfive" : "𝟝", + "\\bbsix" : "𝟞", + "\\bbseven" : "𝟟", + "\\bbeight" : "𝟠", + "\\bbnine" : "𝟡", + "\\sanszero" : "𝟢", + "\\sansone" : "𝟣", + "\\sanstwo" : "𝟤", + "\\sansthree" : "𝟥", + "\\sansfour" : "𝟦", + "\\sansfive" : "𝟧", + "\\sanssix" : "𝟨", + "\\sansseven" : "𝟩", + "\\sanseight" : "𝟪", + "\\sansnine" : "𝟫", + "\\bsanszero" : "𝟬", + "\\bsansone" : "𝟭", + "\\bsanstwo" : "𝟮", + "\\bsansthree" : "𝟯", + "\\bsansfour" : "𝟰", + "\\bsansfive" : "𝟱", + "\\bsanssix" : "𝟲", + "\\bsansseven" : "𝟳", + "\\bsanseight" : "𝟴", + "\\bsansnine" : "𝟵", + "\\ttzero" : "𝟶", + "\\ttone" : "𝟷", + "\\tttwo" : "𝟸", + "\\ttthree" : "𝟹", + "\\ttfour" : "𝟺", + "\\ttfive" : "𝟻", + "\\ttsix" : "𝟼", + "\\ttseven" : "𝟽", + "\\tteight" : "𝟾", + "\\ttnine" : "𝟿", + "\\underbar" : "̲", + "\\underleftrightarrow" : "͍", +} + + +reverse_latex_symbol = { v:k for k,v in latex_symbols.items()} diff --git a/contrib/python/ipython/py3/IPython/core/logger.py b/contrib/python/ipython/py3/IPython/core/logger.py index 4a52bdc8b9b..e3cb233cfa4 100644 --- a/contrib/python/ipython/py3/IPython/core/logger.py +++ b/contrib/python/ipython/py3/IPython/core/logger.py @@ -1,218 +1,218 @@ -"""Logger class for IPython's logging facilities. -""" - -#***************************************************************************** -# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and -# Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu> -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#***************************************************************************** - -#**************************************************************************** -# Modules and globals - -# Python standard modules -import glob -import io -import os -import time - - -#**************************************************************************** -# FIXME: This class isn't a mixin anymore, but it still needs attributes from -# ipython and does input cache management. Finish cleanup later... - -class Logger(object): - """A Logfile class with different policies for file creation""" - - def __init__(self, home_dir, logfname='Logger.log', loghead=u'', - logmode='over'): - - # this is the full ipython instance, we need some attributes from it - # which won't exist until later. What a mess, clean up later... - self.home_dir = home_dir - - self.logfname = logfname - self.loghead = loghead - self.logmode = logmode - self.logfile = None - - # Whether to log raw or processed input - self.log_raw_input = False - - # whether to also log output - self.log_output = False - - # whether to put timestamps before each log entry - self.timestamp = False - - # activity control flags - self.log_active = False - - # logmode is a validated property - def _set_mode(self,mode): - if mode not in ['append','backup','global','over','rotate']: - raise ValueError('invalid log mode %s given' % mode) - self._logmode = mode - - def _get_mode(self): - return self._logmode - - logmode = property(_get_mode,_set_mode) - - def logstart(self, logfname=None, loghead=None, logmode=None, - log_output=False, timestamp=False, log_raw_input=False): - """Generate a new log-file with a default header. - - Raises RuntimeError if the log has already been started""" - - if self.logfile is not None: - raise RuntimeError('Log file is already active: %s' % - self.logfname) - - # The parameters can override constructor defaults - if logfname is not None: self.logfname = logfname - if loghead is not None: self.loghead = loghead - if logmode is not None: self.logmode = logmode - - # Parameters not part of the constructor - self.timestamp = timestamp - self.log_output = log_output - self.log_raw_input = log_raw_input - - # init depending on the log mode requested - isfile = os.path.isfile - logmode = self.logmode - - if logmode == 'append': - self.logfile = io.open(self.logfname, 'a', encoding='utf-8') - - elif logmode == 'backup': - if isfile(self.logfname): - backup_logname = self.logfname+'~' - # Manually remove any old backup, since os.rename may fail - # under Windows. - if isfile(backup_logname): - os.remove(backup_logname) - os.rename(self.logfname,backup_logname) - self.logfile = io.open(self.logfname, 'w', encoding='utf-8') - - elif logmode == 'global': - self.logfname = os.path.join(self.home_dir,self.logfname) - self.logfile = io.open(self.logfname, 'a', encoding='utf-8') - - elif logmode == 'over': - if isfile(self.logfname): - os.remove(self.logfname) - self.logfile = io.open(self.logfname,'w', encoding='utf-8') - - elif logmode == 'rotate': - if isfile(self.logfname): - if isfile(self.logfname+'.001~'): - old = glob.glob(self.logfname+'.*~') - old.sort() - old.reverse() - for f in old: - root, ext = os.path.splitext(f) - num = int(ext[1:-1])+1 - os.rename(f, root+'.'+repr(num).zfill(3)+'~') - os.rename(self.logfname, self.logfname+'.001~') - self.logfile = io.open(self.logfname, 'w', encoding='utf-8') - - if logmode != 'append': - self.logfile.write(self.loghead) - - self.logfile.flush() - self.log_active = True - - def switch_log(self,val): - """Switch logging on/off. val should be ONLY a boolean.""" - - if val not in [False,True,0,1]: - raise ValueError('Call switch_log ONLY with a boolean argument, ' - 'not with: %s' % val) - - label = {0:'OFF',1:'ON',False:'OFF',True:'ON'} - - if self.logfile is None: - print(""" -Logging hasn't been started yet (use logstart for that). - -%logon/%logoff are for temporarily starting and stopping logging for a logfile -which already exists. But you must first start the logging process with -%logstart (optionally giving a logfile name).""") - - else: - if self.log_active == val: - print('Logging is already',label[val]) - else: - print('Switching logging',label[val]) - self.log_active = not self.log_active - self.log_active_out = self.log_active - - def logstate(self): - """Print a status message about the logger.""" - if self.logfile is None: - print('Logging has not been activated.') - else: - state = self.log_active and 'active' or 'temporarily suspended' - print('Filename :', self.logfname) - print('Mode :', self.logmode) - print('Output logging :', self.log_output) - print('Raw input log :', self.log_raw_input) - print('Timestamping :', self.timestamp) - print('State :', state) - - def log(self, line_mod, line_ori): - """Write the sources to a log. - - Inputs: - - - line_mod: possibly modified input, such as the transformations made - by input prefilters or input handlers of various kinds. This should - always be valid Python. - - - line_ori: unmodified input line from the user. This is not - necessarily valid Python. - """ - - # Write the log line, but decide which one according to the - # log_raw_input flag, set when the log is started. - if self.log_raw_input: - self.log_write(line_ori) - else: - self.log_write(line_mod) - - def log_write(self, data, kind='input'): - """Write data to the log file, if active""" - - #print 'data: %r' % data # dbg - if self.log_active and data: - write = self.logfile.write - if kind=='input': - if self.timestamp: - write(time.strftime('# %a, %d %b %Y %H:%M:%S\n', time.localtime())) - write(data) - elif kind=='output' and self.log_output: - odata = u'\n'.join([u'#[Out]# %s' % s - for s in data.splitlines()]) - write(u'%s\n' % odata) - self.logfile.flush() - - def logstop(self): - """Fully stop logging and close log file. - - In order to start logging again, a new logstart() call needs to be - made, possibly (though not necessarily) with a new filename, mode and - other options.""" - - if self.logfile is not None: - self.logfile.close() - self.logfile = None - else: - print("Logging hadn't been started.") - self.log_active = False - - # For backwards compatibility, in case anyone was using this. - close_log = logstop +"""Logger class for IPython's logging facilities. +""" + +#***************************************************************************** +# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and +# Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu> +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + +#**************************************************************************** +# Modules and globals + +# Python standard modules +import glob +import io +import os +import time + + +#**************************************************************************** +# FIXME: This class isn't a mixin anymore, but it still needs attributes from +# ipython and does input cache management. Finish cleanup later... + +class Logger(object): + """A Logfile class with different policies for file creation""" + + def __init__(self, home_dir, logfname='Logger.log', loghead=u'', + logmode='over'): + + # this is the full ipython instance, we need some attributes from it + # which won't exist until later. What a mess, clean up later... + self.home_dir = home_dir + + self.logfname = logfname + self.loghead = loghead + self.logmode = logmode + self.logfile = None + + # Whether to log raw or processed input + self.log_raw_input = False + + # whether to also log output + self.log_output = False + + # whether to put timestamps before each log entry + self.timestamp = False + + # activity control flags + self.log_active = False + + # logmode is a validated property + def _set_mode(self,mode): + if mode not in ['append','backup','global','over','rotate']: + raise ValueError('invalid log mode %s given' % mode) + self._logmode = mode + + def _get_mode(self): + return self._logmode + + logmode = property(_get_mode,_set_mode) + + def logstart(self, logfname=None, loghead=None, logmode=None, + log_output=False, timestamp=False, log_raw_input=False): + """Generate a new log-file with a default header. + + Raises RuntimeError if the log has already been started""" + + if self.logfile is not None: + raise RuntimeError('Log file is already active: %s' % + self.logfname) + + # The parameters can override constructor defaults + if logfname is not None: self.logfname = logfname + if loghead is not None: self.loghead = loghead + if logmode is not None: self.logmode = logmode + + # Parameters not part of the constructor + self.timestamp = timestamp + self.log_output = log_output + self.log_raw_input = log_raw_input + + # init depending on the log mode requested + isfile = os.path.isfile + logmode = self.logmode + + if logmode == 'append': + self.logfile = io.open(self.logfname, 'a', encoding='utf-8') + + elif logmode == 'backup': + if isfile(self.logfname): + backup_logname = self.logfname+'~' + # Manually remove any old backup, since os.rename may fail + # under Windows. + if isfile(backup_logname): + os.remove(backup_logname) + os.rename(self.logfname,backup_logname) + self.logfile = io.open(self.logfname, 'w', encoding='utf-8') + + elif logmode == 'global': + self.logfname = os.path.join(self.home_dir,self.logfname) + self.logfile = io.open(self.logfname, 'a', encoding='utf-8') + + elif logmode == 'over': + if isfile(self.logfname): + os.remove(self.logfname) + self.logfile = io.open(self.logfname,'w', encoding='utf-8') + + elif logmode == 'rotate': + if isfile(self.logfname): + if isfile(self.logfname+'.001~'): + old = glob.glob(self.logfname+'.*~') + old.sort() + old.reverse() + for f in old: + root, ext = os.path.splitext(f) + num = int(ext[1:-1])+1 + os.rename(f, root+'.'+repr(num).zfill(3)+'~') + os.rename(self.logfname, self.logfname+'.001~') + self.logfile = io.open(self.logfname, 'w', encoding='utf-8') + + if logmode != 'append': + self.logfile.write(self.loghead) + + self.logfile.flush() + self.log_active = True + + def switch_log(self,val): + """Switch logging on/off. val should be ONLY a boolean.""" + + if val not in [False,True,0,1]: + raise ValueError('Call switch_log ONLY with a boolean argument, ' + 'not with: %s' % val) + + label = {0:'OFF',1:'ON',False:'OFF',True:'ON'} + + if self.logfile is None: + print(""" +Logging hasn't been started yet (use logstart for that). + +%logon/%logoff are for temporarily starting and stopping logging for a logfile +which already exists. But you must first start the logging process with +%logstart (optionally giving a logfile name).""") + + else: + if self.log_active == val: + print('Logging is already',label[val]) + else: + print('Switching logging',label[val]) + self.log_active = not self.log_active + self.log_active_out = self.log_active + + def logstate(self): + """Print a status message about the logger.""" + if self.logfile is None: + print('Logging has not been activated.') + else: + state = self.log_active and 'active' or 'temporarily suspended' + print('Filename :', self.logfname) + print('Mode :', self.logmode) + print('Output logging :', self.log_output) + print('Raw input log :', self.log_raw_input) + print('Timestamping :', self.timestamp) + print('State :', state) + + def log(self, line_mod, line_ori): + """Write the sources to a log. + + Inputs: + + - line_mod: possibly modified input, such as the transformations made + by input prefilters or input handlers of various kinds. This should + always be valid Python. + + - line_ori: unmodified input line from the user. This is not + necessarily valid Python. + """ + + # Write the log line, but decide which one according to the + # log_raw_input flag, set when the log is started. + if self.log_raw_input: + self.log_write(line_ori) + else: + self.log_write(line_mod) + + def log_write(self, data, kind='input'): + """Write data to the log file, if active""" + + #print 'data: %r' % data # dbg + if self.log_active and data: + write = self.logfile.write + if kind=='input': + if self.timestamp: + write(time.strftime('# %a, %d %b %Y %H:%M:%S\n', time.localtime())) + write(data) + elif kind=='output' and self.log_output: + odata = u'\n'.join([u'#[Out]# %s' % s + for s in data.splitlines()]) + write(u'%s\n' % odata) + self.logfile.flush() + + def logstop(self): + """Fully stop logging and close log file. + + In order to start logging again, a new logstart() call needs to be + made, possibly (though not necessarily) with a new filename, mode and + other options.""" + + if self.logfile is not None: + self.logfile.close() + self.logfile = None + else: + print("Logging hadn't been started.") + self.log_active = False + + # For backwards compatibility, in case anyone was using this. + close_log = logstop diff --git a/contrib/python/ipython/py3/IPython/core/macro.py b/contrib/python/ipython/py3/IPython/core/macro.py index 1fc2df2db40..ce86898cac8 100644 --- a/contrib/python/ipython/py3/IPython/core/macro.py +++ b/contrib/python/ipython/py3/IPython/core/macro.py @@ -1,53 +1,53 @@ -"""Support for interactive macros in IPython""" - -#***************************************************************************** -# Copyright (C) 2001-2005 Fernando Perez <fperez@colorado.edu> -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#***************************************************************************** - -import re - -from IPython.utils.encoding import DEFAULT_ENCODING - -coding_declaration = re.compile(r"#\s*coding[:=]\s*([-\w.]+)") - -class Macro(object): - """Simple class to store the value of macros as strings. - - Macro is just a callable that executes a string of IPython - input when called. - """ - - def __init__(self,code): - """store the macro value, as a single string which can be executed""" - lines = [] - enc = None - for line in code.splitlines(): - coding_match = coding_declaration.match(line) - if coding_match: - enc = coding_match.group(1) - else: - lines.append(line) - code = "\n".join(lines) - if isinstance(code, bytes): - code = code.decode(enc or DEFAULT_ENCODING) - self.value = code + '\n' - - def __str__(self): - return self.value - - def __repr__(self): - return 'IPython.macro.Macro(%s)' % repr(self.value) - - def __getstate__(self): - """ needed for safe pickling via %store """ - return {'value': self.value} - - def __add__(self, other): - if isinstance(other, Macro): - return Macro(self.value + other.value) - elif isinstance(other, str): - return Macro(self.value + other) - raise TypeError +"""Support for interactive macros in IPython""" + +#***************************************************************************** +# Copyright (C) 2001-2005 Fernando Perez <fperez@colorado.edu> +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + +import re + +from IPython.utils.encoding import DEFAULT_ENCODING + +coding_declaration = re.compile(r"#\s*coding[:=]\s*([-\w.]+)") + +class Macro(object): + """Simple class to store the value of macros as strings. + + Macro is just a callable that executes a string of IPython + input when called. + """ + + def __init__(self,code): + """store the macro value, as a single string which can be executed""" + lines = [] + enc = None + for line in code.splitlines(): + coding_match = coding_declaration.match(line) + if coding_match: + enc = coding_match.group(1) + else: + lines.append(line) + code = "\n".join(lines) + if isinstance(code, bytes): + code = code.decode(enc or DEFAULT_ENCODING) + self.value = code + '\n' + + def __str__(self): + return self.value + + def __repr__(self): + return 'IPython.macro.Macro(%s)' % repr(self.value) + + def __getstate__(self): + """ needed for safe pickling via %store """ + return {'value': self.value} + + def __add__(self, other): + if isinstance(other, Macro): + return Macro(self.value + other.value) + elif isinstance(other, str): + return Macro(self.value + other) + raise TypeError diff --git a/contrib/python/ipython/py3/IPython/core/magic.py b/contrib/python/ipython/py3/IPython/core/magic.py index 71fcc460860..bc51677f083 100644 --- a/contrib/python/ipython/py3/IPython/core/magic.py +++ b/contrib/python/ipython/py3/IPython/core/magic.py @@ -1,703 +1,703 @@ -# encoding: utf-8 -"""Magic functions for InteractiveShell. -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and -# Copyright (C) 2001 Fernando Perez <fperez@colorado.edu> -# Copyright (C) 2008 The IPython Development Team - -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -import os -import re -import sys -from getopt import getopt, GetoptError - -from traitlets.config.configurable import Configurable -from . import oinspect -from .error import UsageError -from .inputtransformer2 import ESC_MAGIC, ESC_MAGIC2 -from decorator import decorator -from ..utils.ipstruct import Struct -from ..utils.process import arg_split -from ..utils.text import dedent -from traitlets import Bool, Dict, Instance, observe -from logging import error - -#----------------------------------------------------------------------------- -# Globals -#----------------------------------------------------------------------------- - -# A dict we'll use for each class that has magics, used as temporary storage to -# pass information between the @line/cell_magic method decorators and the -# @magics_class class decorator, because the method decorators have no -# access to the class when they run. See for more details: -# http://stackoverflow.com/questions/2366713/can-a-python-decorator-of-an-instance-method-access-the-class - -magics = dict(line={}, cell={}) - -magic_kinds = ('line', 'cell') -magic_spec = ('line', 'cell', 'line_cell') -magic_escapes = dict(line=ESC_MAGIC, cell=ESC_MAGIC2) - -#----------------------------------------------------------------------------- -# Utility classes and functions -#----------------------------------------------------------------------------- - -class Bunch: pass - - -def on_off(tag): - """Return an ON/OFF string for a 1/0 input. Simple utility function.""" - return ['OFF','ON'][tag] - - -def compress_dhist(dh): - """Compress a directory history into a new one with at most 20 entries. - - Return a new list made from the first and last 10 elements of dhist after - removal of duplicates. - """ - head, tail = dh[:-10], dh[-10:] - - newhead = [] - done = set() - for h in head: - if h in done: - continue - newhead.append(h) - done.add(h) - - return newhead + tail - - -def needs_local_scope(func): - """Decorator to mark magic functions which need to local scope to run.""" - func.needs_local_scope = True - return func - -#----------------------------------------------------------------------------- -# Class and method decorators for registering magics -#----------------------------------------------------------------------------- - -def magics_class(cls): - """Class decorator for all subclasses of the main Magics class. - - Any class that subclasses Magics *must* also apply this decorator, to - ensure that all the methods that have been decorated as line/cell magics - get correctly registered in the class instance. This is necessary because - when method decorators run, the class does not exist yet, so they - temporarily store their information into a module global. Application of - this class decorator copies that global data to the class instance and - clears the global. - - Obviously, this mechanism is not thread-safe, which means that the - *creation* of subclasses of Magic should only be done in a single-thread - context. Instantiation of the classes has no restrictions. Given that - these classes are typically created at IPython startup time and before user - application code becomes active, in practice this should not pose any - problems. - """ - cls.registered = True - cls.magics = dict(line = magics['line'], - cell = magics['cell']) - magics['line'] = {} - magics['cell'] = {} - return cls - - -def record_magic(dct, magic_kind, magic_name, func): - """Utility function to store a function as a magic of a specific kind. - - Parameters - ---------- - dct : dict - A dictionary with 'line' and 'cell' subdicts. - - magic_kind : str - Kind of magic to be stored. - - magic_name : str - Key to store the magic as. - - func : function - Callable object to store. - """ - if magic_kind == 'line_cell': - dct['line'][magic_name] = dct['cell'][magic_name] = func - else: - dct[magic_kind][magic_name] = func - - -def validate_type(magic_kind): - """Ensure that the given magic_kind is valid. - - Check that the given magic_kind is one of the accepted spec types (stored - in the global `magic_spec`), raise ValueError otherwise. - """ - if magic_kind not in magic_spec: - raise ValueError('magic_kind must be one of %s, %s given' % - magic_kinds, magic_kind) - - -# The docstrings for the decorator below will be fairly similar for the two -# types (method and function), so we generate them here once and reuse the -# templates below. -_docstring_template = \ -"""Decorate the given {0} as {1} magic. - -The decorator can be used with or without arguments, as follows. - -i) without arguments: it will create a {1} magic named as the {0} being -decorated:: - - @deco - def foo(...) - -will create a {1} magic named `foo`. - -ii) with one string argument: which will be used as the actual name of the -resulting magic:: - - @deco('bar') - def foo(...) - -will create a {1} magic named `bar`. - -To register a class magic use ``Interactiveshell.register_magic(class or instance)``. -""" - -# These two are decorator factories. While they are conceptually very similar, -# there are enough differences in the details that it's simpler to have them -# written as completely standalone functions rather than trying to share code -# and make a single one with convoluted logic. - -def _method_magic_marker(magic_kind): - """Decorator factory for methods in Magics subclasses. - """ - - validate_type(magic_kind) - - # This is a closure to capture the magic_kind. We could also use a class, - # but it's overkill for just that one bit of state. - def magic_deco(arg): - call = lambda f, *a, **k: f(*a, **k) - - if callable(arg): - # "Naked" decorator call (just @foo, no args) - func = arg - name = func.__name__ - retval = decorator(call, func) - record_magic(magics, magic_kind, name, name) - elif isinstance(arg, str): - # Decorator called with arguments (@foo('bar')) - name = arg - def mark(func, *a, **kw): - record_magic(magics, magic_kind, name, func.__name__) - return decorator(call, func) - retval = mark - else: - raise TypeError("Decorator can only be called with " - "string or function") - return retval - - # Ensure the resulting decorator has a usable docstring - magic_deco.__doc__ = _docstring_template.format('method', magic_kind) - return magic_deco - - -def _function_magic_marker(magic_kind): - """Decorator factory for standalone functions. - """ - validate_type(magic_kind) - - # This is a closure to capture the magic_kind. We could also use a class, - # but it's overkill for just that one bit of state. - def magic_deco(arg): - call = lambda f, *a, **k: f(*a, **k) - - # Find get_ipython() in the caller's namespace - caller = sys._getframe(1) - for ns in ['f_locals', 'f_globals', 'f_builtins']: - get_ipython = getattr(caller, ns).get('get_ipython') - if get_ipython is not None: - break - else: - raise NameError('Decorator can only run in context where ' - '`get_ipython` exists') - - ip = get_ipython() - - if callable(arg): - # "Naked" decorator call (just @foo, no args) - func = arg - name = func.__name__ - ip.register_magic_function(func, magic_kind, name) - retval = decorator(call, func) - elif isinstance(arg, str): - # Decorator called with arguments (@foo('bar')) - name = arg - def mark(func, *a, **kw): - ip.register_magic_function(func, magic_kind, name) - return decorator(call, func) - retval = mark - else: - raise TypeError("Decorator can only be called with " - "string or function") - return retval - - # Ensure the resulting decorator has a usable docstring - ds = _docstring_template.format('function', magic_kind) - - ds += dedent(""" - Note: this decorator can only be used in a context where IPython is already - active, so that the `get_ipython()` call succeeds. You can therefore use - it in your startup files loaded after IPython initializes, but *not* in the - IPython configuration file itself, which is executed before IPython is - fully up and running. Any file located in the `startup` subdirectory of - your configuration profile will be OK in this sense. - """) - - magic_deco.__doc__ = ds - return magic_deco - - -MAGIC_NO_VAR_EXPAND_ATTR = '_ipython_magic_no_var_expand' - - -def no_var_expand(magic_func): - """Mark a magic function as not needing variable expansion - - By default, IPython interprets `{a}` or `$a` in the line passed to magics - as variables that should be interpolated from the interactive namespace - before passing the line to the magic function. - This is not always desirable, e.g. when the magic executes Python code - (%timeit, %time, etc.). - Decorate magics with `@no_var_expand` to opt-out of variable expansion. - - .. versionadded:: 7.3 - """ - setattr(magic_func, MAGIC_NO_VAR_EXPAND_ATTR, True) - return magic_func - - -# Create the actual decorators for public use - -# These three are used to decorate methods in class definitions -line_magic = _method_magic_marker('line') -cell_magic = _method_magic_marker('cell') -line_cell_magic = _method_magic_marker('line_cell') - -# These three decorate standalone functions and perform the decoration -# immediately. They can only run where get_ipython() works -register_line_magic = _function_magic_marker('line') -register_cell_magic = _function_magic_marker('cell') -register_line_cell_magic = _function_magic_marker('line_cell') - -#----------------------------------------------------------------------------- -# Core Magic classes -#----------------------------------------------------------------------------- - -class MagicsManager(Configurable): - """Object that handles all magic-related functionality for IPython. - """ - # Non-configurable class attributes - - # A two-level dict, first keyed by magic type, then by magic function, and - # holding the actual callable object as value. This is the dict used for - # magic function dispatch - magics = Dict() - - # A registry of the original objects that we've been given holding magics. - registry = Dict() - - shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) - - auto_magic = Bool(True, help= - "Automatically call line magics without requiring explicit % prefix" - ).tag(config=True) - @observe('auto_magic') - def _auto_magic_changed(self, change): - self.shell.automagic = change['new'] - - _auto_status = [ - 'Automagic is OFF, % prefix IS needed for line magics.', - 'Automagic is ON, % prefix IS NOT needed for line magics.'] - - user_magics = Instance('IPython.core.magics.UserMagics', allow_none=True) - - def __init__(self, shell=None, config=None, user_magics=None, **traits): - - super(MagicsManager, self).__init__(shell=shell, config=config, - user_magics=user_magics, **traits) - self.magics = dict(line={}, cell={}) - # Let's add the user_magics to the registry for uniformity, so *all* - # registered magic containers can be found there. - self.registry[user_magics.__class__.__name__] = user_magics - - def auto_status(self): - """Return descriptive string with automagic status.""" - return self._auto_status[self.auto_magic] - - def lsmagic(self): - """Return a dict of currently available magic functions. - - The return dict has the keys 'line' and 'cell', corresponding to the - two types of magics we support. Each value is a list of names. - """ - return self.magics - - def lsmagic_docs(self, brief=False, missing=''): - """Return dict of documentation of magic functions. - - The return dict has the keys 'line' and 'cell', corresponding to the - two types of magics we support. Each value is a dict keyed by magic - name whose value is the function docstring. If a docstring is - unavailable, the value of `missing` is used instead. - - If brief is True, only the first line of each docstring will be returned. - """ - docs = {} - for m_type in self.magics: - m_docs = {} - for m_name, m_func in self.magics[m_type].items(): - if m_func.__doc__: - if brief: - m_docs[m_name] = m_func.__doc__.split('\n', 1)[0] - else: - m_docs[m_name] = m_func.__doc__.rstrip() - else: - m_docs[m_name] = missing - docs[m_type] = m_docs - return docs - - def register(self, *magic_objects): - """Register one or more instances of Magics. - - Take one or more classes or instances of classes that subclass the main - `core.Magic` class, and register them with IPython to use the magic - functions they provide. The registration process will then ensure that - any methods that have decorated to provide line and/or cell magics will - be recognized with the `%x`/`%%x` syntax as a line/cell magic - respectively. - - If classes are given, they will be instantiated with the default - constructor. If your classes need a custom constructor, you should - instanitate them first and pass the instance. - - The provided arguments can be an arbitrary mix of classes and instances. - - Parameters - ---------- - magic_objects : one or more classes or instances - """ - # Start by validating them to ensure they have all had their magic - # methods registered at the instance level - for m in magic_objects: - if not m.registered: - raise ValueError("Class of magics %r was constructed without " - "the @register_magics class decorator") - if isinstance(m, type): - # If we're given an uninstantiated class - m = m(shell=self.shell) - - # Now that we have an instance, we can register it and update the - # table of callables - self.registry[m.__class__.__name__] = m - for mtype in magic_kinds: - self.magics[mtype].update(m.magics[mtype]) - - def register_function(self, func, magic_kind='line', magic_name=None): - """Expose a standalone function as magic function for IPython. - - This will create an IPython magic (line, cell or both) from a - standalone function. The functions should have the following - signatures: - - * For line magics: `def f(line)` - * For cell magics: `def f(line, cell)` - * For a function that does both: `def f(line, cell=None)` - - In the latter case, the function will be called with `cell==None` when - invoked as `%f`, and with cell as a string when invoked as `%%f`. - - Parameters - ---------- - func : callable - Function to be registered as a magic. - - magic_kind : str - Kind of magic, one of 'line', 'cell' or 'line_cell' - - magic_name : optional str - If given, the name the magic will have in the IPython namespace. By - default, the name of the function itself is used. - """ - - # Create the new method in the user_magics and register it in the - # global table - validate_type(magic_kind) - magic_name = func.__name__ if magic_name is None else magic_name - setattr(self.user_magics, magic_name, func) - record_magic(self.magics, magic_kind, magic_name, func) - - def register_alias(self, alias_name, magic_name, magic_kind='line', magic_params=None): - """Register an alias to a magic function. - - The alias is an instance of :class:`MagicAlias`, which holds the - name and kind of the magic it should call. Binding is done at - call time, so if the underlying magic function is changed the alias - will call the new function. - - Parameters - ---------- - alias_name : str - The name of the magic to be registered. - - magic_name : str - The name of an existing magic. - - magic_kind : str - Kind of magic, one of 'line' or 'cell' - """ - - # `validate_type` is too permissive, as it allows 'line_cell' - # which we do not handle. - if magic_kind not in magic_kinds: - raise ValueError('magic_kind must be one of %s, %s given' % - magic_kinds, magic_kind) - - alias = MagicAlias(self.shell, magic_name, magic_kind, magic_params) - setattr(self.user_magics, alias_name, alias) - record_magic(self.magics, magic_kind, alias_name, alias) - -# Key base class that provides the central functionality for magics. - - -class Magics(Configurable): - """Base class for implementing magic functions. - - Shell functions which can be reached as %function_name. All magic - functions should accept a string, which they can parse for their own - needs. This can make some functions easier to type, eg `%cd ../` - vs. `%cd("../")` - - Classes providing magic functions need to subclass this class, and they - MUST: - - - Use the method decorators `@line_magic` and `@cell_magic` to decorate - individual methods as magic functions, AND - - - Use the class decorator `@magics_class` to ensure that the magic - methods are properly registered at the instance level upon instance - initialization. - - See :mod:`magic_functions` for examples of actual implementation classes. - """ - # Dict holding all command-line options for each magic. - options_table = None - # Dict for the mapping of magic names to methods, set by class decorator - magics = None - # Flag to check that the class decorator was properly applied - registered = False - # Instance of IPython shell - shell = None - - def __init__(self, shell=None, **kwargs): - if not(self.__class__.registered): - raise ValueError('Magics subclass without registration - ' - 'did you forget to apply @magics_class?') - if shell is not None: - if hasattr(shell, 'configurables'): - shell.configurables.append(self) - if hasattr(shell, 'config'): - kwargs.setdefault('parent', shell) - - self.shell = shell - self.options_table = {} - # The method decorators are run when the instance doesn't exist yet, so - # they can only record the names of the methods they are supposed to - # grab. Only now, that the instance exists, can we create the proper - # mapping to bound methods. So we read the info off the original names - # table and replace each method name by the actual bound method. - # But we mustn't clobber the *class* mapping, in case of multiple instances. - class_magics = self.magics - self.magics = {} - for mtype in magic_kinds: - tab = self.magics[mtype] = {} - cls_tab = class_magics[mtype] - for magic_name, meth_name in cls_tab.items(): - if isinstance(meth_name, str): - # it's a method name, grab it - tab[magic_name] = getattr(self, meth_name) - else: - # it's the real thing - tab[magic_name] = meth_name - # Configurable **needs** to be initiated at the end or the config - # magics get screwed up. - super(Magics, self).__init__(**kwargs) - - def arg_err(self,func): - """Print docstring if incorrect arguments were passed""" - print('Error in arguments:') - print(oinspect.getdoc(func)) - - def format_latex(self, strng): - """Format a string for latex inclusion.""" - - # Characters that need to be escaped for latex: - escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE) - # Magic command names as headers: - cmd_name_re = re.compile(r'^(%s.*?):' % ESC_MAGIC, - re.MULTILINE) - # Magic commands - cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % ESC_MAGIC, - re.MULTILINE) - # Paragraph continue - par_re = re.compile(r'\\$',re.MULTILINE) - - # The "\n" symbol - newline_re = re.compile(r'\\n') - - # Now build the string for output: - #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng) - strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:', - strng) - strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng) - strng = par_re.sub(r'\\\\',strng) - strng = escape_re.sub(r'\\\1',strng) - strng = newline_re.sub(r'\\textbackslash{}n',strng) - return strng - - def parse_options(self, arg_str, opt_str, *long_opts, **kw): - """Parse options passed to an argument string. - - The interface is similar to that of :func:`getopt.getopt`, but it - returns a :class:`~IPython.utils.struct.Struct` with the options as keys - and the stripped argument string still as a string. - - arg_str is quoted as a true sys.argv vector by using shlex.split. - This allows us to easily expand variables, glob files, quote - arguments, etc. - - Parameters - ---------- - - arg_str : str - The arguments to parse. - - opt_str : str - The options specification. - - mode : str, default 'string' - If given as 'list', the argument string is returned as a list (split - on whitespace) instead of a string. - - list_all : bool, default False - Put all option values in lists. Normally only options - appearing more than once are put in a list. - - posix : bool, default True - Whether to split the input line in POSIX mode or not, as per the - conventions outlined in the :mod:`shlex` module from the standard - library. - """ - - # inject default options at the beginning of the input line - caller = sys._getframe(1).f_code.co_name - arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str) - - mode = kw.get('mode','string') - if mode not in ['string','list']: - raise ValueError('incorrect mode given: %s' % mode) - # Get options - list_all = kw.get('list_all',0) - posix = kw.get('posix', os.name == 'posix') - strict = kw.get('strict', True) - - # Check if we have more than one argument to warrant extra processing: - odict = {} # Dictionary with options - args = arg_str.split() - if len(args) >= 1: - # If the list of inputs only has 0 or 1 thing in it, there's no - # need to look for options - argv = arg_split(arg_str, posix, strict) - # Do regular option processing - try: - opts,args = getopt(argv, opt_str, long_opts) - except GetoptError as e: - raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str, - " ".join(long_opts))) - for o,a in opts: - if o.startswith('--'): - o = o[2:] - else: - o = o[1:] - try: - odict[o].append(a) - except AttributeError: - odict[o] = [odict[o],a] - except KeyError: - if list_all: - odict[o] = [a] - else: - odict[o] = a - - # Prepare opts,args for return - opts = Struct(odict) - if mode == 'string': - args = ' '.join(args) - - return opts,args - - def default_option(self, fn, optstr): - """Make an entry in the options_table for fn, with value optstr""" - - if fn not in self.lsmagic(): - error("%s is not a magic function" % fn) - self.options_table[fn] = optstr - - -class MagicAlias(object): - """An alias to another magic function. - - An alias is determined by its magic name and magic kind. Lookup - is done at call time, so if the underlying magic changes the alias - will call the new function. - - Use the :meth:`MagicsManager.register_alias` method or the - `%alias_magic` magic function to create and register a new alias. - """ - def __init__(self, shell, magic_name, magic_kind, magic_params=None): - self.shell = shell - self.magic_name = magic_name - self.magic_params = magic_params - self.magic_kind = magic_kind - - self.pretty_target = '%s%s' % (magic_escapes[self.magic_kind], self.magic_name) - self.__doc__ = "Alias for `%s`." % self.pretty_target - - self._in_call = False - - def __call__(self, *args, **kwargs): - """Call the magic alias.""" - fn = self.shell.find_magic(self.magic_name, self.magic_kind) - if fn is None: - raise UsageError("Magic `%s` not found." % self.pretty_target) - - # Protect against infinite recursion. - if self._in_call: - raise UsageError("Infinite recursion detected; " - "magic aliases cannot call themselves.") - self._in_call = True - try: - if self.magic_params: - args_list = list(args) - args_list[0] = self.magic_params + " " + args[0] - args = tuple(args_list) - return fn(*args, **kwargs) - finally: - self._in_call = False +# encoding: utf-8 +"""Magic functions for InteractiveShell. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and +# Copyright (C) 2001 Fernando Perez <fperez@colorado.edu> +# Copyright (C) 2008 The IPython Development Team + +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +import os +import re +import sys +from getopt import getopt, GetoptError + +from traitlets.config.configurable import Configurable +from . import oinspect +from .error import UsageError +from .inputtransformer2 import ESC_MAGIC, ESC_MAGIC2 +from decorator import decorator +from ..utils.ipstruct import Struct +from ..utils.process import arg_split +from ..utils.text import dedent +from traitlets import Bool, Dict, Instance, observe +from logging import error + +#----------------------------------------------------------------------------- +# Globals +#----------------------------------------------------------------------------- + +# A dict we'll use for each class that has magics, used as temporary storage to +# pass information between the @line/cell_magic method decorators and the +# @magics_class class decorator, because the method decorators have no +# access to the class when they run. See for more details: +# http://stackoverflow.com/questions/2366713/can-a-python-decorator-of-an-instance-method-access-the-class + +magics = dict(line={}, cell={}) + +magic_kinds = ('line', 'cell') +magic_spec = ('line', 'cell', 'line_cell') +magic_escapes = dict(line=ESC_MAGIC, cell=ESC_MAGIC2) + +#----------------------------------------------------------------------------- +# Utility classes and functions +#----------------------------------------------------------------------------- + +class Bunch: pass + + +def on_off(tag): + """Return an ON/OFF string for a 1/0 input. Simple utility function.""" + return ['OFF','ON'][tag] + + +def compress_dhist(dh): + """Compress a directory history into a new one with at most 20 entries. + + Return a new list made from the first and last 10 elements of dhist after + removal of duplicates. + """ + head, tail = dh[:-10], dh[-10:] + + newhead = [] + done = set() + for h in head: + if h in done: + continue + newhead.append(h) + done.add(h) + + return newhead + tail + + +def needs_local_scope(func): + """Decorator to mark magic functions which need to local scope to run.""" + func.needs_local_scope = True + return func + +#----------------------------------------------------------------------------- +# Class and method decorators for registering magics +#----------------------------------------------------------------------------- + +def magics_class(cls): + """Class decorator for all subclasses of the main Magics class. + + Any class that subclasses Magics *must* also apply this decorator, to + ensure that all the methods that have been decorated as line/cell magics + get correctly registered in the class instance. This is necessary because + when method decorators run, the class does not exist yet, so they + temporarily store their information into a module global. Application of + this class decorator copies that global data to the class instance and + clears the global. + + Obviously, this mechanism is not thread-safe, which means that the + *creation* of subclasses of Magic should only be done in a single-thread + context. Instantiation of the classes has no restrictions. Given that + these classes are typically created at IPython startup time and before user + application code becomes active, in practice this should not pose any + problems. + """ + cls.registered = True + cls.magics = dict(line = magics['line'], + cell = magics['cell']) + magics['line'] = {} + magics['cell'] = {} + return cls + + +def record_magic(dct, magic_kind, magic_name, func): + """Utility function to store a function as a magic of a specific kind. + + Parameters + ---------- + dct : dict + A dictionary with 'line' and 'cell' subdicts. + + magic_kind : str + Kind of magic to be stored. + + magic_name : str + Key to store the magic as. + + func : function + Callable object to store. + """ + if magic_kind == 'line_cell': + dct['line'][magic_name] = dct['cell'][magic_name] = func + else: + dct[magic_kind][magic_name] = func + + +def validate_type(magic_kind): + """Ensure that the given magic_kind is valid. + + Check that the given magic_kind is one of the accepted spec types (stored + in the global `magic_spec`), raise ValueError otherwise. + """ + if magic_kind not in magic_spec: + raise ValueError('magic_kind must be one of %s, %s given' % + magic_kinds, magic_kind) + + +# The docstrings for the decorator below will be fairly similar for the two +# types (method and function), so we generate them here once and reuse the +# templates below. +_docstring_template = \ +"""Decorate the given {0} as {1} magic. + +The decorator can be used with or without arguments, as follows. + +i) without arguments: it will create a {1} magic named as the {0} being +decorated:: + + @deco + def foo(...) + +will create a {1} magic named `foo`. + +ii) with one string argument: which will be used as the actual name of the +resulting magic:: + + @deco('bar') + def foo(...) + +will create a {1} magic named `bar`. + +To register a class magic use ``Interactiveshell.register_magic(class or instance)``. +""" + +# These two are decorator factories. While they are conceptually very similar, +# there are enough differences in the details that it's simpler to have them +# written as completely standalone functions rather than trying to share code +# and make a single one with convoluted logic. + +def _method_magic_marker(magic_kind): + """Decorator factory for methods in Magics subclasses. + """ + + validate_type(magic_kind) + + # This is a closure to capture the magic_kind. We could also use a class, + # but it's overkill for just that one bit of state. + def magic_deco(arg): + call = lambda f, *a, **k: f(*a, **k) + + if callable(arg): + # "Naked" decorator call (just @foo, no args) + func = arg + name = func.__name__ + retval = decorator(call, func) + record_magic(magics, magic_kind, name, name) + elif isinstance(arg, str): + # Decorator called with arguments (@foo('bar')) + name = arg + def mark(func, *a, **kw): + record_magic(magics, magic_kind, name, func.__name__) + return decorator(call, func) + retval = mark + else: + raise TypeError("Decorator can only be called with " + "string or function") + return retval + + # Ensure the resulting decorator has a usable docstring + magic_deco.__doc__ = _docstring_template.format('method', magic_kind) + return magic_deco + + +def _function_magic_marker(magic_kind): + """Decorator factory for standalone functions. + """ + validate_type(magic_kind) + + # This is a closure to capture the magic_kind. We could also use a class, + # but it's overkill for just that one bit of state. + def magic_deco(arg): + call = lambda f, *a, **k: f(*a, **k) + + # Find get_ipython() in the caller's namespace + caller = sys._getframe(1) + for ns in ['f_locals', 'f_globals', 'f_builtins']: + get_ipython = getattr(caller, ns).get('get_ipython') + if get_ipython is not None: + break + else: + raise NameError('Decorator can only run in context where ' + '`get_ipython` exists') + + ip = get_ipython() + + if callable(arg): + # "Naked" decorator call (just @foo, no args) + func = arg + name = func.__name__ + ip.register_magic_function(func, magic_kind, name) + retval = decorator(call, func) + elif isinstance(arg, str): + # Decorator called with arguments (@foo('bar')) + name = arg + def mark(func, *a, **kw): + ip.register_magic_function(func, magic_kind, name) + return decorator(call, func) + retval = mark + else: + raise TypeError("Decorator can only be called with " + "string or function") + return retval + + # Ensure the resulting decorator has a usable docstring + ds = _docstring_template.format('function', magic_kind) + + ds += dedent(""" + Note: this decorator can only be used in a context where IPython is already + active, so that the `get_ipython()` call succeeds. You can therefore use + it in your startup files loaded after IPython initializes, but *not* in the + IPython configuration file itself, which is executed before IPython is + fully up and running. Any file located in the `startup` subdirectory of + your configuration profile will be OK in this sense. + """) + + magic_deco.__doc__ = ds + return magic_deco + + +MAGIC_NO_VAR_EXPAND_ATTR = '_ipython_magic_no_var_expand' + + +def no_var_expand(magic_func): + """Mark a magic function as not needing variable expansion + + By default, IPython interprets `{a}` or `$a` in the line passed to magics + as variables that should be interpolated from the interactive namespace + before passing the line to the magic function. + This is not always desirable, e.g. when the magic executes Python code + (%timeit, %time, etc.). + Decorate magics with `@no_var_expand` to opt-out of variable expansion. + + .. versionadded:: 7.3 + """ + setattr(magic_func, MAGIC_NO_VAR_EXPAND_ATTR, True) + return magic_func + + +# Create the actual decorators for public use + +# These three are used to decorate methods in class definitions +line_magic = _method_magic_marker('line') +cell_magic = _method_magic_marker('cell') +line_cell_magic = _method_magic_marker('line_cell') + +# These three decorate standalone functions and perform the decoration +# immediately. They can only run where get_ipython() works +register_line_magic = _function_magic_marker('line') +register_cell_magic = _function_magic_marker('cell') +register_line_cell_magic = _function_magic_marker('line_cell') + +#----------------------------------------------------------------------------- +# Core Magic classes +#----------------------------------------------------------------------------- + +class MagicsManager(Configurable): + """Object that handles all magic-related functionality for IPython. + """ + # Non-configurable class attributes + + # A two-level dict, first keyed by magic type, then by magic function, and + # holding the actual callable object as value. This is the dict used for + # magic function dispatch + magics = Dict() + + # A registry of the original objects that we've been given holding magics. + registry = Dict() + + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) + + auto_magic = Bool(True, help= + "Automatically call line magics without requiring explicit % prefix" + ).tag(config=True) + @observe('auto_magic') + def _auto_magic_changed(self, change): + self.shell.automagic = change['new'] + + _auto_status = [ + 'Automagic is OFF, % prefix IS needed for line magics.', + 'Automagic is ON, % prefix IS NOT needed for line magics.'] + + user_magics = Instance('IPython.core.magics.UserMagics', allow_none=True) + + def __init__(self, shell=None, config=None, user_magics=None, **traits): + + super(MagicsManager, self).__init__(shell=shell, config=config, + user_magics=user_magics, **traits) + self.magics = dict(line={}, cell={}) + # Let's add the user_magics to the registry for uniformity, so *all* + # registered magic containers can be found there. + self.registry[user_magics.__class__.__name__] = user_magics + + def auto_status(self): + """Return descriptive string with automagic status.""" + return self._auto_status[self.auto_magic] + + def lsmagic(self): + """Return a dict of currently available magic functions. + + The return dict has the keys 'line' and 'cell', corresponding to the + two types of magics we support. Each value is a list of names. + """ + return self.magics + + def lsmagic_docs(self, brief=False, missing=''): + """Return dict of documentation of magic functions. + + The return dict has the keys 'line' and 'cell', corresponding to the + two types of magics we support. Each value is a dict keyed by magic + name whose value is the function docstring. If a docstring is + unavailable, the value of `missing` is used instead. + + If brief is True, only the first line of each docstring will be returned. + """ + docs = {} + for m_type in self.magics: + m_docs = {} + for m_name, m_func in self.magics[m_type].items(): + if m_func.__doc__: + if brief: + m_docs[m_name] = m_func.__doc__.split('\n', 1)[0] + else: + m_docs[m_name] = m_func.__doc__.rstrip() + else: + m_docs[m_name] = missing + docs[m_type] = m_docs + return docs + + def register(self, *magic_objects): + """Register one or more instances of Magics. + + Take one or more classes or instances of classes that subclass the main + `core.Magic` class, and register them with IPython to use the magic + functions they provide. The registration process will then ensure that + any methods that have decorated to provide line and/or cell magics will + be recognized with the `%x`/`%%x` syntax as a line/cell magic + respectively. + + If classes are given, they will be instantiated with the default + constructor. If your classes need a custom constructor, you should + instanitate them first and pass the instance. + + The provided arguments can be an arbitrary mix of classes and instances. + + Parameters + ---------- + magic_objects : one or more classes or instances + """ + # Start by validating them to ensure they have all had their magic + # methods registered at the instance level + for m in magic_objects: + if not m.registered: + raise ValueError("Class of magics %r was constructed without " + "the @register_magics class decorator") + if isinstance(m, type): + # If we're given an uninstantiated class + m = m(shell=self.shell) + + # Now that we have an instance, we can register it and update the + # table of callables + self.registry[m.__class__.__name__] = m + for mtype in magic_kinds: + self.magics[mtype].update(m.magics[mtype]) + + def register_function(self, func, magic_kind='line', magic_name=None): + """Expose a standalone function as magic function for IPython. + + This will create an IPython magic (line, cell or both) from a + standalone function. The functions should have the following + signatures: + + * For line magics: `def f(line)` + * For cell magics: `def f(line, cell)` + * For a function that does both: `def f(line, cell=None)` + + In the latter case, the function will be called with `cell==None` when + invoked as `%f`, and with cell as a string when invoked as `%%f`. + + Parameters + ---------- + func : callable + Function to be registered as a magic. + + magic_kind : str + Kind of magic, one of 'line', 'cell' or 'line_cell' + + magic_name : optional str + If given, the name the magic will have in the IPython namespace. By + default, the name of the function itself is used. + """ + + # Create the new method in the user_magics and register it in the + # global table + validate_type(magic_kind) + magic_name = func.__name__ if magic_name is None else magic_name + setattr(self.user_magics, magic_name, func) + record_magic(self.magics, magic_kind, magic_name, func) + + def register_alias(self, alias_name, magic_name, magic_kind='line', magic_params=None): + """Register an alias to a magic function. + + The alias is an instance of :class:`MagicAlias`, which holds the + name and kind of the magic it should call. Binding is done at + call time, so if the underlying magic function is changed the alias + will call the new function. + + Parameters + ---------- + alias_name : str + The name of the magic to be registered. + + magic_name : str + The name of an existing magic. + + magic_kind : str + Kind of magic, one of 'line' or 'cell' + """ + + # `validate_type` is too permissive, as it allows 'line_cell' + # which we do not handle. + if magic_kind not in magic_kinds: + raise ValueError('magic_kind must be one of %s, %s given' % + magic_kinds, magic_kind) + + alias = MagicAlias(self.shell, magic_name, magic_kind, magic_params) + setattr(self.user_magics, alias_name, alias) + record_magic(self.magics, magic_kind, alias_name, alias) + +# Key base class that provides the central functionality for magics. + + +class Magics(Configurable): + """Base class for implementing magic functions. + + Shell functions which can be reached as %function_name. All magic + functions should accept a string, which they can parse for their own + needs. This can make some functions easier to type, eg `%cd ../` + vs. `%cd("../")` + + Classes providing magic functions need to subclass this class, and they + MUST: + + - Use the method decorators `@line_magic` and `@cell_magic` to decorate + individual methods as magic functions, AND + + - Use the class decorator `@magics_class` to ensure that the magic + methods are properly registered at the instance level upon instance + initialization. + + See :mod:`magic_functions` for examples of actual implementation classes. + """ + # Dict holding all command-line options for each magic. + options_table = None + # Dict for the mapping of magic names to methods, set by class decorator + magics = None + # Flag to check that the class decorator was properly applied + registered = False + # Instance of IPython shell + shell = None + + def __init__(self, shell=None, **kwargs): + if not(self.__class__.registered): + raise ValueError('Magics subclass without registration - ' + 'did you forget to apply @magics_class?') + if shell is not None: + if hasattr(shell, 'configurables'): + shell.configurables.append(self) + if hasattr(shell, 'config'): + kwargs.setdefault('parent', shell) + + self.shell = shell + self.options_table = {} + # The method decorators are run when the instance doesn't exist yet, so + # they can only record the names of the methods they are supposed to + # grab. Only now, that the instance exists, can we create the proper + # mapping to bound methods. So we read the info off the original names + # table and replace each method name by the actual bound method. + # But we mustn't clobber the *class* mapping, in case of multiple instances. + class_magics = self.magics + self.magics = {} + for mtype in magic_kinds: + tab = self.magics[mtype] = {} + cls_tab = class_magics[mtype] + for magic_name, meth_name in cls_tab.items(): + if isinstance(meth_name, str): + # it's a method name, grab it + tab[magic_name] = getattr(self, meth_name) + else: + # it's the real thing + tab[magic_name] = meth_name + # Configurable **needs** to be initiated at the end or the config + # magics get screwed up. + super(Magics, self).__init__(**kwargs) + + def arg_err(self,func): + """Print docstring if incorrect arguments were passed""" + print('Error in arguments:') + print(oinspect.getdoc(func)) + + def format_latex(self, strng): + """Format a string for latex inclusion.""" + + # Characters that need to be escaped for latex: + escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE) + # Magic command names as headers: + cmd_name_re = re.compile(r'^(%s.*?):' % ESC_MAGIC, + re.MULTILINE) + # Magic commands + cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % ESC_MAGIC, + re.MULTILINE) + # Paragraph continue + par_re = re.compile(r'\\$',re.MULTILINE) + + # The "\n" symbol + newline_re = re.compile(r'\\n') + + # Now build the string for output: + #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng) + strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:', + strng) + strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng) + strng = par_re.sub(r'\\\\',strng) + strng = escape_re.sub(r'\\\1',strng) + strng = newline_re.sub(r'\\textbackslash{}n',strng) + return strng + + def parse_options(self, arg_str, opt_str, *long_opts, **kw): + """Parse options passed to an argument string. + + The interface is similar to that of :func:`getopt.getopt`, but it + returns a :class:`~IPython.utils.struct.Struct` with the options as keys + and the stripped argument string still as a string. + + arg_str is quoted as a true sys.argv vector by using shlex.split. + This allows us to easily expand variables, glob files, quote + arguments, etc. + + Parameters + ---------- + + arg_str : str + The arguments to parse. + + opt_str : str + The options specification. + + mode : str, default 'string' + If given as 'list', the argument string is returned as a list (split + on whitespace) instead of a string. + + list_all : bool, default False + Put all option values in lists. Normally only options + appearing more than once are put in a list. + + posix : bool, default True + Whether to split the input line in POSIX mode or not, as per the + conventions outlined in the :mod:`shlex` module from the standard + library. + """ + + # inject default options at the beginning of the input line + caller = sys._getframe(1).f_code.co_name + arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str) + + mode = kw.get('mode','string') + if mode not in ['string','list']: + raise ValueError('incorrect mode given: %s' % mode) + # Get options + list_all = kw.get('list_all',0) + posix = kw.get('posix', os.name == 'posix') + strict = kw.get('strict', True) + + # Check if we have more than one argument to warrant extra processing: + odict = {} # Dictionary with options + args = arg_str.split() + if len(args) >= 1: + # If the list of inputs only has 0 or 1 thing in it, there's no + # need to look for options + argv = arg_split(arg_str, posix, strict) + # Do regular option processing + try: + opts,args = getopt(argv, opt_str, long_opts) + except GetoptError as e: + raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str, + " ".join(long_opts))) + for o,a in opts: + if o.startswith('--'): + o = o[2:] + else: + o = o[1:] + try: + odict[o].append(a) + except AttributeError: + odict[o] = [odict[o],a] + except KeyError: + if list_all: + odict[o] = [a] + else: + odict[o] = a + + # Prepare opts,args for return + opts = Struct(odict) + if mode == 'string': + args = ' '.join(args) + + return opts,args + + def default_option(self, fn, optstr): + """Make an entry in the options_table for fn, with value optstr""" + + if fn not in self.lsmagic(): + error("%s is not a magic function" % fn) + self.options_table[fn] = optstr + + +class MagicAlias(object): + """An alias to another magic function. + + An alias is determined by its magic name and magic kind. Lookup + is done at call time, so if the underlying magic changes the alias + will call the new function. + + Use the :meth:`MagicsManager.register_alias` method or the + `%alias_magic` magic function to create and register a new alias. + """ + def __init__(self, shell, magic_name, magic_kind, magic_params=None): + self.shell = shell + self.magic_name = magic_name + self.magic_params = magic_params + self.magic_kind = magic_kind + + self.pretty_target = '%s%s' % (magic_escapes[self.magic_kind], self.magic_name) + self.__doc__ = "Alias for `%s`." % self.pretty_target + + self._in_call = False + + def __call__(self, *args, **kwargs): + """Call the magic alias.""" + fn = self.shell.find_magic(self.magic_name, self.magic_kind) + if fn is None: + raise UsageError("Magic `%s` not found." % self.pretty_target) + + # Protect against infinite recursion. + if self._in_call: + raise UsageError("Infinite recursion detected; " + "magic aliases cannot call themselves.") + self._in_call = True + try: + if self.magic_params: + args_list = list(args) + args_list[0] = self.magic_params + " " + args[0] + args = tuple(args_list) + return fn(*args, **kwargs) + finally: + self._in_call = False diff --git a/contrib/python/ipython/py3/IPython/core/magic_arguments.py b/contrib/python/ipython/py3/IPython/core/magic_arguments.py index 38e03aa176f..9231609572e 100644 --- a/contrib/python/ipython/py3/IPython/core/magic_arguments.py +++ b/contrib/python/ipython/py3/IPython/core/magic_arguments.py @@ -1,278 +1,278 @@ -''' A decorator-based method of constructing IPython magics with `argparse` -option handling. - -New magic functions can be defined like so:: - - from IPython.core.magic_arguments import (argument, magic_arguments, - parse_argstring) - - @magic_arguments() - @argument('-o', '--option', help='An optional argument.') - @argument('arg', type=int, help='An integer positional argument.') - def magic_cool(self, arg): - """ A really cool magic command. - - """ - args = parse_argstring(magic_cool, arg) - ... - -The `@magic_arguments` decorator marks the function as having argparse arguments. -The `@argument` decorator adds an argument using the same syntax as argparse's -`add_argument()` method. More sophisticated uses may also require the -`@argument_group` or `@kwds` decorator to customize the formatting and the -parsing. - -Help text for the magic is automatically generated from the docstring and the -arguments:: - - In[1]: %cool? - %cool [-o OPTION] arg - - A really cool magic command. - - positional arguments: - arg An integer positional argument. - - optional arguments: - -o OPTION, --option OPTION - An optional argument. - -Inheritance diagram: - -.. inheritance-diagram:: IPython.core.magic_arguments - :parts: 3 - -''' -#----------------------------------------------------------------------------- -# Copyright (C) 2010-2011, IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- -import argparse -import re - -# Our own imports -from IPython.core.error import UsageError -from IPython.utils.decorators import undoc -from IPython.utils.process import arg_split -from IPython.utils.text import dedent - -NAME_RE = re.compile(r"[a-zA-Z][a-zA-Z0-9_-]*$") - -@undoc -class MagicHelpFormatter(argparse.RawDescriptionHelpFormatter): - """A HelpFormatter with a couple of changes to meet our needs. - """ - # Modified to dedent text. - def _fill_text(self, text, width, indent): - return argparse.RawDescriptionHelpFormatter._fill_text(self, dedent(text), width, indent) - - # Modified to wrap argument placeholders in <> where necessary. - def _format_action_invocation(self, action): - if not action.option_strings: - metavar, = self._metavar_formatter(action, action.dest)(1) - return metavar - - else: - parts = [] - - # if the Optional doesn't take a value, format is: - # -s, --long - if action.nargs == 0: - parts.extend(action.option_strings) - - # if the Optional takes a value, format is: - # -s ARGS, --long ARGS - else: - default = action.dest.upper() - args_string = self._format_args(action, default) - # IPYTHON MODIFICATION: If args_string is not a plain name, wrap - # it in <> so it's valid RST. - if not NAME_RE.match(args_string): - args_string = "<%s>" % args_string - for option_string in action.option_strings: - parts.append('%s %s' % (option_string, args_string)) - - return ', '.join(parts) - - # Override the default prefix ('usage') to our % magic escape, - # in a code block. - def add_usage(self, usage, actions, groups, prefix="::\n\n %"): - super(MagicHelpFormatter, self).add_usage(usage, actions, groups, prefix) - -class MagicArgumentParser(argparse.ArgumentParser): - """ An ArgumentParser tweaked for use by IPython magics. - """ - def __init__(self, - prog=None, - usage=None, - description=None, - epilog=None, - parents=None, - formatter_class=MagicHelpFormatter, - prefix_chars='-', - argument_default=None, - conflict_handler='error', - add_help=False): - if parents is None: - parents = [] - super(MagicArgumentParser, self).__init__(prog=prog, usage=usage, - description=description, epilog=epilog, - parents=parents, formatter_class=formatter_class, - prefix_chars=prefix_chars, argument_default=argument_default, - conflict_handler=conflict_handler, add_help=add_help) - - def error(self, message): - """ Raise a catchable error instead of exiting. - """ - raise UsageError(message) - - def parse_argstring(self, argstring): - """ Split a string into an argument list and parse that argument list. - """ - argv = arg_split(argstring) - return self.parse_args(argv) - - -def construct_parser(magic_func): - """ Construct an argument parser using the function decorations. - """ - kwds = getattr(magic_func, 'argcmd_kwds', {}) - if 'description' not in kwds: - kwds['description'] = getattr(magic_func, '__doc__', None) - arg_name = real_name(magic_func) - parser = MagicArgumentParser(arg_name, **kwds) - # Reverse the list of decorators in order to apply them in the - # order in which they appear in the source. - group = None - for deco in magic_func.decorators[::-1]: - result = deco.add_to_parser(parser, group) - if result is not None: - group = result - - # Replace the magic function's docstring with the full help text. - magic_func.__doc__ = parser.format_help() - - return parser - - -def parse_argstring(magic_func, argstring): - """ Parse the string of arguments for the given magic function. - """ - return magic_func.parser.parse_argstring(argstring) - - -def real_name(magic_func): - """ Find the real name of the magic. - """ - magic_name = magic_func.__name__ - if magic_name.startswith('magic_'): - magic_name = magic_name[len('magic_'):] - return getattr(magic_func, 'argcmd_name', magic_name) - - -class ArgDecorator(object): - """ Base class for decorators to add ArgumentParser information to a method. - """ - - def __call__(self, func): - if not getattr(func, 'has_arguments', False): - func.has_arguments = True - func.decorators = [] - func.decorators.append(self) - return func - - def add_to_parser(self, parser, group): - """ Add this object's information to the parser, if necessary. - """ - pass - - -class magic_arguments(ArgDecorator): - """ Mark the magic as having argparse arguments and possibly adjust the - name. - """ - - def __init__(self, name=None): - self.name = name - - def __call__(self, func): - if not getattr(func, 'has_arguments', False): - func.has_arguments = True - func.decorators = [] - if self.name is not None: - func.argcmd_name = self.name - # This should be the first decorator in the list of decorators, thus the - # last to execute. Build the parser. - func.parser = construct_parser(func) - return func - - -class ArgMethodWrapper(ArgDecorator): - - """ - Base class to define a wrapper for ArgumentParser method. - - Child class must define either `_method_name` or `add_to_parser`. - - """ - - _method_name = None - - def __init__(self, *args, **kwds): - self.args = args - self.kwds = kwds - - def add_to_parser(self, parser, group): - """ Add this object's information to the parser. - """ - if group is not None: - parser = group - getattr(parser, self._method_name)(*self.args, **self.kwds) - return None - - -class argument(ArgMethodWrapper): - """ Store arguments and keywords to pass to add_argument(). - - Instances also serve to decorate command methods. - """ - _method_name = 'add_argument' - - -class defaults(ArgMethodWrapper): - """ Store arguments and keywords to pass to set_defaults(). - - Instances also serve to decorate command methods. - """ - _method_name = 'set_defaults' - - -class argument_group(ArgMethodWrapper): - """ Store arguments and keywords to pass to add_argument_group(). - - Instances also serve to decorate command methods. - """ - - def add_to_parser(self, parser, group): - """ Add this object's information to the parser. - """ - return parser.add_argument_group(*self.args, **self.kwds) - - -class kwds(ArgDecorator): - """ Provide other keywords to the sub-parser constructor. - """ - def __init__(self, **kwds): - self.kwds = kwds - - def __call__(self, func): - func = super(kwds, self).__call__(func) - func.argcmd_kwds = self.kwds - return func - - -__all__ = ['magic_arguments', 'argument', 'argument_group', 'kwds', - 'parse_argstring'] +''' A decorator-based method of constructing IPython magics with `argparse` +option handling. + +New magic functions can be defined like so:: + + from IPython.core.magic_arguments import (argument, magic_arguments, + parse_argstring) + + @magic_arguments() + @argument('-o', '--option', help='An optional argument.') + @argument('arg', type=int, help='An integer positional argument.') + def magic_cool(self, arg): + """ A really cool magic command. + + """ + args = parse_argstring(magic_cool, arg) + ... + +The `@magic_arguments` decorator marks the function as having argparse arguments. +The `@argument` decorator adds an argument using the same syntax as argparse's +`add_argument()` method. More sophisticated uses may also require the +`@argument_group` or `@kwds` decorator to customize the formatting and the +parsing. + +Help text for the magic is automatically generated from the docstring and the +arguments:: + + In[1]: %cool? + %cool [-o OPTION] arg + + A really cool magic command. + + positional arguments: + arg An integer positional argument. + + optional arguments: + -o OPTION, --option OPTION + An optional argument. + +Inheritance diagram: + +.. inheritance-diagram:: IPython.core.magic_arguments + :parts: 3 + +''' +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011, IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- +import argparse +import re + +# Our own imports +from IPython.core.error import UsageError +from IPython.utils.decorators import undoc +from IPython.utils.process import arg_split +from IPython.utils.text import dedent + +NAME_RE = re.compile(r"[a-zA-Z][a-zA-Z0-9_-]*$") + +@undoc +class MagicHelpFormatter(argparse.RawDescriptionHelpFormatter): + """A HelpFormatter with a couple of changes to meet our needs. + """ + # Modified to dedent text. + def _fill_text(self, text, width, indent): + return argparse.RawDescriptionHelpFormatter._fill_text(self, dedent(text), width, indent) + + # Modified to wrap argument placeholders in <> where necessary. + def _format_action_invocation(self, action): + if not action.option_strings: + metavar, = self._metavar_formatter(action, action.dest)(1) + return metavar + + else: + parts = [] + + # if the Optional doesn't take a value, format is: + # -s, --long + if action.nargs == 0: + parts.extend(action.option_strings) + + # if the Optional takes a value, format is: + # -s ARGS, --long ARGS + else: + default = action.dest.upper() + args_string = self._format_args(action, default) + # IPYTHON MODIFICATION: If args_string is not a plain name, wrap + # it in <> so it's valid RST. + if not NAME_RE.match(args_string): + args_string = "<%s>" % args_string + for option_string in action.option_strings: + parts.append('%s %s' % (option_string, args_string)) + + return ', '.join(parts) + + # Override the default prefix ('usage') to our % magic escape, + # in a code block. + def add_usage(self, usage, actions, groups, prefix="::\n\n %"): + super(MagicHelpFormatter, self).add_usage(usage, actions, groups, prefix) + +class MagicArgumentParser(argparse.ArgumentParser): + """ An ArgumentParser tweaked for use by IPython magics. + """ + def __init__(self, + prog=None, + usage=None, + description=None, + epilog=None, + parents=None, + formatter_class=MagicHelpFormatter, + prefix_chars='-', + argument_default=None, + conflict_handler='error', + add_help=False): + if parents is None: + parents = [] + super(MagicArgumentParser, self).__init__(prog=prog, usage=usage, + description=description, epilog=epilog, + parents=parents, formatter_class=formatter_class, + prefix_chars=prefix_chars, argument_default=argument_default, + conflict_handler=conflict_handler, add_help=add_help) + + def error(self, message): + """ Raise a catchable error instead of exiting. + """ + raise UsageError(message) + + def parse_argstring(self, argstring): + """ Split a string into an argument list and parse that argument list. + """ + argv = arg_split(argstring) + return self.parse_args(argv) + + +def construct_parser(magic_func): + """ Construct an argument parser using the function decorations. + """ + kwds = getattr(magic_func, 'argcmd_kwds', {}) + if 'description' not in kwds: + kwds['description'] = getattr(magic_func, '__doc__', None) + arg_name = real_name(magic_func) + parser = MagicArgumentParser(arg_name, **kwds) + # Reverse the list of decorators in order to apply them in the + # order in which they appear in the source. + group = None + for deco in magic_func.decorators[::-1]: + result = deco.add_to_parser(parser, group) + if result is not None: + group = result + + # Replace the magic function's docstring with the full help text. + magic_func.__doc__ = parser.format_help() + + return parser + + +def parse_argstring(magic_func, argstring): + """ Parse the string of arguments for the given magic function. + """ + return magic_func.parser.parse_argstring(argstring) + + +def real_name(magic_func): + """ Find the real name of the magic. + """ + magic_name = magic_func.__name__ + if magic_name.startswith('magic_'): + magic_name = magic_name[len('magic_'):] + return getattr(magic_func, 'argcmd_name', magic_name) + + +class ArgDecorator(object): + """ Base class for decorators to add ArgumentParser information to a method. + """ + + def __call__(self, func): + if not getattr(func, 'has_arguments', False): + func.has_arguments = True + func.decorators = [] + func.decorators.append(self) + return func + + def add_to_parser(self, parser, group): + """ Add this object's information to the parser, if necessary. + """ + pass + + +class magic_arguments(ArgDecorator): + """ Mark the magic as having argparse arguments and possibly adjust the + name. + """ + + def __init__(self, name=None): + self.name = name + + def __call__(self, func): + if not getattr(func, 'has_arguments', False): + func.has_arguments = True + func.decorators = [] + if self.name is not None: + func.argcmd_name = self.name + # This should be the first decorator in the list of decorators, thus the + # last to execute. Build the parser. + func.parser = construct_parser(func) + return func + + +class ArgMethodWrapper(ArgDecorator): + + """ + Base class to define a wrapper for ArgumentParser method. + + Child class must define either `_method_name` or `add_to_parser`. + + """ + + _method_name = None + + def __init__(self, *args, **kwds): + self.args = args + self.kwds = kwds + + def add_to_parser(self, parser, group): + """ Add this object's information to the parser. + """ + if group is not None: + parser = group + getattr(parser, self._method_name)(*self.args, **self.kwds) + return None + + +class argument(ArgMethodWrapper): + """ Store arguments and keywords to pass to add_argument(). + + Instances also serve to decorate command methods. + """ + _method_name = 'add_argument' + + +class defaults(ArgMethodWrapper): + """ Store arguments and keywords to pass to set_defaults(). + + Instances also serve to decorate command methods. + """ + _method_name = 'set_defaults' + + +class argument_group(ArgMethodWrapper): + """ Store arguments and keywords to pass to add_argument_group(). + + Instances also serve to decorate command methods. + """ + + def add_to_parser(self, parser, group): + """ Add this object's information to the parser. + """ + return parser.add_argument_group(*self.args, **self.kwds) + + +class kwds(ArgDecorator): + """ Provide other keywords to the sub-parser constructor. + """ + def __init__(self, **kwds): + self.kwds = kwds + + def __call__(self, func): + func = super(kwds, self).__call__(func) + func.argcmd_kwds = self.kwds + return func + + +__all__ = ['magic_arguments', 'argument', 'argument_group', 'kwds', + 'parse_argstring'] diff --git a/contrib/python/ipython/py3/IPython/core/magics/__init__.py b/contrib/python/ipython/py3/IPython/core/magics/__init__.py index be494780694..a6c5f474c15 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/__init__.py +++ b/contrib/python/ipython/py3/IPython/core/magics/__init__.py @@ -1,42 +1,42 @@ -"""Implementation of all the magic functions built into IPython. -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012 The IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -from ..magic import Magics, magics_class -from .auto import AutoMagics -from .basic import BasicMagics, AsyncMagics -from .code import CodeMagics, MacroToEdit -from .config import ConfigMagics -from .display import DisplayMagics -from .execution import ExecutionMagics -from .extension import ExtensionMagics -from .history import HistoryMagics -from .logging import LoggingMagics -from .namespace import NamespaceMagics -from .osm import OSMagics -from .packaging import PackagingMagics -from .pylab import PylabMagics -from .script import ScriptMagics - -#----------------------------------------------------------------------------- -# Magic implementation classes -#----------------------------------------------------------------------------- - -@magics_class -class UserMagics(Magics): - """Placeholder for user-defined magics to be added at runtime. - - All magics are eventually merged into a single namespace at runtime, but we - use this class to isolate the magics defined dynamically by the user into - their own class. - """ +"""Implementation of all the magic functions built into IPython. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012 The IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +from ..magic import Magics, magics_class +from .auto import AutoMagics +from .basic import BasicMagics, AsyncMagics +from .code import CodeMagics, MacroToEdit +from .config import ConfigMagics +from .display import DisplayMagics +from .execution import ExecutionMagics +from .extension import ExtensionMagics +from .history import HistoryMagics +from .logging import LoggingMagics +from .namespace import NamespaceMagics +from .osm import OSMagics +from .packaging import PackagingMagics +from .pylab import PylabMagics +from .script import ScriptMagics + +#----------------------------------------------------------------------------- +# Magic implementation classes +#----------------------------------------------------------------------------- + +@magics_class +class UserMagics(Magics): + """Placeholder for user-defined magics to be added at runtime. + + All magics are eventually merged into a single namespace at runtime, but we + use this class to isolate the magics defined dynamically by the user into + their own class. + """ diff --git a/contrib/python/ipython/py3/IPython/core/magics/auto.py b/contrib/python/ipython/py3/IPython/core/magics/auto.py index 846b79d3e5c..a18542f43d1 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/auto.py +++ b/contrib/python/ipython/py3/IPython/core/magics/auto.py @@ -1,128 +1,128 @@ -"""Implementation of magic functions that control various automatic behaviors. -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012 The IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Our own packages -from IPython.core.magic import Bunch, Magics, magics_class, line_magic -from IPython.testing.skipdoctest import skip_doctest -from logging import error - -#----------------------------------------------------------------------------- -# Magic implementation classes -#----------------------------------------------------------------------------- - -@magics_class -class AutoMagics(Magics): - """Magics that control various autoX behaviors.""" - - def __init__(self, shell): - super(AutoMagics, self).__init__(shell) - # namespace for holding state we may need - self._magic_state = Bunch() - - @line_magic - def automagic(self, parameter_s=''): - """Make magic functions callable without having to type the initial %. - - Without arguments toggles on/off (when off, you must call it as - %automagic, of course). With arguments it sets the value, and you can - use any of (case insensitive): - - - on, 1, True: to activate - - - off, 0, False: to deactivate. - - Note that magic functions have lowest priority, so if there's a - variable whose name collides with that of a magic fn, automagic won't - work for that function (you get the variable instead). However, if you - delete the variable (del var), the previously shadowed magic function - becomes visible to automagic again.""" - - arg = parameter_s.lower() - mman = self.shell.magics_manager - if arg in ('on', '1', 'true'): - val = True - elif arg in ('off', '0', 'false'): - val = False - else: - val = not mman.auto_magic - mman.auto_magic = val - print('\n' + self.shell.magics_manager.auto_status()) - - @skip_doctest - @line_magic - def autocall(self, parameter_s=''): - """Make functions callable without having to type parentheses. - - Usage: - - %autocall [mode] - - The mode can be one of: 0->Off, 1->Smart, 2->Full. If not given, the - value is toggled on and off (remembering the previous state). - - In more detail, these values mean: - - 0 -> fully disabled - - 1 -> active, but do not apply if there are no arguments on the line. - - In this mode, you get:: - - In [1]: callable - Out[1]: <built-in function callable> - - In [2]: callable 'hello' - ------> callable('hello') - Out[2]: False - - 2 -> Active always. Even if no arguments are present, the callable - object is called:: - - In [2]: float - ------> float() - Out[2]: 0.0 - - Note that even with autocall off, you can still use '/' at the start of - a line to treat the first argument on the command line as a function - and add parentheses to it:: - - In [8]: /str 43 - ------> str(43) - Out[8]: '43' - - # all-random (note for auto-testing) - """ - - if parameter_s: - arg = int(parameter_s) - else: - arg = 'toggle' - - if not arg in (0, 1, 2, 'toggle'): - error('Valid modes: (0->Off, 1->Smart, 2->Full') - return - - if arg in (0, 1, 2): - self.shell.autocall = arg - else: # toggle - if self.shell.autocall: - self._magic_state.autocall_save = self.shell.autocall - self.shell.autocall = 0 - else: - try: - self.shell.autocall = self._magic_state.autocall_save - except AttributeError: - self.shell.autocall = self._magic_state.autocall_save = 1 - - print("Automatic calling is:",['OFF','Smart','Full'][self.shell.autocall]) +"""Implementation of magic functions that control various automatic behaviors. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012 The IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Our own packages +from IPython.core.magic import Bunch, Magics, magics_class, line_magic +from IPython.testing.skipdoctest import skip_doctest +from logging import error + +#----------------------------------------------------------------------------- +# Magic implementation classes +#----------------------------------------------------------------------------- + +@magics_class +class AutoMagics(Magics): + """Magics that control various autoX behaviors.""" + + def __init__(self, shell): + super(AutoMagics, self).__init__(shell) + # namespace for holding state we may need + self._magic_state = Bunch() + + @line_magic + def automagic(self, parameter_s=''): + """Make magic functions callable without having to type the initial %. + + Without arguments toggles on/off (when off, you must call it as + %automagic, of course). With arguments it sets the value, and you can + use any of (case insensitive): + + - on, 1, True: to activate + + - off, 0, False: to deactivate. + + Note that magic functions have lowest priority, so if there's a + variable whose name collides with that of a magic fn, automagic won't + work for that function (you get the variable instead). However, if you + delete the variable (del var), the previously shadowed magic function + becomes visible to automagic again.""" + + arg = parameter_s.lower() + mman = self.shell.magics_manager + if arg in ('on', '1', 'true'): + val = True + elif arg in ('off', '0', 'false'): + val = False + else: + val = not mman.auto_magic + mman.auto_magic = val + print('\n' + self.shell.magics_manager.auto_status()) + + @skip_doctest + @line_magic + def autocall(self, parameter_s=''): + """Make functions callable without having to type parentheses. + + Usage: + + %autocall [mode] + + The mode can be one of: 0->Off, 1->Smart, 2->Full. If not given, the + value is toggled on and off (remembering the previous state). + + In more detail, these values mean: + + 0 -> fully disabled + + 1 -> active, but do not apply if there are no arguments on the line. + + In this mode, you get:: + + In [1]: callable + Out[1]: <built-in function callable> + + In [2]: callable 'hello' + ------> callable('hello') + Out[2]: False + + 2 -> Active always. Even if no arguments are present, the callable + object is called:: + + In [2]: float + ------> float() + Out[2]: 0.0 + + Note that even with autocall off, you can still use '/' at the start of + a line to treat the first argument on the command line as a function + and add parentheses to it:: + + In [8]: /str 43 + ------> str(43) + Out[8]: '43' + + # all-random (note for auto-testing) + """ + + if parameter_s: + arg = int(parameter_s) + else: + arg = 'toggle' + + if not arg in (0, 1, 2, 'toggle'): + error('Valid modes: (0->Off, 1->Smart, 2->Full') + return + + if arg in (0, 1, 2): + self.shell.autocall = arg + else: # toggle + if self.shell.autocall: + self._magic_state.autocall_save = self.shell.autocall + self.shell.autocall = 0 + else: + try: + self.shell.autocall = self._magic_state.autocall_save + except AttributeError: + self.shell.autocall = self._magic_state.autocall_save = 1 + + print("Automatic calling is:",['OFF','Smart','Full'][self.shell.autocall]) diff --git a/contrib/python/ipython/py3/IPython/core/magics/basic.py b/contrib/python/ipython/py3/IPython/core/magics/basic.py index 9b526f9fda0..72cfc804143 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/basic.py +++ b/contrib/python/ipython/py3/IPython/core/magics/basic.py @@ -1,664 +1,664 @@ -"""Implementation of basic magic functions.""" - - -import argparse -from logging import error -import io -from pprint import pformat -import sys -from warnings import warn - -from traitlets.utils.importstring import import_item -from IPython.core import magic_arguments, page -from IPython.core.error import UsageError -from IPython.core.magic import Magics, magics_class, line_magic, magic_escapes -from IPython.utils.text import format_screen, dedent, indent -from IPython.testing.skipdoctest import skip_doctest -from IPython.utils.ipstruct import Struct - - -class MagicsDisplay(object): - def __init__(self, magics_manager, ignore=None): - self.ignore = ignore if ignore else [] - self.magics_manager = magics_manager - - def _lsmagic(self): - """The main implementation of the %lsmagic""" - mesc = magic_escapes['line'] - cesc = magic_escapes['cell'] - mman = self.magics_manager - magics = mman.lsmagic() - out = ['Available line magics:', - mesc + (' '+mesc).join(sorted([m for m,v in magics['line'].items() if (v not in self.ignore)])), - '', - 'Available cell magics:', - cesc + (' '+cesc).join(sorted([m for m,v in magics['cell'].items() if (v not in self.ignore)])), - '', - mman.auto_status()] - return '\n'.join(out) - - def _repr_pretty_(self, p, cycle): - p.text(self._lsmagic()) - - def __str__(self): - return self._lsmagic() - - def _jsonable(self): - """turn magics dict into jsonable dict of the same structure - - replaces object instances with their class names as strings - """ - magic_dict = {} - mman = self.magics_manager - magics = mman.lsmagic() - for key, subdict in magics.items(): - d = {} - magic_dict[key] = d - for name, obj in subdict.items(): - try: - classname = obj.__self__.__class__.__name__ - except AttributeError: - classname = 'Other' - - d[name] = classname - return magic_dict - - def _repr_json_(self): - return self._jsonable() - - -@magics_class -class BasicMagics(Magics): - """Magics that provide central IPython functionality. - - These are various magics that don't fit into specific categories but that - are all part of the base 'IPython experience'.""" - - @magic_arguments.magic_arguments() - @magic_arguments.argument( - '-l', '--line', action='store_true', - help="""Create a line magic alias.""" - ) - @magic_arguments.argument( - '-c', '--cell', action='store_true', - help="""Create a cell magic alias.""" - ) - @magic_arguments.argument( - 'name', - help="""Name of the magic to be created.""" - ) - @magic_arguments.argument( - 'target', - help="""Name of the existing line or cell magic.""" - ) - @magic_arguments.argument( - '-p', '--params', default=None, - help="""Parameters passed to the magic function.""" - ) - @line_magic - def alias_magic(self, line=''): - """Create an alias for an existing line or cell magic. - - Examples - -------- - :: - - In [1]: %alias_magic t timeit - Created `%t` as an alias for `%timeit`. - Created `%%t` as an alias for `%%timeit`. - - In [2]: %t -n1 pass - 1 loops, best of 3: 954 ns per loop - - In [3]: %%t -n1 - ...: pass - ...: - 1 loops, best of 3: 954 ns per loop - - In [4]: %alias_magic --cell whereami pwd - UsageError: Cell magic function `%%pwd` not found. - In [5]: %alias_magic --line whereami pwd - Created `%whereami` as an alias for `%pwd`. - - In [6]: %whereami - Out[6]: u'/home/testuser' - - In [7]: %alias_magic h history "-p -l 30" --line - Created `%h` as an alias for `%history -l 30`. - """ - - args = magic_arguments.parse_argstring(self.alias_magic, line) - shell = self.shell - mman = self.shell.magics_manager - escs = ''.join(magic_escapes.values()) - - target = args.target.lstrip(escs) - name = args.name.lstrip(escs) - - params = args.params - if (params and - ((params.startswith('"') and params.endswith('"')) - or (params.startswith("'") and params.endswith("'")))): - params = params[1:-1] - - # Find the requested magics. - m_line = shell.find_magic(target, 'line') - m_cell = shell.find_magic(target, 'cell') - if args.line and m_line is None: - raise UsageError('Line magic function `%s%s` not found.' % - (magic_escapes['line'], target)) - if args.cell and m_cell is None: - raise UsageError('Cell magic function `%s%s` not found.' % - (magic_escapes['cell'], target)) - - # If --line and --cell are not specified, default to the ones - # that are available. - if not args.line and not args.cell: - if not m_line and not m_cell: - raise UsageError( - 'No line or cell magic with name `%s` found.' % target - ) - args.line = bool(m_line) - args.cell = bool(m_cell) - - params_str = "" if params is None else " " + params - - if args.line: - mman.register_alias(name, target, 'line', params) - print('Created `%s%s` as an alias for `%s%s%s`.' % ( - magic_escapes['line'], name, - magic_escapes['line'], target, params_str)) - - if args.cell: - mman.register_alias(name, target, 'cell', params) - print('Created `%s%s` as an alias for `%s%s%s`.' % ( - magic_escapes['cell'], name, - magic_escapes['cell'], target, params_str)) - - @line_magic - def lsmagic(self, parameter_s=''): - """List currently available magic functions.""" - return MagicsDisplay(self.shell.magics_manager, ignore=[]) - - def _magic_docs(self, brief=False, rest=False): - """Return docstrings from magic functions.""" - mman = self.shell.magics_manager - docs = mman.lsmagic_docs(brief, missing='No documentation') - - if rest: - format_string = '**%s%s**::\n\n%s\n\n' - else: - format_string = '%s%s:\n%s\n' - - return ''.join( - [format_string % (magic_escapes['line'], fname, - indent(dedent(fndoc))) - for fname, fndoc in sorted(docs['line'].items())] - + - [format_string % (magic_escapes['cell'], fname, - indent(dedent(fndoc))) - for fname, fndoc in sorted(docs['cell'].items())] - ) - - @line_magic - def magic(self, parameter_s=''): - """Print information about the magic function system. - - Supported formats: -latex, -brief, -rest - """ - - mode = '' - try: - mode = parameter_s.split()[0][1:] - except IndexError: - pass - - brief = (mode == 'brief') - rest = (mode == 'rest') - magic_docs = self._magic_docs(brief, rest) - - if mode == 'latex': - print(self.format_latex(magic_docs)) - return - else: - magic_docs = format_screen(magic_docs) - - out = [""" -IPython's 'magic' functions -=========================== - -The magic function system provides a series of functions which allow you to -control the behavior of IPython itself, plus a lot of system-type -features. There are two kinds of magics, line-oriented and cell-oriented. - -Line magics are prefixed with the % character and work much like OS -command-line calls: they get as an argument the rest of the line, where -arguments are passed without parentheses or quotes. For example, this will -time the given statement:: - - %timeit range(1000) - -Cell magics are prefixed with a double %%, and they are functions that get as -an argument not only the rest of the line, but also the lines below it in a -separate argument. These magics are called with two arguments: the rest of the -call line and the body of the cell, consisting of the lines below the first. -For example:: - - %%timeit x = numpy.random.randn((100, 100)) - numpy.linalg.svd(x) - -will time the execution of the numpy svd routine, running the assignment of x -as part of the setup phase, which is not timed. - -In a line-oriented client (the terminal or Qt console IPython), starting a new -input with %% will automatically enter cell mode, and IPython will continue -reading input until a blank line is given. In the notebook, simply type the -whole cell as one entity, but keep in mind that the %% escape can only be at -the very start of the cell. - -NOTE: If you have 'automagic' enabled (via the command line option or with the -%automagic function), you don't need to type in the % explicitly for line -magics; cell magics always require an explicit '%%' escape. By default, -IPython ships with automagic on, so you should only rarely need the % escape. - -Example: typing '%cd mydir' (without the quotes) changes your working directory -to 'mydir', if it exists. - -For a list of the available magic functions, use %lsmagic. For a description -of any of them, type %magic_name?, e.g. '%cd?'. - -Currently the magic system has the following functions:""", - magic_docs, - "Summary of magic functions (from %slsmagic):" % magic_escapes['line'], - str(self.lsmagic()), - ] - page.page('\n'.join(out)) - - - @line_magic - def page(self, parameter_s=''): - """Pretty print the object and display it through a pager. - - %page [options] OBJECT - - If no object is given, use _ (last output). - - Options: - - -r: page str(object), don't pretty-print it.""" - - # After a function contributed by Olivier Aubert, slightly modified. - - # Process options/args - opts, args = self.parse_options(parameter_s, 'r') - raw = 'r' in opts - - oname = args and args or '_' - info = self.shell._ofind(oname) - if info['found']: - txt = (raw and str or pformat)( info['obj'] ) - page.page(txt) - else: - print('Object `%s` not found' % oname) - - @line_magic - def pprint(self, parameter_s=''): - """Toggle pretty printing on/off.""" - ptformatter = self.shell.display_formatter.formatters['text/plain'] - ptformatter.pprint = bool(1 - ptformatter.pprint) - print('Pretty printing has been turned', - ['OFF','ON'][ptformatter.pprint]) - - @line_magic - def colors(self, parameter_s=''): - """Switch color scheme for prompts, info system and exception handlers. - - Currently implemented schemes: NoColor, Linux, LightBG. - - Color scheme names are not case-sensitive. - - Examples - -------- - To get a plain black and white terminal:: - - %colors nocolor - """ - def color_switch_err(name): - warn('Error changing %s color schemes.\n%s' % - (name, sys.exc_info()[1]), stacklevel=2) - - - new_scheme = parameter_s.strip() - if not new_scheme: - raise UsageError( - "%colors: you must specify a color scheme. See '%colors?'") - # local shortcut - shell = self.shell - - # Set shell colour scheme - try: - shell.colors = new_scheme - shell.refresh_style() - except: - color_switch_err('shell') - - # Set exception colors - try: - shell.InteractiveTB.set_colors(scheme = new_scheme) - shell.SyntaxTB.set_colors(scheme = new_scheme) - except: - color_switch_err('exception') - - # Set info (for 'object?') colors - if shell.color_info: - try: - shell.inspector.set_active_scheme(new_scheme) - except: - color_switch_err('object inspector') - else: - shell.inspector.set_active_scheme('NoColor') - - @line_magic - def xmode(self, parameter_s=''): - """Switch modes for the exception handlers. - - Valid modes: Plain, Context, Verbose, and Minimal. - - If called without arguments, acts as a toggle. - - When in verbose mode the value --show (and --hide) - will respectively show (or hide) frames with ``__tracebackhide__ = - True`` value set. - """ - - def xmode_switch_err(name): - warn('Error changing %s exception modes.\n%s' % - (name,sys.exc_info()[1])) - - shell = self.shell - if parameter_s.strip() == "--show": - shell.InteractiveTB.skip_hidden = False - return - if parameter_s.strip() == "--hide": - shell.InteractiveTB.skip_hidden = True - return - - new_mode = parameter_s.strip().capitalize() - try: - shell.InteractiveTB.set_mode(mode=new_mode) - print('Exception reporting mode:',shell.InteractiveTB.mode) - except: - xmode_switch_err('user') - - @line_magic - def quickref(self, arg): - """ Show a quick reference sheet """ - from IPython.core.usage import quick_reference - qr = quick_reference + self._magic_docs(brief=True) - page.page(qr) - - @line_magic - def doctest_mode(self, parameter_s=''): - """Toggle doctest mode on and off. - - This mode is intended to make IPython behave as much as possible like a - plain Python shell, from the perspective of how its prompts, exceptions - and output look. This makes it easy to copy and paste parts of a - session into doctests. It does so by: - - - Changing the prompts to the classic ``>>>`` ones. - - Changing the exception reporting mode to 'Plain'. - - Disabling pretty-printing of output. - - Note that IPython also supports the pasting of code snippets that have - leading '>>>' and '...' prompts in them. This means that you can paste - doctests from files or docstrings (even if they have leading - whitespace), and the code will execute correctly. You can then use - '%history -t' to see the translated history; this will give you the - input after removal of all the leading prompts and whitespace, which - can be pasted back into an editor. - - With these features, you can switch into this mode easily whenever you - need to do testing and changes to doctests, without having to leave - your existing IPython session. - """ - - # Shorthands - shell = self.shell - meta = shell.meta - disp_formatter = self.shell.display_formatter - ptformatter = disp_formatter.formatters['text/plain'] - # dstore is a data store kept in the instance metadata bag to track any - # changes we make, so we can undo them later. - dstore = meta.setdefault('doctest_mode',Struct()) - save_dstore = dstore.setdefault - - # save a few values we'll need to recover later - mode = save_dstore('mode',False) - save_dstore('rc_pprint',ptformatter.pprint) - save_dstore('xmode',shell.InteractiveTB.mode) - save_dstore('rc_separate_out',shell.separate_out) - save_dstore('rc_separate_out2',shell.separate_out2) - save_dstore('rc_separate_in',shell.separate_in) - save_dstore('rc_active_types',disp_formatter.active_types) - - if not mode: - # turn on - - # Prompt separators like plain python - shell.separate_in = '' - shell.separate_out = '' - shell.separate_out2 = '' - - - ptformatter.pprint = False - disp_formatter.active_types = ['text/plain'] - - shell.magic('xmode Plain') - else: - # turn off - shell.separate_in = dstore.rc_separate_in - - shell.separate_out = dstore.rc_separate_out - shell.separate_out2 = dstore.rc_separate_out2 - - ptformatter.pprint = dstore.rc_pprint - disp_formatter.active_types = dstore.rc_active_types - - shell.magic('xmode ' + dstore.xmode) - - # mode here is the state before we switch; switch_doctest_mode takes - # the mode we're switching to. - shell.switch_doctest_mode(not mode) - - # Store new mode and inform - dstore.mode = bool(not mode) - mode_label = ['OFF','ON'][dstore.mode] - print('Doctest mode is:', mode_label) - - @line_magic - def gui(self, parameter_s=''): - """Enable or disable IPython GUI event loop integration. - - %gui [GUINAME] - - This magic replaces IPython's threaded shells that were activated - using the (pylab/wthread/etc.) command line flags. GUI toolkits - can now be enabled at runtime and keyboard - interrupts should work without any problems. The following toolkits - are supported: wxPython, PyQt4, PyGTK, Tk and Cocoa (OSX):: - - %gui wx # enable wxPython event loop integration - %gui qt4|qt # enable PyQt4 event loop integration - %gui qt5 # enable PyQt5 event loop integration - %gui gtk # enable PyGTK event loop integration - %gui gtk3 # enable Gtk3 event loop integration - %gui gtk4 # enable Gtk4 event loop integration - %gui tk # enable Tk event loop integration - %gui osx # enable Cocoa event loop integration - # (requires %matplotlib 1.1) - %gui # disable all event loop integration - - WARNING: after any of these has been called you can simply create - an application object, but DO NOT start the event loop yourself, as - we have already handled that. - """ - opts, arg = self.parse_options(parameter_s, '') - if arg=='': arg = None - try: - return self.shell.enable_gui(arg) - except Exception as e: - # print simple error message, rather than traceback if we can't - # hook up the GUI - error(str(e)) - - @skip_doctest - @line_magic - def precision(self, s=''): - """Set floating point precision for pretty printing. - - Can set either integer precision or a format string. - - If numpy has been imported and precision is an int, - numpy display precision will also be set, via ``numpy.set_printoptions``. - - If no argument is given, defaults will be restored. - - Examples - -------- - :: - - In [1]: from math import pi - - In [2]: %precision 3 - Out[2]: u'%.3f' - - In [3]: pi - Out[3]: 3.142 - - In [4]: %precision %i - Out[4]: u'%i' - - In [5]: pi - Out[5]: 3 - - In [6]: %precision %e - Out[6]: u'%e' - - In [7]: pi**10 - Out[7]: 9.364805e+04 - - In [8]: %precision - Out[8]: u'%r' - - In [9]: pi**10 - Out[9]: 93648.047476082982 - """ - ptformatter = self.shell.display_formatter.formatters['text/plain'] - ptformatter.float_precision = s - return ptformatter.float_format - - @magic_arguments.magic_arguments() - @magic_arguments.argument( - '-e', '--export', action='store_true', default=False, - help=argparse.SUPPRESS - ) - @magic_arguments.argument( - 'filename', type=str, - help='Notebook name or filename' - ) - @line_magic - def notebook(self, s): - """Export and convert IPython notebooks. - - This function can export the current IPython history to a notebook file. - For example, to export the history to "foo.ipynb" do "%notebook foo.ipynb". - - The -e or --export flag is deprecated in IPython 5.2, and will be - removed in the future. - """ - args = magic_arguments.parse_argstring(self.notebook, s) - - from nbformat import write, v4 - - cells = [] - hist = list(self.shell.history_manager.get_range()) - if(len(hist)<=1): - raise ValueError('History is empty, cannot export') - for session, execution_count, source in hist[:-1]: - cells.append(v4.new_code_cell( - execution_count=execution_count, - source=source - )) - nb = v4.new_notebook(cells=cells) - with io.open(args.filename, 'w', encoding='utf-8') as f: - write(nb, f, version=4) - -@magics_class -class AsyncMagics(BasicMagics): - - @line_magic - def autoawait(self, parameter_s): - """ - Allow to change the status of the autoawait option. - - This allow you to set a specific asynchronous code runner. - - If no value is passed, print the currently used asynchronous integration - and whether it is activated. - - It can take a number of value evaluated in the following order: - - - False/false/off deactivate autoawait integration - - True/true/on activate autoawait integration using configured default - loop - - asyncio/curio/trio activate autoawait integration and use integration - with said library. - - - `sync` turn on the pseudo-sync integration (mostly used for - `IPython.embed()` which does not run IPython with a real eventloop and - deactivate running asynchronous code. Turning on Asynchronous code with - the pseudo sync loop is undefined behavior and may lead IPython to crash. - - If the passed parameter does not match any of the above and is a python - identifier, get said object from user namespace and set it as the - runner, and activate autoawait. - - If the object is a fully qualified object name, attempt to import it and - set it as the runner, and activate autoawait. - - - The exact behavior of autoawait is experimental and subject to change - across version of IPython and Python. - """ - - param = parameter_s.strip() - d = {True: "on", False: "off"} - - if not param: - print("IPython autoawait is `{}`, and set to use `{}`".format( - d[self.shell.autoawait], - self.shell.loop_runner - )) - return None - - if param.lower() in ('false', 'off'): - self.shell.autoawait = False - return None - if param.lower() in ('true', 'on'): - self.shell.autoawait = True - return None - - if param in self.shell.loop_runner_map: - self.shell.loop_runner, self.shell.autoawait = self.shell.loop_runner_map[param] - return None - - if param in self.shell.user_ns : - self.shell.loop_runner = self.shell.user_ns[param] - self.shell.autoawait = True - return None - - runner = import_item(param) - - self.shell.loop_runner = runner - self.shell.autoawait = True +"""Implementation of basic magic functions.""" + + +import argparse +from logging import error +import io +from pprint import pformat +import sys +from warnings import warn + +from traitlets.utils.importstring import import_item +from IPython.core import magic_arguments, page +from IPython.core.error import UsageError +from IPython.core.magic import Magics, magics_class, line_magic, magic_escapes +from IPython.utils.text import format_screen, dedent, indent +from IPython.testing.skipdoctest import skip_doctest +from IPython.utils.ipstruct import Struct + + +class MagicsDisplay(object): + def __init__(self, magics_manager, ignore=None): + self.ignore = ignore if ignore else [] + self.magics_manager = magics_manager + + def _lsmagic(self): + """The main implementation of the %lsmagic""" + mesc = magic_escapes['line'] + cesc = magic_escapes['cell'] + mman = self.magics_manager + magics = mman.lsmagic() + out = ['Available line magics:', + mesc + (' '+mesc).join(sorted([m for m,v in magics['line'].items() if (v not in self.ignore)])), + '', + 'Available cell magics:', + cesc + (' '+cesc).join(sorted([m for m,v in magics['cell'].items() if (v not in self.ignore)])), + '', + mman.auto_status()] + return '\n'.join(out) + + def _repr_pretty_(self, p, cycle): + p.text(self._lsmagic()) + + def __str__(self): + return self._lsmagic() + + def _jsonable(self): + """turn magics dict into jsonable dict of the same structure + + replaces object instances with their class names as strings + """ + magic_dict = {} + mman = self.magics_manager + magics = mman.lsmagic() + for key, subdict in magics.items(): + d = {} + magic_dict[key] = d + for name, obj in subdict.items(): + try: + classname = obj.__self__.__class__.__name__ + except AttributeError: + classname = 'Other' + + d[name] = classname + return magic_dict + + def _repr_json_(self): + return self._jsonable() + + +@magics_class +class BasicMagics(Magics): + """Magics that provide central IPython functionality. + + These are various magics that don't fit into specific categories but that + are all part of the base 'IPython experience'.""" + + @magic_arguments.magic_arguments() + @magic_arguments.argument( + '-l', '--line', action='store_true', + help="""Create a line magic alias.""" + ) + @magic_arguments.argument( + '-c', '--cell', action='store_true', + help="""Create a cell magic alias.""" + ) + @magic_arguments.argument( + 'name', + help="""Name of the magic to be created.""" + ) + @magic_arguments.argument( + 'target', + help="""Name of the existing line or cell magic.""" + ) + @magic_arguments.argument( + '-p', '--params', default=None, + help="""Parameters passed to the magic function.""" + ) + @line_magic + def alias_magic(self, line=''): + """Create an alias for an existing line or cell magic. + + Examples + -------- + :: + + In [1]: %alias_magic t timeit + Created `%t` as an alias for `%timeit`. + Created `%%t` as an alias for `%%timeit`. + + In [2]: %t -n1 pass + 1 loops, best of 3: 954 ns per loop + + In [3]: %%t -n1 + ...: pass + ...: + 1 loops, best of 3: 954 ns per loop + + In [4]: %alias_magic --cell whereami pwd + UsageError: Cell magic function `%%pwd` not found. + In [5]: %alias_magic --line whereami pwd + Created `%whereami` as an alias for `%pwd`. + + In [6]: %whereami + Out[6]: u'/home/testuser' + + In [7]: %alias_magic h history "-p -l 30" --line + Created `%h` as an alias for `%history -l 30`. + """ + + args = magic_arguments.parse_argstring(self.alias_magic, line) + shell = self.shell + mman = self.shell.magics_manager + escs = ''.join(magic_escapes.values()) + + target = args.target.lstrip(escs) + name = args.name.lstrip(escs) + + params = args.params + if (params and + ((params.startswith('"') and params.endswith('"')) + or (params.startswith("'") and params.endswith("'")))): + params = params[1:-1] + + # Find the requested magics. + m_line = shell.find_magic(target, 'line') + m_cell = shell.find_magic(target, 'cell') + if args.line and m_line is None: + raise UsageError('Line magic function `%s%s` not found.' % + (magic_escapes['line'], target)) + if args.cell and m_cell is None: + raise UsageError('Cell magic function `%s%s` not found.' % + (magic_escapes['cell'], target)) + + # If --line and --cell are not specified, default to the ones + # that are available. + if not args.line and not args.cell: + if not m_line and not m_cell: + raise UsageError( + 'No line or cell magic with name `%s` found.' % target + ) + args.line = bool(m_line) + args.cell = bool(m_cell) + + params_str = "" if params is None else " " + params + + if args.line: + mman.register_alias(name, target, 'line', params) + print('Created `%s%s` as an alias for `%s%s%s`.' % ( + magic_escapes['line'], name, + magic_escapes['line'], target, params_str)) + + if args.cell: + mman.register_alias(name, target, 'cell', params) + print('Created `%s%s` as an alias for `%s%s%s`.' % ( + magic_escapes['cell'], name, + magic_escapes['cell'], target, params_str)) + + @line_magic + def lsmagic(self, parameter_s=''): + """List currently available magic functions.""" + return MagicsDisplay(self.shell.magics_manager, ignore=[]) + + def _magic_docs(self, brief=False, rest=False): + """Return docstrings from magic functions.""" + mman = self.shell.magics_manager + docs = mman.lsmagic_docs(brief, missing='No documentation') + + if rest: + format_string = '**%s%s**::\n\n%s\n\n' + else: + format_string = '%s%s:\n%s\n' + + return ''.join( + [format_string % (magic_escapes['line'], fname, + indent(dedent(fndoc))) + for fname, fndoc in sorted(docs['line'].items())] + + + [format_string % (magic_escapes['cell'], fname, + indent(dedent(fndoc))) + for fname, fndoc in sorted(docs['cell'].items())] + ) + + @line_magic + def magic(self, parameter_s=''): + """Print information about the magic function system. + + Supported formats: -latex, -brief, -rest + """ + + mode = '' + try: + mode = parameter_s.split()[0][1:] + except IndexError: + pass + + brief = (mode == 'brief') + rest = (mode == 'rest') + magic_docs = self._magic_docs(brief, rest) + + if mode == 'latex': + print(self.format_latex(magic_docs)) + return + else: + magic_docs = format_screen(magic_docs) + + out = [""" +IPython's 'magic' functions +=========================== + +The magic function system provides a series of functions which allow you to +control the behavior of IPython itself, plus a lot of system-type +features. There are two kinds of magics, line-oriented and cell-oriented. + +Line magics are prefixed with the % character and work much like OS +command-line calls: they get as an argument the rest of the line, where +arguments are passed without parentheses or quotes. For example, this will +time the given statement:: + + %timeit range(1000) + +Cell magics are prefixed with a double %%, and they are functions that get as +an argument not only the rest of the line, but also the lines below it in a +separate argument. These magics are called with two arguments: the rest of the +call line and the body of the cell, consisting of the lines below the first. +For example:: + + %%timeit x = numpy.random.randn((100, 100)) + numpy.linalg.svd(x) + +will time the execution of the numpy svd routine, running the assignment of x +as part of the setup phase, which is not timed. + +In a line-oriented client (the terminal or Qt console IPython), starting a new +input with %% will automatically enter cell mode, and IPython will continue +reading input until a blank line is given. In the notebook, simply type the +whole cell as one entity, but keep in mind that the %% escape can only be at +the very start of the cell. + +NOTE: If you have 'automagic' enabled (via the command line option or with the +%automagic function), you don't need to type in the % explicitly for line +magics; cell magics always require an explicit '%%' escape. By default, +IPython ships with automagic on, so you should only rarely need the % escape. + +Example: typing '%cd mydir' (without the quotes) changes your working directory +to 'mydir', if it exists. + +For a list of the available magic functions, use %lsmagic. For a description +of any of them, type %magic_name?, e.g. '%cd?'. + +Currently the magic system has the following functions:""", + magic_docs, + "Summary of magic functions (from %slsmagic):" % magic_escapes['line'], + str(self.lsmagic()), + ] + page.page('\n'.join(out)) + + + @line_magic + def page(self, parameter_s=''): + """Pretty print the object and display it through a pager. + + %page [options] OBJECT + + If no object is given, use _ (last output). + + Options: + + -r: page str(object), don't pretty-print it.""" + + # After a function contributed by Olivier Aubert, slightly modified. + + # Process options/args + opts, args = self.parse_options(parameter_s, 'r') + raw = 'r' in opts + + oname = args and args or '_' + info = self.shell._ofind(oname) + if info['found']: + txt = (raw and str or pformat)( info['obj'] ) + page.page(txt) + else: + print('Object `%s` not found' % oname) + + @line_magic + def pprint(self, parameter_s=''): + """Toggle pretty printing on/off.""" + ptformatter = self.shell.display_formatter.formatters['text/plain'] + ptformatter.pprint = bool(1 - ptformatter.pprint) + print('Pretty printing has been turned', + ['OFF','ON'][ptformatter.pprint]) + + @line_magic + def colors(self, parameter_s=''): + """Switch color scheme for prompts, info system and exception handlers. + + Currently implemented schemes: NoColor, Linux, LightBG. + + Color scheme names are not case-sensitive. + + Examples + -------- + To get a plain black and white terminal:: + + %colors nocolor + """ + def color_switch_err(name): + warn('Error changing %s color schemes.\n%s' % + (name, sys.exc_info()[1]), stacklevel=2) + + + new_scheme = parameter_s.strip() + if not new_scheme: + raise UsageError( + "%colors: you must specify a color scheme. See '%colors?'") + # local shortcut + shell = self.shell + + # Set shell colour scheme + try: + shell.colors = new_scheme + shell.refresh_style() + except: + color_switch_err('shell') + + # Set exception colors + try: + shell.InteractiveTB.set_colors(scheme = new_scheme) + shell.SyntaxTB.set_colors(scheme = new_scheme) + except: + color_switch_err('exception') + + # Set info (for 'object?') colors + if shell.color_info: + try: + shell.inspector.set_active_scheme(new_scheme) + except: + color_switch_err('object inspector') + else: + shell.inspector.set_active_scheme('NoColor') + + @line_magic + def xmode(self, parameter_s=''): + """Switch modes for the exception handlers. + + Valid modes: Plain, Context, Verbose, and Minimal. + + If called without arguments, acts as a toggle. + + When in verbose mode the value --show (and --hide) + will respectively show (or hide) frames with ``__tracebackhide__ = + True`` value set. + """ + + def xmode_switch_err(name): + warn('Error changing %s exception modes.\n%s' % + (name,sys.exc_info()[1])) + + shell = self.shell + if parameter_s.strip() == "--show": + shell.InteractiveTB.skip_hidden = False + return + if parameter_s.strip() == "--hide": + shell.InteractiveTB.skip_hidden = True + return + + new_mode = parameter_s.strip().capitalize() + try: + shell.InteractiveTB.set_mode(mode=new_mode) + print('Exception reporting mode:',shell.InteractiveTB.mode) + except: + xmode_switch_err('user') + + @line_magic + def quickref(self, arg): + """ Show a quick reference sheet """ + from IPython.core.usage import quick_reference + qr = quick_reference + self._magic_docs(brief=True) + page.page(qr) + + @line_magic + def doctest_mode(self, parameter_s=''): + """Toggle doctest mode on and off. + + This mode is intended to make IPython behave as much as possible like a + plain Python shell, from the perspective of how its prompts, exceptions + and output look. This makes it easy to copy and paste parts of a + session into doctests. It does so by: + + - Changing the prompts to the classic ``>>>`` ones. + - Changing the exception reporting mode to 'Plain'. + - Disabling pretty-printing of output. + + Note that IPython also supports the pasting of code snippets that have + leading '>>>' and '...' prompts in them. This means that you can paste + doctests from files or docstrings (even if they have leading + whitespace), and the code will execute correctly. You can then use + '%history -t' to see the translated history; this will give you the + input after removal of all the leading prompts and whitespace, which + can be pasted back into an editor. + + With these features, you can switch into this mode easily whenever you + need to do testing and changes to doctests, without having to leave + your existing IPython session. + """ + + # Shorthands + shell = self.shell + meta = shell.meta + disp_formatter = self.shell.display_formatter + ptformatter = disp_formatter.formatters['text/plain'] + # dstore is a data store kept in the instance metadata bag to track any + # changes we make, so we can undo them later. + dstore = meta.setdefault('doctest_mode',Struct()) + save_dstore = dstore.setdefault + + # save a few values we'll need to recover later + mode = save_dstore('mode',False) + save_dstore('rc_pprint',ptformatter.pprint) + save_dstore('xmode',shell.InteractiveTB.mode) + save_dstore('rc_separate_out',shell.separate_out) + save_dstore('rc_separate_out2',shell.separate_out2) + save_dstore('rc_separate_in',shell.separate_in) + save_dstore('rc_active_types',disp_formatter.active_types) + + if not mode: + # turn on + + # Prompt separators like plain python + shell.separate_in = '' + shell.separate_out = '' + shell.separate_out2 = '' + + + ptformatter.pprint = False + disp_formatter.active_types = ['text/plain'] + + shell.magic('xmode Plain') + else: + # turn off + shell.separate_in = dstore.rc_separate_in + + shell.separate_out = dstore.rc_separate_out + shell.separate_out2 = dstore.rc_separate_out2 + + ptformatter.pprint = dstore.rc_pprint + disp_formatter.active_types = dstore.rc_active_types + + shell.magic('xmode ' + dstore.xmode) + + # mode here is the state before we switch; switch_doctest_mode takes + # the mode we're switching to. + shell.switch_doctest_mode(not mode) + + # Store new mode and inform + dstore.mode = bool(not mode) + mode_label = ['OFF','ON'][dstore.mode] + print('Doctest mode is:', mode_label) + + @line_magic + def gui(self, parameter_s=''): + """Enable or disable IPython GUI event loop integration. + + %gui [GUINAME] + + This magic replaces IPython's threaded shells that were activated + using the (pylab/wthread/etc.) command line flags. GUI toolkits + can now be enabled at runtime and keyboard + interrupts should work without any problems. The following toolkits + are supported: wxPython, PyQt4, PyGTK, Tk and Cocoa (OSX):: + + %gui wx # enable wxPython event loop integration + %gui qt4|qt # enable PyQt4 event loop integration + %gui qt5 # enable PyQt5 event loop integration + %gui gtk # enable PyGTK event loop integration + %gui gtk3 # enable Gtk3 event loop integration + %gui gtk4 # enable Gtk4 event loop integration + %gui tk # enable Tk event loop integration + %gui osx # enable Cocoa event loop integration + # (requires %matplotlib 1.1) + %gui # disable all event loop integration + + WARNING: after any of these has been called you can simply create + an application object, but DO NOT start the event loop yourself, as + we have already handled that. + """ + opts, arg = self.parse_options(parameter_s, '') + if arg=='': arg = None + try: + return self.shell.enable_gui(arg) + except Exception as e: + # print simple error message, rather than traceback if we can't + # hook up the GUI + error(str(e)) + + @skip_doctest + @line_magic + def precision(self, s=''): + """Set floating point precision for pretty printing. + + Can set either integer precision or a format string. + + If numpy has been imported and precision is an int, + numpy display precision will also be set, via ``numpy.set_printoptions``. + + If no argument is given, defaults will be restored. + + Examples + -------- + :: + + In [1]: from math import pi + + In [2]: %precision 3 + Out[2]: u'%.3f' + + In [3]: pi + Out[3]: 3.142 + + In [4]: %precision %i + Out[4]: u'%i' + + In [5]: pi + Out[5]: 3 + + In [6]: %precision %e + Out[6]: u'%e' + + In [7]: pi**10 + Out[7]: 9.364805e+04 + + In [8]: %precision + Out[8]: u'%r' + + In [9]: pi**10 + Out[9]: 93648.047476082982 + """ + ptformatter = self.shell.display_formatter.formatters['text/plain'] + ptformatter.float_precision = s + return ptformatter.float_format + + @magic_arguments.magic_arguments() + @magic_arguments.argument( + '-e', '--export', action='store_true', default=False, + help=argparse.SUPPRESS + ) + @magic_arguments.argument( + 'filename', type=str, + help='Notebook name or filename' + ) + @line_magic + def notebook(self, s): + """Export and convert IPython notebooks. + + This function can export the current IPython history to a notebook file. + For example, to export the history to "foo.ipynb" do "%notebook foo.ipynb". + + The -e or --export flag is deprecated in IPython 5.2, and will be + removed in the future. + """ + args = magic_arguments.parse_argstring(self.notebook, s) + + from nbformat import write, v4 + + cells = [] + hist = list(self.shell.history_manager.get_range()) + if(len(hist)<=1): + raise ValueError('History is empty, cannot export') + for session, execution_count, source in hist[:-1]: + cells.append(v4.new_code_cell( + execution_count=execution_count, + source=source + )) + nb = v4.new_notebook(cells=cells) + with io.open(args.filename, 'w', encoding='utf-8') as f: + write(nb, f, version=4) + +@magics_class +class AsyncMagics(BasicMagics): + + @line_magic + def autoawait(self, parameter_s): + """ + Allow to change the status of the autoawait option. + + This allow you to set a specific asynchronous code runner. + + If no value is passed, print the currently used asynchronous integration + and whether it is activated. + + It can take a number of value evaluated in the following order: + + - False/false/off deactivate autoawait integration + - True/true/on activate autoawait integration using configured default + loop + - asyncio/curio/trio activate autoawait integration and use integration + with said library. + + - `sync` turn on the pseudo-sync integration (mostly used for + `IPython.embed()` which does not run IPython with a real eventloop and + deactivate running asynchronous code. Turning on Asynchronous code with + the pseudo sync loop is undefined behavior and may lead IPython to crash. + + If the passed parameter does not match any of the above and is a python + identifier, get said object from user namespace and set it as the + runner, and activate autoawait. + + If the object is a fully qualified object name, attempt to import it and + set it as the runner, and activate autoawait. + + + The exact behavior of autoawait is experimental and subject to change + across version of IPython and Python. + """ + + param = parameter_s.strip() + d = {True: "on", False: "off"} + + if not param: + print("IPython autoawait is `{}`, and set to use `{}`".format( + d[self.shell.autoawait], + self.shell.loop_runner + )) + return None + + if param.lower() in ('false', 'off'): + self.shell.autoawait = False + return None + if param.lower() in ('true', 'on'): + self.shell.autoawait = True + return None + + if param in self.shell.loop_runner_map: + self.shell.loop_runner, self.shell.autoawait = self.shell.loop_runner_map[param] + return None + + if param in self.shell.user_ns : + self.shell.loop_runner = self.shell.user_ns[param] + self.shell.autoawait = True + return None + + runner = import_item(param) + + self.shell.loop_runner = runner + self.shell.autoawait = True diff --git a/contrib/python/ipython/py3/IPython/core/magics/code.py b/contrib/python/ipython/py3/IPython/core/magics/code.py index 0ce9df18f16..d446d35ac6b 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/code.py +++ b/contrib/python/ipython/py3/IPython/core/magics/code.py @@ -1,750 +1,750 @@ -"""Implementation of code management magic functions. -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012 The IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Stdlib -import inspect -import io -import os -import re -import sys -import ast -from itertools import chain -from urllib.request import Request, urlopen -from urllib.parse import urlencode - -# Our own packages -from IPython.core.error import TryNext, StdinNotImplementedError, UsageError -from IPython.core.macro import Macro -from IPython.core.magic import Magics, magics_class, line_magic -from IPython.core.oinspect import find_file, find_source_lines -from IPython.core.release import version -from IPython.testing.skipdoctest import skip_doctest -from IPython.utils.contexts import preserve_keys -from IPython.utils.path import get_py_filename -from warnings import warn -from logging import error -from IPython.utils.text import get_text_list - -#----------------------------------------------------------------------------- -# Magic implementation classes -#----------------------------------------------------------------------------- - -# Used for exception handling in magic_edit -class MacroToEdit(ValueError): pass - -ipython_input_pat = re.compile(r"<ipython\-input\-(\d+)-[a-z\d]+>$") - -# To match, e.g. 8-10 1:5 :10 3- -range_re = re.compile(r""" -(?P<start>\d+)? -((?P<sep>[\-:]) - (?P<end>\d+)?)? -$""", re.VERBOSE) - - -def extract_code_ranges(ranges_str): - """Turn a string of range for %%load into 2-tuples of (start, stop) - ready to use as a slice of the content split by lines. - - Examples - -------- - list(extract_input_ranges("5-10 2")) - [(4, 10), (1, 2)] - """ - for range_str in ranges_str.split(): - rmatch = range_re.match(range_str) - if not rmatch: - continue - sep = rmatch.group("sep") - start = rmatch.group("start") - end = rmatch.group("end") - - if sep == '-': - start = int(start) - 1 if start else None - end = int(end) if end else None - elif sep == ':': - start = int(start) - 1 if start else None - end = int(end) - 1 if end else None - else: - end = int(start) - start = int(start) - 1 - yield (start, end) - - -def extract_symbols(code, symbols): - """ - Return a tuple (blocks, not_found) - where ``blocks`` is a list of code fragments - for each symbol parsed from code, and ``not_found`` are - symbols not found in the code. - - For example:: - - In [1]: code = '''a = 10 - ...: def b(): return 42 - ...: class A: pass''' - - In [2]: extract_symbols(code, 'A,b,z') - Out[2]: (['class A: pass\\n', 'def b(): return 42\\n'], ['z']) - """ - symbols = symbols.split(',') - - # this will raise SyntaxError if code isn't valid Python - py_code = ast.parse(code) - - marks = [(getattr(s, 'name', None), s.lineno) for s in py_code.body] - code = code.split('\n') - - symbols_lines = {} - - # we already know the start_lineno of each symbol (marks). - # To find each end_lineno, we traverse in reverse order until each - # non-blank line - end = len(code) - for name, start in reversed(marks): - while not code[end - 1].strip(): - end -= 1 - if name: - symbols_lines[name] = (start - 1, end) - end = start - 1 - - # Now symbols_lines is a map - # {'symbol_name': (start_lineno, end_lineno), ...} - - # fill a list with chunks of codes for each requested symbol - blocks = [] - not_found = [] - for symbol in symbols: - if symbol in symbols_lines: - start, end = symbols_lines[symbol] - blocks.append('\n'.join(code[start:end]) + '\n') - else: - not_found.append(symbol) - - return blocks, not_found - -def strip_initial_indent(lines): - """For %load, strip indent from lines until finding an unindented line. - - https://github.com/ipython/ipython/issues/9775 - """ - indent_re = re.compile(r'\s+') - - it = iter(lines) - first_line = next(it) - indent_match = indent_re.match(first_line) - - if indent_match: - # First line was indented - indent = indent_match.group() - yield first_line[len(indent):] - - for line in it: - if line.startswith(indent): - yield line[len(indent):] - else: - # Less indented than the first line - stop dedenting - yield line - break - else: - yield first_line - - # Pass the remaining lines through without dedenting - for line in it: - yield line - - -class InteractivelyDefined(Exception): - """Exception for interactively defined variable in magic_edit""" - def __init__(self, index): - self.index = index - - -@magics_class -class CodeMagics(Magics): - """Magics related to code management (loading, saving, editing, ...).""" - - def __init__(self, *args, **kwargs): - self._knowntemps = set() - super(CodeMagics, self).__init__(*args, **kwargs) - - @line_magic - def save(self, parameter_s=''): - """Save a set of lines or a macro to a given filename. - - Usage:\\ - %save [options] filename n1-n2 n3-n4 ... n5 .. n6 ... - - Options: - - -r: use 'raw' input. By default, the 'processed' history is used, - so that magics are loaded in their transformed version to valid - Python. If this option is given, the raw input as typed as the - command line is used instead. - - -f: force overwrite. If file exists, %save will prompt for overwrite - unless -f is given. - - -a: append to the file instead of overwriting it. - - This function uses the same syntax as %history for input ranges, - then saves the lines to the filename you specify. - - It adds a '.py' extension to the file if you don't do so yourself, and - it asks for confirmation before overwriting existing files. - - If `-r` option is used, the default extension is `.ipy`. - """ - - opts,args = self.parse_options(parameter_s,'fra',mode='list') - if not args: - raise UsageError('Missing filename.') - raw = 'r' in opts - force = 'f' in opts - append = 'a' in opts - mode = 'a' if append else 'w' - ext = '.ipy' if raw else '.py' - fname, codefrom = args[0], " ".join(args[1:]) - if not fname.endswith(('.py','.ipy')): - fname += ext - file_exists = os.path.isfile(fname) - if file_exists and not force and not append: - try: - overwrite = self.shell.ask_yes_no('File `%s` exists. Overwrite (y/[N])? ' % fname, default='n') - except StdinNotImplementedError: - print("File `%s` exists. Use `%%save -f %s` to force overwrite" % (fname, parameter_s)) - return - if not overwrite : - print('Operation cancelled.') - return - try: - cmds = self.shell.find_user_code(codefrom,raw) - except (TypeError, ValueError) as e: - print(e.args[0]) - return - with io.open(fname, mode, encoding="utf-8") as f: - if not file_exists or not append: - f.write("# coding: utf-8\n") - f.write(cmds) - # make sure we end on a newline - if not cmds.endswith('\n'): - f.write('\n') - print('The following commands were written to file `%s`:' % fname) - print(cmds) - - @line_magic - def pastebin(self, parameter_s=''): - """Upload code to dpaste.com, returning the URL. - - Usage:\\ - %pastebin [-d "Custom description"][-e 24] 1-7 - - The argument can be an input history range, a filename, or the name of a - string or macro. - - Options: - - -d: Pass a custom description. The default will say - "Pasted from IPython". - -e: Pass number of days for the link to be expired. - The default will be 7 days. - """ - opts, args = self.parse_options(parameter_s, "d:e:") - - try: - code = self.shell.find_user_code(args) - except (ValueError, TypeError) as e: - print(e.args[0]) - return - - expiry_days = 7 - try: - expiry_days = int(opts.get("e", 7)) - except ValueError as e: - print(e.args[0].capitalize()) - return - if expiry_days < 1 or expiry_days > 365: - print("Expiry days should be in range of 1 to 365") - return - - post_data = urlencode( - { - "title": opts.get("d", "Pasted from IPython"), - "syntax": "python", - "content": code, - "expiry_days": expiry_days, - } - ).encode("utf-8") - - request = Request( - "https://dpaste.com/api/v2/", - headers={"User-Agent": "IPython v{}".format(version)}, - ) - response = urlopen(request, post_data) - return response.headers.get('Location') - - @line_magic - def loadpy(self, arg_s): - """Alias of `%load` - - `%loadpy` has gained some flexibility and dropped the requirement of a `.py` - extension. So it has been renamed simply into %load. You can look at - `%load`'s docstring for more info. - """ - self.load(arg_s) - - @line_magic - def load(self, arg_s): - """Load code into the current frontend. - - Usage:\\ - %load [options] source - - where source can be a filename, URL, input history range, macro, or - element in the user namespace - - Options: - - -r <lines>: Specify lines or ranges of lines to load from the source. - Ranges could be specified as x-y (x..y) or in python-style x:y - (x..(y-1)). Both limits x and y can be left blank (meaning the - beginning and end of the file, respectively). - - -s <symbols>: Specify function or classes to load from python source. - - -y : Don't ask confirmation for loading source above 200 000 characters. - - -n : Include the user's namespace when searching for source code. - - This magic command can either take a local filename, a URL, an history - range (see %history) or a macro as argument, it will prompt for - confirmation before loading source with more than 200 000 characters, unless - -y flag is passed or if the frontend does not support raw_input:: - - %load myscript.py - %load 7-27 - %load myMacro - %load http://www.example.com/myscript.py - %load -r 5-10 myscript.py - %load -r 10-20,30,40: foo.py - %load -s MyClass,wonder_function myscript.py - %load -n MyClass - %load -n my_module.wonder_function - """ - opts,args = self.parse_options(arg_s,'yns:r:') - - if not args: - raise UsageError('Missing filename, URL, input history range, ' - 'macro, or element in the user namespace.') - - search_ns = 'n' in opts - - contents = self.shell.find_user_code(args, search_ns=search_ns) - - if 's' in opts: - try: - blocks, not_found = extract_symbols(contents, opts['s']) - except SyntaxError: - # non python code - error("Unable to parse the input as valid Python code") - return - - if len(not_found) == 1: - warn('The symbol `%s` was not found' % not_found[0]) - elif len(not_found) > 1: - warn('The symbols %s were not found' % get_text_list(not_found, - wrap_item_with='`') - ) - - contents = '\n'.join(blocks) - - if 'r' in opts: - ranges = opts['r'].replace(',', ' ') - lines = contents.split('\n') - slices = extract_code_ranges(ranges) - contents = [lines[slice(*slc)] for slc in slices] - contents = '\n'.join(strip_initial_indent(chain.from_iterable(contents))) - - l = len(contents) - - # 200 000 is ~ 2500 full 80 character lines - # so in average, more than 5000 lines - if l > 200000 and 'y' not in opts: - try: - ans = self.shell.ask_yes_no(("The text you're trying to load seems pretty big"\ - " (%d characters). Continue (y/[N]) ?" % l), default='n' ) - except StdinNotImplementedError: - #assume yes if raw input not implemented - ans = True - - if ans is False : - print('Operation cancelled.') - return - - contents = "# %load {}\n".format(arg_s) + contents - - self.shell.set_next_input(contents, replace=True) - - @staticmethod - def _find_edit_target(shell, args, opts, last_call): - """Utility method used by magic_edit to find what to edit.""" - - def make_filename(arg): - "Make a filename from the given args" - try: - filename = get_py_filename(arg) - except IOError: - # If it ends with .py but doesn't already exist, assume we want - # a new file. - if arg.endswith('.py'): - filename = arg - else: - filename = None - return filename - - # Set a few locals from the options for convenience: - opts_prev = 'p' in opts - opts_raw = 'r' in opts - - # custom exceptions - class DataIsObject(Exception): pass - - # Default line number value - lineno = opts.get('n',None) - - if opts_prev: - args = '_%s' % last_call[0] - if args not in shell.user_ns: - args = last_call[1] - - # by default this is done with temp files, except when the given - # arg is a filename - use_temp = True - - data = '' - - # First, see if the arguments should be a filename. - filename = make_filename(args) - if filename: - use_temp = False - elif args: - # Mode where user specifies ranges of lines, like in %macro. - data = shell.extract_input_lines(args, opts_raw) - if not data: - try: - # Load the parameter given as a variable. If not a string, - # process it as an object instead (below) - - #print '*** args',args,'type',type(args) # dbg - data = eval(args, shell.user_ns) - if not isinstance(data, str): - raise DataIsObject - - except (NameError,SyntaxError): - # given argument is not a variable, try as a filename - filename = make_filename(args) - if filename is None: - warn("Argument given (%s) can't be found as a variable " - "or as a filename." % args) - return (None, None, None) - use_temp = False - - except DataIsObject: - # macros have a special edit function - if isinstance(data, Macro): - raise MacroToEdit(data) - - # For objects, try to edit the file where they are defined - filename = find_file(data) - if filename: - if 'fakemodule' in filename.lower() and \ - inspect.isclass(data): - # class created by %edit? Try to find source - # by looking for method definitions instead, the - # __module__ in those classes is FakeModule. - attrs = [getattr(data, aname) for aname in dir(data)] - for attr in attrs: - if not inspect.ismethod(attr): - continue - filename = find_file(attr) - if filename and \ - 'fakemodule' not in filename.lower(): - # change the attribute to be the edit - # target instead - data = attr - break - - m = ipython_input_pat.match(os.path.basename(filename)) - if m: - raise InteractivelyDefined(int(m.groups()[0])) - - datafile = 1 - if filename is None: - filename = make_filename(args) - datafile = 1 - if filename is not None: - # only warn about this if we get a real name - warn('Could not find file where `%s` is defined.\n' - 'Opening a file named `%s`' % (args, filename)) - # Now, make sure we can actually read the source (if it was - # in a temp file it's gone by now). - if datafile: - if lineno is None: - lineno = find_source_lines(data) - if lineno is None: - filename = make_filename(args) - if filename is None: - warn('The file where `%s` was defined ' - 'cannot be read or found.' % data) - return (None, None, None) - use_temp = False - - if use_temp: - filename = shell.mktempfile(data) - print('IPython will make a temporary file named:',filename) - - # use last_call to remember the state of the previous call, but don't - # let it be clobbered by successive '-p' calls. - try: - last_call[0] = shell.displayhook.prompt_count - if not opts_prev: - last_call[1] = args - except: - pass - - - return filename, lineno, use_temp - - def _edit_macro(self,mname,macro): - """open an editor with the macro data in a file""" - filename = self.shell.mktempfile(macro.value) - self.shell.hooks.editor(filename) - - # and make a new macro object, to replace the old one - with open(filename) as mfile: - mvalue = mfile.read() - self.shell.user_ns[mname] = Macro(mvalue) - - @skip_doctest - @line_magic - def edit(self, parameter_s='',last_call=['','']): - """Bring up an editor and execute the resulting code. - - Usage: - %edit [options] [args] - - %edit runs IPython's editor hook. The default version of this hook is - set to call the editor specified by your $EDITOR environment variable. - If this isn't found, it will default to vi under Linux/Unix and to - notepad under Windows. See the end of this docstring for how to change - the editor hook. - - You can also set the value of this editor via the - ``TerminalInteractiveShell.editor`` option in your configuration file. - This is useful if you wish to use a different editor from your typical - default with IPython (and for Windows users who typically don't set - environment variables). - - This command allows you to conveniently edit multi-line code right in - your IPython session. - - If called without arguments, %edit opens up an empty editor with a - temporary file and will execute the contents of this file when you - close it (don't forget to save it!). - - - Options: - - -n <number>: open the editor at a specified line number. By default, - the IPython editor hook uses the unix syntax 'editor +N filename', but - you can configure this by providing your own modified hook if your - favorite editor supports line-number specifications with a different - syntax. - - -p: this will call the editor with the same data as the previous time - it was used, regardless of how long ago (in your current session) it - was. - - -r: use 'raw' input. This option only applies to input taken from the - user's history. By default, the 'processed' history is used, so that - magics are loaded in their transformed version to valid Python. If - this option is given, the raw input as typed as the command line is - used instead. When you exit the editor, it will be executed by - IPython's own processor. - - -x: do not execute the edited code immediately upon exit. This is - mainly useful if you are editing programs which need to be called with - command line arguments, which you can then do using %run. - - - Arguments: - - If arguments are given, the following possibilities exist: - - - If the argument is a filename, IPython will load that into the - editor. It will execute its contents with execfile() when you exit, - loading any code in the file into your interactive namespace. - - - The arguments are ranges of input history, e.g. "7 ~1/4-6". - The syntax is the same as in the %history magic. - - - If the argument is a string variable, its contents are loaded - into the editor. You can thus edit any string which contains - python code (including the result of previous edits). - - - If the argument is the name of an object (other than a string), - IPython will try to locate the file where it was defined and open the - editor at the point where it is defined. You can use `%edit function` - to load an editor exactly at the point where 'function' is defined, - edit it and have the file be executed automatically. - - - If the object is a macro (see %macro for details), this opens up your - specified editor with a temporary file containing the macro's data. - Upon exit, the macro is reloaded with the contents of the file. - - Note: opening at an exact line is only supported under Unix, and some - editors (like kedit and gedit up to Gnome 2.8) do not understand the - '+NUMBER' parameter necessary for this feature. Good editors like - (X)Emacs, vi, jed, pico and joe all do. - - After executing your code, %edit will return as output the code you - typed in the editor (except when it was an existing file). This way - you can reload the code in further invocations of %edit as a variable, - via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of - the output. - - Note that %edit is also available through the alias %ed. - - This is an example of creating a simple function inside the editor and - then modifying it. First, start up the editor:: - - In [1]: edit - Editing... done. Executing edited code... - Out[1]: 'def foo():\\n print "foo() was defined in an editing - session"\\n' - - We can then call the function foo():: - - In [2]: foo() - foo() was defined in an editing session - - Now we edit foo. IPython automatically loads the editor with the - (temporary) file where foo() was previously defined:: - - In [3]: edit foo - Editing... done. Executing edited code... - - And if we call foo() again we get the modified version:: - - In [4]: foo() - foo() has now been changed! - - Here is an example of how to edit a code snippet successive - times. First we call the editor:: - - In [5]: edit - Editing... done. Executing edited code... - hello - Out[5]: "print 'hello'\\n" - - Now we call it again with the previous output (stored in _):: - - In [6]: edit _ - Editing... done. Executing edited code... - hello world - Out[6]: "print 'hello world'\\n" - - Now we call it with the output #8 (stored in _8, also as Out[8]):: - - In [7]: edit _8 - Editing... done. Executing edited code... - hello again - Out[7]: "print 'hello again'\\n" - - - Changing the default editor hook: - - If you wish to write your own editor hook, you can put it in a - configuration file which you load at startup time. The default hook - is defined in the IPython.core.hooks module, and you can use that as a - starting example for further modifications. That file also has - general instructions on how to set a new hook for use once you've - defined it.""" - opts,args = self.parse_options(parameter_s,'prxn:') - - try: - filename, lineno, is_temp = self._find_edit_target(self.shell, - args, opts, last_call) - except MacroToEdit as e: - self._edit_macro(args, e.args[0]) - return - except InteractivelyDefined as e: - print("Editing In[%i]" % e.index) - args = str(e.index) - filename, lineno, is_temp = self._find_edit_target(self.shell, - args, opts, last_call) - if filename is None: - # nothing was found, warnings have already been issued, - # just give up. - return - - if is_temp: - self._knowntemps.add(filename) - elif (filename in self._knowntemps): - is_temp = True - - - # do actual editing here - print('Editing...', end=' ') - sys.stdout.flush() - try: - # Quote filenames that may have spaces in them - if ' ' in filename: - filename = "'%s'" % filename - self.shell.hooks.editor(filename,lineno) - except TryNext: - warn('Could not open editor') - return - - # XXX TODO: should this be generalized for all string vars? - # For now, this is special-cased to blocks created by cpaste - if args.strip() == 'pasted_block': - with open(filename, 'r') as f: - self.shell.user_ns['pasted_block'] = f.read() - - if 'x' in opts: # -x prevents actual execution - print() - else: - print('done. Executing edited code...') - with preserve_keys(self.shell.user_ns, '__file__'): - if not is_temp: - self.shell.user_ns['__file__'] = filename - if 'r' in opts: # Untranslated IPython code - with open(filename, 'r') as f: - source = f.read() - self.shell.run_cell(source, store_history=False) - else: - self.shell.safe_execfile(filename, self.shell.user_ns, - self.shell.user_ns) - - if is_temp: - try: - with open(filename) as f: - return f.read() - except IOError as msg: - if msg.filename == filename: - warn('File not found. Did you forget to save?') - return - else: - self.shell.showtraceback() +"""Implementation of code management magic functions. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012 The IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib +import inspect +import io +import os +import re +import sys +import ast +from itertools import chain +from urllib.request import Request, urlopen +from urllib.parse import urlencode + +# Our own packages +from IPython.core.error import TryNext, StdinNotImplementedError, UsageError +from IPython.core.macro import Macro +from IPython.core.magic import Magics, magics_class, line_magic +from IPython.core.oinspect import find_file, find_source_lines +from IPython.core.release import version +from IPython.testing.skipdoctest import skip_doctest +from IPython.utils.contexts import preserve_keys +from IPython.utils.path import get_py_filename +from warnings import warn +from logging import error +from IPython.utils.text import get_text_list + +#----------------------------------------------------------------------------- +# Magic implementation classes +#----------------------------------------------------------------------------- + +# Used for exception handling in magic_edit +class MacroToEdit(ValueError): pass + +ipython_input_pat = re.compile(r"<ipython\-input\-(\d+)-[a-z\d]+>$") + +# To match, e.g. 8-10 1:5 :10 3- +range_re = re.compile(r""" +(?P<start>\d+)? +((?P<sep>[\-:]) + (?P<end>\d+)?)? +$""", re.VERBOSE) + + +def extract_code_ranges(ranges_str): + """Turn a string of range for %%load into 2-tuples of (start, stop) + ready to use as a slice of the content split by lines. + + Examples + -------- + list(extract_input_ranges("5-10 2")) + [(4, 10), (1, 2)] + """ + for range_str in ranges_str.split(): + rmatch = range_re.match(range_str) + if not rmatch: + continue + sep = rmatch.group("sep") + start = rmatch.group("start") + end = rmatch.group("end") + + if sep == '-': + start = int(start) - 1 if start else None + end = int(end) if end else None + elif sep == ':': + start = int(start) - 1 if start else None + end = int(end) - 1 if end else None + else: + end = int(start) + start = int(start) - 1 + yield (start, end) + + +def extract_symbols(code, symbols): + """ + Return a tuple (blocks, not_found) + where ``blocks`` is a list of code fragments + for each symbol parsed from code, and ``not_found`` are + symbols not found in the code. + + For example:: + + In [1]: code = '''a = 10 + ...: def b(): return 42 + ...: class A: pass''' + + In [2]: extract_symbols(code, 'A,b,z') + Out[2]: (['class A: pass\\n', 'def b(): return 42\\n'], ['z']) + """ + symbols = symbols.split(',') + + # this will raise SyntaxError if code isn't valid Python + py_code = ast.parse(code) + + marks = [(getattr(s, 'name', None), s.lineno) for s in py_code.body] + code = code.split('\n') + + symbols_lines = {} + + # we already know the start_lineno of each symbol (marks). + # To find each end_lineno, we traverse in reverse order until each + # non-blank line + end = len(code) + for name, start in reversed(marks): + while not code[end - 1].strip(): + end -= 1 + if name: + symbols_lines[name] = (start - 1, end) + end = start - 1 + + # Now symbols_lines is a map + # {'symbol_name': (start_lineno, end_lineno), ...} + + # fill a list with chunks of codes for each requested symbol + blocks = [] + not_found = [] + for symbol in symbols: + if symbol in symbols_lines: + start, end = symbols_lines[symbol] + blocks.append('\n'.join(code[start:end]) + '\n') + else: + not_found.append(symbol) + + return blocks, not_found + +def strip_initial_indent(lines): + """For %load, strip indent from lines until finding an unindented line. + + https://github.com/ipython/ipython/issues/9775 + """ + indent_re = re.compile(r'\s+') + + it = iter(lines) + first_line = next(it) + indent_match = indent_re.match(first_line) + + if indent_match: + # First line was indented + indent = indent_match.group() + yield first_line[len(indent):] + + for line in it: + if line.startswith(indent): + yield line[len(indent):] + else: + # Less indented than the first line - stop dedenting + yield line + break + else: + yield first_line + + # Pass the remaining lines through without dedenting + for line in it: + yield line + + +class InteractivelyDefined(Exception): + """Exception for interactively defined variable in magic_edit""" + def __init__(self, index): + self.index = index + + +@magics_class +class CodeMagics(Magics): + """Magics related to code management (loading, saving, editing, ...).""" + + def __init__(self, *args, **kwargs): + self._knowntemps = set() + super(CodeMagics, self).__init__(*args, **kwargs) + + @line_magic + def save(self, parameter_s=''): + """Save a set of lines or a macro to a given filename. + + Usage:\\ + %save [options] filename n1-n2 n3-n4 ... n5 .. n6 ... + + Options: + + -r: use 'raw' input. By default, the 'processed' history is used, + so that magics are loaded in their transformed version to valid + Python. If this option is given, the raw input as typed as the + command line is used instead. + + -f: force overwrite. If file exists, %save will prompt for overwrite + unless -f is given. + + -a: append to the file instead of overwriting it. + + This function uses the same syntax as %history for input ranges, + then saves the lines to the filename you specify. + + It adds a '.py' extension to the file if you don't do so yourself, and + it asks for confirmation before overwriting existing files. + + If `-r` option is used, the default extension is `.ipy`. + """ + + opts,args = self.parse_options(parameter_s,'fra',mode='list') + if not args: + raise UsageError('Missing filename.') + raw = 'r' in opts + force = 'f' in opts + append = 'a' in opts + mode = 'a' if append else 'w' + ext = '.ipy' if raw else '.py' + fname, codefrom = args[0], " ".join(args[1:]) + if not fname.endswith(('.py','.ipy')): + fname += ext + file_exists = os.path.isfile(fname) + if file_exists and not force and not append: + try: + overwrite = self.shell.ask_yes_no('File `%s` exists. Overwrite (y/[N])? ' % fname, default='n') + except StdinNotImplementedError: + print("File `%s` exists. Use `%%save -f %s` to force overwrite" % (fname, parameter_s)) + return + if not overwrite : + print('Operation cancelled.') + return + try: + cmds = self.shell.find_user_code(codefrom,raw) + except (TypeError, ValueError) as e: + print(e.args[0]) + return + with io.open(fname, mode, encoding="utf-8") as f: + if not file_exists or not append: + f.write("# coding: utf-8\n") + f.write(cmds) + # make sure we end on a newline + if not cmds.endswith('\n'): + f.write('\n') + print('The following commands were written to file `%s`:' % fname) + print(cmds) + + @line_magic + def pastebin(self, parameter_s=''): + """Upload code to dpaste.com, returning the URL. + + Usage:\\ + %pastebin [-d "Custom description"][-e 24] 1-7 + + The argument can be an input history range, a filename, or the name of a + string or macro. + + Options: + + -d: Pass a custom description. The default will say + "Pasted from IPython". + -e: Pass number of days for the link to be expired. + The default will be 7 days. + """ + opts, args = self.parse_options(parameter_s, "d:e:") + + try: + code = self.shell.find_user_code(args) + except (ValueError, TypeError) as e: + print(e.args[0]) + return + + expiry_days = 7 + try: + expiry_days = int(opts.get("e", 7)) + except ValueError as e: + print(e.args[0].capitalize()) + return + if expiry_days < 1 or expiry_days > 365: + print("Expiry days should be in range of 1 to 365") + return + + post_data = urlencode( + { + "title": opts.get("d", "Pasted from IPython"), + "syntax": "python", + "content": code, + "expiry_days": expiry_days, + } + ).encode("utf-8") + + request = Request( + "https://dpaste.com/api/v2/", + headers={"User-Agent": "IPython v{}".format(version)}, + ) + response = urlopen(request, post_data) + return response.headers.get('Location') + + @line_magic + def loadpy(self, arg_s): + """Alias of `%load` + + `%loadpy` has gained some flexibility and dropped the requirement of a `.py` + extension. So it has been renamed simply into %load. You can look at + `%load`'s docstring for more info. + """ + self.load(arg_s) + + @line_magic + def load(self, arg_s): + """Load code into the current frontend. + + Usage:\\ + %load [options] source + + where source can be a filename, URL, input history range, macro, or + element in the user namespace + + Options: + + -r <lines>: Specify lines or ranges of lines to load from the source. + Ranges could be specified as x-y (x..y) or in python-style x:y + (x..(y-1)). Both limits x and y can be left blank (meaning the + beginning and end of the file, respectively). + + -s <symbols>: Specify function or classes to load from python source. + + -y : Don't ask confirmation for loading source above 200 000 characters. + + -n : Include the user's namespace when searching for source code. + + This magic command can either take a local filename, a URL, an history + range (see %history) or a macro as argument, it will prompt for + confirmation before loading source with more than 200 000 characters, unless + -y flag is passed or if the frontend does not support raw_input:: + + %load myscript.py + %load 7-27 + %load myMacro + %load http://www.example.com/myscript.py + %load -r 5-10 myscript.py + %load -r 10-20,30,40: foo.py + %load -s MyClass,wonder_function myscript.py + %load -n MyClass + %load -n my_module.wonder_function + """ + opts,args = self.parse_options(arg_s,'yns:r:') + + if not args: + raise UsageError('Missing filename, URL, input history range, ' + 'macro, or element in the user namespace.') + + search_ns = 'n' in opts + + contents = self.shell.find_user_code(args, search_ns=search_ns) + + if 's' in opts: + try: + blocks, not_found = extract_symbols(contents, opts['s']) + except SyntaxError: + # non python code + error("Unable to parse the input as valid Python code") + return + + if len(not_found) == 1: + warn('The symbol `%s` was not found' % not_found[0]) + elif len(not_found) > 1: + warn('The symbols %s were not found' % get_text_list(not_found, + wrap_item_with='`') + ) + + contents = '\n'.join(blocks) + + if 'r' in opts: + ranges = opts['r'].replace(',', ' ') + lines = contents.split('\n') + slices = extract_code_ranges(ranges) + contents = [lines[slice(*slc)] for slc in slices] + contents = '\n'.join(strip_initial_indent(chain.from_iterable(contents))) + + l = len(contents) + + # 200 000 is ~ 2500 full 80 character lines + # so in average, more than 5000 lines + if l > 200000 and 'y' not in opts: + try: + ans = self.shell.ask_yes_no(("The text you're trying to load seems pretty big"\ + " (%d characters). Continue (y/[N]) ?" % l), default='n' ) + except StdinNotImplementedError: + #assume yes if raw input not implemented + ans = True + + if ans is False : + print('Operation cancelled.') + return + + contents = "# %load {}\n".format(arg_s) + contents + + self.shell.set_next_input(contents, replace=True) + + @staticmethod + def _find_edit_target(shell, args, opts, last_call): + """Utility method used by magic_edit to find what to edit.""" + + def make_filename(arg): + "Make a filename from the given args" + try: + filename = get_py_filename(arg) + except IOError: + # If it ends with .py but doesn't already exist, assume we want + # a new file. + if arg.endswith('.py'): + filename = arg + else: + filename = None + return filename + + # Set a few locals from the options for convenience: + opts_prev = 'p' in opts + opts_raw = 'r' in opts + + # custom exceptions + class DataIsObject(Exception): pass + + # Default line number value + lineno = opts.get('n',None) + + if opts_prev: + args = '_%s' % last_call[0] + if args not in shell.user_ns: + args = last_call[1] + + # by default this is done with temp files, except when the given + # arg is a filename + use_temp = True + + data = '' + + # First, see if the arguments should be a filename. + filename = make_filename(args) + if filename: + use_temp = False + elif args: + # Mode where user specifies ranges of lines, like in %macro. + data = shell.extract_input_lines(args, opts_raw) + if not data: + try: + # Load the parameter given as a variable. If not a string, + # process it as an object instead (below) + + #print '*** args',args,'type',type(args) # dbg + data = eval(args, shell.user_ns) + if not isinstance(data, str): + raise DataIsObject + + except (NameError,SyntaxError): + # given argument is not a variable, try as a filename + filename = make_filename(args) + if filename is None: + warn("Argument given (%s) can't be found as a variable " + "or as a filename." % args) + return (None, None, None) + use_temp = False + + except DataIsObject: + # macros have a special edit function + if isinstance(data, Macro): + raise MacroToEdit(data) + + # For objects, try to edit the file where they are defined + filename = find_file(data) + if filename: + if 'fakemodule' in filename.lower() and \ + inspect.isclass(data): + # class created by %edit? Try to find source + # by looking for method definitions instead, the + # __module__ in those classes is FakeModule. + attrs = [getattr(data, aname) for aname in dir(data)] + for attr in attrs: + if not inspect.ismethod(attr): + continue + filename = find_file(attr) + if filename and \ + 'fakemodule' not in filename.lower(): + # change the attribute to be the edit + # target instead + data = attr + break + + m = ipython_input_pat.match(os.path.basename(filename)) + if m: + raise InteractivelyDefined(int(m.groups()[0])) + + datafile = 1 + if filename is None: + filename = make_filename(args) + datafile = 1 + if filename is not None: + # only warn about this if we get a real name + warn('Could not find file where `%s` is defined.\n' + 'Opening a file named `%s`' % (args, filename)) + # Now, make sure we can actually read the source (if it was + # in a temp file it's gone by now). + if datafile: + if lineno is None: + lineno = find_source_lines(data) + if lineno is None: + filename = make_filename(args) + if filename is None: + warn('The file where `%s` was defined ' + 'cannot be read or found.' % data) + return (None, None, None) + use_temp = False + + if use_temp: + filename = shell.mktempfile(data) + print('IPython will make a temporary file named:',filename) + + # use last_call to remember the state of the previous call, but don't + # let it be clobbered by successive '-p' calls. + try: + last_call[0] = shell.displayhook.prompt_count + if not opts_prev: + last_call[1] = args + except: + pass + + + return filename, lineno, use_temp + + def _edit_macro(self,mname,macro): + """open an editor with the macro data in a file""" + filename = self.shell.mktempfile(macro.value) + self.shell.hooks.editor(filename) + + # and make a new macro object, to replace the old one + with open(filename) as mfile: + mvalue = mfile.read() + self.shell.user_ns[mname] = Macro(mvalue) + + @skip_doctest + @line_magic + def edit(self, parameter_s='',last_call=['','']): + """Bring up an editor and execute the resulting code. + + Usage: + %edit [options] [args] + + %edit runs IPython's editor hook. The default version of this hook is + set to call the editor specified by your $EDITOR environment variable. + If this isn't found, it will default to vi under Linux/Unix and to + notepad under Windows. See the end of this docstring for how to change + the editor hook. + + You can also set the value of this editor via the + ``TerminalInteractiveShell.editor`` option in your configuration file. + This is useful if you wish to use a different editor from your typical + default with IPython (and for Windows users who typically don't set + environment variables). + + This command allows you to conveniently edit multi-line code right in + your IPython session. + + If called without arguments, %edit opens up an empty editor with a + temporary file and will execute the contents of this file when you + close it (don't forget to save it!). + + + Options: + + -n <number>: open the editor at a specified line number. By default, + the IPython editor hook uses the unix syntax 'editor +N filename', but + you can configure this by providing your own modified hook if your + favorite editor supports line-number specifications with a different + syntax. + + -p: this will call the editor with the same data as the previous time + it was used, regardless of how long ago (in your current session) it + was. + + -r: use 'raw' input. This option only applies to input taken from the + user's history. By default, the 'processed' history is used, so that + magics are loaded in their transformed version to valid Python. If + this option is given, the raw input as typed as the command line is + used instead. When you exit the editor, it will be executed by + IPython's own processor. + + -x: do not execute the edited code immediately upon exit. This is + mainly useful if you are editing programs which need to be called with + command line arguments, which you can then do using %run. + + + Arguments: + + If arguments are given, the following possibilities exist: + + - If the argument is a filename, IPython will load that into the + editor. It will execute its contents with execfile() when you exit, + loading any code in the file into your interactive namespace. + + - The arguments are ranges of input history, e.g. "7 ~1/4-6". + The syntax is the same as in the %history magic. + + - If the argument is a string variable, its contents are loaded + into the editor. You can thus edit any string which contains + python code (including the result of previous edits). + + - If the argument is the name of an object (other than a string), + IPython will try to locate the file where it was defined and open the + editor at the point where it is defined. You can use `%edit function` + to load an editor exactly at the point where 'function' is defined, + edit it and have the file be executed automatically. + + - If the object is a macro (see %macro for details), this opens up your + specified editor with a temporary file containing the macro's data. + Upon exit, the macro is reloaded with the contents of the file. + + Note: opening at an exact line is only supported under Unix, and some + editors (like kedit and gedit up to Gnome 2.8) do not understand the + '+NUMBER' parameter necessary for this feature. Good editors like + (X)Emacs, vi, jed, pico and joe all do. + + After executing your code, %edit will return as output the code you + typed in the editor (except when it was an existing file). This way + you can reload the code in further invocations of %edit as a variable, + via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of + the output. + + Note that %edit is also available through the alias %ed. + + This is an example of creating a simple function inside the editor and + then modifying it. First, start up the editor:: + + In [1]: edit + Editing... done. Executing edited code... + Out[1]: 'def foo():\\n print "foo() was defined in an editing + session"\\n' + + We can then call the function foo():: + + In [2]: foo() + foo() was defined in an editing session + + Now we edit foo. IPython automatically loads the editor with the + (temporary) file where foo() was previously defined:: + + In [3]: edit foo + Editing... done. Executing edited code... + + And if we call foo() again we get the modified version:: + + In [4]: foo() + foo() has now been changed! + + Here is an example of how to edit a code snippet successive + times. First we call the editor:: + + In [5]: edit + Editing... done. Executing edited code... + hello + Out[5]: "print 'hello'\\n" + + Now we call it again with the previous output (stored in _):: + + In [6]: edit _ + Editing... done. Executing edited code... + hello world + Out[6]: "print 'hello world'\\n" + + Now we call it with the output #8 (stored in _8, also as Out[8]):: + + In [7]: edit _8 + Editing... done. Executing edited code... + hello again + Out[7]: "print 'hello again'\\n" + + + Changing the default editor hook: + + If you wish to write your own editor hook, you can put it in a + configuration file which you load at startup time. The default hook + is defined in the IPython.core.hooks module, and you can use that as a + starting example for further modifications. That file also has + general instructions on how to set a new hook for use once you've + defined it.""" + opts,args = self.parse_options(parameter_s,'prxn:') + + try: + filename, lineno, is_temp = self._find_edit_target(self.shell, + args, opts, last_call) + except MacroToEdit as e: + self._edit_macro(args, e.args[0]) + return + except InteractivelyDefined as e: + print("Editing In[%i]" % e.index) + args = str(e.index) + filename, lineno, is_temp = self._find_edit_target(self.shell, + args, opts, last_call) + if filename is None: + # nothing was found, warnings have already been issued, + # just give up. + return + + if is_temp: + self._knowntemps.add(filename) + elif (filename in self._knowntemps): + is_temp = True + + + # do actual editing here + print('Editing...', end=' ') + sys.stdout.flush() + try: + # Quote filenames that may have spaces in them + if ' ' in filename: + filename = "'%s'" % filename + self.shell.hooks.editor(filename,lineno) + except TryNext: + warn('Could not open editor') + return + + # XXX TODO: should this be generalized for all string vars? + # For now, this is special-cased to blocks created by cpaste + if args.strip() == 'pasted_block': + with open(filename, 'r') as f: + self.shell.user_ns['pasted_block'] = f.read() + + if 'x' in opts: # -x prevents actual execution + print() + else: + print('done. Executing edited code...') + with preserve_keys(self.shell.user_ns, '__file__'): + if not is_temp: + self.shell.user_ns['__file__'] = filename + if 'r' in opts: # Untranslated IPython code + with open(filename, 'r') as f: + source = f.read() + self.shell.run_cell(source, store_history=False) + else: + self.shell.safe_execfile(filename, self.shell.user_ns, + self.shell.user_ns) + + if is_temp: + try: + with open(filename) as f: + return f.read() + except IOError as msg: + if msg.filename == filename: + warn('File not found. Did you forget to save?') + return + else: + self.shell.showtraceback() diff --git a/contrib/python/ipython/py3/IPython/core/magics/config.py b/contrib/python/ipython/py3/IPython/core/magics/config.py index 233c4432290..97b13df02e6 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/config.py +++ b/contrib/python/ipython/py3/IPython/core/magics/config.py @@ -1,158 +1,158 @@ -"""Implementation of configuration-related magic functions. -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012 The IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Stdlib -import re - -# Our own packages -from IPython.core.error import UsageError -from IPython.core.magic import Magics, magics_class, line_magic -from logging import error - -#----------------------------------------------------------------------------- -# Magic implementation classes -#----------------------------------------------------------------------------- - -reg = re.compile(r'^\w+\.\w+$') -@magics_class -class ConfigMagics(Magics): - - def __init__(self, shell): - super(ConfigMagics, self).__init__(shell) - self.configurables = [] - - @line_magic - def config(self, s): - """configure IPython - - %config Class[.trait=value] - - This magic exposes most of the IPython config system. Any - Configurable class should be able to be configured with the simple - line:: - - %config Class.trait=value - - Where `value` will be resolved in the user's namespace, if it is an - expression or variable name. - - Examples - -------- - - To see what classes are available for config, pass no arguments:: - - In [1]: %config - Available objects for config: - TerminalInteractiveShell - HistoryManager - PrefilterManager - AliasManager - IPCompleter - DisplayFormatter - - To view what is configurable on a given class, just pass the class - name:: - - In [2]: %config IPCompleter - IPCompleter options - ----------------- - IPCompleter.omit__names=<Enum> - Current: 2 - Choices: (0, 1, 2) - Instruct the completer to omit private method names - Specifically, when completing on ``object.<tab>``. - When 2 [default]: all names that start with '_' will be excluded. - When 1: all 'magic' names (``__foo__``) will be excluded. - When 0: nothing will be excluded. - IPCompleter.merge_completions=<CBool> - Current: True - Whether to merge completion results into a single list - If False, only the completion results from the first non-empty - completer will be returned. - IPCompleter.limit_to__all__=<CBool> - Current: False - Instruct the completer to use __all__ for the completion - Specifically, when completing on ``object.<tab>``. - When True: only those names in obj.__all__ will be included. - When False [default]: the __all__ attribute is ignored - IPCompleter.greedy=<CBool> - Current: False - Activate greedy completion - This will enable completion on elements of lists, results of - function calls, etc., but can be unsafe because the code is - actually evaluated on TAB. - - but the real use is in setting values:: - - In [3]: %config IPCompleter.greedy = True - - and these values are read from the user_ns if they are variables:: - - In [4]: feeling_greedy=False - - In [5]: %config IPCompleter.greedy = feeling_greedy - - """ - from traitlets.config.loader import Config - # some IPython objects are Configurable, but do not yet have - # any configurable traits. Exclude them from the effects of - # this magic, as their presence is just noise: - configurables = sorted(set([ c for c in self.shell.configurables - if c.__class__.class_traits(config=True) - ]), key=lambda x: x.__class__.__name__) - classnames = [ c.__class__.__name__ for c in configurables ] - - line = s.strip() - if not line: - # print available configurable names - print("Available objects for config:") - for name in classnames: - print(" ", name) - return - elif line in classnames: - # `%config TerminalInteractiveShell` will print trait info for - # TerminalInteractiveShell - c = configurables[classnames.index(line)] - cls = c.__class__ - help = cls.class_get_help(c) - # strip leading '--' from cl-args: - help = re.sub(re.compile(r'^--', re.MULTILINE), '', help) - print(help) - return - elif reg.match(line): - cls, attr = line.split('.') - return getattr(configurables[classnames.index(cls)],attr) - elif '=' not in line: - msg = "Invalid config statement: %r, "\ - "should be `Class.trait = value`." - - ll = line.lower() - for classname in classnames: - if ll == classname.lower(): - msg = msg + '\nDid you mean %s (note the case)?' % classname - break - - raise UsageError( msg % line) - - # otherwise, assume we are setting configurables. - # leave quotes on args when splitting, because we want - # unquoted args to eval in user_ns - cfg = Config() - exec("cfg."+line, locals(), self.shell.user_ns) - - for configurable in configurables: - try: - configurable.update_config(cfg) - except Exception as e: - error(e) +"""Implementation of configuration-related magic functions. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012 The IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib +import re + +# Our own packages +from IPython.core.error import UsageError +from IPython.core.magic import Magics, magics_class, line_magic +from logging import error + +#----------------------------------------------------------------------------- +# Magic implementation classes +#----------------------------------------------------------------------------- + +reg = re.compile(r'^\w+\.\w+$') +@magics_class +class ConfigMagics(Magics): + + def __init__(self, shell): + super(ConfigMagics, self).__init__(shell) + self.configurables = [] + + @line_magic + def config(self, s): + """configure IPython + + %config Class[.trait=value] + + This magic exposes most of the IPython config system. Any + Configurable class should be able to be configured with the simple + line:: + + %config Class.trait=value + + Where `value` will be resolved in the user's namespace, if it is an + expression or variable name. + + Examples + -------- + + To see what classes are available for config, pass no arguments:: + + In [1]: %config + Available objects for config: + TerminalInteractiveShell + HistoryManager + PrefilterManager + AliasManager + IPCompleter + DisplayFormatter + + To view what is configurable on a given class, just pass the class + name:: + + In [2]: %config IPCompleter + IPCompleter options + ----------------- + IPCompleter.omit__names=<Enum> + Current: 2 + Choices: (0, 1, 2) + Instruct the completer to omit private method names + Specifically, when completing on ``object.<tab>``. + When 2 [default]: all names that start with '_' will be excluded. + When 1: all 'magic' names (``__foo__``) will be excluded. + When 0: nothing will be excluded. + IPCompleter.merge_completions=<CBool> + Current: True + Whether to merge completion results into a single list + If False, only the completion results from the first non-empty + completer will be returned. + IPCompleter.limit_to__all__=<CBool> + Current: False + Instruct the completer to use __all__ for the completion + Specifically, when completing on ``object.<tab>``. + When True: only those names in obj.__all__ will be included. + When False [default]: the __all__ attribute is ignored + IPCompleter.greedy=<CBool> + Current: False + Activate greedy completion + This will enable completion on elements of lists, results of + function calls, etc., but can be unsafe because the code is + actually evaluated on TAB. + + but the real use is in setting values:: + + In [3]: %config IPCompleter.greedy = True + + and these values are read from the user_ns if they are variables:: + + In [4]: feeling_greedy=False + + In [5]: %config IPCompleter.greedy = feeling_greedy + + """ + from traitlets.config.loader import Config + # some IPython objects are Configurable, but do not yet have + # any configurable traits. Exclude them from the effects of + # this magic, as their presence is just noise: + configurables = sorted(set([ c for c in self.shell.configurables + if c.__class__.class_traits(config=True) + ]), key=lambda x: x.__class__.__name__) + classnames = [ c.__class__.__name__ for c in configurables ] + + line = s.strip() + if not line: + # print available configurable names + print("Available objects for config:") + for name in classnames: + print(" ", name) + return + elif line in classnames: + # `%config TerminalInteractiveShell` will print trait info for + # TerminalInteractiveShell + c = configurables[classnames.index(line)] + cls = c.__class__ + help = cls.class_get_help(c) + # strip leading '--' from cl-args: + help = re.sub(re.compile(r'^--', re.MULTILINE), '', help) + print(help) + return + elif reg.match(line): + cls, attr = line.split('.') + return getattr(configurables[classnames.index(cls)],attr) + elif '=' not in line: + msg = "Invalid config statement: %r, "\ + "should be `Class.trait = value`." + + ll = line.lower() + for classname in classnames: + if ll == classname.lower(): + msg = msg + '\nDid you mean %s (note the case)?' % classname + break + + raise UsageError( msg % line) + + # otherwise, assume we are setting configurables. + # leave quotes on args when splitting, because we want + # unquoted args to eval in user_ns + cfg = Config() + exec("cfg."+line, locals(), self.shell.user_ns) + + for configurable in configurables: + try: + configurable.update_config(cfg) + except Exception as e: + error(e) diff --git a/contrib/python/ipython/py3/IPython/core/magics/display.py b/contrib/python/ipython/py3/IPython/core/magics/display.py index 3e063cef9dd..07853944715 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/display.py +++ b/contrib/python/ipython/py3/IPython/core/magics/display.py @@ -1,82 +1,82 @@ -"""Simple magics for display formats""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012 The IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Our own packages -from IPython.core.display import display, Javascript, Latex, SVG, HTML, Markdown -from IPython.core.magic import ( - Magics, magics_class, cell_magic -) -from IPython.core import magic_arguments - -#----------------------------------------------------------------------------- -# Magic implementation classes -#----------------------------------------------------------------------------- - - -@magics_class -class DisplayMagics(Magics): - """Magics for displaying various output types with literals - - Defines javascript/latex/svg/html cell magics for writing - blocks in those languages, to be rendered in the frontend. - """ - - @cell_magic - def js(self, line, cell): - """Run the cell block of Javascript code - - Alias of `%%javascript` - """ - self.javascript(line, cell) - - @cell_magic - def javascript(self, line, cell): - """Run the cell block of Javascript code""" - display(Javascript(cell)) - - - @cell_magic - def latex(self, line, cell): - """Render the cell as a block of latex - - The subset of latex which is support depends on the implementation in - the client. In the Jupyter Notebook, this magic only renders the subset - of latex defined by MathJax - [here](https://docs.mathjax.org/en/v2.5-latest/tex.html).""" - display(Latex(cell)) - - @cell_magic - def svg(self, line, cell): - """Render the cell as an SVG literal""" - display(SVG(cell)) - - @magic_arguments.magic_arguments() - @magic_arguments.argument( - '--isolated', action='store_true', default=False, - help="""Annotate the cell as 'isolated'. -Isolated cells are rendered inside their own <iframe> tag""" - ) - @cell_magic - def html(self, line, cell): - """Render the cell as a block of HTML""" - args = magic_arguments.parse_argstring(self.html, line) - html = HTML(cell) - if args.isolated: - display(html, metadata={'text/html':{'isolated':True}}) - else: - display(html) - - @cell_magic - def markdown(self, line, cell): - """Render the cell as Markdown text block""" - display(Markdown(cell)) +"""Simple magics for display formats""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012 The IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Our own packages +from IPython.core.display import display, Javascript, Latex, SVG, HTML, Markdown +from IPython.core.magic import ( + Magics, magics_class, cell_magic +) +from IPython.core import magic_arguments + +#----------------------------------------------------------------------------- +# Magic implementation classes +#----------------------------------------------------------------------------- + + +@magics_class +class DisplayMagics(Magics): + """Magics for displaying various output types with literals + + Defines javascript/latex/svg/html cell magics for writing + blocks in those languages, to be rendered in the frontend. + """ + + @cell_magic + def js(self, line, cell): + """Run the cell block of Javascript code + + Alias of `%%javascript` + """ + self.javascript(line, cell) + + @cell_magic + def javascript(self, line, cell): + """Run the cell block of Javascript code""" + display(Javascript(cell)) + + + @cell_magic + def latex(self, line, cell): + """Render the cell as a block of latex + + The subset of latex which is support depends on the implementation in + the client. In the Jupyter Notebook, this magic only renders the subset + of latex defined by MathJax + [here](https://docs.mathjax.org/en/v2.5-latest/tex.html).""" + display(Latex(cell)) + + @cell_magic + def svg(self, line, cell): + """Render the cell as an SVG literal""" + display(SVG(cell)) + + @magic_arguments.magic_arguments() + @magic_arguments.argument( + '--isolated', action='store_true', default=False, + help="""Annotate the cell as 'isolated'. +Isolated cells are rendered inside their own <iframe> tag""" + ) + @cell_magic + def html(self, line, cell): + """Render the cell as a block of HTML""" + args = magic_arguments.parse_argstring(self.html, line) + html = HTML(cell) + if args.isolated: + display(html, metadata={'text/html':{'isolated':True}}) + else: + display(html) + + @cell_magic + def markdown(self, line, cell): + """Render the cell as Markdown text block""" + display(Markdown(cell)) diff --git a/contrib/python/ipython/py3/IPython/core/magics/execution.py b/contrib/python/ipython/py3/IPython/core/magics/execution.py index 00af7f17c9f..6b651939f8f 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/execution.py +++ b/contrib/python/ipython/py3/IPython/core/magics/execution.py @@ -1,1526 +1,1526 @@ -# -*- coding: utf-8 -*- -"""Implementation of execution-related magic functions.""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -import ast -import bdb -import builtins as builtin_mod -import gc -import itertools -import os -import shlex -import sys -import time -import timeit -import math -import re -from pdb import Restart - -# cProfile was added in Python2.5 -try: - import cProfile as profile - import pstats -except ImportError: - # profile isn't bundled by default in Debian for license reasons - try: - import profile, pstats - except ImportError: - profile = pstats = None - -from IPython.core import oinspect -from IPython.core import magic_arguments -from IPython.core import page -from IPython.core.error import UsageError -from IPython.core.macro import Macro -from IPython.core.magic import (Magics, magics_class, line_magic, cell_magic, - line_cell_magic, on_off, needs_local_scope, - no_var_expand) -from IPython.testing.skipdoctest import skip_doctest -from IPython.utils.contexts import preserve_keys -from IPython.utils.capture import capture_output -from IPython.utils.ipstruct import Struct -from IPython.utils.module_paths import find_mod -from IPython.utils.path import get_py_filename, shellglob -from IPython.utils.timing import clock, clock2 -from warnings import warn -from logging import error -from io import StringIO - -if sys.version_info > (3,8): - from ast import Module -else : - # mock the new API, ignore second argument - # see https://github.com/ipython/ipython/issues/11590 - from ast import Module as OriginalModule - Module = lambda nodelist, type_ignores: OriginalModule(nodelist) - - -#----------------------------------------------------------------------------- -# Magic implementation classes -#----------------------------------------------------------------------------- - - -class TimeitResult(object): - """ - Object returned by the timeit magic with info about the run. - - Contains the following attributes : - - loops: (int) number of loops done per measurement - repeat: (int) number of times the measurement has been repeated - best: (float) best execution time / number - all_runs: (list of float) execution time of each run (in s) - compile_time: (float) time of statement compilation (s) - - """ - def __init__(self, loops, repeat, best, worst, all_runs, compile_time, precision): - self.loops = loops - self.repeat = repeat - self.best = best - self.worst = worst - self.all_runs = all_runs - self.compile_time = compile_time - self._precision = precision - self.timings = [ dt / self.loops for dt in all_runs] - - @property - def average(self): - return math.fsum(self.timings) / len(self.timings) - - @property - def stdev(self): - mean = self.average - return (math.fsum([(x - mean) ** 2 for x in self.timings]) / len(self.timings)) ** 0.5 - - def __str__(self): - pm = '+-' - if hasattr(sys.stdout, 'encoding') and sys.stdout.encoding: - try: - u'\xb1'.encode(sys.stdout.encoding) - pm = u'\xb1' - except: - pass - return ( - u"{mean} {pm} {std} per loop (mean {pm} std. dev. of {runs} run{run_plural}, {loops} loop{loop_plural} each)" - .format( - pm = pm, - runs = self.repeat, - loops = self.loops, - loop_plural = "" if self.loops == 1 else "s", - run_plural = "" if self.repeat == 1 else "s", - mean = _format_time(self.average, self._precision), - std = _format_time(self.stdev, self._precision)) - ) - - def _repr_pretty_(self, p , cycle): - unic = self.__str__() - p.text(u'<TimeitResult : '+unic+u'>') - - -class TimeitTemplateFiller(ast.NodeTransformer): - """Fill in the AST template for timing execution. - - This is quite closely tied to the template definition, which is in - :meth:`ExecutionMagics.timeit`. - """ - def __init__(self, ast_setup, ast_stmt): - self.ast_setup = ast_setup - self.ast_stmt = ast_stmt - - def visit_FunctionDef(self, node): - "Fill in the setup statement" - self.generic_visit(node) - if node.name == "inner": - node.body[:1] = self.ast_setup.body - - return node - - def visit_For(self, node): - "Fill in the statement to be timed" - if getattr(getattr(node.body[0], 'value', None), 'id', None) == 'stmt': - node.body = self.ast_stmt.body - return node - - -class Timer(timeit.Timer): - """Timer class that explicitly uses self.inner - - which is an undocumented implementation detail of CPython, - not shared by PyPy. - """ - # Timer.timeit copied from CPython 3.4.2 - def timeit(self, number=timeit.default_number): - """Time 'number' executions of the main statement. - - To be precise, this executes the setup statement once, and - then returns the time it takes to execute the main statement - a number of times, as a float measured in seconds. The - argument is the number of times through the loop, defaulting - to one million. The main statement, the setup statement and - the timer function to be used are passed to the constructor. - """ - it = itertools.repeat(None, number) - gcold = gc.isenabled() - gc.disable() - try: - timing = self.inner(it, self.timer) - finally: - if gcold: - gc.enable() - return timing - - -@magics_class -class ExecutionMagics(Magics): - """Magics related to code execution, debugging, profiling, etc. - - """ - - def __init__(self, shell): - super(ExecutionMagics, self).__init__(shell) - if profile is None: - self.prun = self.profile_missing_notice - # Default execution function used to actually run user code. - self.default_runner = None - - def profile_missing_notice(self, *args, **kwargs): - error("""\ -The profile module could not be found. It has been removed from the standard -python packages because of its non-free license. To use profiling, install the -python-profiler package from non-free.""") - - @skip_doctest - @no_var_expand - @line_cell_magic - def prun(self, parameter_s='', cell=None): - - """Run a statement through the python code profiler. - - Usage, in line mode: - %prun [options] statement - - Usage, in cell mode: - %%prun [options] [statement] - code... - code... - - In cell mode, the additional code lines are appended to the (possibly - empty) statement in the first line. Cell mode allows you to easily - profile multiline blocks without having to put them in a separate - function. - - The given statement (which doesn't require quote marks) is run via the - python profiler in a manner similar to the profile.run() function. - Namespaces are internally managed to work correctly; profile.run - cannot be used in IPython because it makes certain assumptions about - namespaces which do not hold under IPython. - - Options: - - -l <limit> - you can place restrictions on what or how much of the - profile gets printed. The limit value can be: - - * A string: only information for function names containing this string - is printed. - - * An integer: only these many lines are printed. - - * A float (between 0 and 1): this fraction of the report is printed - (for example, use a limit of 0.4 to see the topmost 40% only). - - You can combine several limits with repeated use of the option. For - example, ``-l __init__ -l 5`` will print only the topmost 5 lines of - information about class constructors. - - -r - return the pstats.Stats object generated by the profiling. This - object has all the information about the profile in it, and you can - later use it for further analysis or in other functions. - - -s <key> - sort profile by given key. You can provide more than one key - by using the option several times: '-s key1 -s key2 -s key3...'. The - default sorting key is 'time'. - - The following is copied verbatim from the profile documentation - referenced below: - - When more than one key is provided, additional keys are used as - secondary criteria when the there is equality in all keys selected - before them. - - Abbreviations can be used for any key names, as long as the - abbreviation is unambiguous. The following are the keys currently - defined: - - ============ ===================== - Valid Arg Meaning - ============ ===================== - "calls" call count - "cumulative" cumulative time - "file" file name - "module" file name - "pcalls" primitive call count - "line" line number - "name" function name - "nfl" name/file/line - "stdname" standard name - "time" internal time - ============ ===================== - - Note that all sorts on statistics are in descending order (placing - most time consuming items first), where as name, file, and line number - searches are in ascending order (i.e., alphabetical). The subtle - distinction between "nfl" and "stdname" is that the standard name is a - sort of the name as printed, which means that the embedded line - numbers get compared in an odd way. For example, lines 3, 20, and 40 - would (if the file names were the same) appear in the string order - "20" "3" and "40". In contrast, "nfl" does a numeric compare of the - line numbers. In fact, sort_stats("nfl") is the same as - sort_stats("name", "file", "line"). - - -T <filename> - save profile results as shown on screen to a text - file. The profile is still shown on screen. - - -D <filename> - save (via dump_stats) profile statistics to given - filename. This data is in a format understood by the pstats module, and - is generated by a call to the dump_stats() method of profile - objects. The profile is still shown on screen. - - -q - suppress output to the pager. Best used with -T and/or -D above. - - If you want to run complete programs under the profiler's control, use - ``%run -p [prof_opts] filename.py [args to program]`` where prof_opts - contains profiler specific options as described here. - - You can read the complete documentation for the profile module with:: - - In [1]: import profile; profile.help() - - .. versionchanged:: 7.3 - User variables are no longer expanded, - the magic line is always left unmodified. - - """ - opts, arg_str = self.parse_options(parameter_s, 'D:l:rs:T:q', - list_all=True, posix=False) - if cell is not None: - arg_str += '\n' + cell - arg_str = self.shell.transform_cell(arg_str) - return self._run_with_profiler(arg_str, opts, self.shell.user_ns) - - def _run_with_profiler(self, code, opts, namespace): - """ - Run `code` with profiler. Used by ``%prun`` and ``%run -p``. - - Parameters - ---------- - code : str - Code to be executed. - opts : Struct - Options parsed by `self.parse_options`. - namespace : dict - A dictionary for Python namespace (e.g., `self.shell.user_ns`). - - """ - - # Fill default values for unspecified options: - opts.merge(Struct(D=[''], l=[], s=['time'], T=[''])) - - prof = profile.Profile() - try: - prof = prof.runctx(code, namespace, namespace) - sys_exit = '' - except SystemExit: - sys_exit = """*** SystemExit exception caught in code being profiled.""" - - stats = pstats.Stats(prof).strip_dirs().sort_stats(*opts.s) - - lims = opts.l - if lims: - lims = [] # rebuild lims with ints/floats/strings - for lim in opts.l: - try: - lims.append(int(lim)) - except ValueError: - try: - lims.append(float(lim)) - except ValueError: - lims.append(lim) - - # Trap output. - stdout_trap = StringIO() - stats_stream = stats.stream - try: - stats.stream = stdout_trap - stats.print_stats(*lims) - finally: - stats.stream = stats_stream - - output = stdout_trap.getvalue() - output = output.rstrip() - - if 'q' not in opts: - page.page(output) - print(sys_exit, end=' ') - - dump_file = opts.D[0] - text_file = opts.T[0] - if dump_file: - prof.dump_stats(dump_file) - print('\n*** Profile stats marshalled to file',\ - repr(dump_file)+'.',sys_exit) - if text_file: - with open(text_file, 'w') as pfile: - pfile.write(output) - print('\n*** Profile printout saved to text file',\ - repr(text_file)+'.',sys_exit) - - if 'r' in opts: - return stats - else: - return None - - @line_magic - def pdb(self, parameter_s=''): - """Control the automatic calling of the pdb interactive debugger. - - Call as '%pdb on', '%pdb 1', '%pdb off' or '%pdb 0'. If called without - argument it works as a toggle. - - When an exception is triggered, IPython can optionally call the - interactive pdb debugger after the traceback printout. %pdb toggles - this feature on and off. - - The initial state of this feature is set in your configuration - file (the option is ``InteractiveShell.pdb``). - - If you want to just activate the debugger AFTER an exception has fired, - without having to type '%pdb on' and rerunning your code, you can use - the %debug magic.""" - - par = parameter_s.strip().lower() - - if par: - try: - new_pdb = {'off':0,'0':0,'on':1,'1':1}[par] - except KeyError: - print ('Incorrect argument. Use on/1, off/0, ' - 'or nothing for a toggle.') - return - else: - # toggle - new_pdb = not self.shell.call_pdb - - # set on the shell - self.shell.call_pdb = new_pdb - print('Automatic pdb calling has been turned',on_off(new_pdb)) - - @skip_doctest - @magic_arguments.magic_arguments() - @magic_arguments.argument('--breakpoint', '-b', metavar='FILE:LINE', - help=""" - Set break point at LINE in FILE. - """ - ) - @magic_arguments.argument('statement', nargs='*', - help=""" - Code to run in debugger. - You can omit this in cell magic mode. - """ - ) - @no_var_expand - @line_cell_magic - def debug(self, line='', cell=None): - """Activate the interactive debugger. - - This magic command support two ways of activating debugger. - One is to activate debugger before executing code. This way, you - can set a break point, to step through the code from the point. - You can use this mode by giving statements to execute and optionally - a breakpoint. - - The other one is to activate debugger in post-mortem mode. You can - activate this mode simply running %debug without any argument. - If an exception has just occurred, this lets you inspect its stack - frames interactively. Note that this will always work only on the last - traceback that occurred, so you must call this quickly after an - exception that you wish to inspect has fired, because if another one - occurs, it clobbers the previous one. - - If you want IPython to automatically do this on every exception, see - the %pdb magic for more details. - - .. versionchanged:: 7.3 - When running code, user variables are no longer expanded, - the magic line is always left unmodified. - - """ - args = magic_arguments.parse_argstring(self.debug, line) - - if not (args.breakpoint or args.statement or cell): - self._debug_post_mortem() - elif not (args.breakpoint or cell): - # If there is no breakpoints, the line is just code to execute - self._debug_exec(line, None) - else: - # Here we try to reconstruct the code from the output of - # parse_argstring. This might not work if the code has spaces - # For example this fails for `print("a b")` - code = "\n".join(args.statement) - if cell: - code += "\n" + cell - self._debug_exec(code, args.breakpoint) - - def _debug_post_mortem(self): - self.shell.debugger(force=True) - - def _debug_exec(self, code, breakpoint): - if breakpoint: - (filename, bp_line) = breakpoint.rsplit(':', 1) - bp_line = int(bp_line) - else: - (filename, bp_line) = (None, None) - self._run_with_debugger(code, self.shell.user_ns, filename, bp_line) - - @line_magic - def tb(self, s): - """Print the last traceback. - - Optionally, specify an exception reporting mode, tuning the - verbosity of the traceback. By default the currently-active exception - mode is used. See %xmode for changing exception reporting modes. - - Valid modes: Plain, Context, Verbose, and Minimal. - """ - interactive_tb = self.shell.InteractiveTB - if s: - # Switch exception reporting mode for this one call. - # Ensure it is switched back. - def xmode_switch_err(name): - warn('Error changing %s exception modes.\n%s' % - (name,sys.exc_info()[1])) - - new_mode = s.strip().capitalize() - original_mode = interactive_tb.mode - try: - try: - interactive_tb.set_mode(mode=new_mode) - except Exception: - xmode_switch_err('user') - else: - self.shell.showtraceback() - finally: - interactive_tb.set_mode(mode=original_mode) - else: - self.shell.showtraceback() - - @skip_doctest - @line_magic - def run(self, parameter_s='', runner=None, - file_finder=get_py_filename): - """Run the named file inside IPython as a program. - - Usage:: - - %run [-n -i -e -G] - [( -t [-N<N>] | -d [-b<N>] | -p [profile options] )] - ( -m mod | filename ) [args] - - The filename argument should be either a pure Python script (with - extension ``.py``), or a file with custom IPython syntax (such as - magics). If the latter, the file can be either a script with ``.ipy`` - extension, or a Jupyter notebook with ``.ipynb`` extension. When running - a Jupyter notebook, the output from print statements and other - displayed objects will appear in the terminal (even matplotlib figures - will open, if a terminal-compliant backend is being used). Note that, - at the system command line, the ``jupyter run`` command offers similar - functionality for executing notebooks (albeit currently with some - differences in supported options). - - Parameters after the filename are passed as command-line arguments to - the program (put in sys.argv). Then, control returns to IPython's - prompt. - - This is similar to running at a system prompt ``python file args``, - but with the advantage of giving you IPython's tracebacks, and of - loading all variables into your interactive namespace for further use - (unless -p is used, see below). - - The file is executed in a namespace initially consisting only of - ``__name__=='__main__'`` and sys.argv constructed as indicated. It thus - sees its environment as if it were being run as a stand-alone program - (except for sharing global objects such as previously imported - modules). But after execution, the IPython interactive namespace gets - updated with all variables defined in the program (except for __name__ - and sys.argv). This allows for very convenient loading of code for - interactive work, while giving each program a 'clean sheet' to run in. - - Arguments are expanded using shell-like glob match. Patterns - '*', '?', '[seq]' and '[!seq]' can be used. Additionally, - tilde '~' will be expanded into user's home directory. Unlike - real shells, quotation does not suppress expansions. Use - *two* back slashes (e.g. ``\\\\*``) to suppress expansions. - To completely disable these expansions, you can use -G flag. - - On Windows systems, the use of single quotes `'` when specifying - a file is not supported. Use double quotes `"`. - - Options: - - -n - __name__ is NOT set to '__main__', but to the running file's name - without extension (as python does under import). This allows running - scripts and reloading the definitions in them without calling code - protected by an ``if __name__ == "__main__"`` clause. - - -i - run the file in IPython's namespace instead of an empty one. This - is useful if you are experimenting with code written in a text editor - which depends on variables defined interactively. - - -e - ignore sys.exit() calls or SystemExit exceptions in the script - being run. This is particularly useful if IPython is being used to - run unittests, which always exit with a sys.exit() call. In such - cases you are interested in the output of the test results, not in - seeing a traceback of the unittest module. - - -t - print timing information at the end of the run. IPython will give - you an estimated CPU time consumption for your script, which under - Unix uses the resource module to avoid the wraparound problems of - time.clock(). Under Unix, an estimate of time spent on system tasks - is also given (for Windows platforms this is reported as 0.0). - - If -t is given, an additional ``-N<N>`` option can be given, where <N> - must be an integer indicating how many times you want the script to - run. The final timing report will include total and per run results. - - For example (testing the script uniq_stable.py):: - - In [1]: run -t uniq_stable - - IPython CPU timings (estimated): - User : 0.19597 s. - System: 0.0 s. - - In [2]: run -t -N5 uniq_stable - - IPython CPU timings (estimated): - Total runs performed: 5 - Times : Total Per run - User : 0.910862 s, 0.1821724 s. - System: 0.0 s, 0.0 s. - - -d - run your program under the control of pdb, the Python debugger. - This allows you to execute your program step by step, watch variables, - etc. Internally, what IPython does is similar to calling:: - - pdb.run('execfile("YOURFILENAME")') - - with a breakpoint set on line 1 of your file. You can change the line - number for this automatic breakpoint to be <N> by using the -bN option - (where N must be an integer). For example:: - - %run -d -b40 myscript - - will set the first breakpoint at line 40 in myscript.py. Note that - the first breakpoint must be set on a line which actually does - something (not a comment or docstring) for it to stop execution. - - Or you can specify a breakpoint in a different file:: - - %run -d -b myotherfile.py:20 myscript - - When the pdb debugger starts, you will see a (Pdb) prompt. You must - first enter 'c' (without quotes) to start execution up to the first - breakpoint. - - Entering 'help' gives information about the use of the debugger. You - can easily see pdb's full documentation with "import pdb;pdb.help()" - at a prompt. - - -p - run program under the control of the Python profiler module (which - prints a detailed report of execution times, function calls, etc). - - You can pass other options after -p which affect the behavior of the - profiler itself. See the docs for %prun for details. - - In this mode, the program's variables do NOT propagate back to the - IPython interactive namespace (because they remain in the namespace - where the profiler executes them). - - Internally this triggers a call to %prun, see its documentation for - details on the options available specifically for profiling. - - There is one special usage for which the text above doesn't apply: - if the filename ends with .ipy[nb], the file is run as ipython script, - just as if the commands were written on IPython prompt. - - -m - specify module name to load instead of script path. Similar to - the -m option for the python interpreter. Use this option last if you - want to combine with other %run options. Unlike the python interpreter - only source modules are allowed no .pyc or .pyo files. - For example:: - - %run -m example - - will run the example module. - - -G - disable shell-like glob expansion of arguments. - - """ - - # Logic to handle issue #3664 - # Add '--' after '-m <module_name>' to ignore additional args passed to a module. - if '-m' in parameter_s and '--' not in parameter_s: - argv = shlex.split(parameter_s, posix=(os.name == 'posix')) - for idx, arg in enumerate(argv): - if arg and arg.startswith('-') and arg != '-': - if arg == '-m': - argv.insert(idx + 2, '--') - break - else: - # Positional arg, break - break - parameter_s = ' '.join(shlex.quote(arg) for arg in argv) - - # get arguments and set sys.argv for program to be run. - opts, arg_lst = self.parse_options(parameter_s, - 'nidtN:b:pD:l:rs:T:em:G', - mode='list', list_all=1) - if "m" in opts: - modulename = opts["m"][0] - modpath = find_mod(modulename) - if modpath is None: - msg = '%r is not a valid modulename on sys.path'%modulename - raise Exception(msg) - arg_lst = [modpath] + arg_lst - try: - fpath = None # initialize to make sure fpath is in scope later - fpath = arg_lst[0] - filename = file_finder(fpath) - except IndexError: - msg = 'you must provide at least a filename.' - raise Exception(msg) - except IOError as e: - try: - msg = str(e) - except UnicodeError: - msg = e.message - if os.name == 'nt' and re.match(r"^'.*'$",fpath): - warn('For Windows, use double quotes to wrap a filename: %run "mypath\\myfile.py"') - raise Exception(msg) - except TypeError: - if fpath in sys.meta_path: - filename = "" - else: - raise - - if filename.lower().endswith(('.ipy', '.ipynb')): - with preserve_keys(self.shell.user_ns, '__file__'): - self.shell.user_ns['__file__'] = filename - self.shell.safe_execfile_ipy(filename, raise_exceptions=True) - return - - # Control the response to exit() calls made by the script being run - exit_ignore = 'e' in opts - - # Make sure that the running script gets a proper sys.argv as if it - # were run from a system shell. - save_argv = sys.argv # save it for later restoring - - if 'G' in opts: - args = arg_lst[1:] - else: - # tilde and glob expansion - args = shellglob(map(os.path.expanduser, arg_lst[1:])) - - sys.argv = [filename] + args # put in the proper filename - - if 'n' in opts: - name = os.path.splitext(os.path.basename(filename))[0] - else: - name = '__main__' - - if 'i' in opts: - # Run in user's interactive namespace - prog_ns = self.shell.user_ns - __name__save = self.shell.user_ns['__name__'] - prog_ns['__name__'] = name - main_mod = self.shell.user_module - - # Since '%run foo' emulates 'python foo.py' at the cmd line, we must - # set the __file__ global in the script's namespace - # TK: Is this necessary in interactive mode? - prog_ns['__file__'] = filename - else: - # Run in a fresh, empty namespace - - # The shell MUST hold a reference to prog_ns so after %run - # exits, the python deletion mechanism doesn't zero it out - # (leaving dangling references). See interactiveshell for details - main_mod = self.shell.new_main_mod(filename, name) - prog_ns = main_mod.__dict__ - - # pickle fix. See interactiveshell for an explanation. But we need to - # make sure that, if we overwrite __main__, we replace it at the end - main_mod_name = prog_ns['__name__'] - - if main_mod_name == '__main__': - restore_main = sys.modules['__main__'] - else: - restore_main = False - - # This needs to be undone at the end to prevent holding references to - # every single object ever created. - sys.modules[main_mod_name] = main_mod - - if 'p' in opts or 'd' in opts: - if 'm' in opts: - code = 'run_module(modulename, prog_ns)' - code_ns = { - 'run_module': self.shell.safe_run_module, - 'prog_ns': prog_ns, - 'modulename': modulename, - } - else: - if 'd' in opts: - # allow exceptions to raise in debug mode - code = 'execfile(filename, prog_ns, raise_exceptions=True)' - else: - code = 'execfile(filename, prog_ns)' - code_ns = { - 'execfile': self.shell.safe_execfile, - 'prog_ns': prog_ns, - 'filename': get_py_filename(filename), - } - - try: - stats = None - if 'p' in opts: - stats = self._run_with_profiler(code, opts, code_ns) - else: - if 'd' in opts: - bp_file, bp_line = parse_breakpoint( - opts.get('b', ['1'])[0], filename) - self._run_with_debugger( - code, code_ns, filename, bp_line, bp_file) - else: - if 'm' in opts: - def run(): - self.shell.safe_run_module(modulename, prog_ns) - else: - if runner is None: - runner = self.default_runner - if runner is None: - runner = self.shell.safe_execfile - - def run(): - runner(filename, prog_ns, prog_ns, - exit_ignore=exit_ignore) - - if 't' in opts: - # timed execution - try: - nruns = int(opts['N'][0]) - if nruns < 1: - error('Number of runs must be >=1') - return - except (KeyError): - nruns = 1 - self._run_with_timing(run, nruns) - else: - # regular execution - run() - - if 'i' in opts: - self.shell.user_ns['__name__'] = __name__save - else: - # update IPython interactive namespace - - # Some forms of read errors on the file may mean the - # __name__ key was never set; using pop we don't have to - # worry about a possible KeyError. - prog_ns.pop('__name__', None) - - with preserve_keys(self.shell.user_ns, '__file__'): - self.shell.user_ns.update(prog_ns) - finally: - # It's a bit of a mystery why, but __builtins__ can change from - # being a module to becoming a dict missing some key data after - # %run. As best I can see, this is NOT something IPython is doing - # at all, and similar problems have been reported before: - # http://coding.derkeiler.com/Archive/Python/comp.lang.python/2004-10/0188.html - # Since this seems to be done by the interpreter itself, the best - # we can do is to at least restore __builtins__ for the user on - # exit. - self.shell.user_ns['__builtins__'] = builtin_mod - - # Ensure key global structures are restored - sys.argv = save_argv - if restore_main: - sys.modules['__main__'] = restore_main - if '__mp_main__' in sys.modules: - sys.modules['__mp_main__'] = restore_main - else: - # Remove from sys.modules the reference to main_mod we'd - # added. Otherwise it will trap references to objects - # contained therein. - del sys.modules[main_mod_name] - - return stats - - def _run_with_debugger(self, code, code_ns, filename=None, - bp_line=None, bp_file=None): - """ - Run `code` in debugger with a break point. - - Parameters - ---------- - code : str - Code to execute. - code_ns : dict - A namespace in which `code` is executed. - filename : str - `code` is ran as if it is in `filename`. - bp_line : int, optional - Line number of the break point. - bp_file : str, optional - Path to the file in which break point is specified. - `filename` is used if not given. - - Raises - ------ - UsageError - If the break point given by `bp_line` is not valid. - - """ - deb = self.shell.InteractiveTB.pdb - if not deb: - self.shell.InteractiveTB.pdb = self.shell.InteractiveTB.debugger_cls() - deb = self.shell.InteractiveTB.pdb - - # deb.checkline() fails if deb.curframe exists but is None; it can - # handle it not existing. https://github.com/ipython/ipython/issues/10028 - if hasattr(deb, 'curframe'): - del deb.curframe - - # reset Breakpoint state, which is moronically kept - # in a class - bdb.Breakpoint.next = 1 - bdb.Breakpoint.bplist = {} - bdb.Breakpoint.bpbynumber = [None] - deb.clear_all_breaks() - if bp_line is not None: - # Set an initial breakpoint to stop execution - maxtries = 10 - bp_file = bp_file or filename - checkline = deb.checkline(bp_file, bp_line) - if not checkline: - for bp in range(bp_line + 1, bp_line + maxtries + 1): - if deb.checkline(bp_file, bp): - break - else: - msg = ("\nI failed to find a valid line to set " - "a breakpoint\n" - "after trying up to line: %s.\n" - "Please set a valid breakpoint manually " - "with the -b option." % bp) - raise UsageError(msg) - # if we find a good linenumber, set the breakpoint - deb.do_break('%s:%s' % (bp_file, bp_line)) - - if filename: - # Mimic Pdb._runscript(...) - deb._wait_for_mainpyfile = True - deb.mainpyfile = deb.canonic(filename) - - # Start file run - print("NOTE: Enter 'c' at the %s prompt to continue execution." % deb.prompt) - try: - if filename: - # save filename so it can be used by methods on the deb object - deb._exec_filename = filename - while True: - try: - trace = sys.gettrace() - deb.run(code, code_ns) - except Restart: - print("Restarting") - if filename: - deb._wait_for_mainpyfile = True - deb.mainpyfile = deb.canonic(filename) - continue - else: - break - finally: - sys.settrace(trace) - - - except: - etype, value, tb = sys.exc_info() - # Skip three frames in the traceback: the %run one, - # one inside bdb.py, and the command-line typed by the - # user (run by exec in pdb itself). - self.shell.InteractiveTB(etype, value, tb, tb_offset=3) - - @staticmethod - def _run_with_timing(run, nruns): - """ - Run function `run` and print timing information. - - Parameters - ---------- - run : callable - Any callable object which takes no argument. - nruns : int - Number of times to execute `run`. - - """ - twall0 = time.perf_counter() - if nruns == 1: - t0 = clock2() - run() - t1 = clock2() - t_usr = t1[0] - t0[0] - t_sys = t1[1] - t0[1] - print("\nIPython CPU timings (estimated):") - print(" User : %10.2f s." % t_usr) - print(" System : %10.2f s." % t_sys) - else: - runs = range(nruns) - t0 = clock2() - for nr in runs: - run() - t1 = clock2() - t_usr = t1[0] - t0[0] - t_sys = t1[1] - t0[1] - print("\nIPython CPU timings (estimated):") - print("Total runs performed:", nruns) - print(" Times : %10s %10s" % ('Total', 'Per run')) - print(" User : %10.2f s, %10.2f s." % (t_usr, t_usr / nruns)) - print(" System : %10.2f s, %10.2f s." % (t_sys, t_sys / nruns)) - twall1 = time.perf_counter() - print("Wall time: %10.2f s." % (twall1 - twall0)) - - @skip_doctest - @no_var_expand - @line_cell_magic - @needs_local_scope - def timeit(self, line='', cell=None, local_ns=None): - """Time execution of a Python statement or expression - - Usage, in line mode: - %timeit [-n<N> -r<R> [-t|-c] -q -p<P> -o] statement - or in cell mode: - %%timeit [-n<N> -r<R> [-t|-c] -q -p<P> -o] setup_code - code - code... - - Time execution of a Python statement or expression using the timeit - module. This function can be used both as a line and cell magic: - - - In line mode you can time a single-line statement (though multiple - ones can be chained with using semicolons). - - - In cell mode, the statement in the first line is used as setup code - (executed but not timed) and the body of the cell is timed. The cell - body has access to any variables created in the setup code. - - Options: - -n<N>: execute the given statement <N> times in a loop. If <N> is not - provided, <N> is determined so as to get sufficient accuracy. - - -r<R>: number of repeats <R>, each consisting of <N> loops, and take the - best result. - Default: 7 - - -t: use time.time to measure the time, which is the default on Unix. - This function measures wall time. - - -c: use time.clock to measure the time, which is the default on - Windows and measures wall time. On Unix, resource.getrusage is used - instead and returns the CPU user time. - - -p<P>: use a precision of <P> digits to display the timing result. - Default: 3 - - -q: Quiet, do not print result. - - -o: return a TimeitResult that can be stored in a variable to inspect - the result in more details. - - .. versionchanged:: 7.3 - User variables are no longer expanded, - the magic line is always left unmodified. - - Examples - -------- - :: - - In [1]: %timeit pass - 8.26 ns ± 0.12 ns per loop (mean ± std. dev. of 7 runs, 100000000 loops each) - - In [2]: u = None - - In [3]: %timeit u is None - 29.9 ns ± 0.643 ns per loop (mean ± std. dev. of 7 runs, 10000000 loops each) - - In [4]: %timeit -r 4 u == None - - In [5]: import time - - In [6]: %timeit -n1 time.sleep(2) - - - The times reported by %timeit will be slightly higher than those - reported by the timeit.py script when variables are accessed. This is - due to the fact that %timeit executes the statement in the namespace - of the shell, compared with timeit.py, which uses a single setup - statement to import function or create variables. Generally, the bias - does not matter as long as results from timeit.py are not mixed with - those from %timeit.""" - - opts, stmt = self.parse_options(line,'n:r:tcp:qo', - posix=False, strict=False) - if stmt == "" and cell is None: - return - - timefunc = timeit.default_timer - number = int(getattr(opts, "n", 0)) - default_repeat = 7 if timeit.default_repeat < 7 else timeit.default_repeat - repeat = int(getattr(opts, "r", default_repeat)) - precision = int(getattr(opts, "p", 3)) - quiet = 'q' in opts - return_result = 'o' in opts - if hasattr(opts, "t"): - timefunc = time.time - if hasattr(opts, "c"): - timefunc = clock - - timer = Timer(timer=timefunc) - # this code has tight coupling to the inner workings of timeit.Timer, - # but is there a better way to achieve that the code stmt has access - # to the shell namespace? - transform = self.shell.transform_cell - - if cell is None: - # called as line magic - ast_setup = self.shell.compile.ast_parse("pass") - ast_stmt = self.shell.compile.ast_parse(transform(stmt)) - else: - ast_setup = self.shell.compile.ast_parse(transform(stmt)) - ast_stmt = self.shell.compile.ast_parse(transform(cell)) - - ast_setup = self.shell.transform_ast(ast_setup) - ast_stmt = self.shell.transform_ast(ast_stmt) - - # Check that these compile to valid Python code *outside* the timer func - # Invalid code may become valid when put inside the function & loop, - # which messes up error messages. - # https://github.com/ipython/ipython/issues/10636 - self.shell.compile(ast_setup, "<magic-timeit-setup>", "exec") - self.shell.compile(ast_stmt, "<magic-timeit-stmt>", "exec") - - # This codestring is taken from timeit.template - we fill it in as an - # AST, so that we can apply our AST transformations to the user code - # without affecting the timing code. - timeit_ast_template = ast.parse('def inner(_it, _timer):\n' - ' setup\n' - ' _t0 = _timer()\n' - ' for _i in _it:\n' - ' stmt\n' - ' _t1 = _timer()\n' - ' return _t1 - _t0\n') - - timeit_ast = TimeitTemplateFiller(ast_setup, ast_stmt).visit(timeit_ast_template) - timeit_ast = ast.fix_missing_locations(timeit_ast) - - # Track compilation time so it can be reported if too long - # Minimum time above which compilation time will be reported - tc_min = 0.1 - - t0 = clock() - code = self.shell.compile(timeit_ast, "<magic-timeit>", "exec") - tc = clock()-t0 - - ns = {} - glob = self.shell.user_ns - # handles global vars with same name as local vars. We store them in conflict_globs. - conflict_globs = {} - if local_ns and cell is None: - for var_name, var_val in glob.items(): - if var_name in local_ns: - conflict_globs[var_name] = var_val - glob.update(local_ns) - - exec(code, glob, ns) - timer.inner = ns["inner"] - - # This is used to check if there is a huge difference between the - # best and worst timings. - # Issue: https://github.com/ipython/ipython/issues/6471 - if number == 0: - # determine number so that 0.2 <= total time < 2.0 - for index in range(0, 10): - number = 10 ** index - time_number = timer.timeit(number) - if time_number >= 0.2: - break - - all_runs = timer.repeat(repeat, number) - best = min(all_runs) / number - worst = max(all_runs) / number - timeit_result = TimeitResult(number, repeat, best, worst, all_runs, tc, precision) - - # Restore global vars from conflict_globs - if conflict_globs: - glob.update(conflict_globs) - - if not quiet : - # Check best timing is greater than zero to avoid a - # ZeroDivisionError. - # In cases where the slowest timing is lesser than a microsecond - # we assume that it does not really matter if the fastest - # timing is 4 times faster than the slowest timing or not. - if worst > 4 * best and best > 0 and worst > 1e-6: - print("The slowest run took %0.2f times longer than the " - "fastest. This could mean that an intermediate result " - "is being cached." % (worst / best)) +# -*- coding: utf-8 -*- +"""Implementation of execution-related magic functions.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +import ast +import bdb +import builtins as builtin_mod +import gc +import itertools +import os +import shlex +import sys +import time +import timeit +import math +import re +from pdb import Restart + +# cProfile was added in Python2.5 +try: + import cProfile as profile + import pstats +except ImportError: + # profile isn't bundled by default in Debian for license reasons + try: + import profile, pstats + except ImportError: + profile = pstats = None + +from IPython.core import oinspect +from IPython.core import magic_arguments +from IPython.core import page +from IPython.core.error import UsageError +from IPython.core.macro import Macro +from IPython.core.magic import (Magics, magics_class, line_magic, cell_magic, + line_cell_magic, on_off, needs_local_scope, + no_var_expand) +from IPython.testing.skipdoctest import skip_doctest +from IPython.utils.contexts import preserve_keys +from IPython.utils.capture import capture_output +from IPython.utils.ipstruct import Struct +from IPython.utils.module_paths import find_mod +from IPython.utils.path import get_py_filename, shellglob +from IPython.utils.timing import clock, clock2 +from warnings import warn +from logging import error +from io import StringIO + +if sys.version_info > (3,8): + from ast import Module +else : + # mock the new API, ignore second argument + # see https://github.com/ipython/ipython/issues/11590 + from ast import Module as OriginalModule + Module = lambda nodelist, type_ignores: OriginalModule(nodelist) + + +#----------------------------------------------------------------------------- +# Magic implementation classes +#----------------------------------------------------------------------------- + + +class TimeitResult(object): + """ + Object returned by the timeit magic with info about the run. + + Contains the following attributes : + + loops: (int) number of loops done per measurement + repeat: (int) number of times the measurement has been repeated + best: (float) best execution time / number + all_runs: (list of float) execution time of each run (in s) + compile_time: (float) time of statement compilation (s) + + """ + def __init__(self, loops, repeat, best, worst, all_runs, compile_time, precision): + self.loops = loops + self.repeat = repeat + self.best = best + self.worst = worst + self.all_runs = all_runs + self.compile_time = compile_time + self._precision = precision + self.timings = [ dt / self.loops for dt in all_runs] + + @property + def average(self): + return math.fsum(self.timings) / len(self.timings) + + @property + def stdev(self): + mean = self.average + return (math.fsum([(x - mean) ** 2 for x in self.timings]) / len(self.timings)) ** 0.5 + + def __str__(self): + pm = '+-' + if hasattr(sys.stdout, 'encoding') and sys.stdout.encoding: + try: + u'\xb1'.encode(sys.stdout.encoding) + pm = u'\xb1' + except: + pass + return ( + u"{mean} {pm} {std} per loop (mean {pm} std. dev. of {runs} run{run_plural}, {loops} loop{loop_plural} each)" + .format( + pm = pm, + runs = self.repeat, + loops = self.loops, + loop_plural = "" if self.loops == 1 else "s", + run_plural = "" if self.repeat == 1 else "s", + mean = _format_time(self.average, self._precision), + std = _format_time(self.stdev, self._precision)) + ) + + def _repr_pretty_(self, p , cycle): + unic = self.__str__() + p.text(u'<TimeitResult : '+unic+u'>') + + +class TimeitTemplateFiller(ast.NodeTransformer): + """Fill in the AST template for timing execution. + + This is quite closely tied to the template definition, which is in + :meth:`ExecutionMagics.timeit`. + """ + def __init__(self, ast_setup, ast_stmt): + self.ast_setup = ast_setup + self.ast_stmt = ast_stmt + + def visit_FunctionDef(self, node): + "Fill in the setup statement" + self.generic_visit(node) + if node.name == "inner": + node.body[:1] = self.ast_setup.body + + return node + + def visit_For(self, node): + "Fill in the statement to be timed" + if getattr(getattr(node.body[0], 'value', None), 'id', None) == 'stmt': + node.body = self.ast_stmt.body + return node + + +class Timer(timeit.Timer): + """Timer class that explicitly uses self.inner + + which is an undocumented implementation detail of CPython, + not shared by PyPy. + """ + # Timer.timeit copied from CPython 3.4.2 + def timeit(self, number=timeit.default_number): + """Time 'number' executions of the main statement. + + To be precise, this executes the setup statement once, and + then returns the time it takes to execute the main statement + a number of times, as a float measured in seconds. The + argument is the number of times through the loop, defaulting + to one million. The main statement, the setup statement and + the timer function to be used are passed to the constructor. + """ + it = itertools.repeat(None, number) + gcold = gc.isenabled() + gc.disable() + try: + timing = self.inner(it, self.timer) + finally: + if gcold: + gc.enable() + return timing + + +@magics_class +class ExecutionMagics(Magics): + """Magics related to code execution, debugging, profiling, etc. + + """ + + def __init__(self, shell): + super(ExecutionMagics, self).__init__(shell) + if profile is None: + self.prun = self.profile_missing_notice + # Default execution function used to actually run user code. + self.default_runner = None + + def profile_missing_notice(self, *args, **kwargs): + error("""\ +The profile module could not be found. It has been removed from the standard +python packages because of its non-free license. To use profiling, install the +python-profiler package from non-free.""") + + @skip_doctest + @no_var_expand + @line_cell_magic + def prun(self, parameter_s='', cell=None): + + """Run a statement through the python code profiler. + + Usage, in line mode: + %prun [options] statement + + Usage, in cell mode: + %%prun [options] [statement] + code... + code... + + In cell mode, the additional code lines are appended to the (possibly + empty) statement in the first line. Cell mode allows you to easily + profile multiline blocks without having to put them in a separate + function. + + The given statement (which doesn't require quote marks) is run via the + python profiler in a manner similar to the profile.run() function. + Namespaces are internally managed to work correctly; profile.run + cannot be used in IPython because it makes certain assumptions about + namespaces which do not hold under IPython. + + Options: + + -l <limit> + you can place restrictions on what or how much of the + profile gets printed. The limit value can be: + + * A string: only information for function names containing this string + is printed. + + * An integer: only these many lines are printed. + + * A float (between 0 and 1): this fraction of the report is printed + (for example, use a limit of 0.4 to see the topmost 40% only). + + You can combine several limits with repeated use of the option. For + example, ``-l __init__ -l 5`` will print only the topmost 5 lines of + information about class constructors. + + -r + return the pstats.Stats object generated by the profiling. This + object has all the information about the profile in it, and you can + later use it for further analysis or in other functions. + + -s <key> + sort profile by given key. You can provide more than one key + by using the option several times: '-s key1 -s key2 -s key3...'. The + default sorting key is 'time'. + + The following is copied verbatim from the profile documentation + referenced below: + + When more than one key is provided, additional keys are used as + secondary criteria when the there is equality in all keys selected + before them. + + Abbreviations can be used for any key names, as long as the + abbreviation is unambiguous. The following are the keys currently + defined: + + ============ ===================== + Valid Arg Meaning + ============ ===================== + "calls" call count + "cumulative" cumulative time + "file" file name + "module" file name + "pcalls" primitive call count + "line" line number + "name" function name + "nfl" name/file/line + "stdname" standard name + "time" internal time + ============ ===================== + + Note that all sorts on statistics are in descending order (placing + most time consuming items first), where as name, file, and line number + searches are in ascending order (i.e., alphabetical). The subtle + distinction between "nfl" and "stdname" is that the standard name is a + sort of the name as printed, which means that the embedded line + numbers get compared in an odd way. For example, lines 3, 20, and 40 + would (if the file names were the same) appear in the string order + "20" "3" and "40". In contrast, "nfl" does a numeric compare of the + line numbers. In fact, sort_stats("nfl") is the same as + sort_stats("name", "file", "line"). + + -T <filename> + save profile results as shown on screen to a text + file. The profile is still shown on screen. + + -D <filename> + save (via dump_stats) profile statistics to given + filename. This data is in a format understood by the pstats module, and + is generated by a call to the dump_stats() method of profile + objects. The profile is still shown on screen. + + -q + suppress output to the pager. Best used with -T and/or -D above. + + If you want to run complete programs under the profiler's control, use + ``%run -p [prof_opts] filename.py [args to program]`` where prof_opts + contains profiler specific options as described here. + + You can read the complete documentation for the profile module with:: + + In [1]: import profile; profile.help() + + .. versionchanged:: 7.3 + User variables are no longer expanded, + the magic line is always left unmodified. + + """ + opts, arg_str = self.parse_options(parameter_s, 'D:l:rs:T:q', + list_all=True, posix=False) + if cell is not None: + arg_str += '\n' + cell + arg_str = self.shell.transform_cell(arg_str) + return self._run_with_profiler(arg_str, opts, self.shell.user_ns) + + def _run_with_profiler(self, code, opts, namespace): + """ + Run `code` with profiler. Used by ``%prun`` and ``%run -p``. + + Parameters + ---------- + code : str + Code to be executed. + opts : Struct + Options parsed by `self.parse_options`. + namespace : dict + A dictionary for Python namespace (e.g., `self.shell.user_ns`). + + """ + + # Fill default values for unspecified options: + opts.merge(Struct(D=[''], l=[], s=['time'], T=[''])) + + prof = profile.Profile() + try: + prof = prof.runctx(code, namespace, namespace) + sys_exit = '' + except SystemExit: + sys_exit = """*** SystemExit exception caught in code being profiled.""" + + stats = pstats.Stats(prof).strip_dirs().sort_stats(*opts.s) + + lims = opts.l + if lims: + lims = [] # rebuild lims with ints/floats/strings + for lim in opts.l: + try: + lims.append(int(lim)) + except ValueError: + try: + lims.append(float(lim)) + except ValueError: + lims.append(lim) + + # Trap output. + stdout_trap = StringIO() + stats_stream = stats.stream + try: + stats.stream = stdout_trap + stats.print_stats(*lims) + finally: + stats.stream = stats_stream + + output = stdout_trap.getvalue() + output = output.rstrip() + + if 'q' not in opts: + page.page(output) + print(sys_exit, end=' ') + + dump_file = opts.D[0] + text_file = opts.T[0] + if dump_file: + prof.dump_stats(dump_file) + print('\n*** Profile stats marshalled to file',\ + repr(dump_file)+'.',sys_exit) + if text_file: + with open(text_file, 'w') as pfile: + pfile.write(output) + print('\n*** Profile printout saved to text file',\ + repr(text_file)+'.',sys_exit) + + if 'r' in opts: + return stats + else: + return None + + @line_magic + def pdb(self, parameter_s=''): + """Control the automatic calling of the pdb interactive debugger. + + Call as '%pdb on', '%pdb 1', '%pdb off' or '%pdb 0'. If called without + argument it works as a toggle. + + When an exception is triggered, IPython can optionally call the + interactive pdb debugger after the traceback printout. %pdb toggles + this feature on and off. + + The initial state of this feature is set in your configuration + file (the option is ``InteractiveShell.pdb``). + + If you want to just activate the debugger AFTER an exception has fired, + without having to type '%pdb on' and rerunning your code, you can use + the %debug magic.""" + + par = parameter_s.strip().lower() + + if par: + try: + new_pdb = {'off':0,'0':0,'on':1,'1':1}[par] + except KeyError: + print ('Incorrect argument. Use on/1, off/0, ' + 'or nothing for a toggle.') + return + else: + # toggle + new_pdb = not self.shell.call_pdb + + # set on the shell + self.shell.call_pdb = new_pdb + print('Automatic pdb calling has been turned',on_off(new_pdb)) + + @skip_doctest + @magic_arguments.magic_arguments() + @magic_arguments.argument('--breakpoint', '-b', metavar='FILE:LINE', + help=""" + Set break point at LINE in FILE. + """ + ) + @magic_arguments.argument('statement', nargs='*', + help=""" + Code to run in debugger. + You can omit this in cell magic mode. + """ + ) + @no_var_expand + @line_cell_magic + def debug(self, line='', cell=None): + """Activate the interactive debugger. + + This magic command support two ways of activating debugger. + One is to activate debugger before executing code. This way, you + can set a break point, to step through the code from the point. + You can use this mode by giving statements to execute and optionally + a breakpoint. + + The other one is to activate debugger in post-mortem mode. You can + activate this mode simply running %debug without any argument. + If an exception has just occurred, this lets you inspect its stack + frames interactively. Note that this will always work only on the last + traceback that occurred, so you must call this quickly after an + exception that you wish to inspect has fired, because if another one + occurs, it clobbers the previous one. + + If you want IPython to automatically do this on every exception, see + the %pdb magic for more details. + + .. versionchanged:: 7.3 + When running code, user variables are no longer expanded, + the magic line is always left unmodified. + + """ + args = magic_arguments.parse_argstring(self.debug, line) + + if not (args.breakpoint or args.statement or cell): + self._debug_post_mortem() + elif not (args.breakpoint or cell): + # If there is no breakpoints, the line is just code to execute + self._debug_exec(line, None) + else: + # Here we try to reconstruct the code from the output of + # parse_argstring. This might not work if the code has spaces + # For example this fails for `print("a b")` + code = "\n".join(args.statement) + if cell: + code += "\n" + cell + self._debug_exec(code, args.breakpoint) + + def _debug_post_mortem(self): + self.shell.debugger(force=True) + + def _debug_exec(self, code, breakpoint): + if breakpoint: + (filename, bp_line) = breakpoint.rsplit(':', 1) + bp_line = int(bp_line) + else: + (filename, bp_line) = (None, None) + self._run_with_debugger(code, self.shell.user_ns, filename, bp_line) + + @line_magic + def tb(self, s): + """Print the last traceback. + + Optionally, specify an exception reporting mode, tuning the + verbosity of the traceback. By default the currently-active exception + mode is used. See %xmode for changing exception reporting modes. + + Valid modes: Plain, Context, Verbose, and Minimal. + """ + interactive_tb = self.shell.InteractiveTB + if s: + # Switch exception reporting mode for this one call. + # Ensure it is switched back. + def xmode_switch_err(name): + warn('Error changing %s exception modes.\n%s' % + (name,sys.exc_info()[1])) + + new_mode = s.strip().capitalize() + original_mode = interactive_tb.mode + try: + try: + interactive_tb.set_mode(mode=new_mode) + except Exception: + xmode_switch_err('user') + else: + self.shell.showtraceback() + finally: + interactive_tb.set_mode(mode=original_mode) + else: + self.shell.showtraceback() + + @skip_doctest + @line_magic + def run(self, parameter_s='', runner=None, + file_finder=get_py_filename): + """Run the named file inside IPython as a program. + + Usage:: + + %run [-n -i -e -G] + [( -t [-N<N>] | -d [-b<N>] | -p [profile options] )] + ( -m mod | filename ) [args] + + The filename argument should be either a pure Python script (with + extension ``.py``), or a file with custom IPython syntax (such as + magics). If the latter, the file can be either a script with ``.ipy`` + extension, or a Jupyter notebook with ``.ipynb`` extension. When running + a Jupyter notebook, the output from print statements and other + displayed objects will appear in the terminal (even matplotlib figures + will open, if a terminal-compliant backend is being used). Note that, + at the system command line, the ``jupyter run`` command offers similar + functionality for executing notebooks (albeit currently with some + differences in supported options). + + Parameters after the filename are passed as command-line arguments to + the program (put in sys.argv). Then, control returns to IPython's + prompt. + + This is similar to running at a system prompt ``python file args``, + but with the advantage of giving you IPython's tracebacks, and of + loading all variables into your interactive namespace for further use + (unless -p is used, see below). + + The file is executed in a namespace initially consisting only of + ``__name__=='__main__'`` and sys.argv constructed as indicated. It thus + sees its environment as if it were being run as a stand-alone program + (except for sharing global objects such as previously imported + modules). But after execution, the IPython interactive namespace gets + updated with all variables defined in the program (except for __name__ + and sys.argv). This allows for very convenient loading of code for + interactive work, while giving each program a 'clean sheet' to run in. + + Arguments are expanded using shell-like glob match. Patterns + '*', '?', '[seq]' and '[!seq]' can be used. Additionally, + tilde '~' will be expanded into user's home directory. Unlike + real shells, quotation does not suppress expansions. Use + *two* back slashes (e.g. ``\\\\*``) to suppress expansions. + To completely disable these expansions, you can use -G flag. + + On Windows systems, the use of single quotes `'` when specifying + a file is not supported. Use double quotes `"`. + + Options: + + -n + __name__ is NOT set to '__main__', but to the running file's name + without extension (as python does under import). This allows running + scripts and reloading the definitions in them without calling code + protected by an ``if __name__ == "__main__"`` clause. + + -i + run the file in IPython's namespace instead of an empty one. This + is useful if you are experimenting with code written in a text editor + which depends on variables defined interactively. + + -e + ignore sys.exit() calls or SystemExit exceptions in the script + being run. This is particularly useful if IPython is being used to + run unittests, which always exit with a sys.exit() call. In such + cases you are interested in the output of the test results, not in + seeing a traceback of the unittest module. + + -t + print timing information at the end of the run. IPython will give + you an estimated CPU time consumption for your script, which under + Unix uses the resource module to avoid the wraparound problems of + time.clock(). Under Unix, an estimate of time spent on system tasks + is also given (for Windows platforms this is reported as 0.0). + + If -t is given, an additional ``-N<N>`` option can be given, where <N> + must be an integer indicating how many times you want the script to + run. The final timing report will include total and per run results. + + For example (testing the script uniq_stable.py):: + + In [1]: run -t uniq_stable + + IPython CPU timings (estimated): + User : 0.19597 s. + System: 0.0 s. + + In [2]: run -t -N5 uniq_stable + + IPython CPU timings (estimated): + Total runs performed: 5 + Times : Total Per run + User : 0.910862 s, 0.1821724 s. + System: 0.0 s, 0.0 s. + + -d + run your program under the control of pdb, the Python debugger. + This allows you to execute your program step by step, watch variables, + etc. Internally, what IPython does is similar to calling:: + + pdb.run('execfile("YOURFILENAME")') + + with a breakpoint set on line 1 of your file. You can change the line + number for this automatic breakpoint to be <N> by using the -bN option + (where N must be an integer). For example:: + + %run -d -b40 myscript + + will set the first breakpoint at line 40 in myscript.py. Note that + the first breakpoint must be set on a line which actually does + something (not a comment or docstring) for it to stop execution. + + Or you can specify a breakpoint in a different file:: + + %run -d -b myotherfile.py:20 myscript + + When the pdb debugger starts, you will see a (Pdb) prompt. You must + first enter 'c' (without quotes) to start execution up to the first + breakpoint. + + Entering 'help' gives information about the use of the debugger. You + can easily see pdb's full documentation with "import pdb;pdb.help()" + at a prompt. + + -p + run program under the control of the Python profiler module (which + prints a detailed report of execution times, function calls, etc). + + You can pass other options after -p which affect the behavior of the + profiler itself. See the docs for %prun for details. + + In this mode, the program's variables do NOT propagate back to the + IPython interactive namespace (because they remain in the namespace + where the profiler executes them). + + Internally this triggers a call to %prun, see its documentation for + details on the options available specifically for profiling. + + There is one special usage for which the text above doesn't apply: + if the filename ends with .ipy[nb], the file is run as ipython script, + just as if the commands were written on IPython prompt. + + -m + specify module name to load instead of script path. Similar to + the -m option for the python interpreter. Use this option last if you + want to combine with other %run options. Unlike the python interpreter + only source modules are allowed no .pyc or .pyo files. + For example:: + + %run -m example + + will run the example module. + + -G + disable shell-like glob expansion of arguments. + + """ + + # Logic to handle issue #3664 + # Add '--' after '-m <module_name>' to ignore additional args passed to a module. + if '-m' in parameter_s and '--' not in parameter_s: + argv = shlex.split(parameter_s, posix=(os.name == 'posix')) + for idx, arg in enumerate(argv): + if arg and arg.startswith('-') and arg != '-': + if arg == '-m': + argv.insert(idx + 2, '--') + break + else: + # Positional arg, break + break + parameter_s = ' '.join(shlex.quote(arg) for arg in argv) + + # get arguments and set sys.argv for program to be run. + opts, arg_lst = self.parse_options(parameter_s, + 'nidtN:b:pD:l:rs:T:em:G', + mode='list', list_all=1) + if "m" in opts: + modulename = opts["m"][0] + modpath = find_mod(modulename) + if modpath is None: + msg = '%r is not a valid modulename on sys.path'%modulename + raise Exception(msg) + arg_lst = [modpath] + arg_lst + try: + fpath = None # initialize to make sure fpath is in scope later + fpath = arg_lst[0] + filename = file_finder(fpath) + except IndexError: + msg = 'you must provide at least a filename.' + raise Exception(msg) + except IOError as e: + try: + msg = str(e) + except UnicodeError: + msg = e.message + if os.name == 'nt' and re.match(r"^'.*'$",fpath): + warn('For Windows, use double quotes to wrap a filename: %run "mypath\\myfile.py"') + raise Exception(msg) + except TypeError: + if fpath in sys.meta_path: + filename = "" + else: + raise + + if filename.lower().endswith(('.ipy', '.ipynb')): + with preserve_keys(self.shell.user_ns, '__file__'): + self.shell.user_ns['__file__'] = filename + self.shell.safe_execfile_ipy(filename, raise_exceptions=True) + return + + # Control the response to exit() calls made by the script being run + exit_ignore = 'e' in opts + + # Make sure that the running script gets a proper sys.argv as if it + # were run from a system shell. + save_argv = sys.argv # save it for later restoring + + if 'G' in opts: + args = arg_lst[1:] + else: + # tilde and glob expansion + args = shellglob(map(os.path.expanduser, arg_lst[1:])) + + sys.argv = [filename] + args # put in the proper filename + + if 'n' in opts: + name = os.path.splitext(os.path.basename(filename))[0] + else: + name = '__main__' + + if 'i' in opts: + # Run in user's interactive namespace + prog_ns = self.shell.user_ns + __name__save = self.shell.user_ns['__name__'] + prog_ns['__name__'] = name + main_mod = self.shell.user_module + + # Since '%run foo' emulates 'python foo.py' at the cmd line, we must + # set the __file__ global in the script's namespace + # TK: Is this necessary in interactive mode? + prog_ns['__file__'] = filename + else: + # Run in a fresh, empty namespace + + # The shell MUST hold a reference to prog_ns so after %run + # exits, the python deletion mechanism doesn't zero it out + # (leaving dangling references). See interactiveshell for details + main_mod = self.shell.new_main_mod(filename, name) + prog_ns = main_mod.__dict__ + + # pickle fix. See interactiveshell for an explanation. But we need to + # make sure that, if we overwrite __main__, we replace it at the end + main_mod_name = prog_ns['__name__'] + + if main_mod_name == '__main__': + restore_main = sys.modules['__main__'] + else: + restore_main = False + + # This needs to be undone at the end to prevent holding references to + # every single object ever created. + sys.modules[main_mod_name] = main_mod + + if 'p' in opts or 'd' in opts: + if 'm' in opts: + code = 'run_module(modulename, prog_ns)' + code_ns = { + 'run_module': self.shell.safe_run_module, + 'prog_ns': prog_ns, + 'modulename': modulename, + } + else: + if 'd' in opts: + # allow exceptions to raise in debug mode + code = 'execfile(filename, prog_ns, raise_exceptions=True)' + else: + code = 'execfile(filename, prog_ns)' + code_ns = { + 'execfile': self.shell.safe_execfile, + 'prog_ns': prog_ns, + 'filename': get_py_filename(filename), + } + + try: + stats = None + if 'p' in opts: + stats = self._run_with_profiler(code, opts, code_ns) + else: + if 'd' in opts: + bp_file, bp_line = parse_breakpoint( + opts.get('b', ['1'])[0], filename) + self._run_with_debugger( + code, code_ns, filename, bp_line, bp_file) + else: + if 'm' in opts: + def run(): + self.shell.safe_run_module(modulename, prog_ns) + else: + if runner is None: + runner = self.default_runner + if runner is None: + runner = self.shell.safe_execfile + + def run(): + runner(filename, prog_ns, prog_ns, + exit_ignore=exit_ignore) + + if 't' in opts: + # timed execution + try: + nruns = int(opts['N'][0]) + if nruns < 1: + error('Number of runs must be >=1') + return + except (KeyError): + nruns = 1 + self._run_with_timing(run, nruns) + else: + # regular execution + run() + + if 'i' in opts: + self.shell.user_ns['__name__'] = __name__save + else: + # update IPython interactive namespace + + # Some forms of read errors on the file may mean the + # __name__ key was never set; using pop we don't have to + # worry about a possible KeyError. + prog_ns.pop('__name__', None) + + with preserve_keys(self.shell.user_ns, '__file__'): + self.shell.user_ns.update(prog_ns) + finally: + # It's a bit of a mystery why, but __builtins__ can change from + # being a module to becoming a dict missing some key data after + # %run. As best I can see, this is NOT something IPython is doing + # at all, and similar problems have been reported before: + # http://coding.derkeiler.com/Archive/Python/comp.lang.python/2004-10/0188.html + # Since this seems to be done by the interpreter itself, the best + # we can do is to at least restore __builtins__ for the user on + # exit. + self.shell.user_ns['__builtins__'] = builtin_mod + + # Ensure key global structures are restored + sys.argv = save_argv + if restore_main: + sys.modules['__main__'] = restore_main + if '__mp_main__' in sys.modules: + sys.modules['__mp_main__'] = restore_main + else: + # Remove from sys.modules the reference to main_mod we'd + # added. Otherwise it will trap references to objects + # contained therein. + del sys.modules[main_mod_name] + + return stats + + def _run_with_debugger(self, code, code_ns, filename=None, + bp_line=None, bp_file=None): + """ + Run `code` in debugger with a break point. + + Parameters + ---------- + code : str + Code to execute. + code_ns : dict + A namespace in which `code` is executed. + filename : str + `code` is ran as if it is in `filename`. + bp_line : int, optional + Line number of the break point. + bp_file : str, optional + Path to the file in which break point is specified. + `filename` is used if not given. + + Raises + ------ + UsageError + If the break point given by `bp_line` is not valid. + + """ + deb = self.shell.InteractiveTB.pdb + if not deb: + self.shell.InteractiveTB.pdb = self.shell.InteractiveTB.debugger_cls() + deb = self.shell.InteractiveTB.pdb + + # deb.checkline() fails if deb.curframe exists but is None; it can + # handle it not existing. https://github.com/ipython/ipython/issues/10028 + if hasattr(deb, 'curframe'): + del deb.curframe + + # reset Breakpoint state, which is moronically kept + # in a class + bdb.Breakpoint.next = 1 + bdb.Breakpoint.bplist = {} + bdb.Breakpoint.bpbynumber = [None] + deb.clear_all_breaks() + if bp_line is not None: + # Set an initial breakpoint to stop execution + maxtries = 10 + bp_file = bp_file or filename + checkline = deb.checkline(bp_file, bp_line) + if not checkline: + for bp in range(bp_line + 1, bp_line + maxtries + 1): + if deb.checkline(bp_file, bp): + break + else: + msg = ("\nI failed to find a valid line to set " + "a breakpoint\n" + "after trying up to line: %s.\n" + "Please set a valid breakpoint manually " + "with the -b option." % bp) + raise UsageError(msg) + # if we find a good linenumber, set the breakpoint + deb.do_break('%s:%s' % (bp_file, bp_line)) + + if filename: + # Mimic Pdb._runscript(...) + deb._wait_for_mainpyfile = True + deb.mainpyfile = deb.canonic(filename) + + # Start file run + print("NOTE: Enter 'c' at the %s prompt to continue execution." % deb.prompt) + try: + if filename: + # save filename so it can be used by methods on the deb object + deb._exec_filename = filename + while True: + try: + trace = sys.gettrace() + deb.run(code, code_ns) + except Restart: + print("Restarting") + if filename: + deb._wait_for_mainpyfile = True + deb.mainpyfile = deb.canonic(filename) + continue + else: + break + finally: + sys.settrace(trace) - print( timeit_result ) - - if tc > tc_min: - print("Compiler time: %.2f s" % tc) - if return_result: - return timeit_result - - @skip_doctest - @no_var_expand - @needs_local_scope - @line_cell_magic - def time(self,line='', cell=None, local_ns=None): - """Time execution of a Python statement or expression. - - The CPU and wall clock times are printed, and the value of the - expression (if any) is returned. Note that under Win32, system time - is always reported as 0, since it can not be measured. - - This function can be used both as a line and cell magic: - - - In line mode you can time a single-line statement (though multiple - ones can be chained with using semicolons). - - - In cell mode, you can time the cell body (a directly - following statement raises an error). - - This function provides very basic timing functionality. Use the timeit - magic for more control over the measurement. - - .. versionchanged:: 7.3 - User variables are no longer expanded, - the magic line is always left unmodified. - - Examples - -------- - :: - - In [1]: %time 2**128 - CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s - Wall time: 0.00 - Out[1]: 340282366920938463463374607431768211456L - - In [2]: n = 1000000 - - In [3]: %time sum(range(n)) - CPU times: user 1.20 s, sys: 0.05 s, total: 1.25 s - Wall time: 1.37 - Out[3]: 499999500000L - - In [4]: %time print 'hello world' - hello world - CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s - Wall time: 0.00 - - - .. note:: - The time needed by Python to compile the given expression will be - reported if it is more than 0.1s. - - In the example below, the actual exponentiation is done by Python - at compilation time, so while the expression can take a noticeable - amount of time to compute, that time is purely due to the - compilation:: - - In [5]: %time 3**9999; - CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s - Wall time: 0.00 s - - In [6]: %time 3**999999; - CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s - Wall time: 0.00 s - Compiler : 0.78 s - """ - # fail immediately if the given expression can't be compiled - - if line and cell: - raise UsageError("Can't use statement directly after '%%time'!") - - if cell: - expr = self.shell.transform_cell(cell) - else: - expr = self.shell.transform_cell(line) - - # Minimum time above which parse time will be reported - tp_min = 0.1 - - t0 = clock() - expr_ast = self.shell.compile.ast_parse(expr) - tp = clock()-t0 - - # Apply AST transformations - expr_ast = self.shell.transform_ast(expr_ast) - - # Minimum time above which compilation time will be reported - tc_min = 0.1 - - expr_val=None - if len(expr_ast.body)==1 and isinstance(expr_ast.body[0], ast.Expr): - mode = 'eval' - source = '<timed eval>' - expr_ast = ast.Expression(expr_ast.body[0].value) - else: - mode = 'exec' - source = '<timed exec>' - # multi-line %%time case - if len(expr_ast.body) > 1 and isinstance(expr_ast.body[-1], ast.Expr): - expr_val= expr_ast.body[-1] - expr_ast = expr_ast.body[:-1] - expr_ast = Module(expr_ast, []) - expr_val = ast.Expression(expr_val.value) - - t0 = clock() - code = self.shell.compile(expr_ast, source, mode) - tc = clock()-t0 - - # skew measurement as little as possible - glob = self.shell.user_ns - wtime = time.time - # time execution - wall_st = wtime() - if mode=='eval': - st = clock2() - try: - out = eval(code, glob, local_ns) - except: - self.shell.showtraceback() - return - end = clock2() - else: - st = clock2() - try: - exec(code, glob, local_ns) - out=None - # multi-line %%time case - if expr_val is not None: - code_2 = self.shell.compile(expr_val, source, 'eval') - out = eval(code_2, glob, local_ns) - except: - self.shell.showtraceback() - return - end = clock2() - - wall_end = wtime() - # Compute actual times and report - wall_time = wall_end-wall_st - cpu_user = end[0]-st[0] - cpu_sys = end[1]-st[1] - cpu_tot = cpu_user+cpu_sys - # On windows cpu_sys is always zero, so no new information to the next print - if sys.platform != 'win32': - print("CPU times: user %s, sys: %s, total: %s" % \ - (_format_time(cpu_user),_format_time(cpu_sys),_format_time(cpu_tot))) - print("Wall time: %s" % _format_time(wall_time)) - if tc > tc_min: - print("Compiler : %s" % _format_time(tc)) - if tp > tp_min: - print("Parser : %s" % _format_time(tp)) - return out - - @skip_doctest - @line_magic - def macro(self, parameter_s=''): - """Define a macro for future re-execution. It accepts ranges of history, - filenames or string objects. - - Usage:\\ - %macro [options] name n1-n2 n3-n4 ... n5 .. n6 ... - - Options: - - -r: use 'raw' input. By default, the 'processed' history is used, - so that magics are loaded in their transformed version to valid - Python. If this option is given, the raw input as typed at the - command line is used instead. + + except: + etype, value, tb = sys.exc_info() + # Skip three frames in the traceback: the %run one, + # one inside bdb.py, and the command-line typed by the + # user (run by exec in pdb itself). + self.shell.InteractiveTB(etype, value, tb, tb_offset=3) + + @staticmethod + def _run_with_timing(run, nruns): + """ + Run function `run` and print timing information. + + Parameters + ---------- + run : callable + Any callable object which takes no argument. + nruns : int + Number of times to execute `run`. + + """ + twall0 = time.perf_counter() + if nruns == 1: + t0 = clock2() + run() + t1 = clock2() + t_usr = t1[0] - t0[0] + t_sys = t1[1] - t0[1] + print("\nIPython CPU timings (estimated):") + print(" User : %10.2f s." % t_usr) + print(" System : %10.2f s." % t_sys) + else: + runs = range(nruns) + t0 = clock2() + for nr in runs: + run() + t1 = clock2() + t_usr = t1[0] - t0[0] + t_sys = t1[1] - t0[1] + print("\nIPython CPU timings (estimated):") + print("Total runs performed:", nruns) + print(" Times : %10s %10s" % ('Total', 'Per run')) + print(" User : %10.2f s, %10.2f s." % (t_usr, t_usr / nruns)) + print(" System : %10.2f s, %10.2f s." % (t_sys, t_sys / nruns)) + twall1 = time.perf_counter() + print("Wall time: %10.2f s." % (twall1 - twall0)) + + @skip_doctest + @no_var_expand + @line_cell_magic + @needs_local_scope + def timeit(self, line='', cell=None, local_ns=None): + """Time execution of a Python statement or expression + + Usage, in line mode: + %timeit [-n<N> -r<R> [-t|-c] -q -p<P> -o] statement + or in cell mode: + %%timeit [-n<N> -r<R> [-t|-c] -q -p<P> -o] setup_code + code + code... + + Time execution of a Python statement or expression using the timeit + module. This function can be used both as a line and cell magic: + + - In line mode you can time a single-line statement (though multiple + ones can be chained with using semicolons). + + - In cell mode, the statement in the first line is used as setup code + (executed but not timed) and the body of the cell is timed. The cell + body has access to any variables created in the setup code. + + Options: + -n<N>: execute the given statement <N> times in a loop. If <N> is not + provided, <N> is determined so as to get sufficient accuracy. + + -r<R>: number of repeats <R>, each consisting of <N> loops, and take the + best result. + Default: 7 + + -t: use time.time to measure the time, which is the default on Unix. + This function measures wall time. + + -c: use time.clock to measure the time, which is the default on + Windows and measures wall time. On Unix, resource.getrusage is used + instead and returns the CPU user time. + + -p<P>: use a precision of <P> digits to display the timing result. + Default: 3 + + -q: Quiet, do not print result. + + -o: return a TimeitResult that can be stored in a variable to inspect + the result in more details. + + .. versionchanged:: 7.3 + User variables are no longer expanded, + the magic line is always left unmodified. + + Examples + -------- + :: + + In [1]: %timeit pass + 8.26 ns ± 0.12 ns per loop (mean ± std. dev. of 7 runs, 100000000 loops each) + + In [2]: u = None + + In [3]: %timeit u is None + 29.9 ns ± 0.643 ns per loop (mean ± std. dev. of 7 runs, 10000000 loops each) + + In [4]: %timeit -r 4 u == None + + In [5]: import time + + In [6]: %timeit -n1 time.sleep(2) + + + The times reported by %timeit will be slightly higher than those + reported by the timeit.py script when variables are accessed. This is + due to the fact that %timeit executes the statement in the namespace + of the shell, compared with timeit.py, which uses a single setup + statement to import function or create variables. Generally, the bias + does not matter as long as results from timeit.py are not mixed with + those from %timeit.""" + + opts, stmt = self.parse_options(line,'n:r:tcp:qo', + posix=False, strict=False) + if stmt == "" and cell is None: + return + + timefunc = timeit.default_timer + number = int(getattr(opts, "n", 0)) + default_repeat = 7 if timeit.default_repeat < 7 else timeit.default_repeat + repeat = int(getattr(opts, "r", default_repeat)) + precision = int(getattr(opts, "p", 3)) + quiet = 'q' in opts + return_result = 'o' in opts + if hasattr(opts, "t"): + timefunc = time.time + if hasattr(opts, "c"): + timefunc = clock + + timer = Timer(timer=timefunc) + # this code has tight coupling to the inner workings of timeit.Timer, + # but is there a better way to achieve that the code stmt has access + # to the shell namespace? + transform = self.shell.transform_cell + + if cell is None: + # called as line magic + ast_setup = self.shell.compile.ast_parse("pass") + ast_stmt = self.shell.compile.ast_parse(transform(stmt)) + else: + ast_setup = self.shell.compile.ast_parse(transform(stmt)) + ast_stmt = self.shell.compile.ast_parse(transform(cell)) + + ast_setup = self.shell.transform_ast(ast_setup) + ast_stmt = self.shell.transform_ast(ast_stmt) + + # Check that these compile to valid Python code *outside* the timer func + # Invalid code may become valid when put inside the function & loop, + # which messes up error messages. + # https://github.com/ipython/ipython/issues/10636 + self.shell.compile(ast_setup, "<magic-timeit-setup>", "exec") + self.shell.compile(ast_stmt, "<magic-timeit-stmt>", "exec") + + # This codestring is taken from timeit.template - we fill it in as an + # AST, so that we can apply our AST transformations to the user code + # without affecting the timing code. + timeit_ast_template = ast.parse('def inner(_it, _timer):\n' + ' setup\n' + ' _t0 = _timer()\n' + ' for _i in _it:\n' + ' stmt\n' + ' _t1 = _timer()\n' + ' return _t1 - _t0\n') + + timeit_ast = TimeitTemplateFiller(ast_setup, ast_stmt).visit(timeit_ast_template) + timeit_ast = ast.fix_missing_locations(timeit_ast) + + # Track compilation time so it can be reported if too long + # Minimum time above which compilation time will be reported + tc_min = 0.1 + + t0 = clock() + code = self.shell.compile(timeit_ast, "<magic-timeit>", "exec") + tc = clock()-t0 + + ns = {} + glob = self.shell.user_ns + # handles global vars with same name as local vars. We store them in conflict_globs. + conflict_globs = {} + if local_ns and cell is None: + for var_name, var_val in glob.items(): + if var_name in local_ns: + conflict_globs[var_name] = var_val + glob.update(local_ns) + + exec(code, glob, ns) + timer.inner = ns["inner"] + + # This is used to check if there is a huge difference between the + # best and worst timings. + # Issue: https://github.com/ipython/ipython/issues/6471 + if number == 0: + # determine number so that 0.2 <= total time < 2.0 + for index in range(0, 10): + number = 10 ** index + time_number = timer.timeit(number) + if time_number >= 0.2: + break + + all_runs = timer.repeat(repeat, number) + best = min(all_runs) / number + worst = max(all_runs) / number + timeit_result = TimeitResult(number, repeat, best, worst, all_runs, tc, precision) + + # Restore global vars from conflict_globs + if conflict_globs: + glob.update(conflict_globs) + + if not quiet : + # Check best timing is greater than zero to avoid a + # ZeroDivisionError. + # In cases where the slowest timing is lesser than a microsecond + # we assume that it does not really matter if the fastest + # timing is 4 times faster than the slowest timing or not. + if worst > 4 * best and best > 0 and worst > 1e-6: + print("The slowest run took %0.2f times longer than the " + "fastest. This could mean that an intermediate result " + "is being cached." % (worst / best)) - -q: quiet macro definition. By default, a tag line is printed - to indicate the macro has been created, and then the contents of - the macro are printed. If this option is given, then no printout - is produced once the macro is created. - - This will define a global variable called `name` which is a string - made of joining the slices and lines you specify (n1,n2,... numbers - above) from your input history into a single string. This variable - acts like an automatic function which re-executes those lines as if - you had typed them. You just type 'name' at the prompt and the code - executes. - - The syntax for indicating input ranges is described in %history. - - Note: as a 'hidden' feature, you can also use traditional python slice - notation, where N:M means numbers N through M-1. - - For example, if your history contains (print using %hist -n ):: - - 44: x=1 - 45: y=3 - 46: z=x+y - 47: print x - 48: a=5 - 49: print 'x',x,'y',y - - you can create a macro with lines 44 through 47 (included) and line 49 - called my_macro with:: - - In [55]: %macro my_macro 44-47 49 - - Now, typing `my_macro` (without quotes) will re-execute all this code - in one pass. - - You don't need to give the line-numbers in order, and any given line - number can appear multiple times. You can assemble macros with any - lines from your input history in any order. - - The macro is a simple object which holds its value in an attribute, - but IPython's display system checks for macros and executes them as - code instead of printing them when you type their name. - - You can view a macro's contents by explicitly printing it with:: - - print macro_name - - """ - opts,args = self.parse_options(parameter_s,'rq',mode='list') - if not args: # List existing macros - return sorted(k for k,v in self.shell.user_ns.items() if isinstance(v, Macro)) - if len(args) == 1: - raise UsageError( - "%macro insufficient args; usage '%macro name n1-n2 n3-4...") - name, codefrom = args[0], " ".join(args[1:]) - - #print 'rng',ranges # dbg - try: - lines = self.shell.find_user_code(codefrom, 'r' in opts) - except (ValueError, TypeError) as e: - print(e.args[0]) - return - macro = Macro(lines) - self.shell.define_macro(name, macro) - if not ( 'q' in opts) : - print('Macro `%s` created. To execute, type its name (without quotes).' % name) - print('=== Macro contents: ===') - print(macro, end=' ') - - @magic_arguments.magic_arguments() - @magic_arguments.argument('output', type=str, default='', nargs='?', - help="""The name of the variable in which to store output. - This is a utils.io.CapturedIO object with stdout/err attributes - for the text of the captured output. - - CapturedOutput also has a show() method for displaying the output, - and __call__ as well, so you can use that to quickly display the - output. - - If unspecified, captured output is discarded. - """ - ) - @magic_arguments.argument('--no-stderr', action="store_true", - help="""Don't capture stderr.""" - ) - @magic_arguments.argument('--no-stdout', action="store_true", - help="""Don't capture stdout.""" - ) - @magic_arguments.argument('--no-display', action="store_true", - help="""Don't capture IPython's rich display.""" - ) - @cell_magic - def capture(self, line, cell): - """run the cell, capturing stdout, stderr, and IPython's rich display() calls.""" - args = magic_arguments.parse_argstring(self.capture, line) - out = not args.no_stdout - err = not args.no_stderr - disp = not args.no_display - with capture_output(out, err, disp) as io: - self.shell.run_cell(cell) - if args.output: - self.shell.user_ns[args.output] = io - -def parse_breakpoint(text, current_file): - '''Returns (file, line) for file:line and (current_file, line) for line''' - colon = text.find(':') - if colon == -1: - return current_file, int(text) - else: - return text[:colon], int(text[colon+1:]) - -def _format_time(timespan, precision=3): - """Formats the timespan in a human readable form""" - - if timespan >= 60.0: - # we have more than a minute, format that in a human readable form - # Idea from http://snipplr.com/view/5713/ - parts = [("d", 60*60*24),("h", 60*60),("min", 60), ("s", 1)] - time = [] - leftover = timespan - for suffix, length in parts: - value = int(leftover / length) - if value > 0: - leftover = leftover % length - time.append(u'%s%s' % (str(value), suffix)) - if leftover < 1: - break - return " ".join(time) - - - # Unfortunately the unicode 'micro' symbol can cause problems in - # certain terminals. - # See bug: https://bugs.launchpad.net/ipython/+bug/348466 - # Try to prevent crashes by being more secure than it needs to - # E.g. eclipse is able to print a µ, but has no sys.stdout.encoding set. - units = [u"s", u"ms",u'us',"ns"] # the save value - if hasattr(sys.stdout, 'encoding') and sys.stdout.encoding: - try: - u'\xb5'.encode(sys.stdout.encoding) - units = [u"s", u"ms",u'\xb5s',"ns"] - except: - pass - scaling = [1, 1e3, 1e6, 1e9] - - if timespan > 0.0: - order = min(-int(math.floor(math.log10(timespan)) // 3), 3) - else: - order = 3 - return u"%.*g %s" % (precision, timespan * scaling[order], units[order]) + print( timeit_result ) + + if tc > tc_min: + print("Compiler time: %.2f s" % tc) + if return_result: + return timeit_result + + @skip_doctest + @no_var_expand + @needs_local_scope + @line_cell_magic + def time(self,line='', cell=None, local_ns=None): + """Time execution of a Python statement or expression. + + The CPU and wall clock times are printed, and the value of the + expression (if any) is returned. Note that under Win32, system time + is always reported as 0, since it can not be measured. + + This function can be used both as a line and cell magic: + + - In line mode you can time a single-line statement (though multiple + ones can be chained with using semicolons). + + - In cell mode, you can time the cell body (a directly + following statement raises an error). + + This function provides very basic timing functionality. Use the timeit + magic for more control over the measurement. + + .. versionchanged:: 7.3 + User variables are no longer expanded, + the magic line is always left unmodified. + + Examples + -------- + :: + + In [1]: %time 2**128 + CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s + Wall time: 0.00 + Out[1]: 340282366920938463463374607431768211456L + + In [2]: n = 1000000 + + In [3]: %time sum(range(n)) + CPU times: user 1.20 s, sys: 0.05 s, total: 1.25 s + Wall time: 1.37 + Out[3]: 499999500000L + + In [4]: %time print 'hello world' + hello world + CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s + Wall time: 0.00 + + + .. note:: + The time needed by Python to compile the given expression will be + reported if it is more than 0.1s. + + In the example below, the actual exponentiation is done by Python + at compilation time, so while the expression can take a noticeable + amount of time to compute, that time is purely due to the + compilation:: + + In [5]: %time 3**9999; + CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s + Wall time: 0.00 s + + In [6]: %time 3**999999; + CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s + Wall time: 0.00 s + Compiler : 0.78 s + """ + # fail immediately if the given expression can't be compiled + + if line and cell: + raise UsageError("Can't use statement directly after '%%time'!") + + if cell: + expr = self.shell.transform_cell(cell) + else: + expr = self.shell.transform_cell(line) + + # Minimum time above which parse time will be reported + tp_min = 0.1 + + t0 = clock() + expr_ast = self.shell.compile.ast_parse(expr) + tp = clock()-t0 + + # Apply AST transformations + expr_ast = self.shell.transform_ast(expr_ast) + + # Minimum time above which compilation time will be reported + tc_min = 0.1 + + expr_val=None + if len(expr_ast.body)==1 and isinstance(expr_ast.body[0], ast.Expr): + mode = 'eval' + source = '<timed eval>' + expr_ast = ast.Expression(expr_ast.body[0].value) + else: + mode = 'exec' + source = '<timed exec>' + # multi-line %%time case + if len(expr_ast.body) > 1 and isinstance(expr_ast.body[-1], ast.Expr): + expr_val= expr_ast.body[-1] + expr_ast = expr_ast.body[:-1] + expr_ast = Module(expr_ast, []) + expr_val = ast.Expression(expr_val.value) + + t0 = clock() + code = self.shell.compile(expr_ast, source, mode) + tc = clock()-t0 + + # skew measurement as little as possible + glob = self.shell.user_ns + wtime = time.time + # time execution + wall_st = wtime() + if mode=='eval': + st = clock2() + try: + out = eval(code, glob, local_ns) + except: + self.shell.showtraceback() + return + end = clock2() + else: + st = clock2() + try: + exec(code, glob, local_ns) + out=None + # multi-line %%time case + if expr_val is not None: + code_2 = self.shell.compile(expr_val, source, 'eval') + out = eval(code_2, glob, local_ns) + except: + self.shell.showtraceback() + return + end = clock2() + + wall_end = wtime() + # Compute actual times and report + wall_time = wall_end-wall_st + cpu_user = end[0]-st[0] + cpu_sys = end[1]-st[1] + cpu_tot = cpu_user+cpu_sys + # On windows cpu_sys is always zero, so no new information to the next print + if sys.platform != 'win32': + print("CPU times: user %s, sys: %s, total: %s" % \ + (_format_time(cpu_user),_format_time(cpu_sys),_format_time(cpu_tot))) + print("Wall time: %s" % _format_time(wall_time)) + if tc > tc_min: + print("Compiler : %s" % _format_time(tc)) + if tp > tp_min: + print("Parser : %s" % _format_time(tp)) + return out + + @skip_doctest + @line_magic + def macro(self, parameter_s=''): + """Define a macro for future re-execution. It accepts ranges of history, + filenames or string objects. + + Usage:\\ + %macro [options] name n1-n2 n3-n4 ... n5 .. n6 ... + + Options: + + -r: use 'raw' input. By default, the 'processed' history is used, + so that magics are loaded in their transformed version to valid + Python. If this option is given, the raw input as typed at the + command line is used instead. + + -q: quiet macro definition. By default, a tag line is printed + to indicate the macro has been created, and then the contents of + the macro are printed. If this option is given, then no printout + is produced once the macro is created. + + This will define a global variable called `name` which is a string + made of joining the slices and lines you specify (n1,n2,... numbers + above) from your input history into a single string. This variable + acts like an automatic function which re-executes those lines as if + you had typed them. You just type 'name' at the prompt and the code + executes. + + The syntax for indicating input ranges is described in %history. + + Note: as a 'hidden' feature, you can also use traditional python slice + notation, where N:M means numbers N through M-1. + + For example, if your history contains (print using %hist -n ):: + + 44: x=1 + 45: y=3 + 46: z=x+y + 47: print x + 48: a=5 + 49: print 'x',x,'y',y + + you can create a macro with lines 44 through 47 (included) and line 49 + called my_macro with:: + + In [55]: %macro my_macro 44-47 49 + + Now, typing `my_macro` (without quotes) will re-execute all this code + in one pass. + + You don't need to give the line-numbers in order, and any given line + number can appear multiple times. You can assemble macros with any + lines from your input history in any order. + + The macro is a simple object which holds its value in an attribute, + but IPython's display system checks for macros and executes them as + code instead of printing them when you type their name. + + You can view a macro's contents by explicitly printing it with:: + + print macro_name + + """ + opts,args = self.parse_options(parameter_s,'rq',mode='list') + if not args: # List existing macros + return sorted(k for k,v in self.shell.user_ns.items() if isinstance(v, Macro)) + if len(args) == 1: + raise UsageError( + "%macro insufficient args; usage '%macro name n1-n2 n3-4...") + name, codefrom = args[0], " ".join(args[1:]) + + #print 'rng',ranges # dbg + try: + lines = self.shell.find_user_code(codefrom, 'r' in opts) + except (ValueError, TypeError) as e: + print(e.args[0]) + return + macro = Macro(lines) + self.shell.define_macro(name, macro) + if not ( 'q' in opts) : + print('Macro `%s` created. To execute, type its name (without quotes).' % name) + print('=== Macro contents: ===') + print(macro, end=' ') + + @magic_arguments.magic_arguments() + @magic_arguments.argument('output', type=str, default='', nargs='?', + help="""The name of the variable in which to store output. + This is a utils.io.CapturedIO object with stdout/err attributes + for the text of the captured output. + + CapturedOutput also has a show() method for displaying the output, + and __call__ as well, so you can use that to quickly display the + output. + + If unspecified, captured output is discarded. + """ + ) + @magic_arguments.argument('--no-stderr', action="store_true", + help="""Don't capture stderr.""" + ) + @magic_arguments.argument('--no-stdout', action="store_true", + help="""Don't capture stdout.""" + ) + @magic_arguments.argument('--no-display', action="store_true", + help="""Don't capture IPython's rich display.""" + ) + @cell_magic + def capture(self, line, cell): + """run the cell, capturing stdout, stderr, and IPython's rich display() calls.""" + args = magic_arguments.parse_argstring(self.capture, line) + out = not args.no_stdout + err = not args.no_stderr + disp = not args.no_display + with capture_output(out, err, disp) as io: + self.shell.run_cell(cell) + if args.output: + self.shell.user_ns[args.output] = io + +def parse_breakpoint(text, current_file): + '''Returns (file, line) for file:line and (current_file, line) for line''' + colon = text.find(':') + if colon == -1: + return current_file, int(text) + else: + return text[:colon], int(text[colon+1:]) + +def _format_time(timespan, precision=3): + """Formats the timespan in a human readable form""" + + if timespan >= 60.0: + # we have more than a minute, format that in a human readable form + # Idea from http://snipplr.com/view/5713/ + parts = [("d", 60*60*24),("h", 60*60),("min", 60), ("s", 1)] + time = [] + leftover = timespan + for suffix, length in parts: + value = int(leftover / length) + if value > 0: + leftover = leftover % length + time.append(u'%s%s' % (str(value), suffix)) + if leftover < 1: + break + return " ".join(time) + + + # Unfortunately the unicode 'micro' symbol can cause problems in + # certain terminals. + # See bug: https://bugs.launchpad.net/ipython/+bug/348466 + # Try to prevent crashes by being more secure than it needs to + # E.g. eclipse is able to print a µ, but has no sys.stdout.encoding set. + units = [u"s", u"ms",u'us',"ns"] # the save value + if hasattr(sys.stdout, 'encoding') and sys.stdout.encoding: + try: + u'\xb5'.encode(sys.stdout.encoding) + units = [u"s", u"ms",u'\xb5s',"ns"] + except: + pass + scaling = [1, 1e3, 1e6, 1e9] + + if timespan > 0.0: + order = min(-int(math.floor(math.log10(timespan)) // 3), 3) + else: + order = 3 + return u"%.*g %s" % (precision, timespan * scaling[order], units[order]) diff --git a/contrib/python/ipython/py3/IPython/core/magics/extension.py b/contrib/python/ipython/py3/IPython/core/magics/extension.py index cffa89c5662..ba93b3be754 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/extension.py +++ b/contrib/python/ipython/py3/IPython/core/magics/extension.py @@ -1,63 +1,63 @@ -"""Implementation of magic functions for the extension machinery. -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012 The IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - - -# Our own packages -from IPython.core.error import UsageError -from IPython.core.magic import Magics, magics_class, line_magic - -#----------------------------------------------------------------------------- -# Magic implementation classes -#----------------------------------------------------------------------------- - -@magics_class -class ExtensionMagics(Magics): - """Magics to manage the IPython extensions system.""" - - @line_magic - def load_ext(self, module_str): - """Load an IPython extension by its module name.""" - if not module_str: - raise UsageError('Missing module name.') - res = self.shell.extension_manager.load_extension(module_str) - - if res == 'already loaded': - print("The %s extension is already loaded. To reload it, use:" % module_str) - print(" %reload_ext", module_str) - elif res == 'no load function': - print("The %s module is not an IPython extension." % module_str) - - @line_magic - def unload_ext(self, module_str): - """Unload an IPython extension by its module name. - - Not all extensions can be unloaded, only those which define an - ``unload_ipython_extension`` function. - """ - if not module_str: - raise UsageError('Missing module name.') - - res = self.shell.extension_manager.unload_extension(module_str) - - if res == 'no unload function': - print("The %s extension doesn't define how to unload it." % module_str) - elif res == "not loaded": - print("The %s extension is not loaded." % module_str) - - @line_magic - def reload_ext(self, module_str): - """Reload an IPython extension by its module name.""" - if not module_str: - raise UsageError('Missing module name.') - self.shell.extension_manager.reload_extension(module_str) +"""Implementation of magic functions for the extension machinery. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012 The IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + + +# Our own packages +from IPython.core.error import UsageError +from IPython.core.magic import Magics, magics_class, line_magic + +#----------------------------------------------------------------------------- +# Magic implementation classes +#----------------------------------------------------------------------------- + +@magics_class +class ExtensionMagics(Magics): + """Magics to manage the IPython extensions system.""" + + @line_magic + def load_ext(self, module_str): + """Load an IPython extension by its module name.""" + if not module_str: + raise UsageError('Missing module name.') + res = self.shell.extension_manager.load_extension(module_str) + + if res == 'already loaded': + print("The %s extension is already loaded. To reload it, use:" % module_str) + print(" %reload_ext", module_str) + elif res == 'no load function': + print("The %s module is not an IPython extension." % module_str) + + @line_magic + def unload_ext(self, module_str): + """Unload an IPython extension by its module name. + + Not all extensions can be unloaded, only those which define an + ``unload_ipython_extension`` function. + """ + if not module_str: + raise UsageError('Missing module name.') + + res = self.shell.extension_manager.unload_extension(module_str) + + if res == 'no unload function': + print("The %s extension doesn't define how to unload it." % module_str) + elif res == "not loaded": + print("The %s extension is not loaded." % module_str) + + @line_magic + def reload_ext(self, module_str): + """Reload an IPython extension by its module name.""" + if not module_str: + raise UsageError('Missing module name.') + self.shell.extension_manager.reload_extension(module_str) diff --git a/contrib/python/ipython/py3/IPython/core/magics/history.py b/contrib/python/ipython/py3/IPython/core/magics/history.py index fd397d9cf4b..5af09e5ce10 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/history.py +++ b/contrib/python/ipython/py3/IPython/core/magics/history.py @@ -1,319 +1,319 @@ -"""Implementation of magic functions related to History. -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012, IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Stdlib -import os -import sys -from io import open as io_open - -# Our own packages -from IPython.core.error import StdinNotImplementedError -from IPython.core.magic import Magics, magics_class, line_magic -from IPython.core.magic_arguments import (argument, magic_arguments, - parse_argstring) -from IPython.testing.skipdoctest import skip_doctest -from IPython.utils import io - -#----------------------------------------------------------------------------- -# Magics class implementation -#----------------------------------------------------------------------------- - - -_unspecified = object() - - -@magics_class -class HistoryMagics(Magics): - - @magic_arguments() - @argument( - '-n', dest='print_nums', action='store_true', default=False, - help=""" - print line numbers for each input. - This feature is only available if numbered prompts are in use. - """) - @argument( - '-o', dest='get_output', action='store_true', default=False, - help="also print outputs for each input.") - @argument( - '-p', dest='pyprompts', action='store_true', default=False, - help=""" - print classic '>>>' python prompts before each input. - This is useful for making documentation, and in conjunction - with -o, for producing doctest-ready output. - """) - @argument( - '-t', dest='raw', action='store_false', default=True, - help=""" - print the 'translated' history, as IPython understands it. - IPython filters your input and converts it all into valid Python - source before executing it (things like magics or aliases are turned - into function calls, for example). With this option, you'll see the - native history instead of the user-entered version: '%%cd /' will be - seen as 'get_ipython().run_line_magic("cd", "/")' instead of '%%cd /'. - """) - @argument( - '-f', dest='filename', - help=""" - FILENAME: instead of printing the output to the screen, redirect - it to the given file. The file is always overwritten, though *when - it can*, IPython asks for confirmation first. In particular, running - the command 'history -f FILENAME' from the IPython Notebook - interface will replace FILENAME even if it already exists *without* - confirmation. - """) - @argument( - '-g', dest='pattern', nargs='*', default=None, - help=""" - treat the arg as a glob pattern to search for in (full) history. - This includes the saved history (almost all commands ever written). - The pattern may contain '?' to match one unknown character and '*' - to match any number of unknown characters. Use '%%hist -g' to show - full saved history (may be very long). - """) - @argument( - '-l', dest='limit', type=int, nargs='?', default=_unspecified, - help=""" - get the last n lines from all sessions. Specify n as a single - arg, or the default is the last 10 lines. - """) - @argument( - '-u', dest='unique', action='store_true', - help=""" - when searching history using `-g`, show only unique history. - """) - @argument('range', nargs='*') - @skip_doctest - @line_magic - def history(self, parameter_s = ''): - """Print input history (_i<n> variables), with most recent last. - - By default, input history is printed without line numbers so it can be - directly pasted into an editor. Use -n to show them. - - By default, all input history from the current session is displayed. - Ranges of history can be indicated using the syntax: - - ``4`` - Line 4, current session - ``4-6`` - Lines 4-6, current session - ``243/1-5`` - Lines 1-5, session 243 - ``~2/7`` - Line 7, session 2 before current - ``~8/1-~6/5`` - From the first line of 8 sessions ago, to the fifth line of 6 - sessions ago. - - Multiple ranges can be entered, separated by spaces - - The same syntax is used by %macro, %save, %edit, %rerun - - Examples - -------- - :: - - In [6]: %history -n 4-6 - 4:a = 12 - 5:print a**2 - 6:%history -n 4-6 - - """ - - args = parse_argstring(self.history, parameter_s) - - # For brevity - history_manager = self.shell.history_manager - - def _format_lineno(session, line): - """Helper function to format line numbers properly.""" - if session in (0, history_manager.session_number): - return str(line) - return "%s/%s" % (session, line) - - # Check if output to specific file was requested. - outfname = args.filename - if not outfname: - outfile = sys.stdout # default - # We don't want to close stdout at the end! - close_at_end = False - else: - if os.path.exists(outfname): - try: - ans = io.ask_yes_no("File %r exists. Overwrite?" % outfname) - except StdinNotImplementedError: - ans = True - if not ans: - print('Aborting.') - return - print("Overwriting file.") - outfile = io_open(outfname, 'w', encoding='utf-8') - close_at_end = True - - print_nums = args.print_nums - get_output = args.get_output - pyprompts = args.pyprompts - raw = args.raw - - pattern = None - limit = None if args.limit is _unspecified else args.limit - - if args.pattern is not None: - if args.pattern: - pattern = "*" + " ".join(args.pattern) + "*" - else: - pattern = "*" - hist = history_manager.search(pattern, raw=raw, output=get_output, - n=limit, unique=args.unique) - print_nums = True - elif args.limit is not _unspecified: - n = 10 if limit is None else limit - hist = history_manager.get_tail(n, raw=raw, output=get_output) - else: - if args.range: # Get history by ranges - hist = history_manager.get_range_by_str(" ".join(args.range), - raw, get_output) - else: # Just get history for the current session - hist = history_manager.get_range(raw=raw, output=get_output) - - # We could be displaying the entire history, so let's not try to pull - # it into a list in memory. Anything that needs more space will just - # misalign. - width = 4 - - for session, lineno, inline in hist: - # Print user history with tabs expanded to 4 spaces. The GUI - # clients use hard tabs for easier usability in auto-indented code, - # but we want to produce PEP-8 compliant history for safe pasting - # into an editor. - if get_output: - inline, output = inline - inline = inline.expandtabs(4).rstrip() - - multiline = "\n" in inline - line_sep = '\n' if multiline else ' ' - if print_nums: - print(u'%s:%s' % (_format_lineno(session, lineno).rjust(width), - line_sep), file=outfile, end=u'') - if pyprompts: - print(u">>> ", end=u"", file=outfile) - if multiline: - inline = "\n... ".join(inline.splitlines()) + "\n..." - print(inline, file=outfile) - if get_output and output: - print(output, file=outfile) - - if close_at_end: - outfile.close() - - @line_magic - def recall(self, arg): - r"""Repeat a command, or get command to input line for editing. - - %recall and %rep are equivalent. - - - %recall (no arguments): - - Place a string version of last computation result (stored in the - special '_' variable) to the next input prompt. Allows you to create - elaborate command lines without using copy-paste:: - - In[1]: l = ["hei", "vaan"] - In[2]: "".join(l) - Out[2]: heivaan - In[3]: %recall - In[4]: heivaan_ <== cursor blinking - - %recall 45 - - Place history line 45 on the next input prompt. Use %hist to find - out the number. - - %recall 1-4 - - Combine the specified lines into one cell, and place it on the next - input prompt. See %history for the slice syntax. - - %recall foo+bar - - If foo+bar can be evaluated in the user namespace, the result is - placed at the next input prompt. Otherwise, the history is searched - for lines which contain that substring, and the most recent one is - placed at the next input prompt. - """ - if not arg: # Last output - self.shell.set_next_input(str(self.shell.user_ns["_"])) - return - # Get history range - histlines = self.shell.history_manager.get_range_by_str(arg) - cmd = "\n".join(x[2] for x in histlines) - if cmd: - self.shell.set_next_input(cmd.rstrip()) - return - - try: # Variable in user namespace - cmd = str(eval(arg, self.shell.user_ns)) - except Exception: # Search for term in history - histlines = self.shell.history_manager.search("*"+arg+"*") - for h in reversed([x[2] for x in histlines]): - if 'recall' in h or 'rep' in h: - continue - self.shell.set_next_input(h.rstrip()) - return - else: - self.shell.set_next_input(cmd.rstrip()) - return - print("Couldn't evaluate or find in history:", arg) - - @line_magic - def rerun(self, parameter_s=''): - """Re-run previous input - - By default, you can specify ranges of input history to be repeated - (as with %history). With no arguments, it will repeat the last line. - - Options: - - -l <n> : Repeat the last n lines of input, not including the - current command. - - -g foo : Repeat the most recent line which contains foo - """ - opts, args = self.parse_options(parameter_s, 'l:g:', mode='string') - if "l" in opts: # Last n lines - n = int(opts['l']) - hist = self.shell.history_manager.get_tail(n) - elif "g" in opts: # Search - p = "*"+opts['g']+"*" - hist = list(self.shell.history_manager.search(p)) - for l in reversed(hist): - if "rerun" not in l[2]: - hist = [l] # The last match which isn't a %rerun - break - else: - hist = [] # No matches except %rerun - elif args: # Specify history ranges - hist = self.shell.history_manager.get_range_by_str(args) - else: # Last line - hist = self.shell.history_manager.get_tail(1) - hist = [x[2] for x in hist] - if not hist: - print("No lines in history match specification") - return - histlines = "\n".join(hist) - print("=== Executing: ===") - print(histlines) - print("=== Output: ===") - self.shell.run_cell("\n".join(hist), store_history=False) +"""Implementation of magic functions related to History. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012, IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib +import os +import sys +from io import open as io_open + +# Our own packages +from IPython.core.error import StdinNotImplementedError +from IPython.core.magic import Magics, magics_class, line_magic +from IPython.core.magic_arguments import (argument, magic_arguments, + parse_argstring) +from IPython.testing.skipdoctest import skip_doctest +from IPython.utils import io + +#----------------------------------------------------------------------------- +# Magics class implementation +#----------------------------------------------------------------------------- + + +_unspecified = object() + + +@magics_class +class HistoryMagics(Magics): + + @magic_arguments() + @argument( + '-n', dest='print_nums', action='store_true', default=False, + help=""" + print line numbers for each input. + This feature is only available if numbered prompts are in use. + """) + @argument( + '-o', dest='get_output', action='store_true', default=False, + help="also print outputs for each input.") + @argument( + '-p', dest='pyprompts', action='store_true', default=False, + help=""" + print classic '>>>' python prompts before each input. + This is useful for making documentation, and in conjunction + with -o, for producing doctest-ready output. + """) + @argument( + '-t', dest='raw', action='store_false', default=True, + help=""" + print the 'translated' history, as IPython understands it. + IPython filters your input and converts it all into valid Python + source before executing it (things like magics or aliases are turned + into function calls, for example). With this option, you'll see the + native history instead of the user-entered version: '%%cd /' will be + seen as 'get_ipython().run_line_magic("cd", "/")' instead of '%%cd /'. + """) + @argument( + '-f', dest='filename', + help=""" + FILENAME: instead of printing the output to the screen, redirect + it to the given file. The file is always overwritten, though *when + it can*, IPython asks for confirmation first. In particular, running + the command 'history -f FILENAME' from the IPython Notebook + interface will replace FILENAME even if it already exists *without* + confirmation. + """) + @argument( + '-g', dest='pattern', nargs='*', default=None, + help=""" + treat the arg as a glob pattern to search for in (full) history. + This includes the saved history (almost all commands ever written). + The pattern may contain '?' to match one unknown character and '*' + to match any number of unknown characters. Use '%%hist -g' to show + full saved history (may be very long). + """) + @argument( + '-l', dest='limit', type=int, nargs='?', default=_unspecified, + help=""" + get the last n lines from all sessions. Specify n as a single + arg, or the default is the last 10 lines. + """) + @argument( + '-u', dest='unique', action='store_true', + help=""" + when searching history using `-g`, show only unique history. + """) + @argument('range', nargs='*') + @skip_doctest + @line_magic + def history(self, parameter_s = ''): + """Print input history (_i<n> variables), with most recent last. + + By default, input history is printed without line numbers so it can be + directly pasted into an editor. Use -n to show them. + + By default, all input history from the current session is displayed. + Ranges of history can be indicated using the syntax: + + ``4`` + Line 4, current session + ``4-6`` + Lines 4-6, current session + ``243/1-5`` + Lines 1-5, session 243 + ``~2/7`` + Line 7, session 2 before current + ``~8/1-~6/5`` + From the first line of 8 sessions ago, to the fifth line of 6 + sessions ago. + + Multiple ranges can be entered, separated by spaces + + The same syntax is used by %macro, %save, %edit, %rerun + + Examples + -------- + :: + + In [6]: %history -n 4-6 + 4:a = 12 + 5:print a**2 + 6:%history -n 4-6 + + """ + + args = parse_argstring(self.history, parameter_s) + + # For brevity + history_manager = self.shell.history_manager + + def _format_lineno(session, line): + """Helper function to format line numbers properly.""" + if session in (0, history_manager.session_number): + return str(line) + return "%s/%s" % (session, line) + + # Check if output to specific file was requested. + outfname = args.filename + if not outfname: + outfile = sys.stdout # default + # We don't want to close stdout at the end! + close_at_end = False + else: + if os.path.exists(outfname): + try: + ans = io.ask_yes_no("File %r exists. Overwrite?" % outfname) + except StdinNotImplementedError: + ans = True + if not ans: + print('Aborting.') + return + print("Overwriting file.") + outfile = io_open(outfname, 'w', encoding='utf-8') + close_at_end = True + + print_nums = args.print_nums + get_output = args.get_output + pyprompts = args.pyprompts + raw = args.raw + + pattern = None + limit = None if args.limit is _unspecified else args.limit + + if args.pattern is not None: + if args.pattern: + pattern = "*" + " ".join(args.pattern) + "*" + else: + pattern = "*" + hist = history_manager.search(pattern, raw=raw, output=get_output, + n=limit, unique=args.unique) + print_nums = True + elif args.limit is not _unspecified: + n = 10 if limit is None else limit + hist = history_manager.get_tail(n, raw=raw, output=get_output) + else: + if args.range: # Get history by ranges + hist = history_manager.get_range_by_str(" ".join(args.range), + raw, get_output) + else: # Just get history for the current session + hist = history_manager.get_range(raw=raw, output=get_output) + + # We could be displaying the entire history, so let's not try to pull + # it into a list in memory. Anything that needs more space will just + # misalign. + width = 4 + + for session, lineno, inline in hist: + # Print user history with tabs expanded to 4 spaces. The GUI + # clients use hard tabs for easier usability in auto-indented code, + # but we want to produce PEP-8 compliant history for safe pasting + # into an editor. + if get_output: + inline, output = inline + inline = inline.expandtabs(4).rstrip() + + multiline = "\n" in inline + line_sep = '\n' if multiline else ' ' + if print_nums: + print(u'%s:%s' % (_format_lineno(session, lineno).rjust(width), + line_sep), file=outfile, end=u'') + if pyprompts: + print(u">>> ", end=u"", file=outfile) + if multiline: + inline = "\n... ".join(inline.splitlines()) + "\n..." + print(inline, file=outfile) + if get_output and output: + print(output, file=outfile) + + if close_at_end: + outfile.close() + + @line_magic + def recall(self, arg): + r"""Repeat a command, or get command to input line for editing. + + %recall and %rep are equivalent. + + - %recall (no arguments): + + Place a string version of last computation result (stored in the + special '_' variable) to the next input prompt. Allows you to create + elaborate command lines without using copy-paste:: + + In[1]: l = ["hei", "vaan"] + In[2]: "".join(l) + Out[2]: heivaan + In[3]: %recall + In[4]: heivaan_ <== cursor blinking + + %recall 45 + + Place history line 45 on the next input prompt. Use %hist to find + out the number. + + %recall 1-4 + + Combine the specified lines into one cell, and place it on the next + input prompt. See %history for the slice syntax. + + %recall foo+bar + + If foo+bar can be evaluated in the user namespace, the result is + placed at the next input prompt. Otherwise, the history is searched + for lines which contain that substring, and the most recent one is + placed at the next input prompt. + """ + if not arg: # Last output + self.shell.set_next_input(str(self.shell.user_ns["_"])) + return + # Get history range + histlines = self.shell.history_manager.get_range_by_str(arg) + cmd = "\n".join(x[2] for x in histlines) + if cmd: + self.shell.set_next_input(cmd.rstrip()) + return + + try: # Variable in user namespace + cmd = str(eval(arg, self.shell.user_ns)) + except Exception: # Search for term in history + histlines = self.shell.history_manager.search("*"+arg+"*") + for h in reversed([x[2] for x in histlines]): + if 'recall' in h or 'rep' in h: + continue + self.shell.set_next_input(h.rstrip()) + return + else: + self.shell.set_next_input(cmd.rstrip()) + return + print("Couldn't evaluate or find in history:", arg) + + @line_magic + def rerun(self, parameter_s=''): + """Re-run previous input + + By default, you can specify ranges of input history to be repeated + (as with %history). With no arguments, it will repeat the last line. + + Options: + + -l <n> : Repeat the last n lines of input, not including the + current command. + + -g foo : Repeat the most recent line which contains foo + """ + opts, args = self.parse_options(parameter_s, 'l:g:', mode='string') + if "l" in opts: # Last n lines + n = int(opts['l']) + hist = self.shell.history_manager.get_tail(n) + elif "g" in opts: # Search + p = "*"+opts['g']+"*" + hist = list(self.shell.history_manager.search(p)) + for l in reversed(hist): + if "rerun" not in l[2]: + hist = [l] # The last match which isn't a %rerun + break + else: + hist = [] # No matches except %rerun + elif args: # Specify history ranges + hist = self.shell.history_manager.get_range_by_str(args) + else: # Last line + hist = self.shell.history_manager.get_tail(1) + hist = [x[2] for x in hist] + if not hist: + print("No lines in history match specification") + return + histlines = "\n".join(hist) + print("=== Executing: ===") + print(histlines) + print("=== Output: ===") + self.shell.run_cell("\n".join(hist), store_history=False) diff --git a/contrib/python/ipython/py3/IPython/core/magics/logging.py b/contrib/python/ipython/py3/IPython/core/magics/logging.py index d884710d790..b6b8d8a5af6 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/logging.py +++ b/contrib/python/ipython/py3/IPython/core/magics/logging.py @@ -1,195 +1,195 @@ -"""Implementation of magic functions for IPython's own logging. -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012 The IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Stdlib -import os -import sys - -# Our own packages -from IPython.core.magic import Magics, magics_class, line_magic -from warnings import warn -from traitlets import Bool - -#----------------------------------------------------------------------------- -# Magic implementation classes -#----------------------------------------------------------------------------- - -@magics_class -class LoggingMagics(Magics): - """Magics related to all logging machinery.""" - - quiet = Bool(False, help= - """ - Suppress output of log state when logging is enabled - """ - ).tag(config=True) - - @line_magic - def logstart(self, parameter_s=''): - """Start logging anywhere in a session. - - %logstart [-o|-r|-t|-q] [log_name [log_mode]] - - If no name is given, it defaults to a file named 'ipython_log.py' in your - current directory, in 'rotate' mode (see below). - - '%logstart name' saves to file 'name' in 'backup' mode. It saves your - history up to that point and then continues logging. - - %logstart takes a second optional parameter: logging mode. This can be one - of (note that the modes are given unquoted): - - append - Keep logging at the end of any existing file. - - backup - Rename any existing file to name~ and start name. - - global - Append to a single logfile in your home directory. - - over - Overwrite any existing log. - - rotate - Create rotating logs: name.1~, name.2~, etc. - - Options: - - -o - log also IPython's output. In this mode, all commands which - generate an Out[NN] prompt are recorded to the logfile, right after - their corresponding input line. The output lines are always - prepended with a '#[Out]# ' marker, so that the log remains valid - Python code. - - Since this marker is always the same, filtering only the output from - a log is very easy, using for example a simple awk call:: - - awk -F'#\\[Out\\]# ' '{if($2) {print $2}}' ipython_log.py - - -r - log 'raw' input. Normally, IPython's logs contain the processed - input, so that user lines are logged in their final form, converted - into valid Python. For example, %Exit is logged as - _ip.magic("Exit"). If the -r flag is given, all input is logged - exactly as typed, with no transformations applied. - - -t - put timestamps before each input line logged (these are put in - comments). - - -q - suppress output of logstate message when logging is invoked - """ - - opts,par = self.parse_options(parameter_s,'ortq') - log_output = 'o' in opts - log_raw_input = 'r' in opts - timestamp = 't' in opts - quiet = 'q' in opts - - logger = self.shell.logger - - # if no args are given, the defaults set in the logger constructor by - # ipython remain valid - if par: - try: - logfname,logmode = par.split() - except: - logfname = par - logmode = 'backup' - else: - logfname = logger.logfname - logmode = logger.logmode - # put logfname into rc struct as if it had been called on the command - # line, so it ends up saved in the log header Save it in case we need - # to restore it... - old_logfile = self.shell.logfile - if logfname: - logfname = os.path.expanduser(logfname) - self.shell.logfile = logfname - - loghead = u'# IPython log file\n\n' - try: - logger.logstart(logfname, loghead, logmode, log_output, timestamp, - log_raw_input) - except: - self.shell.logfile = old_logfile - warn("Couldn't start log: %s" % sys.exc_info()[1]) - else: - # log input history up to this point, optionally interleaving - # output if requested - - if timestamp: - # disable timestamping for the previous history, since we've - # lost those already (no time machine here). - logger.timestamp = False - - if log_raw_input: - input_hist = self.shell.history_manager.input_hist_raw - else: - input_hist = self.shell.history_manager.input_hist_parsed - - if log_output: - log_write = logger.log_write - output_hist = self.shell.history_manager.output_hist - for n in range(1,len(input_hist)-1): - log_write(input_hist[n].rstrip() + u'\n') - if n in output_hist: - log_write(repr(output_hist[n]),'output') - else: - logger.log_write(u'\n'.join(input_hist[1:])) - logger.log_write(u'\n') - if timestamp: - # re-enable timestamping - logger.timestamp = True - - if not (self.quiet or quiet): - print ('Activating auto-logging. ' - 'Current session state plus future input saved.') - logger.logstate() - - @line_magic - def logstop(self, parameter_s=''): - """Fully stop logging and close log file. - - In order to start logging again, a new %logstart call needs to be made, - possibly (though not necessarily) with a new filename, mode and other - options.""" - self.shell.logger.logstop() - - @line_magic - def logoff(self, parameter_s=''): - """Temporarily stop logging. - - You must have previously started logging.""" - self.shell.logger.switch_log(0) - - @line_magic - def logon(self, parameter_s=''): - """Restart logging. - - This function is for restarting logging which you've temporarily - stopped with %logoff. For starting logging for the first time, you - must use the %logstart function, which allows you to specify an - optional log filename.""" - - self.shell.logger.switch_log(1) - - @line_magic - def logstate(self, parameter_s=''): - """Print the status of the logging system.""" - - self.shell.logger.logstate() +"""Implementation of magic functions for IPython's own logging. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012 The IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib +import os +import sys + +# Our own packages +from IPython.core.magic import Magics, magics_class, line_magic +from warnings import warn +from traitlets import Bool + +#----------------------------------------------------------------------------- +# Magic implementation classes +#----------------------------------------------------------------------------- + +@magics_class +class LoggingMagics(Magics): + """Magics related to all logging machinery.""" + + quiet = Bool(False, help= + """ + Suppress output of log state when logging is enabled + """ + ).tag(config=True) + + @line_magic + def logstart(self, parameter_s=''): + """Start logging anywhere in a session. + + %logstart [-o|-r|-t|-q] [log_name [log_mode]] + + If no name is given, it defaults to a file named 'ipython_log.py' in your + current directory, in 'rotate' mode (see below). + + '%logstart name' saves to file 'name' in 'backup' mode. It saves your + history up to that point and then continues logging. + + %logstart takes a second optional parameter: logging mode. This can be one + of (note that the modes are given unquoted): + + append + Keep logging at the end of any existing file. + + backup + Rename any existing file to name~ and start name. + + global + Append to a single logfile in your home directory. + + over + Overwrite any existing log. + + rotate + Create rotating logs: name.1~, name.2~, etc. + + Options: + + -o + log also IPython's output. In this mode, all commands which + generate an Out[NN] prompt are recorded to the logfile, right after + their corresponding input line. The output lines are always + prepended with a '#[Out]# ' marker, so that the log remains valid + Python code. + + Since this marker is always the same, filtering only the output from + a log is very easy, using for example a simple awk call:: + + awk -F'#\\[Out\\]# ' '{if($2) {print $2}}' ipython_log.py + + -r + log 'raw' input. Normally, IPython's logs contain the processed + input, so that user lines are logged in their final form, converted + into valid Python. For example, %Exit is logged as + _ip.magic("Exit"). If the -r flag is given, all input is logged + exactly as typed, with no transformations applied. + + -t + put timestamps before each input line logged (these are put in + comments). + + -q + suppress output of logstate message when logging is invoked + """ + + opts,par = self.parse_options(parameter_s,'ortq') + log_output = 'o' in opts + log_raw_input = 'r' in opts + timestamp = 't' in opts + quiet = 'q' in opts + + logger = self.shell.logger + + # if no args are given, the defaults set in the logger constructor by + # ipython remain valid + if par: + try: + logfname,logmode = par.split() + except: + logfname = par + logmode = 'backup' + else: + logfname = logger.logfname + logmode = logger.logmode + # put logfname into rc struct as if it had been called on the command + # line, so it ends up saved in the log header Save it in case we need + # to restore it... + old_logfile = self.shell.logfile + if logfname: + logfname = os.path.expanduser(logfname) + self.shell.logfile = logfname + + loghead = u'# IPython log file\n\n' + try: + logger.logstart(logfname, loghead, logmode, log_output, timestamp, + log_raw_input) + except: + self.shell.logfile = old_logfile + warn("Couldn't start log: %s" % sys.exc_info()[1]) + else: + # log input history up to this point, optionally interleaving + # output if requested + + if timestamp: + # disable timestamping for the previous history, since we've + # lost those already (no time machine here). + logger.timestamp = False + + if log_raw_input: + input_hist = self.shell.history_manager.input_hist_raw + else: + input_hist = self.shell.history_manager.input_hist_parsed + + if log_output: + log_write = logger.log_write + output_hist = self.shell.history_manager.output_hist + for n in range(1,len(input_hist)-1): + log_write(input_hist[n].rstrip() + u'\n') + if n in output_hist: + log_write(repr(output_hist[n]),'output') + else: + logger.log_write(u'\n'.join(input_hist[1:])) + logger.log_write(u'\n') + if timestamp: + # re-enable timestamping + logger.timestamp = True + + if not (self.quiet or quiet): + print ('Activating auto-logging. ' + 'Current session state plus future input saved.') + logger.logstate() + + @line_magic + def logstop(self, parameter_s=''): + """Fully stop logging and close log file. + + In order to start logging again, a new %logstart call needs to be made, + possibly (though not necessarily) with a new filename, mode and other + options.""" + self.shell.logger.logstop() + + @line_magic + def logoff(self, parameter_s=''): + """Temporarily stop logging. + + You must have previously started logging.""" + self.shell.logger.switch_log(0) + + @line_magic + def logon(self, parameter_s=''): + """Restart logging. + + This function is for restarting logging which you've temporarily + stopped with %logoff. For starting logging for the first time, you + must use the %logstart function, which allows you to specify an + optional log filename.""" + + self.shell.logger.switch_log(1) + + @line_magic + def logstate(self, parameter_s=''): + """Print the status of the logging system.""" + + self.shell.logger.logstate() diff --git a/contrib/python/ipython/py3/IPython/core/magics/namespace.py b/contrib/python/ipython/py3/IPython/core/magics/namespace.py index 5c3156d4d26..5cc2d81ca2a 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/namespace.py +++ b/contrib/python/ipython/py3/IPython/core/magics/namespace.py @@ -1,712 +1,712 @@ -"""Implementation of namespace-related magic functions. -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012 The IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Stdlib -import gc -import re -import sys - -# Our own packages -from IPython.core import page -from IPython.core.error import StdinNotImplementedError, UsageError -from IPython.core.magic import Magics, magics_class, line_magic -from IPython.testing.skipdoctest import skip_doctest -from IPython.utils.encoding import DEFAULT_ENCODING -from IPython.utils.openpy import read_py_file -from IPython.utils.path import get_py_filename - -#----------------------------------------------------------------------------- -# Magic implementation classes -#----------------------------------------------------------------------------- - -@magics_class -class NamespaceMagics(Magics): - """Magics to manage various aspects of the user's namespace. - - These include listing variables, introspecting into them, etc. - """ - - @line_magic - def pinfo(self, parameter_s='', namespaces=None): - """Provide detailed information about an object. - - '%pinfo object' is just a synonym for object? or ?object.""" - - #print 'pinfo par: <%s>' % parameter_s # dbg - # detail_level: 0 -> obj? , 1 -> obj?? - detail_level = 0 - # We need to detect if we got called as 'pinfo pinfo foo', which can - # happen if the user types 'pinfo foo?' at the cmd line. - pinfo,qmark1,oname,qmark2 = \ - re.match(r'(pinfo )?(\?*)(.*?)(\??$)',parameter_s).groups() - if pinfo or qmark1 or qmark2: - detail_level = 1 - if "*" in oname: - self.psearch(oname) - else: - self.shell._inspect('pinfo', oname, detail_level=detail_level, - namespaces=namespaces) - - @line_magic - def pinfo2(self, parameter_s='', namespaces=None): - """Provide extra detailed information about an object. - - '%pinfo2 object' is just a synonym for object?? or ??object.""" - self.shell._inspect('pinfo', parameter_s, detail_level=1, - namespaces=namespaces) - - @skip_doctest - @line_magic - def pdef(self, parameter_s='', namespaces=None): - """Print the call signature for any callable object. - - If the object is a class, print the constructor information. - - Examples - -------- - :: - - In [3]: %pdef urllib.urlopen - urllib.urlopen(url, data=None, proxies=None) - """ - self.shell._inspect('pdef',parameter_s, namespaces) - - @line_magic - def pdoc(self, parameter_s='', namespaces=None): - """Print the docstring for an object. - - If the given object is a class, it will print both the class and the - constructor docstrings.""" - self.shell._inspect('pdoc',parameter_s, namespaces) - - @line_magic - def psource(self, parameter_s='', namespaces=None): - """Print (or run through pager) the source code for an object.""" - if not parameter_s: - raise UsageError('Missing object name.') - self.shell._inspect('psource',parameter_s, namespaces) - - @line_magic - def pfile(self, parameter_s='', namespaces=None): - """Print (or run through pager) the file where an object is defined. - - The file opens at the line where the object definition begins. IPython - will honor the environment variable PAGER if set, and otherwise will - do its best to print the file in a convenient form. - - If the given argument is not an object currently defined, IPython will - try to interpret it as a filename (automatically adding a .py extension - if needed). You can thus use %pfile as a syntax highlighting code - viewer.""" - - # first interpret argument as an object name - out = self.shell._inspect('pfile',parameter_s, namespaces) - # if not, try the input as a filename - if out == 'not found': - try: - filename = get_py_filename(parameter_s) - except IOError as msg: - print(msg) - return - page.page(self.shell.pycolorize(read_py_file(filename, skip_encoding_cookie=False))) - - @line_magic - def psearch(self, parameter_s=''): - """Search for object in namespaces by wildcard. - - %psearch [options] PATTERN [OBJECT TYPE] - - Note: ? can be used as a synonym for %psearch, at the beginning or at - the end: both a*? and ?a* are equivalent to '%psearch a*'. Still, the - rest of the command line must be unchanged (options come first), so - for example the following forms are equivalent - - %psearch -i a* function - -i a* function? - ?-i a* function - - Arguments: - - PATTERN - - where PATTERN is a string containing * as a wildcard similar to its - use in a shell. The pattern is matched in all namespaces on the - search path. By default objects starting with a single _ are not - matched, many IPython generated objects have a single - underscore. The default is case insensitive matching. Matching is - also done on the attributes of objects and not only on the objects - in a module. - - [OBJECT TYPE] - - Is the name of a python type from the types module. The name is - given in lowercase without the ending type, ex. StringType is - written string. By adding a type here only objects matching the - given type are matched. Using all here makes the pattern match all - types (this is the default). - - Options: - - -a: makes the pattern match even objects whose names start with a - single underscore. These names are normally omitted from the - search. - - -i/-c: make the pattern case insensitive/sensitive. If neither of - these options are given, the default is read from your configuration - file, with the option ``InteractiveShell.wildcards_case_sensitive``. - If this option is not specified in your configuration file, IPython's - internal default is to do a case sensitive search. - - -e/-s NAMESPACE: exclude/search a given namespace. The pattern you - specify can be searched in any of the following namespaces: - 'builtin', 'user', 'user_global','internal', 'alias', where - 'builtin' and 'user' are the search defaults. Note that you should - not use quotes when specifying namespaces. - - -l: List all available object types for object matching. This function - can be used without arguments. - - 'Builtin' contains the python module builtin, 'user' contains all - user data, 'alias' only contain the shell aliases and no python - objects, 'internal' contains objects used by IPython. The - 'user_global' namespace is only used by embedded IPython instances, - and it contains module-level globals. You can add namespaces to the - search with -s or exclude them with -e (these options can be given - more than once). - - Examples - -------- - :: - - %psearch a* -> objects beginning with an a - %psearch -e builtin a* -> objects NOT in the builtin space starting in a - %psearch a* function -> all functions beginning with an a - %psearch re.e* -> objects beginning with an e in module re - %psearch r*.e* -> objects that start with e in modules starting in r - %psearch r*.* string -> all strings in modules beginning with r - - Case sensitive search:: - - %psearch -c a* list all object beginning with lower case a - - Show objects beginning with a single _:: - - %psearch -a _* list objects beginning with a single underscore - - List available objects:: - - %psearch -l list all available object types - """ - # default namespaces to be searched - def_search = ['user_local', 'user_global', 'builtin'] - - # Process options/args - opts,args = self.parse_options(parameter_s,'cias:e:l',list_all=True) - opt = opts.get - shell = self.shell - psearch = shell.inspector.psearch - - # select list object types - list_types = False - if 'l' in opts: - list_types = True - - # select case options - if 'i' in opts: - ignore_case = True - elif 'c' in opts: - ignore_case = False - else: - ignore_case = not shell.wildcards_case_sensitive - - # Build list of namespaces to search from user options - def_search.extend(opt('s',[])) - ns_exclude = ns_exclude=opt('e',[]) - ns_search = [nm for nm in def_search if nm not in ns_exclude] - - # Call the actual search - try: - psearch(args,shell.ns_table,ns_search, - show_all=opt('a'),ignore_case=ignore_case, list_types=list_types) - except: - shell.showtraceback() - - @skip_doctest - @line_magic - def who_ls(self, parameter_s=''): - """Return a sorted list of all interactive variables. - - If arguments are given, only variables of types matching these - arguments are returned. - - Examples - -------- - - Define two variables and list them with who_ls:: - - In [1]: alpha = 123 - - In [2]: beta = 'test' - - In [3]: %who_ls - Out[3]: ['alpha', 'beta'] - - In [4]: %who_ls int - Out[4]: ['alpha'] - - In [5]: %who_ls str - Out[5]: ['beta'] - """ - - user_ns = self.shell.user_ns - user_ns_hidden = self.shell.user_ns_hidden - nonmatching = object() # This can never be in user_ns - out = [ i for i in user_ns - if not i.startswith('_') \ - and (user_ns[i] is not user_ns_hidden.get(i, nonmatching)) ] - - typelist = parameter_s.split() - if typelist: - typeset = set(typelist) - out = [i for i in out if type(user_ns[i]).__name__ in typeset] - - out.sort() - return out - - @skip_doctest - @line_magic - def who(self, parameter_s=''): - """Print all interactive variables, with some minimal formatting. - - If any arguments are given, only variables whose type matches one of - these are printed. For example:: - - %who function str - - will only list functions and strings, excluding all other types of - variables. To find the proper type names, simply use type(var) at a - command line to see how python prints type names. For example: - - :: - - In [1]: type('hello')\\ - Out[1]: <type 'str'> - - indicates that the type name for strings is 'str'. - - ``%who`` always excludes executed names loaded through your configuration - file and things which are internal to IPython. - - This is deliberate, as typically you may load many modules and the - purpose of %who is to show you only what you've manually defined. - - Examples - -------- - - Define two variables and list them with who:: - - In [1]: alpha = 123 - - In [2]: beta = 'test' - - In [3]: %who - alpha beta - - In [4]: %who int - alpha - - In [5]: %who str - beta - """ - - varlist = self.who_ls(parameter_s) - if not varlist: - if parameter_s: - print('No variables match your requested type.') - else: - print('Interactive namespace is empty.') - return - - # if we have variables, move on... - count = 0 - for i in varlist: - print(i+'\t', end=' ') - count += 1 - if count > 8: - count = 0 - print() - print() - - @skip_doctest - @line_magic - def whos(self, parameter_s=''): - """Like %who, but gives some extra information about each variable. - - The same type filtering of %who can be applied here. - - For all variables, the type is printed. Additionally it prints: - - - For {},[],(): their length. - - - For numpy arrays, a summary with shape, number of - elements, typecode and size in memory. - - - Everything else: a string representation, snipping their middle if - too long. - - Examples - -------- - - Define two variables and list them with whos:: - - In [1]: alpha = 123 - - In [2]: beta = 'test' - - In [3]: %whos - Variable Type Data/Info - -------------------------------- - alpha int 123 - beta str test - """ - - varnames = self.who_ls(parameter_s) - if not varnames: - if parameter_s: - print('No variables match your requested type.') - else: - print('Interactive namespace is empty.') - return - - # if we have variables, move on... - - # for these types, show len() instead of data: - seq_types = ['dict', 'list', 'tuple'] - - # for numpy arrays, display summary info - ndarray_type = None - if 'numpy' in sys.modules: - try: - from numpy import ndarray - except ImportError: - pass - else: - ndarray_type = ndarray.__name__ - - # Find all variable names and types so we can figure out column sizes - - # some types are well known and can be shorter - abbrevs = {'IPython.core.macro.Macro' : 'Macro'} - def type_name(v): - tn = type(v).__name__ - return abbrevs.get(tn,tn) - - varlist = [self.shell.user_ns[n] for n in varnames] - - typelist = [] - for vv in varlist: - tt = type_name(vv) - - if tt=='instance': - typelist.append( abbrevs.get(str(vv.__class__), - str(vv.__class__))) - else: - typelist.append(tt) - - # column labels and # of spaces as separator - varlabel = 'Variable' - typelabel = 'Type' - datalabel = 'Data/Info' - colsep = 3 - # variable format strings - vformat = "{0:<{varwidth}}{1:<{typewidth}}" - aformat = "%s: %s elems, type `%s`, %s bytes" - # find the size of the columns to format the output nicely - varwidth = max(max(map(len,varnames)), len(varlabel)) + colsep - typewidth = max(max(map(len,typelist)), len(typelabel)) + colsep - # table header - print(varlabel.ljust(varwidth) + typelabel.ljust(typewidth) + \ - ' '+datalabel+'\n' + '-'*(varwidth+typewidth+len(datalabel)+1)) - # and the table itself - kb = 1024 - Mb = 1048576 # kb**2 - for vname,var,vtype in zip(varnames,varlist,typelist): - print(vformat.format(vname, vtype, varwidth=varwidth, typewidth=typewidth), end=' ') - if vtype in seq_types: - print("n="+str(len(var))) - elif vtype == ndarray_type: - vshape = str(var.shape).replace(',','').replace(' ','x')[1:-1] - if vtype==ndarray_type: - # numpy - vsize = var.size - vbytes = vsize*var.itemsize - vdtype = var.dtype - - if vbytes < 100000: - print(aformat % (vshape, vsize, vdtype, vbytes)) - else: - print(aformat % (vshape, vsize, vdtype, vbytes), end=' ') - if vbytes < Mb: - print('(%s kb)' % (vbytes/kb,)) - else: - print('(%s Mb)' % (vbytes/Mb,)) - else: - try: - vstr = str(var) - except UnicodeEncodeError: - vstr = var.encode(DEFAULT_ENCODING, - 'backslashreplace') - except: - vstr = "<object with id %d (str() failed)>" % id(var) - vstr = vstr.replace('\n', '\\n') - if len(vstr) < 50: - print(vstr) - else: - print(vstr[:25] + "<...>" + vstr[-25:]) - - @line_magic - def reset(self, parameter_s=''): - """Resets the namespace by removing all names defined by the user, if - called without arguments, or by removing some types of objects, such - as everything currently in IPython's In[] and Out[] containers (see - the parameters for details). - - Parameters - ---------- - -f : force reset without asking for confirmation. - - -s : 'Soft' reset: Only clears your namespace, leaving history intact. - References to objects may be kept. By default (without this option), - we do a 'hard' reset, giving you a new session and removing all - references to objects from the current session. - - --aggressive: Try to aggressively remove modules from sys.modules ; this - may allow you to reimport Python modules that have been updated and - pick up changes, but can have unattended consequences. - - in : reset input history - - out : reset output history - - dhist : reset directory history - - array : reset only variables that are NumPy arrays - - See Also - -------- - reset_selective : invoked as ``%reset_selective`` - - Examples - -------- - :: - - In [6]: a = 1 - - In [7]: a - Out[7]: 1 - - In [8]: 'a' in get_ipython().user_ns - Out[8]: True - - In [9]: %reset -f - - In [1]: 'a' in get_ipython().user_ns - Out[1]: False - - In [2]: %reset -f in - Flushing input history - - In [3]: %reset -f dhist in - Flushing directory history - Flushing input history - - Notes - ----- - Calling this magic from clients that do not implement standard input, - such as the ipython notebook interface, will reset the namespace - without confirmation. - """ - opts, args = self.parse_options(parameter_s, "sf", "aggressive", mode="list") - if "f" in opts: - ans = True - else: - try: - ans = self.shell.ask_yes_no( - "Once deleted, variables cannot be recovered. Proceed (y/[n])?", - default='n') - except StdinNotImplementedError: - ans = True - if not ans: - print('Nothing done.') - return - - if 's' in opts: # Soft reset - user_ns = self.shell.user_ns - for i in self.who_ls(): - del(user_ns[i]) - elif len(args) == 0: # Hard reset - self.shell.reset(new_session=False, aggressive=("aggressive" in opts)) - - # reset in/out/dhist/array: previously extensinions/clearcmd.py - ip = self.shell - user_ns = self.shell.user_ns # local lookup, heavily used - - for target in args: - target = target.lower() # make matches case insensitive - if target == 'out': - print("Flushing output cache (%d entries)" % len(user_ns['_oh'])) - self.shell.displayhook.flush() - - elif target == 'in': - print("Flushing input history") - pc = self.shell.displayhook.prompt_count + 1 - for n in range(1, pc): - key = '_i'+repr(n) - user_ns.pop(key,None) - user_ns.update(dict(_i=u'',_ii=u'',_iii=u'')) - hm = ip.history_manager - # don't delete these, as %save and %macro depending on the - # length of these lists to be preserved - hm.input_hist_parsed[:] = [''] * pc - hm.input_hist_raw[:] = [''] * pc - # hm has internal machinery for _i,_ii,_iii, clear it out - hm._i = hm._ii = hm._iii = hm._i00 = u'' - - elif target == 'array': - # Support cleaning up numpy arrays - try: - from numpy import ndarray - # This must be done with items and not iteritems because - # we're going to modify the dict in-place. - for x,val in list(user_ns.items()): - if isinstance(val,ndarray): - del user_ns[x] - except ImportError: - print("reset array only works if Numpy is available.") - - elif target == 'dhist': - print("Flushing directory history") - del user_ns['_dh'][:] - - else: - print("Don't know how to reset ", end=' ') - print(target + ", please run `%reset?` for details") - - gc.collect() - - @line_magic - def reset_selective(self, parameter_s=''): - """Resets the namespace by removing names defined by the user. - - Input/Output history are left around in case you need them. - - %reset_selective [-f] regex - - No action is taken if regex is not included - - Options - -f : force reset without asking for confirmation. - - See Also - -------- - reset : invoked as ``%reset`` - - Examples - -------- - - We first fully reset the namespace so your output looks identical to - this example for pedagogical reasons; in practice you do not need a - full reset:: - - In [1]: %reset -f - - Now, with a clean namespace we can make a few variables and use - ``%reset_selective`` to only delete names that match our regexp:: - - In [2]: a=1; b=2; c=3; b1m=4; b2m=5; b3m=6; b4m=7; b2s=8 - - In [3]: who_ls - Out[3]: ['a', 'b', 'b1m', 'b2m', 'b2s', 'b3m', 'b4m', 'c'] - - In [4]: %reset_selective -f b[2-3]m - - In [5]: who_ls - Out[5]: ['a', 'b', 'b1m', 'b2s', 'b4m', 'c'] - - In [6]: %reset_selective -f d - - In [7]: who_ls - Out[7]: ['a', 'b', 'b1m', 'b2s', 'b4m', 'c'] - - In [8]: %reset_selective -f c - - In [9]: who_ls - Out[9]: ['a', 'b', 'b1m', 'b2s', 'b4m'] - - In [10]: %reset_selective -f b - - In [11]: who_ls - Out[11]: ['a'] - - Notes - ----- - Calling this magic from clients that do not implement standard input, - such as the ipython notebook interface, will reset the namespace - without confirmation. - """ - - opts, regex = self.parse_options(parameter_s,'f') - - if 'f' in opts: - ans = True - else: - try: - ans = self.shell.ask_yes_no( - "Once deleted, variables cannot be recovered. Proceed (y/[n])? ", - default='n') - except StdinNotImplementedError: - ans = True - if not ans: - print('Nothing done.') - return - user_ns = self.shell.user_ns - if not regex: - print('No regex pattern specified. Nothing done.') - return - else: - try: - m = re.compile(regex) - except TypeError: - raise TypeError('regex must be a string or compiled pattern') - for i in self.who_ls(): - if m.search(i): - del(user_ns[i]) - - @line_magic - def xdel(self, parameter_s=''): - """Delete a variable, trying to clear it from anywhere that - IPython's machinery has references to it. By default, this uses - the identity of the named object in the user namespace to remove - references held under other names. The object is also removed - from the output history. - - Options - -n : Delete the specified name from all namespaces, without - checking their identity. - """ - opts, varname = self.parse_options(parameter_s,'n') - try: - self.shell.del_var(varname, ('n' in opts)) - except (NameError, ValueError) as e: - print(type(e).__name__ +": "+ str(e)) +"""Implementation of namespace-related magic functions. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012 The IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib +import gc +import re +import sys + +# Our own packages +from IPython.core import page +from IPython.core.error import StdinNotImplementedError, UsageError +from IPython.core.magic import Magics, magics_class, line_magic +from IPython.testing.skipdoctest import skip_doctest +from IPython.utils.encoding import DEFAULT_ENCODING +from IPython.utils.openpy import read_py_file +from IPython.utils.path import get_py_filename + +#----------------------------------------------------------------------------- +# Magic implementation classes +#----------------------------------------------------------------------------- + +@magics_class +class NamespaceMagics(Magics): + """Magics to manage various aspects of the user's namespace. + + These include listing variables, introspecting into them, etc. + """ + + @line_magic + def pinfo(self, parameter_s='', namespaces=None): + """Provide detailed information about an object. + + '%pinfo object' is just a synonym for object? or ?object.""" + + #print 'pinfo par: <%s>' % parameter_s # dbg + # detail_level: 0 -> obj? , 1 -> obj?? + detail_level = 0 + # We need to detect if we got called as 'pinfo pinfo foo', which can + # happen if the user types 'pinfo foo?' at the cmd line. + pinfo,qmark1,oname,qmark2 = \ + re.match(r'(pinfo )?(\?*)(.*?)(\??$)',parameter_s).groups() + if pinfo or qmark1 or qmark2: + detail_level = 1 + if "*" in oname: + self.psearch(oname) + else: + self.shell._inspect('pinfo', oname, detail_level=detail_level, + namespaces=namespaces) + + @line_magic + def pinfo2(self, parameter_s='', namespaces=None): + """Provide extra detailed information about an object. + + '%pinfo2 object' is just a synonym for object?? or ??object.""" + self.shell._inspect('pinfo', parameter_s, detail_level=1, + namespaces=namespaces) + + @skip_doctest + @line_magic + def pdef(self, parameter_s='', namespaces=None): + """Print the call signature for any callable object. + + If the object is a class, print the constructor information. + + Examples + -------- + :: + + In [3]: %pdef urllib.urlopen + urllib.urlopen(url, data=None, proxies=None) + """ + self.shell._inspect('pdef',parameter_s, namespaces) + + @line_magic + def pdoc(self, parameter_s='', namespaces=None): + """Print the docstring for an object. + + If the given object is a class, it will print both the class and the + constructor docstrings.""" + self.shell._inspect('pdoc',parameter_s, namespaces) + + @line_magic + def psource(self, parameter_s='', namespaces=None): + """Print (or run through pager) the source code for an object.""" + if not parameter_s: + raise UsageError('Missing object name.') + self.shell._inspect('psource',parameter_s, namespaces) + + @line_magic + def pfile(self, parameter_s='', namespaces=None): + """Print (or run through pager) the file where an object is defined. + + The file opens at the line where the object definition begins. IPython + will honor the environment variable PAGER if set, and otherwise will + do its best to print the file in a convenient form. + + If the given argument is not an object currently defined, IPython will + try to interpret it as a filename (automatically adding a .py extension + if needed). You can thus use %pfile as a syntax highlighting code + viewer.""" + + # first interpret argument as an object name + out = self.shell._inspect('pfile',parameter_s, namespaces) + # if not, try the input as a filename + if out == 'not found': + try: + filename = get_py_filename(parameter_s) + except IOError as msg: + print(msg) + return + page.page(self.shell.pycolorize(read_py_file(filename, skip_encoding_cookie=False))) + + @line_magic + def psearch(self, parameter_s=''): + """Search for object in namespaces by wildcard. + + %psearch [options] PATTERN [OBJECT TYPE] + + Note: ? can be used as a synonym for %psearch, at the beginning or at + the end: both a*? and ?a* are equivalent to '%psearch a*'. Still, the + rest of the command line must be unchanged (options come first), so + for example the following forms are equivalent + + %psearch -i a* function + -i a* function? + ?-i a* function + + Arguments: + + PATTERN + + where PATTERN is a string containing * as a wildcard similar to its + use in a shell. The pattern is matched in all namespaces on the + search path. By default objects starting with a single _ are not + matched, many IPython generated objects have a single + underscore. The default is case insensitive matching. Matching is + also done on the attributes of objects and not only on the objects + in a module. + + [OBJECT TYPE] + + Is the name of a python type from the types module. The name is + given in lowercase without the ending type, ex. StringType is + written string. By adding a type here only objects matching the + given type are matched. Using all here makes the pattern match all + types (this is the default). + + Options: + + -a: makes the pattern match even objects whose names start with a + single underscore. These names are normally omitted from the + search. + + -i/-c: make the pattern case insensitive/sensitive. If neither of + these options are given, the default is read from your configuration + file, with the option ``InteractiveShell.wildcards_case_sensitive``. + If this option is not specified in your configuration file, IPython's + internal default is to do a case sensitive search. + + -e/-s NAMESPACE: exclude/search a given namespace. The pattern you + specify can be searched in any of the following namespaces: + 'builtin', 'user', 'user_global','internal', 'alias', where + 'builtin' and 'user' are the search defaults. Note that you should + not use quotes when specifying namespaces. + + -l: List all available object types for object matching. This function + can be used without arguments. + + 'Builtin' contains the python module builtin, 'user' contains all + user data, 'alias' only contain the shell aliases and no python + objects, 'internal' contains objects used by IPython. The + 'user_global' namespace is only used by embedded IPython instances, + and it contains module-level globals. You can add namespaces to the + search with -s or exclude them with -e (these options can be given + more than once). + + Examples + -------- + :: + + %psearch a* -> objects beginning with an a + %psearch -e builtin a* -> objects NOT in the builtin space starting in a + %psearch a* function -> all functions beginning with an a + %psearch re.e* -> objects beginning with an e in module re + %psearch r*.e* -> objects that start with e in modules starting in r + %psearch r*.* string -> all strings in modules beginning with r + + Case sensitive search:: + + %psearch -c a* list all object beginning with lower case a + + Show objects beginning with a single _:: + + %psearch -a _* list objects beginning with a single underscore + + List available objects:: + + %psearch -l list all available object types + """ + # default namespaces to be searched + def_search = ['user_local', 'user_global', 'builtin'] + + # Process options/args + opts,args = self.parse_options(parameter_s,'cias:e:l',list_all=True) + opt = opts.get + shell = self.shell + psearch = shell.inspector.psearch + + # select list object types + list_types = False + if 'l' in opts: + list_types = True + + # select case options + if 'i' in opts: + ignore_case = True + elif 'c' in opts: + ignore_case = False + else: + ignore_case = not shell.wildcards_case_sensitive + + # Build list of namespaces to search from user options + def_search.extend(opt('s',[])) + ns_exclude = ns_exclude=opt('e',[]) + ns_search = [nm for nm in def_search if nm not in ns_exclude] + + # Call the actual search + try: + psearch(args,shell.ns_table,ns_search, + show_all=opt('a'),ignore_case=ignore_case, list_types=list_types) + except: + shell.showtraceback() + + @skip_doctest + @line_magic + def who_ls(self, parameter_s=''): + """Return a sorted list of all interactive variables. + + If arguments are given, only variables of types matching these + arguments are returned. + + Examples + -------- + + Define two variables and list them with who_ls:: + + In [1]: alpha = 123 + + In [2]: beta = 'test' + + In [3]: %who_ls + Out[3]: ['alpha', 'beta'] + + In [4]: %who_ls int + Out[4]: ['alpha'] + + In [5]: %who_ls str + Out[5]: ['beta'] + """ + + user_ns = self.shell.user_ns + user_ns_hidden = self.shell.user_ns_hidden + nonmatching = object() # This can never be in user_ns + out = [ i for i in user_ns + if not i.startswith('_') \ + and (user_ns[i] is not user_ns_hidden.get(i, nonmatching)) ] + + typelist = parameter_s.split() + if typelist: + typeset = set(typelist) + out = [i for i in out if type(user_ns[i]).__name__ in typeset] + + out.sort() + return out + + @skip_doctest + @line_magic + def who(self, parameter_s=''): + """Print all interactive variables, with some minimal formatting. + + If any arguments are given, only variables whose type matches one of + these are printed. For example:: + + %who function str + + will only list functions and strings, excluding all other types of + variables. To find the proper type names, simply use type(var) at a + command line to see how python prints type names. For example: + + :: + + In [1]: type('hello')\\ + Out[1]: <type 'str'> + + indicates that the type name for strings is 'str'. + + ``%who`` always excludes executed names loaded through your configuration + file and things which are internal to IPython. + + This is deliberate, as typically you may load many modules and the + purpose of %who is to show you only what you've manually defined. + + Examples + -------- + + Define two variables and list them with who:: + + In [1]: alpha = 123 + + In [2]: beta = 'test' + + In [3]: %who + alpha beta + + In [4]: %who int + alpha + + In [5]: %who str + beta + """ + + varlist = self.who_ls(parameter_s) + if not varlist: + if parameter_s: + print('No variables match your requested type.') + else: + print('Interactive namespace is empty.') + return + + # if we have variables, move on... + count = 0 + for i in varlist: + print(i+'\t', end=' ') + count += 1 + if count > 8: + count = 0 + print() + print() + + @skip_doctest + @line_magic + def whos(self, parameter_s=''): + """Like %who, but gives some extra information about each variable. + + The same type filtering of %who can be applied here. + + For all variables, the type is printed. Additionally it prints: + + - For {},[],(): their length. + + - For numpy arrays, a summary with shape, number of + elements, typecode and size in memory. + + - Everything else: a string representation, snipping their middle if + too long. + + Examples + -------- + + Define two variables and list them with whos:: + + In [1]: alpha = 123 + + In [2]: beta = 'test' + + In [3]: %whos + Variable Type Data/Info + -------------------------------- + alpha int 123 + beta str test + """ + + varnames = self.who_ls(parameter_s) + if not varnames: + if parameter_s: + print('No variables match your requested type.') + else: + print('Interactive namespace is empty.') + return + + # if we have variables, move on... + + # for these types, show len() instead of data: + seq_types = ['dict', 'list', 'tuple'] + + # for numpy arrays, display summary info + ndarray_type = None + if 'numpy' in sys.modules: + try: + from numpy import ndarray + except ImportError: + pass + else: + ndarray_type = ndarray.__name__ + + # Find all variable names and types so we can figure out column sizes + + # some types are well known and can be shorter + abbrevs = {'IPython.core.macro.Macro' : 'Macro'} + def type_name(v): + tn = type(v).__name__ + return abbrevs.get(tn,tn) + + varlist = [self.shell.user_ns[n] for n in varnames] + + typelist = [] + for vv in varlist: + tt = type_name(vv) + + if tt=='instance': + typelist.append( abbrevs.get(str(vv.__class__), + str(vv.__class__))) + else: + typelist.append(tt) + + # column labels and # of spaces as separator + varlabel = 'Variable' + typelabel = 'Type' + datalabel = 'Data/Info' + colsep = 3 + # variable format strings + vformat = "{0:<{varwidth}}{1:<{typewidth}}" + aformat = "%s: %s elems, type `%s`, %s bytes" + # find the size of the columns to format the output nicely + varwidth = max(max(map(len,varnames)), len(varlabel)) + colsep + typewidth = max(max(map(len,typelist)), len(typelabel)) + colsep + # table header + print(varlabel.ljust(varwidth) + typelabel.ljust(typewidth) + \ + ' '+datalabel+'\n' + '-'*(varwidth+typewidth+len(datalabel)+1)) + # and the table itself + kb = 1024 + Mb = 1048576 # kb**2 + for vname,var,vtype in zip(varnames,varlist,typelist): + print(vformat.format(vname, vtype, varwidth=varwidth, typewidth=typewidth), end=' ') + if vtype in seq_types: + print("n="+str(len(var))) + elif vtype == ndarray_type: + vshape = str(var.shape).replace(',','').replace(' ','x')[1:-1] + if vtype==ndarray_type: + # numpy + vsize = var.size + vbytes = vsize*var.itemsize + vdtype = var.dtype + + if vbytes < 100000: + print(aformat % (vshape, vsize, vdtype, vbytes)) + else: + print(aformat % (vshape, vsize, vdtype, vbytes), end=' ') + if vbytes < Mb: + print('(%s kb)' % (vbytes/kb,)) + else: + print('(%s Mb)' % (vbytes/Mb,)) + else: + try: + vstr = str(var) + except UnicodeEncodeError: + vstr = var.encode(DEFAULT_ENCODING, + 'backslashreplace') + except: + vstr = "<object with id %d (str() failed)>" % id(var) + vstr = vstr.replace('\n', '\\n') + if len(vstr) < 50: + print(vstr) + else: + print(vstr[:25] + "<...>" + vstr[-25:]) + + @line_magic + def reset(self, parameter_s=''): + """Resets the namespace by removing all names defined by the user, if + called without arguments, or by removing some types of objects, such + as everything currently in IPython's In[] and Out[] containers (see + the parameters for details). + + Parameters + ---------- + -f : force reset without asking for confirmation. + + -s : 'Soft' reset: Only clears your namespace, leaving history intact. + References to objects may be kept. By default (without this option), + we do a 'hard' reset, giving you a new session and removing all + references to objects from the current session. + + --aggressive: Try to aggressively remove modules from sys.modules ; this + may allow you to reimport Python modules that have been updated and + pick up changes, but can have unattended consequences. + + in : reset input history + + out : reset output history + + dhist : reset directory history + + array : reset only variables that are NumPy arrays + + See Also + -------- + reset_selective : invoked as ``%reset_selective`` + + Examples + -------- + :: + + In [6]: a = 1 + + In [7]: a + Out[7]: 1 + + In [8]: 'a' in get_ipython().user_ns + Out[8]: True + + In [9]: %reset -f + + In [1]: 'a' in get_ipython().user_ns + Out[1]: False + + In [2]: %reset -f in + Flushing input history + + In [3]: %reset -f dhist in + Flushing directory history + Flushing input history + + Notes + ----- + Calling this magic from clients that do not implement standard input, + such as the ipython notebook interface, will reset the namespace + without confirmation. + """ + opts, args = self.parse_options(parameter_s, "sf", "aggressive", mode="list") + if "f" in opts: + ans = True + else: + try: + ans = self.shell.ask_yes_no( + "Once deleted, variables cannot be recovered. Proceed (y/[n])?", + default='n') + except StdinNotImplementedError: + ans = True + if not ans: + print('Nothing done.') + return + + if 's' in opts: # Soft reset + user_ns = self.shell.user_ns + for i in self.who_ls(): + del(user_ns[i]) + elif len(args) == 0: # Hard reset + self.shell.reset(new_session=False, aggressive=("aggressive" in opts)) + + # reset in/out/dhist/array: previously extensinions/clearcmd.py + ip = self.shell + user_ns = self.shell.user_ns # local lookup, heavily used + + for target in args: + target = target.lower() # make matches case insensitive + if target == 'out': + print("Flushing output cache (%d entries)" % len(user_ns['_oh'])) + self.shell.displayhook.flush() + + elif target == 'in': + print("Flushing input history") + pc = self.shell.displayhook.prompt_count + 1 + for n in range(1, pc): + key = '_i'+repr(n) + user_ns.pop(key,None) + user_ns.update(dict(_i=u'',_ii=u'',_iii=u'')) + hm = ip.history_manager + # don't delete these, as %save and %macro depending on the + # length of these lists to be preserved + hm.input_hist_parsed[:] = [''] * pc + hm.input_hist_raw[:] = [''] * pc + # hm has internal machinery for _i,_ii,_iii, clear it out + hm._i = hm._ii = hm._iii = hm._i00 = u'' + + elif target == 'array': + # Support cleaning up numpy arrays + try: + from numpy import ndarray + # This must be done with items and not iteritems because + # we're going to modify the dict in-place. + for x,val in list(user_ns.items()): + if isinstance(val,ndarray): + del user_ns[x] + except ImportError: + print("reset array only works if Numpy is available.") + + elif target == 'dhist': + print("Flushing directory history") + del user_ns['_dh'][:] + + else: + print("Don't know how to reset ", end=' ') + print(target + ", please run `%reset?` for details") + + gc.collect() + + @line_magic + def reset_selective(self, parameter_s=''): + """Resets the namespace by removing names defined by the user. + + Input/Output history are left around in case you need them. + + %reset_selective [-f] regex + + No action is taken if regex is not included + + Options + -f : force reset without asking for confirmation. + + See Also + -------- + reset : invoked as ``%reset`` + + Examples + -------- + + We first fully reset the namespace so your output looks identical to + this example for pedagogical reasons; in practice you do not need a + full reset:: + + In [1]: %reset -f + + Now, with a clean namespace we can make a few variables and use + ``%reset_selective`` to only delete names that match our regexp:: + + In [2]: a=1; b=2; c=3; b1m=4; b2m=5; b3m=6; b4m=7; b2s=8 + + In [3]: who_ls + Out[3]: ['a', 'b', 'b1m', 'b2m', 'b2s', 'b3m', 'b4m', 'c'] + + In [4]: %reset_selective -f b[2-3]m + + In [5]: who_ls + Out[5]: ['a', 'b', 'b1m', 'b2s', 'b4m', 'c'] + + In [6]: %reset_selective -f d + + In [7]: who_ls + Out[7]: ['a', 'b', 'b1m', 'b2s', 'b4m', 'c'] + + In [8]: %reset_selective -f c + + In [9]: who_ls + Out[9]: ['a', 'b', 'b1m', 'b2s', 'b4m'] + + In [10]: %reset_selective -f b + + In [11]: who_ls + Out[11]: ['a'] + + Notes + ----- + Calling this magic from clients that do not implement standard input, + such as the ipython notebook interface, will reset the namespace + without confirmation. + """ + + opts, regex = self.parse_options(parameter_s,'f') + + if 'f' in opts: + ans = True + else: + try: + ans = self.shell.ask_yes_no( + "Once deleted, variables cannot be recovered. Proceed (y/[n])? ", + default='n') + except StdinNotImplementedError: + ans = True + if not ans: + print('Nothing done.') + return + user_ns = self.shell.user_ns + if not regex: + print('No regex pattern specified. Nothing done.') + return + else: + try: + m = re.compile(regex) + except TypeError: + raise TypeError('regex must be a string or compiled pattern') + for i in self.who_ls(): + if m.search(i): + del(user_ns[i]) + + @line_magic + def xdel(self, parameter_s=''): + """Delete a variable, trying to clear it from anywhere that + IPython's machinery has references to it. By default, this uses + the identity of the named object in the user namespace to remove + references held under other names. The object is also removed + from the output history. + + Options + -n : Delete the specified name from all namespaces, without + checking their identity. + """ + opts, varname = self.parse_options(parameter_s,'n') + try: + self.shell.del_var(varname, ('n' in opts)) + except (NameError, ValueError) as e: + print(type(e).__name__ +": "+ str(e)) diff --git a/contrib/python/ipython/py3/IPython/core/magics/osm.py b/contrib/python/ipython/py3/IPython/core/magics/osm.py index 5c5d13098f1..90da7e22803 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/osm.py +++ b/contrib/python/ipython/py3/IPython/core/magics/osm.py @@ -1,857 +1,857 @@ -"""Implementation of magic functions for interaction with the OS. - -Note: this module is named 'osm' instead of 'os' to avoid a collision with the -builtin. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import io -import os -import re -import sys -from pprint import pformat - -from IPython.core import magic_arguments -from IPython.core import oinspect -from IPython.core import page -from IPython.core.alias import AliasError, Alias -from IPython.core.error import UsageError -from IPython.core.magic import ( - Magics, compress_dhist, magics_class, line_magic, cell_magic, line_cell_magic -) -from IPython.testing.skipdoctest import skip_doctest -from IPython.utils.openpy import source_to_unicode -from IPython.utils.process import abbrev_cwd -from IPython.utils.terminal import set_term_title -from traitlets import Bool -from warnings import warn - - -@magics_class -class OSMagics(Magics): - """Magics to interact with the underlying OS (shell-type functionality). - """ - - cd_force_quiet = Bool(False, - help="Force %cd magic to be quiet even if -q is not passed." - ).tag(config=True) - - def __init__(self, shell=None, **kwargs): - - # Now define isexec in a cross platform manner. - self.is_posix = False - self.execre = None - if os.name == 'posix': - self.is_posix = True - else: - try: - winext = os.environ['pathext'].replace(';','|').replace('.','') - except KeyError: - winext = 'exe|com|bat|py' - try: - self.execre = re.compile(r'(.*)\.(%s)$' % winext,re.IGNORECASE) - except re.error: - warn("Seems like your pathext environmental " - "variable is malformed. Please check it to " - "enable a proper handle of file extensions " - "managed for your system") - winext = 'exe|com|bat|py' - self.execre = re.compile(r'(.*)\.(%s)$' % winext,re.IGNORECASE) - - # call up the chain - super().__init__(shell=shell, **kwargs) - - - @skip_doctest - def _isexec_POSIX(self, file): - """ - Test for executable on a POSIX system - """ - if os.access(file.path, os.X_OK): - # will fail on maxOS if access is not X_OK - return file.is_file() - return False - - - - @skip_doctest - def _isexec_WIN(self, file): - """ - Test for executable file on non POSIX system - """ - return file.is_file() and self.execre.match(file.name) is not None - - @skip_doctest - def isexec(self, file): - """ - Test for executable file on non POSIX system - """ - if self.is_posix: - return self._isexec_POSIX(file) - else: - return self._isexec_WIN(file) - - - @skip_doctest - @line_magic - def alias(self, parameter_s=''): - """Define an alias for a system command. - - '%alias alias_name cmd' defines 'alias_name' as an alias for 'cmd' - - Then, typing 'alias_name params' will execute the system command 'cmd - params' (from your underlying operating system). - - Aliases have lower precedence than magic functions and Python normal - variables, so if 'foo' is both a Python variable and an alias, the - alias can not be executed until 'del foo' removes the Python variable. - - You can use the %l specifier in an alias definition to represent the - whole line when the alias is called. For example:: - - In [2]: alias bracket echo "Input in brackets: <%l>" - In [3]: bracket hello world - Input in brackets: <hello world> - - You can also define aliases with parameters using %s specifiers (one - per parameter):: - - In [1]: alias parts echo first %s second %s - In [2]: %parts A B - first A second B - In [3]: %parts A - Incorrect number of arguments: 2 expected. - parts is an alias to: 'echo first %s second %s' - - Note that %l and %s are mutually exclusive. You can only use one or - the other in your aliases. - - Aliases expand Python variables just like system calls using ! or !! - do: all expressions prefixed with '$' get expanded. For details of - the semantic rules, see PEP-215: - http://www.python.org/peps/pep-0215.html. This is the library used by - IPython for variable expansion. If you want to access a true shell - variable, an extra $ is necessary to prevent its expansion by - IPython:: - - In [6]: alias show echo - In [7]: PATH='A Python string' - In [8]: show $PATH - A Python string - In [9]: show $$PATH - /usr/local/lf9560/bin:/usr/local/intel/compiler70/ia32/bin:... - - You can use the alias facility to access all of $PATH. See the %rehashx - function, which automatically creates aliases for the contents of your - $PATH. - - If called with no parameters, %alias prints the current alias table - for your system. For posix systems, the default aliases are 'cat', - 'cp', 'mv', 'rm', 'rmdir', and 'mkdir', and other platform-specific - aliases are added. For windows-based systems, the default aliases are - 'copy', 'ddir', 'echo', 'ls', 'ldir', 'mkdir', 'ren', and 'rmdir'. - - You can see the definition of alias by adding a question mark in the - end:: - - In [1]: cat? - Repr: <alias cat for 'cat'>""" - - par = parameter_s.strip() - if not par: - aliases = sorted(self.shell.alias_manager.aliases) - # stored = self.shell.db.get('stored_aliases', {} ) - # for k, v in stored: - # atab.append(k, v[0]) - - print("Total number of aliases:", len(aliases)) - sys.stdout.flush() - return aliases - - # Now try to define a new one - try: - alias,cmd = par.split(None, 1) - except TypeError: - print(oinspect.getdoc(self.alias)) - return - - try: - self.shell.alias_manager.define_alias(alias, cmd) - except AliasError as e: - print(e) - # end magic_alias - - @line_magic - def unalias(self, parameter_s=''): - """Remove an alias""" - - aname = parameter_s.strip() - try: - self.shell.alias_manager.undefine_alias(aname) - except ValueError as e: - print(e) - return - - stored = self.shell.db.get('stored_aliases', {} ) - if aname in stored: - print("Removing %stored alias",aname) - del stored[aname] - self.shell.db['stored_aliases'] = stored - - @line_magic - def rehashx(self, parameter_s=''): - """Update the alias table with all executable files in $PATH. - - rehashx explicitly checks that every entry in $PATH is a file - with execute access (os.X_OK). - - Under Windows, it checks executability as a match against a - '|'-separated string of extensions, stored in the IPython config - variable win_exec_ext. This defaults to 'exe|com|bat'. - - This function also resets the root module cache of module completer, - used on slow filesystems. - """ - from IPython.core.alias import InvalidAliasError - - # for the benefit of module completer in ipy_completers.py - del self.shell.db['rootmodules_cache'] - - path = [os.path.abspath(os.path.expanduser(p)) for p in - os.environ.get('PATH','').split(os.pathsep)] - - syscmdlist = [] - savedir = os.getcwd() - - # Now walk the paths looking for executables to alias. - try: - # write the whole loop for posix/Windows so we don't have an if in - # the innermost part - if self.is_posix: - for pdir in path: - try: - os.chdir(pdir) - except OSError: - continue - - # for python 3.6+ rewrite to: with os.scandir(pdir) as dirlist: - dirlist = os.scandir(path=pdir) - for ff in dirlist: - if self.isexec(ff): - fname = ff.name - try: - # Removes dots from the name since ipython - # will assume names with dots to be python. - if not self.shell.alias_manager.is_alias(fname): - self.shell.alias_manager.define_alias( - fname.replace('.',''), fname) - except InvalidAliasError: - pass - else: - syscmdlist.append(fname) - else: - no_alias = Alias.blacklist - for pdir in path: - try: - os.chdir(pdir) - except OSError: - continue - - # for python 3.6+ rewrite to: with os.scandir(pdir) as dirlist: - dirlist = os.scandir(pdir) - for ff in dirlist: - fname = ff.name - base, ext = os.path.splitext(fname) - if self.isexec(ff) and base.lower() not in no_alias: - if ext.lower() == '.exe': - fname = base - try: - # Removes dots from the name since ipython - # will assume names with dots to be python. - self.shell.alias_manager.define_alias( - base.lower().replace('.',''), fname) - except InvalidAliasError: - pass - syscmdlist.append(fname) - - self.shell.db['syscmdlist'] = syscmdlist - finally: - os.chdir(savedir) - - @skip_doctest - @line_magic - def pwd(self, parameter_s=''): - """Return the current working directory path. - - Examples - -------- - :: - - In [9]: pwd - Out[9]: '/home/tsuser/sprint/ipython' - """ - try: - return os.getcwd() - except FileNotFoundError: - raise UsageError("CWD no longer exists - please use %cd to change directory.") - - @skip_doctest - @line_magic - def cd(self, parameter_s=''): - """Change the current working directory. - - This command automatically maintains an internal list of directories - you visit during your IPython session, in the variable _dh. The - command %dhist shows this history nicely formatted. You can also - do 'cd -<tab>' to see directory history conveniently. - - Usage: - - cd 'dir': changes to directory 'dir'. - - cd -: changes to the last visited directory. - - cd -<n>: changes to the n-th directory in the directory history. - - cd --foo: change to directory that matches 'foo' in history - - cd -b <bookmark_name>: jump to a bookmark set by %bookmark - (note: cd <bookmark_name> is enough if there is no - directory <bookmark_name>, but a bookmark with the name exists.) - 'cd -b <tab>' allows you to tab-complete bookmark names. - - Options: - - -q: quiet. Do not print the working directory after the cd command is - executed. By default IPython's cd command does print this directory, - since the default prompts do not display path information. - - Note that !cd doesn't work for this purpose because the shell where - !command runs is immediately discarded after executing 'command'. - - Examples - -------- - :: - - In [10]: cd parent/child - /home/tsuser/parent/child - """ - - try: - oldcwd = os.getcwd() - except FileNotFoundError: - # Happens if the CWD has been deleted. - oldcwd = None - - numcd = re.match(r'(-)(\d+)$',parameter_s) - # jump in directory history by number - if numcd: - nn = int(numcd.group(2)) - try: - ps = self.shell.user_ns['_dh'][nn] - except IndexError: - print('The requested directory does not exist in history.') - return - else: - opts = {} - elif parameter_s.startswith('--'): - ps = None - fallback = None - pat = parameter_s[2:] - dh = self.shell.user_ns['_dh'] - # first search only by basename (last component) - for ent in reversed(dh): - if pat in os.path.basename(ent) and os.path.isdir(ent): - ps = ent - break - - if fallback is None and pat in ent and os.path.isdir(ent): - fallback = ent - - # if we have no last part match, pick the first full path match - if ps is None: - ps = fallback - - if ps is None: - print("No matching entry in directory history") - return - else: - opts = {} - - - else: - opts, ps = self.parse_options(parameter_s, 'qb', mode='string') - # jump to previous - if ps == '-': - try: - ps = self.shell.user_ns['_dh'][-2] - except IndexError: - raise UsageError('%cd -: No previous directory to change to.') - # jump to bookmark if needed - else: - if not os.path.isdir(ps) or 'b' in opts: - bkms = self.shell.db.get('bookmarks', {}) - - if ps in bkms: - target = bkms[ps] - print('(bookmark:%s) -> %s' % (ps, target)) - ps = target - else: - if 'b' in opts: - raise UsageError("Bookmark '%s' not found. " - "Use '%%bookmark -l' to see your bookmarks." % ps) - - # at this point ps should point to the target dir - if ps: - try: - os.chdir(os.path.expanduser(ps)) - if hasattr(self.shell, 'term_title') and self.shell.term_title: - set_term_title(self.shell.term_title_format.format(cwd=abbrev_cwd())) - except OSError: - print(sys.exc_info()[1]) - else: - cwd = os.getcwd() - dhist = self.shell.user_ns['_dh'] - if oldcwd != cwd: - dhist.append(cwd) - self.shell.db['dhist'] = compress_dhist(dhist)[-100:] - - else: - os.chdir(self.shell.home_dir) - if hasattr(self.shell, 'term_title') and self.shell.term_title: - set_term_title(self.shell.term_title_format.format(cwd="~")) - cwd = os.getcwd() - dhist = self.shell.user_ns['_dh'] - - if oldcwd != cwd: - dhist.append(cwd) - self.shell.db['dhist'] = compress_dhist(dhist)[-100:] - if not 'q' in opts and not self.cd_force_quiet and self.shell.user_ns['_dh']: - print(self.shell.user_ns['_dh'][-1]) - - @line_magic - def env(self, parameter_s=''): - """Get, set, or list environment variables. - - Usage:\\ - - %env: lists all environment variables/values - %env var: get value for var - %env var val: set value for var - %env var=val: set value for var - %env var=$val: set value for var, using python expansion if possible - """ - if parameter_s.strip(): - split = '=' if '=' in parameter_s else ' ' - bits = parameter_s.split(split) - if len(bits) == 1: - key = parameter_s.strip() - if key in os.environ: - return os.environ[key] - else: - err = "Environment does not have key: {0}".format(key) - raise UsageError(err) - if len(bits) > 1: - return self.set_env(parameter_s) - env = dict(os.environ) - # hide likely secrets when printing the whole environment - for key in list(env): - if any(s in key.lower() for s in ('key', 'token', 'secret')): - env[key] = '<hidden>' - - return env - - @line_magic - def set_env(self, parameter_s): - """Set environment variables. Assumptions are that either "val" is a - name in the user namespace, or val is something that evaluates to a - string. - - Usage:\\ - %set_env var val: set value for var - %set_env var=val: set value for var - %set_env var=$val: set value for var, using python expansion if possible - """ - split = '=' if '=' in parameter_s else ' ' - bits = parameter_s.split(split, 1) - if not parameter_s.strip() or len(bits)<2: - raise UsageError("usage is 'set_env var=val'") - var = bits[0].strip() - val = bits[1].strip() - if re.match(r'.*\s.*', var): - # an environment variable with whitespace is almost certainly - # not what the user intended. what's more likely is the wrong - # split was chosen, ie for "set_env cmd_args A=B", we chose - # '=' for the split and should have chosen ' '. to get around - # this, users should just assign directly to os.environ or use - # standard magic {var} expansion. - err = "refusing to set env var with whitespace: '{0}'" - err = err.format(val) - raise UsageError(err) - os.environ[var] = val - print('env: {0}={1}'.format(var,val)) - - @line_magic - def pushd(self, parameter_s=''): - """Place the current dir on stack and change directory. - - Usage:\\ - %pushd ['dirname'] - """ - - dir_s = self.shell.dir_stack - tgt = os.path.expanduser(parameter_s) - cwd = os.getcwd().replace(self.shell.home_dir,'~') - if tgt: - self.cd(parameter_s) - dir_s.insert(0,cwd) - return self.shell.magic('dirs') - - @line_magic - def popd(self, parameter_s=''): - """Change to directory popped off the top of the stack. - """ - if not self.shell.dir_stack: - raise UsageError("%popd on empty stack") - top = self.shell.dir_stack.pop(0) - self.cd(top) - print("popd ->",top) - - @line_magic - def dirs(self, parameter_s=''): - """Return the current directory stack.""" - - return self.shell.dir_stack - - @line_magic - def dhist(self, parameter_s=''): - """Print your history of visited directories. - - %dhist -> print full history\\ - %dhist n -> print last n entries only\\ - %dhist n1 n2 -> print entries between n1 and n2 (n2 not included)\\ - - This history is automatically maintained by the %cd command, and - always available as the global list variable _dh. You can use %cd -<n> - to go to directory number <n>. - - Note that most of time, you should view directory history by entering - cd -<TAB>. - - """ - - dh = self.shell.user_ns['_dh'] - if parameter_s: - try: - args = map(int,parameter_s.split()) - except: - self.arg_err(self.dhist) - return - if len(args) == 1: - ini,fin = max(len(dh)-(args[0]),0),len(dh) - elif len(args) == 2: - ini,fin = args - fin = min(fin, len(dh)) - else: - self.arg_err(self.dhist) - return - else: - ini,fin = 0,len(dh) - print('Directory history (kept in _dh)') - for i in range(ini, fin): - print("%d: %s" % (i, dh[i])) - - @skip_doctest - @line_magic - def sc(self, parameter_s=''): - """Shell capture - run shell command and capture output (DEPRECATED use !). - - DEPRECATED. Suboptimal, retained for backwards compatibility. - - You should use the form 'var = !command' instead. Example: - - "%sc -l myfiles = ls ~" should now be written as - - "myfiles = !ls ~" - - myfiles.s, myfiles.l and myfiles.n still apply as documented - below. - - -- - %sc [options] varname=command - - IPython will run the given command using commands.getoutput(), and - will then update the user's interactive namespace with a variable - called varname, containing the value of the call. Your command can - contain shell wildcards, pipes, etc. - - The '=' sign in the syntax is mandatory, and the variable name you - supply must follow Python's standard conventions for valid names. - - (A special format without variable name exists for internal use) - - Options: - - -l: list output. Split the output on newlines into a list before - assigning it to the given variable. By default the output is stored - as a single string. - - -v: verbose. Print the contents of the variable. - - In most cases you should not need to split as a list, because the - returned value is a special type of string which can automatically - provide its contents either as a list (split on newlines) or as a - space-separated string. These are convenient, respectively, either - for sequential processing or to be passed to a shell command. - - For example:: - - # Capture into variable a - In [1]: sc a=ls *py - - # a is a string with embedded newlines - In [2]: a - Out[2]: 'setup.py\\nwin32_manual_post_install.py' - - # which can be seen as a list: - In [3]: a.l - Out[3]: ['setup.py', 'win32_manual_post_install.py'] - - # or as a whitespace-separated string: - In [4]: a.s - Out[4]: 'setup.py win32_manual_post_install.py' - - # a.s is useful to pass as a single command line: - In [5]: !wc -l $a.s - 146 setup.py - 130 win32_manual_post_install.py - 276 total - - # while the list form is useful to loop over: - In [6]: for f in a.l: - ...: !wc -l $f - ...: - 146 setup.py - 130 win32_manual_post_install.py - - Similarly, the lists returned by the -l option are also special, in - the sense that you can equally invoke the .s attribute on them to - automatically get a whitespace-separated string from their contents:: - - In [7]: sc -l b=ls *py - - In [8]: b - Out[8]: ['setup.py', 'win32_manual_post_install.py'] - - In [9]: b.s - Out[9]: 'setup.py win32_manual_post_install.py' - - In summary, both the lists and strings used for output capture have - the following special attributes:: - - .l (or .list) : value as list. - .n (or .nlstr): value as newline-separated string. - .s (or .spstr): value as space-separated string. - """ - - opts,args = self.parse_options(parameter_s, 'lv') - # Try to get a variable name and command to run - try: - # the variable name must be obtained from the parse_options - # output, which uses shlex.split to strip options out. - var,_ = args.split('=', 1) - var = var.strip() - # But the command has to be extracted from the original input - # parameter_s, not on what parse_options returns, to avoid the - # quote stripping which shlex.split performs on it. - _,cmd = parameter_s.split('=', 1) - except ValueError: - var,cmd = '','' - # If all looks ok, proceed - split = 'l' in opts - out = self.shell.getoutput(cmd, split=split) - if 'v' in opts: - print('%s ==\n%s' % (var, pformat(out))) - if var: - self.shell.user_ns.update({var:out}) - else: - return out - - @line_cell_magic - def sx(self, line='', cell=None): - """Shell execute - run shell command and capture output (!! is short-hand). - - %sx command - - IPython will run the given command using commands.getoutput(), and - return the result formatted as a list (split on '\\n'). Since the - output is _returned_, it will be stored in ipython's regular output - cache Out[N] and in the '_N' automatic variables. - - Notes: - - 1) If an input line begins with '!!', then %sx is automatically - invoked. That is, while:: - - !ls - - causes ipython to simply issue system('ls'), typing:: - - !!ls - - is a shorthand equivalent to:: - - %sx ls - - 2) %sx differs from %sc in that %sx automatically splits into a list, - like '%sc -l'. The reason for this is to make it as easy as possible - to process line-oriented shell output via further python commands. - %sc is meant to provide much finer control, but requires more - typing. - - 3) Just like %sc -l, this is a list with special attributes: - :: - - .l (or .list) : value as list. - .n (or .nlstr): value as newline-separated string. - .s (or .spstr): value as whitespace-separated string. - - This is very useful when trying to use such lists as arguments to - system commands.""" - - if cell is None: - # line magic - return self.shell.getoutput(line) - else: - opts,args = self.parse_options(line, '', 'out=') - output = self.shell.getoutput(cell) - out_name = opts.get('out', opts.get('o')) - if out_name: - self.shell.user_ns[out_name] = output - else: - return output - - system = line_cell_magic('system')(sx) - bang = cell_magic('!')(sx) - - @line_magic - def bookmark(self, parameter_s=''): - """Manage IPython's bookmark system. - - %bookmark <name> - set bookmark to current dir - %bookmark <name> <dir> - set bookmark to <dir> - %bookmark -l - list all bookmarks - %bookmark -d <name> - remove bookmark - %bookmark -r - remove all bookmarks - - You can later on access a bookmarked folder with:: - - %cd -b <name> - - or simply '%cd <name>' if there is no directory called <name> AND - there is such a bookmark defined. - - Your bookmarks persist through IPython sessions, but they are - associated with each profile.""" - - opts,args = self.parse_options(parameter_s,'drl',mode='list') - if len(args) > 2: - raise UsageError("%bookmark: too many arguments") - - bkms = self.shell.db.get('bookmarks',{}) - - if 'd' in opts: - try: - todel = args[0] - except IndexError: - raise UsageError( - "%bookmark -d: must provide a bookmark to delete") - else: - try: - del bkms[todel] - except KeyError: - raise UsageError( - "%%bookmark -d: Can't delete bookmark '%s'" % todel) - - elif 'r' in opts: - bkms = {} - elif 'l' in opts: - bks = sorted(bkms) - if bks: - size = max(map(len, bks)) - else: - size = 0 - fmt = '%-'+str(size)+'s -> %s' - print('Current bookmarks:') - for bk in bks: - print(fmt % (bk, bkms[bk])) - else: - if not args: - raise UsageError("%bookmark: You must specify the bookmark name") - elif len(args)==1: - bkms[args[0]] = os.getcwd() - elif len(args)==2: - bkms[args[0]] = args[1] - self.shell.db['bookmarks'] = bkms - - @line_magic - def pycat(self, parameter_s=''): - """Show a syntax-highlighted file through a pager. - - This magic is similar to the cat utility, but it will assume the file - to be Python source and will show it with syntax highlighting. - - This magic command can either take a local filename, an url, - an history range (see %history) or a macro as argument :: - - %pycat myscript.py - %pycat 7-27 - %pycat myMacro - %pycat http://www.example.com/myscript.py - """ - if not parameter_s: - raise UsageError('Missing filename, URL, input history range, ' - 'or macro.') - - try : - cont = self.shell.find_user_code(parameter_s, skip_encoding_cookie=False) - except (ValueError, IOError): - print("Error: no such file, variable, URL, history range or macro") - return - - page.page(self.shell.pycolorize(source_to_unicode(cont))) - - @magic_arguments.magic_arguments() - @magic_arguments.argument( - '-a', '--append', action='store_true', default=False, - help='Append contents of the cell to an existing file. ' - 'The file will be created if it does not exist.' - ) - @magic_arguments.argument( - 'filename', type=str, - help='file to write' - ) - @cell_magic - def writefile(self, line, cell): - """Write the contents of the cell to a file. - - The file will be overwritten unless the -a (--append) flag is specified. - """ - args = magic_arguments.parse_argstring(self.writefile, line) - if re.match(r'^(\'.*\')|(".*")$', args.filename): - filename = os.path.expanduser(args.filename[1:-1]) - else: - filename = os.path.expanduser(args.filename) - - if os.path.exists(filename): - if args.append: - print("Appending to %s" % filename) - else: - print("Overwriting %s" % filename) - else: - print("Writing %s" % filename) - - mode = 'a' if args.append else 'w' - with io.open(filename, mode, encoding='utf-8') as f: - f.write(cell) +"""Implementation of magic functions for interaction with the OS. + +Note: this module is named 'osm' instead of 'os' to avoid a collision with the +builtin. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import io +import os +import re +import sys +from pprint import pformat + +from IPython.core import magic_arguments +from IPython.core import oinspect +from IPython.core import page +from IPython.core.alias import AliasError, Alias +from IPython.core.error import UsageError +from IPython.core.magic import ( + Magics, compress_dhist, magics_class, line_magic, cell_magic, line_cell_magic +) +from IPython.testing.skipdoctest import skip_doctest +from IPython.utils.openpy import source_to_unicode +from IPython.utils.process import abbrev_cwd +from IPython.utils.terminal import set_term_title +from traitlets import Bool +from warnings import warn + + +@magics_class +class OSMagics(Magics): + """Magics to interact with the underlying OS (shell-type functionality). + """ + + cd_force_quiet = Bool(False, + help="Force %cd magic to be quiet even if -q is not passed." + ).tag(config=True) + + def __init__(self, shell=None, **kwargs): + + # Now define isexec in a cross platform manner. + self.is_posix = False + self.execre = None + if os.name == 'posix': + self.is_posix = True + else: + try: + winext = os.environ['pathext'].replace(';','|').replace('.','') + except KeyError: + winext = 'exe|com|bat|py' + try: + self.execre = re.compile(r'(.*)\.(%s)$' % winext,re.IGNORECASE) + except re.error: + warn("Seems like your pathext environmental " + "variable is malformed. Please check it to " + "enable a proper handle of file extensions " + "managed for your system") + winext = 'exe|com|bat|py' + self.execre = re.compile(r'(.*)\.(%s)$' % winext,re.IGNORECASE) + + # call up the chain + super().__init__(shell=shell, **kwargs) + + + @skip_doctest + def _isexec_POSIX(self, file): + """ + Test for executable on a POSIX system + """ + if os.access(file.path, os.X_OK): + # will fail on maxOS if access is not X_OK + return file.is_file() + return False + + + + @skip_doctest + def _isexec_WIN(self, file): + """ + Test for executable file on non POSIX system + """ + return file.is_file() and self.execre.match(file.name) is not None + + @skip_doctest + def isexec(self, file): + """ + Test for executable file on non POSIX system + """ + if self.is_posix: + return self._isexec_POSIX(file) + else: + return self._isexec_WIN(file) + + + @skip_doctest + @line_magic + def alias(self, parameter_s=''): + """Define an alias for a system command. + + '%alias alias_name cmd' defines 'alias_name' as an alias for 'cmd' + + Then, typing 'alias_name params' will execute the system command 'cmd + params' (from your underlying operating system). + + Aliases have lower precedence than magic functions and Python normal + variables, so if 'foo' is both a Python variable and an alias, the + alias can not be executed until 'del foo' removes the Python variable. + + You can use the %l specifier in an alias definition to represent the + whole line when the alias is called. For example:: + + In [2]: alias bracket echo "Input in brackets: <%l>" + In [3]: bracket hello world + Input in brackets: <hello world> + + You can also define aliases with parameters using %s specifiers (one + per parameter):: + + In [1]: alias parts echo first %s second %s + In [2]: %parts A B + first A second B + In [3]: %parts A + Incorrect number of arguments: 2 expected. + parts is an alias to: 'echo first %s second %s' + + Note that %l and %s are mutually exclusive. You can only use one or + the other in your aliases. + + Aliases expand Python variables just like system calls using ! or !! + do: all expressions prefixed with '$' get expanded. For details of + the semantic rules, see PEP-215: + http://www.python.org/peps/pep-0215.html. This is the library used by + IPython for variable expansion. If you want to access a true shell + variable, an extra $ is necessary to prevent its expansion by + IPython:: + + In [6]: alias show echo + In [7]: PATH='A Python string' + In [8]: show $PATH + A Python string + In [9]: show $$PATH + /usr/local/lf9560/bin:/usr/local/intel/compiler70/ia32/bin:... + + You can use the alias facility to access all of $PATH. See the %rehashx + function, which automatically creates aliases for the contents of your + $PATH. + + If called with no parameters, %alias prints the current alias table + for your system. For posix systems, the default aliases are 'cat', + 'cp', 'mv', 'rm', 'rmdir', and 'mkdir', and other platform-specific + aliases are added. For windows-based systems, the default aliases are + 'copy', 'ddir', 'echo', 'ls', 'ldir', 'mkdir', 'ren', and 'rmdir'. + + You can see the definition of alias by adding a question mark in the + end:: + + In [1]: cat? + Repr: <alias cat for 'cat'>""" + + par = parameter_s.strip() + if not par: + aliases = sorted(self.shell.alias_manager.aliases) + # stored = self.shell.db.get('stored_aliases', {} ) + # for k, v in stored: + # atab.append(k, v[0]) + + print("Total number of aliases:", len(aliases)) + sys.stdout.flush() + return aliases + + # Now try to define a new one + try: + alias,cmd = par.split(None, 1) + except TypeError: + print(oinspect.getdoc(self.alias)) + return + + try: + self.shell.alias_manager.define_alias(alias, cmd) + except AliasError as e: + print(e) + # end magic_alias + + @line_magic + def unalias(self, parameter_s=''): + """Remove an alias""" + + aname = parameter_s.strip() + try: + self.shell.alias_manager.undefine_alias(aname) + except ValueError as e: + print(e) + return + + stored = self.shell.db.get('stored_aliases', {} ) + if aname in stored: + print("Removing %stored alias",aname) + del stored[aname] + self.shell.db['stored_aliases'] = stored + + @line_magic + def rehashx(self, parameter_s=''): + """Update the alias table with all executable files in $PATH. + + rehashx explicitly checks that every entry in $PATH is a file + with execute access (os.X_OK). + + Under Windows, it checks executability as a match against a + '|'-separated string of extensions, stored in the IPython config + variable win_exec_ext. This defaults to 'exe|com|bat'. + + This function also resets the root module cache of module completer, + used on slow filesystems. + """ + from IPython.core.alias import InvalidAliasError + + # for the benefit of module completer in ipy_completers.py + del self.shell.db['rootmodules_cache'] + + path = [os.path.abspath(os.path.expanduser(p)) for p in + os.environ.get('PATH','').split(os.pathsep)] + + syscmdlist = [] + savedir = os.getcwd() + + # Now walk the paths looking for executables to alias. + try: + # write the whole loop for posix/Windows so we don't have an if in + # the innermost part + if self.is_posix: + for pdir in path: + try: + os.chdir(pdir) + except OSError: + continue + + # for python 3.6+ rewrite to: with os.scandir(pdir) as dirlist: + dirlist = os.scandir(path=pdir) + for ff in dirlist: + if self.isexec(ff): + fname = ff.name + try: + # Removes dots from the name since ipython + # will assume names with dots to be python. + if not self.shell.alias_manager.is_alias(fname): + self.shell.alias_manager.define_alias( + fname.replace('.',''), fname) + except InvalidAliasError: + pass + else: + syscmdlist.append(fname) + else: + no_alias = Alias.blacklist + for pdir in path: + try: + os.chdir(pdir) + except OSError: + continue + + # for python 3.6+ rewrite to: with os.scandir(pdir) as dirlist: + dirlist = os.scandir(pdir) + for ff in dirlist: + fname = ff.name + base, ext = os.path.splitext(fname) + if self.isexec(ff) and base.lower() not in no_alias: + if ext.lower() == '.exe': + fname = base + try: + # Removes dots from the name since ipython + # will assume names with dots to be python. + self.shell.alias_manager.define_alias( + base.lower().replace('.',''), fname) + except InvalidAliasError: + pass + syscmdlist.append(fname) + + self.shell.db['syscmdlist'] = syscmdlist + finally: + os.chdir(savedir) + + @skip_doctest + @line_magic + def pwd(self, parameter_s=''): + """Return the current working directory path. + + Examples + -------- + :: + + In [9]: pwd + Out[9]: '/home/tsuser/sprint/ipython' + """ + try: + return os.getcwd() + except FileNotFoundError: + raise UsageError("CWD no longer exists - please use %cd to change directory.") + + @skip_doctest + @line_magic + def cd(self, parameter_s=''): + """Change the current working directory. + + This command automatically maintains an internal list of directories + you visit during your IPython session, in the variable _dh. The + command %dhist shows this history nicely formatted. You can also + do 'cd -<tab>' to see directory history conveniently. + + Usage: + + cd 'dir': changes to directory 'dir'. + + cd -: changes to the last visited directory. + + cd -<n>: changes to the n-th directory in the directory history. + + cd --foo: change to directory that matches 'foo' in history + + cd -b <bookmark_name>: jump to a bookmark set by %bookmark + (note: cd <bookmark_name> is enough if there is no + directory <bookmark_name>, but a bookmark with the name exists.) + 'cd -b <tab>' allows you to tab-complete bookmark names. + + Options: + + -q: quiet. Do not print the working directory after the cd command is + executed. By default IPython's cd command does print this directory, + since the default prompts do not display path information. + + Note that !cd doesn't work for this purpose because the shell where + !command runs is immediately discarded after executing 'command'. + + Examples + -------- + :: + + In [10]: cd parent/child + /home/tsuser/parent/child + """ + + try: + oldcwd = os.getcwd() + except FileNotFoundError: + # Happens if the CWD has been deleted. + oldcwd = None + + numcd = re.match(r'(-)(\d+)$',parameter_s) + # jump in directory history by number + if numcd: + nn = int(numcd.group(2)) + try: + ps = self.shell.user_ns['_dh'][nn] + except IndexError: + print('The requested directory does not exist in history.') + return + else: + opts = {} + elif parameter_s.startswith('--'): + ps = None + fallback = None + pat = parameter_s[2:] + dh = self.shell.user_ns['_dh'] + # first search only by basename (last component) + for ent in reversed(dh): + if pat in os.path.basename(ent) and os.path.isdir(ent): + ps = ent + break + + if fallback is None and pat in ent and os.path.isdir(ent): + fallback = ent + + # if we have no last part match, pick the first full path match + if ps is None: + ps = fallback + + if ps is None: + print("No matching entry in directory history") + return + else: + opts = {} + + + else: + opts, ps = self.parse_options(parameter_s, 'qb', mode='string') + # jump to previous + if ps == '-': + try: + ps = self.shell.user_ns['_dh'][-2] + except IndexError: + raise UsageError('%cd -: No previous directory to change to.') + # jump to bookmark if needed + else: + if not os.path.isdir(ps) or 'b' in opts: + bkms = self.shell.db.get('bookmarks', {}) + + if ps in bkms: + target = bkms[ps] + print('(bookmark:%s) -> %s' % (ps, target)) + ps = target + else: + if 'b' in opts: + raise UsageError("Bookmark '%s' not found. " + "Use '%%bookmark -l' to see your bookmarks." % ps) + + # at this point ps should point to the target dir + if ps: + try: + os.chdir(os.path.expanduser(ps)) + if hasattr(self.shell, 'term_title') and self.shell.term_title: + set_term_title(self.shell.term_title_format.format(cwd=abbrev_cwd())) + except OSError: + print(sys.exc_info()[1]) + else: + cwd = os.getcwd() + dhist = self.shell.user_ns['_dh'] + if oldcwd != cwd: + dhist.append(cwd) + self.shell.db['dhist'] = compress_dhist(dhist)[-100:] + + else: + os.chdir(self.shell.home_dir) + if hasattr(self.shell, 'term_title') and self.shell.term_title: + set_term_title(self.shell.term_title_format.format(cwd="~")) + cwd = os.getcwd() + dhist = self.shell.user_ns['_dh'] + + if oldcwd != cwd: + dhist.append(cwd) + self.shell.db['dhist'] = compress_dhist(dhist)[-100:] + if not 'q' in opts and not self.cd_force_quiet and self.shell.user_ns['_dh']: + print(self.shell.user_ns['_dh'][-1]) + + @line_magic + def env(self, parameter_s=''): + """Get, set, or list environment variables. + + Usage:\\ + + %env: lists all environment variables/values + %env var: get value for var + %env var val: set value for var + %env var=val: set value for var + %env var=$val: set value for var, using python expansion if possible + """ + if parameter_s.strip(): + split = '=' if '=' in parameter_s else ' ' + bits = parameter_s.split(split) + if len(bits) == 1: + key = parameter_s.strip() + if key in os.environ: + return os.environ[key] + else: + err = "Environment does not have key: {0}".format(key) + raise UsageError(err) + if len(bits) > 1: + return self.set_env(parameter_s) + env = dict(os.environ) + # hide likely secrets when printing the whole environment + for key in list(env): + if any(s in key.lower() for s in ('key', 'token', 'secret')): + env[key] = '<hidden>' + + return env + + @line_magic + def set_env(self, parameter_s): + """Set environment variables. Assumptions are that either "val" is a + name in the user namespace, or val is something that evaluates to a + string. + + Usage:\\ + %set_env var val: set value for var + %set_env var=val: set value for var + %set_env var=$val: set value for var, using python expansion if possible + """ + split = '=' if '=' in parameter_s else ' ' + bits = parameter_s.split(split, 1) + if not parameter_s.strip() or len(bits)<2: + raise UsageError("usage is 'set_env var=val'") + var = bits[0].strip() + val = bits[1].strip() + if re.match(r'.*\s.*', var): + # an environment variable with whitespace is almost certainly + # not what the user intended. what's more likely is the wrong + # split was chosen, ie for "set_env cmd_args A=B", we chose + # '=' for the split and should have chosen ' '. to get around + # this, users should just assign directly to os.environ or use + # standard magic {var} expansion. + err = "refusing to set env var with whitespace: '{0}'" + err = err.format(val) + raise UsageError(err) + os.environ[var] = val + print('env: {0}={1}'.format(var,val)) + + @line_magic + def pushd(self, parameter_s=''): + """Place the current dir on stack and change directory. + + Usage:\\ + %pushd ['dirname'] + """ + + dir_s = self.shell.dir_stack + tgt = os.path.expanduser(parameter_s) + cwd = os.getcwd().replace(self.shell.home_dir,'~') + if tgt: + self.cd(parameter_s) + dir_s.insert(0,cwd) + return self.shell.magic('dirs') + + @line_magic + def popd(self, parameter_s=''): + """Change to directory popped off the top of the stack. + """ + if not self.shell.dir_stack: + raise UsageError("%popd on empty stack") + top = self.shell.dir_stack.pop(0) + self.cd(top) + print("popd ->",top) + + @line_magic + def dirs(self, parameter_s=''): + """Return the current directory stack.""" + + return self.shell.dir_stack + + @line_magic + def dhist(self, parameter_s=''): + """Print your history of visited directories. + + %dhist -> print full history\\ + %dhist n -> print last n entries only\\ + %dhist n1 n2 -> print entries between n1 and n2 (n2 not included)\\ + + This history is automatically maintained by the %cd command, and + always available as the global list variable _dh. You can use %cd -<n> + to go to directory number <n>. + + Note that most of time, you should view directory history by entering + cd -<TAB>. + + """ + + dh = self.shell.user_ns['_dh'] + if parameter_s: + try: + args = map(int,parameter_s.split()) + except: + self.arg_err(self.dhist) + return + if len(args) == 1: + ini,fin = max(len(dh)-(args[0]),0),len(dh) + elif len(args) == 2: + ini,fin = args + fin = min(fin, len(dh)) + else: + self.arg_err(self.dhist) + return + else: + ini,fin = 0,len(dh) + print('Directory history (kept in _dh)') + for i in range(ini, fin): + print("%d: %s" % (i, dh[i])) + + @skip_doctest + @line_magic + def sc(self, parameter_s=''): + """Shell capture - run shell command and capture output (DEPRECATED use !). + + DEPRECATED. Suboptimal, retained for backwards compatibility. + + You should use the form 'var = !command' instead. Example: + + "%sc -l myfiles = ls ~" should now be written as + + "myfiles = !ls ~" + + myfiles.s, myfiles.l and myfiles.n still apply as documented + below. + + -- + %sc [options] varname=command + + IPython will run the given command using commands.getoutput(), and + will then update the user's interactive namespace with a variable + called varname, containing the value of the call. Your command can + contain shell wildcards, pipes, etc. + + The '=' sign in the syntax is mandatory, and the variable name you + supply must follow Python's standard conventions for valid names. + + (A special format without variable name exists for internal use) + + Options: + + -l: list output. Split the output on newlines into a list before + assigning it to the given variable. By default the output is stored + as a single string. + + -v: verbose. Print the contents of the variable. + + In most cases you should not need to split as a list, because the + returned value is a special type of string which can automatically + provide its contents either as a list (split on newlines) or as a + space-separated string. These are convenient, respectively, either + for sequential processing or to be passed to a shell command. + + For example:: + + # Capture into variable a + In [1]: sc a=ls *py + + # a is a string with embedded newlines + In [2]: a + Out[2]: 'setup.py\\nwin32_manual_post_install.py' + + # which can be seen as a list: + In [3]: a.l + Out[3]: ['setup.py', 'win32_manual_post_install.py'] + + # or as a whitespace-separated string: + In [4]: a.s + Out[4]: 'setup.py win32_manual_post_install.py' + + # a.s is useful to pass as a single command line: + In [5]: !wc -l $a.s + 146 setup.py + 130 win32_manual_post_install.py + 276 total + + # while the list form is useful to loop over: + In [6]: for f in a.l: + ...: !wc -l $f + ...: + 146 setup.py + 130 win32_manual_post_install.py + + Similarly, the lists returned by the -l option are also special, in + the sense that you can equally invoke the .s attribute on them to + automatically get a whitespace-separated string from their contents:: + + In [7]: sc -l b=ls *py + + In [8]: b + Out[8]: ['setup.py', 'win32_manual_post_install.py'] + + In [9]: b.s + Out[9]: 'setup.py win32_manual_post_install.py' + + In summary, both the lists and strings used for output capture have + the following special attributes:: + + .l (or .list) : value as list. + .n (or .nlstr): value as newline-separated string. + .s (or .spstr): value as space-separated string. + """ + + opts,args = self.parse_options(parameter_s, 'lv') + # Try to get a variable name and command to run + try: + # the variable name must be obtained from the parse_options + # output, which uses shlex.split to strip options out. + var,_ = args.split('=', 1) + var = var.strip() + # But the command has to be extracted from the original input + # parameter_s, not on what parse_options returns, to avoid the + # quote stripping which shlex.split performs on it. + _,cmd = parameter_s.split('=', 1) + except ValueError: + var,cmd = '','' + # If all looks ok, proceed + split = 'l' in opts + out = self.shell.getoutput(cmd, split=split) + if 'v' in opts: + print('%s ==\n%s' % (var, pformat(out))) + if var: + self.shell.user_ns.update({var:out}) + else: + return out + + @line_cell_magic + def sx(self, line='', cell=None): + """Shell execute - run shell command and capture output (!! is short-hand). + + %sx command + + IPython will run the given command using commands.getoutput(), and + return the result formatted as a list (split on '\\n'). Since the + output is _returned_, it will be stored in ipython's regular output + cache Out[N] and in the '_N' automatic variables. + + Notes: + + 1) If an input line begins with '!!', then %sx is automatically + invoked. That is, while:: + + !ls + + causes ipython to simply issue system('ls'), typing:: + + !!ls + + is a shorthand equivalent to:: + + %sx ls + + 2) %sx differs from %sc in that %sx automatically splits into a list, + like '%sc -l'. The reason for this is to make it as easy as possible + to process line-oriented shell output via further python commands. + %sc is meant to provide much finer control, but requires more + typing. + + 3) Just like %sc -l, this is a list with special attributes: + :: + + .l (or .list) : value as list. + .n (or .nlstr): value as newline-separated string. + .s (or .spstr): value as whitespace-separated string. + + This is very useful when trying to use such lists as arguments to + system commands.""" + + if cell is None: + # line magic + return self.shell.getoutput(line) + else: + opts,args = self.parse_options(line, '', 'out=') + output = self.shell.getoutput(cell) + out_name = opts.get('out', opts.get('o')) + if out_name: + self.shell.user_ns[out_name] = output + else: + return output + + system = line_cell_magic('system')(sx) + bang = cell_magic('!')(sx) + + @line_magic + def bookmark(self, parameter_s=''): + """Manage IPython's bookmark system. + + %bookmark <name> - set bookmark to current dir + %bookmark <name> <dir> - set bookmark to <dir> + %bookmark -l - list all bookmarks + %bookmark -d <name> - remove bookmark + %bookmark -r - remove all bookmarks + + You can later on access a bookmarked folder with:: + + %cd -b <name> + + or simply '%cd <name>' if there is no directory called <name> AND + there is such a bookmark defined. + + Your bookmarks persist through IPython sessions, but they are + associated with each profile.""" + + opts,args = self.parse_options(parameter_s,'drl',mode='list') + if len(args) > 2: + raise UsageError("%bookmark: too many arguments") + + bkms = self.shell.db.get('bookmarks',{}) + + if 'd' in opts: + try: + todel = args[0] + except IndexError: + raise UsageError( + "%bookmark -d: must provide a bookmark to delete") + else: + try: + del bkms[todel] + except KeyError: + raise UsageError( + "%%bookmark -d: Can't delete bookmark '%s'" % todel) + + elif 'r' in opts: + bkms = {} + elif 'l' in opts: + bks = sorted(bkms) + if bks: + size = max(map(len, bks)) + else: + size = 0 + fmt = '%-'+str(size)+'s -> %s' + print('Current bookmarks:') + for bk in bks: + print(fmt % (bk, bkms[bk])) + else: + if not args: + raise UsageError("%bookmark: You must specify the bookmark name") + elif len(args)==1: + bkms[args[0]] = os.getcwd() + elif len(args)==2: + bkms[args[0]] = args[1] + self.shell.db['bookmarks'] = bkms + + @line_magic + def pycat(self, parameter_s=''): + """Show a syntax-highlighted file through a pager. + + This magic is similar to the cat utility, but it will assume the file + to be Python source and will show it with syntax highlighting. + + This magic command can either take a local filename, an url, + an history range (see %history) or a macro as argument :: + + %pycat myscript.py + %pycat 7-27 + %pycat myMacro + %pycat http://www.example.com/myscript.py + """ + if not parameter_s: + raise UsageError('Missing filename, URL, input history range, ' + 'or macro.') + + try : + cont = self.shell.find_user_code(parameter_s, skip_encoding_cookie=False) + except (ValueError, IOError): + print("Error: no such file, variable, URL, history range or macro") + return + + page.page(self.shell.pycolorize(source_to_unicode(cont))) + + @magic_arguments.magic_arguments() + @magic_arguments.argument( + '-a', '--append', action='store_true', default=False, + help='Append contents of the cell to an existing file. ' + 'The file will be created if it does not exist.' + ) + @magic_arguments.argument( + 'filename', type=str, + help='file to write' + ) + @cell_magic + def writefile(self, line, cell): + """Write the contents of the cell to a file. + + The file will be overwritten unless the -a (--append) flag is specified. + """ + args = magic_arguments.parse_argstring(self.writefile, line) + if re.match(r'^(\'.*\')|(".*")$', args.filename): + filename = os.path.expanduser(args.filename[1:-1]) + else: + filename = os.path.expanduser(args.filename) + + if os.path.exists(filename): + if args.append: + print("Appending to %s" % filename) + else: + print("Overwriting %s" % filename) + else: + print("Writing %s" % filename) + + mode = 'a' if args.append else 'w' + with io.open(filename, mode, encoding='utf-8') as f: + f.write(cell) diff --git a/contrib/python/ipython/py3/IPython/core/magics/packaging.py b/contrib/python/ipython/py3/IPython/core/magics/packaging.py index 52644b361ae..04bde051ae0 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/packaging.py +++ b/contrib/python/ipython/py3/IPython/core/magics/packaging.py @@ -1,110 +1,110 @@ -"""Implementation of packaging-related magic functions. -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2018 The IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -import os -import re -import shlex -import sys - -from IPython.core.magic import Magics, magics_class, line_magic - - -def _is_conda_environment(): - """Return True if the current Python executable is in a conda env""" - # TODO: does this need to change on windows? - conda_history = os.path.join(sys.prefix, 'conda-meta', 'history') - return os.path.exists(conda_history) - - -def _get_conda_executable(): - """Find the path to the conda executable""" - # Check if there is a conda executable in the same directory as the Python executable. - # This is the case within conda's root environment. - conda = os.path.join(os.path.dirname(sys.executable), 'conda') - if os.path.isfile(conda): - return conda - - # Otherwise, attempt to extract the executable from conda history. - # This applies in any conda environment. - R = re.compile(r"^#\s*cmd:\s*(?P<command>.*conda)\s[create|install]") - with open(os.path.join(sys.prefix, 'conda-meta', 'history')) as f: - for line in f: - match = R.match(line) - if match: - return match.groupdict()['command'] - - # Fallback: assume conda is available on the system path. - return "conda" - - -CONDA_COMMANDS_REQUIRING_PREFIX = { - 'install', 'list', 'remove', 'uninstall', 'update', 'upgrade', -} -CONDA_COMMANDS_REQUIRING_YES = { - 'install', 'remove', 'uninstall', 'update', 'upgrade', -} -CONDA_ENV_FLAGS = {'-p', '--prefix', '-n', '--name'} -CONDA_YES_FLAGS = {'-y', '--y'} - - -@magics_class -class PackagingMagics(Magics): - """Magics related to packaging & installation""" - - @line_magic - def pip(self, line): - """Run the pip package manager within the current kernel. - - Usage: - %pip install [pkgs] - """ - python = sys.executable - if sys.platform == "win32": - python = '"' + python + '"' - else: - python = shlex.quote(python) - - self.shell.system(" ".join([python, "-m", "pip", line])) - - print("Note: you may need to restart the kernel to use updated packages.") - - @line_magic - def conda(self, line): - """Run the conda package manager within the current kernel. - - Usage: - %conda install [pkgs] - """ - if not _is_conda_environment(): - raise ValueError("The python kernel does not appear to be a conda environment. " - "Please use ``%pip install`` instead.") - - conda = _get_conda_executable() - args = shlex.split(line) - command = args[0] - args = args[1:] - extra_args = [] - - # When the subprocess does not allow us to respond "yes" during the installation, - # we need to insert --yes in the argument list for some commands - stdin_disabled = getattr(self.shell, 'kernel', None) is not None - needs_yes = command in CONDA_COMMANDS_REQUIRING_YES - has_yes = set(args).intersection(CONDA_YES_FLAGS) - if stdin_disabled and needs_yes and not has_yes: - extra_args.append("--yes") - - # Add --prefix to point conda installation to the current environment - needs_prefix = command in CONDA_COMMANDS_REQUIRING_PREFIX - has_prefix = set(args).intersection(CONDA_ENV_FLAGS) - if needs_prefix and not has_prefix: - extra_args.extend(["--prefix", sys.prefix]) - - self.shell.system(' '.join([conda, command] + extra_args + args)) - print("\nNote: you may need to restart the kernel to use updated packages.") +"""Implementation of packaging-related magic functions. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2018 The IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +import os +import re +import shlex +import sys + +from IPython.core.magic import Magics, magics_class, line_magic + + +def _is_conda_environment(): + """Return True if the current Python executable is in a conda env""" + # TODO: does this need to change on windows? + conda_history = os.path.join(sys.prefix, 'conda-meta', 'history') + return os.path.exists(conda_history) + + +def _get_conda_executable(): + """Find the path to the conda executable""" + # Check if there is a conda executable in the same directory as the Python executable. + # This is the case within conda's root environment. + conda = os.path.join(os.path.dirname(sys.executable), 'conda') + if os.path.isfile(conda): + return conda + + # Otherwise, attempt to extract the executable from conda history. + # This applies in any conda environment. + R = re.compile(r"^#\s*cmd:\s*(?P<command>.*conda)\s[create|install]") + with open(os.path.join(sys.prefix, 'conda-meta', 'history')) as f: + for line in f: + match = R.match(line) + if match: + return match.groupdict()['command'] + + # Fallback: assume conda is available on the system path. + return "conda" + + +CONDA_COMMANDS_REQUIRING_PREFIX = { + 'install', 'list', 'remove', 'uninstall', 'update', 'upgrade', +} +CONDA_COMMANDS_REQUIRING_YES = { + 'install', 'remove', 'uninstall', 'update', 'upgrade', +} +CONDA_ENV_FLAGS = {'-p', '--prefix', '-n', '--name'} +CONDA_YES_FLAGS = {'-y', '--y'} + + +@magics_class +class PackagingMagics(Magics): + """Magics related to packaging & installation""" + + @line_magic + def pip(self, line): + """Run the pip package manager within the current kernel. + + Usage: + %pip install [pkgs] + """ + python = sys.executable + if sys.platform == "win32": + python = '"' + python + '"' + else: + python = shlex.quote(python) + + self.shell.system(" ".join([python, "-m", "pip", line])) + + print("Note: you may need to restart the kernel to use updated packages.") + + @line_magic + def conda(self, line): + """Run the conda package manager within the current kernel. + + Usage: + %conda install [pkgs] + """ + if not _is_conda_environment(): + raise ValueError("The python kernel does not appear to be a conda environment. " + "Please use ``%pip install`` instead.") + + conda = _get_conda_executable() + args = shlex.split(line) + command = args[0] + args = args[1:] + extra_args = [] + + # When the subprocess does not allow us to respond "yes" during the installation, + # we need to insert --yes in the argument list for some commands + stdin_disabled = getattr(self.shell, 'kernel', None) is not None + needs_yes = command in CONDA_COMMANDS_REQUIRING_YES + has_yes = set(args).intersection(CONDA_YES_FLAGS) + if stdin_disabled and needs_yes and not has_yes: + extra_args.append("--yes") + + # Add --prefix to point conda installation to the current environment + needs_prefix = command in CONDA_COMMANDS_REQUIRING_PREFIX + has_prefix = set(args).intersection(CONDA_ENV_FLAGS) + if needs_prefix and not has_prefix: + extra_args.extend(["--prefix", sys.prefix]) + + self.shell.system(' '.join([conda, command] + extra_args + args)) + print("\nNote: you may need to restart the kernel to use updated packages.") diff --git a/contrib/python/ipython/py3/IPython/core/magics/pylab.py b/contrib/python/ipython/py3/IPython/core/magics/pylab.py index 8b59599c08c..9ec441a3e2c 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/pylab.py +++ b/contrib/python/ipython/py3/IPython/core/magics/pylab.py @@ -1,166 +1,166 @@ -"""Implementation of magic functions for matplotlib/pylab support. -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012 The IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Our own packages -from traitlets.config.application import Application -from IPython.core import magic_arguments -from IPython.core.magic import Magics, magics_class, line_magic -from IPython.testing.skipdoctest import skip_doctest -from warnings import warn -from IPython.core.pylabtools import backends - -#----------------------------------------------------------------------------- -# Magic implementation classes -#----------------------------------------------------------------------------- - -magic_gui_arg = magic_arguments.argument( - 'gui', nargs='?', - help="""Name of the matplotlib backend to use %s. - If given, the corresponding matplotlib backend is used, - otherwise it will be matplotlib's default - (which you can set in your matplotlib config file). - """ % str(tuple(sorted(backends.keys()))) -) - - -@magics_class -class PylabMagics(Magics): - """Magics related to matplotlib's pylab support""" - - @skip_doctest - @line_magic - @magic_arguments.magic_arguments() - @magic_arguments.argument('-l', '--list', action='store_true', - help='Show available matplotlib backends') - @magic_gui_arg - def matplotlib(self, line=''): - """Set up matplotlib to work interactively. - - This function lets you activate matplotlib interactive support - at any point during an IPython session. It does not import anything - into the interactive namespace. - - If you are using the inline matplotlib backend in the IPython Notebook - you can set which figure formats are enabled using the following:: - - In [1]: from IPython.display import set_matplotlib_formats - - In [2]: set_matplotlib_formats('pdf', 'svg') - - The default for inline figures sets `bbox_inches` to 'tight'. This can - cause discrepancies between the displayed image and the identical - image created using `savefig`. This behavior can be disabled using the - `%config` magic:: - - In [3]: %config InlineBackend.print_figure_kwargs = {'bbox_inches':None} - - In addition, see the docstring of - `IPython.display.set_matplotlib_formats` and - `IPython.display.set_matplotlib_close` for more information on - changing additional behaviors of the inline backend. - - Examples - -------- - To enable the inline backend for usage with the IPython Notebook:: - - In [1]: %matplotlib inline - - In this case, where the matplotlib default is TkAgg:: - - In [2]: %matplotlib - Using matplotlib backend: TkAgg - - But you can explicitly request a different GUI backend:: - - In [3]: %matplotlib qt - - You can list the available backends using the -l/--list option:: - - In [4]: %matplotlib --list - Available matplotlib backends: ['osx', 'qt4', 'qt5', 'gtk3', 'gtk4', 'notebook', 'wx', 'qt', 'nbagg', - 'gtk', 'tk', 'inline'] - """ - args = magic_arguments.parse_argstring(self.matplotlib, line) - if args.list: - backends_list = list(backends.keys()) - print("Available matplotlib backends: %s" % backends_list) - else: - gui, backend = self.shell.enable_matplotlib(args.gui.lower() if isinstance(args.gui, str) else args.gui) - self._show_matplotlib_backend(args.gui, backend) - - @skip_doctest - @line_magic - @magic_arguments.magic_arguments() - @magic_arguments.argument( - '--no-import-all', action='store_true', default=None, - help="""Prevent IPython from performing ``import *`` into the interactive namespace. - - You can govern the default behavior of this flag with the - InteractiveShellApp.pylab_import_all configurable. - """ - ) - @magic_gui_arg - def pylab(self, line=''): - """Load numpy and matplotlib to work interactively. - - This function lets you activate pylab (matplotlib, numpy and - interactive support) at any point during an IPython session. - - %pylab makes the following imports:: - - import numpy - import matplotlib - from matplotlib import pylab, mlab, pyplot - np = numpy - plt = pyplot - - from IPython.display import display - from IPython.core.pylabtools import figsize, getfigs - - from pylab import * - from numpy import * - - If you pass `--no-import-all`, the last two `*` imports will be excluded. - - See the %matplotlib magic for more details about activating matplotlib - without affecting the interactive namespace. - """ - args = magic_arguments.parse_argstring(self.pylab, line) - if args.no_import_all is None: - # get default from Application - if Application.initialized(): - app = Application.instance() - try: - import_all = app.pylab_import_all - except AttributeError: - import_all = True - else: - # nothing specified, no app - default True - import_all = True - else: - # invert no-import flag - import_all = not args.no_import_all - - gui, backend, clobbered = self.shell.enable_pylab(args.gui, import_all=import_all) - self._show_matplotlib_backend(args.gui, backend) - print("Populating the interactive namespace from numpy and matplotlib") - if clobbered: - warn("pylab import has clobbered these variables: %s" % clobbered + - "\n`%matplotlib` prevents importing * from pylab and numpy" - ) - - def _show_matplotlib_backend(self, gui, backend): - """show matplotlib message backend message""" - if not gui or gui == 'auto': - print("Using matplotlib backend: %s" % backend) +"""Implementation of magic functions for matplotlib/pylab support. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012 The IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Our own packages +from traitlets.config.application import Application +from IPython.core import magic_arguments +from IPython.core.magic import Magics, magics_class, line_magic +from IPython.testing.skipdoctest import skip_doctest +from warnings import warn +from IPython.core.pylabtools import backends + +#----------------------------------------------------------------------------- +# Magic implementation classes +#----------------------------------------------------------------------------- + +magic_gui_arg = magic_arguments.argument( + 'gui', nargs='?', + help="""Name of the matplotlib backend to use %s. + If given, the corresponding matplotlib backend is used, + otherwise it will be matplotlib's default + (which you can set in your matplotlib config file). + """ % str(tuple(sorted(backends.keys()))) +) + + +@magics_class +class PylabMagics(Magics): + """Magics related to matplotlib's pylab support""" + + @skip_doctest + @line_magic + @magic_arguments.magic_arguments() + @magic_arguments.argument('-l', '--list', action='store_true', + help='Show available matplotlib backends') + @magic_gui_arg + def matplotlib(self, line=''): + """Set up matplotlib to work interactively. + + This function lets you activate matplotlib interactive support + at any point during an IPython session. It does not import anything + into the interactive namespace. + + If you are using the inline matplotlib backend in the IPython Notebook + you can set which figure formats are enabled using the following:: + + In [1]: from IPython.display import set_matplotlib_formats + + In [2]: set_matplotlib_formats('pdf', 'svg') + + The default for inline figures sets `bbox_inches` to 'tight'. This can + cause discrepancies between the displayed image and the identical + image created using `savefig`. This behavior can be disabled using the + `%config` magic:: + + In [3]: %config InlineBackend.print_figure_kwargs = {'bbox_inches':None} + + In addition, see the docstring of + `IPython.display.set_matplotlib_formats` and + `IPython.display.set_matplotlib_close` for more information on + changing additional behaviors of the inline backend. + + Examples + -------- + To enable the inline backend for usage with the IPython Notebook:: + + In [1]: %matplotlib inline + + In this case, where the matplotlib default is TkAgg:: + + In [2]: %matplotlib + Using matplotlib backend: TkAgg + + But you can explicitly request a different GUI backend:: + + In [3]: %matplotlib qt + + You can list the available backends using the -l/--list option:: + + In [4]: %matplotlib --list + Available matplotlib backends: ['osx', 'qt4', 'qt5', 'gtk3', 'gtk4', 'notebook', 'wx', 'qt', 'nbagg', + 'gtk', 'tk', 'inline'] + """ + args = magic_arguments.parse_argstring(self.matplotlib, line) + if args.list: + backends_list = list(backends.keys()) + print("Available matplotlib backends: %s" % backends_list) + else: + gui, backend = self.shell.enable_matplotlib(args.gui.lower() if isinstance(args.gui, str) else args.gui) + self._show_matplotlib_backend(args.gui, backend) + + @skip_doctest + @line_magic + @magic_arguments.magic_arguments() + @magic_arguments.argument( + '--no-import-all', action='store_true', default=None, + help="""Prevent IPython from performing ``import *`` into the interactive namespace. + + You can govern the default behavior of this flag with the + InteractiveShellApp.pylab_import_all configurable. + """ + ) + @magic_gui_arg + def pylab(self, line=''): + """Load numpy and matplotlib to work interactively. + + This function lets you activate pylab (matplotlib, numpy and + interactive support) at any point during an IPython session. + + %pylab makes the following imports:: + + import numpy + import matplotlib + from matplotlib import pylab, mlab, pyplot + np = numpy + plt = pyplot + + from IPython.display import display + from IPython.core.pylabtools import figsize, getfigs + + from pylab import * + from numpy import * + + If you pass `--no-import-all`, the last two `*` imports will be excluded. + + See the %matplotlib magic for more details about activating matplotlib + without affecting the interactive namespace. + """ + args = magic_arguments.parse_argstring(self.pylab, line) + if args.no_import_all is None: + # get default from Application + if Application.initialized(): + app = Application.instance() + try: + import_all = app.pylab_import_all + except AttributeError: + import_all = True + else: + # nothing specified, no app - default True + import_all = True + else: + # invert no-import flag + import_all = not args.no_import_all + + gui, backend, clobbered = self.shell.enable_pylab(args.gui, import_all=import_all) + self._show_matplotlib_backend(args.gui, backend) + print("Populating the interactive namespace from numpy and matplotlib") + if clobbered: + warn("pylab import has clobbered these variables: %s" % clobbered + + "\n`%matplotlib` prevents importing * from pylab and numpy" + ) + + def _show_matplotlib_backend(self, gui, backend): + """show matplotlib message backend message""" + if not gui or gui == 'auto': + print("Using matplotlib backend: %s" % backend) diff --git a/contrib/python/ipython/py3/IPython/core/magics/script.py b/contrib/python/ipython/py3/IPython/core/magics/script.py index 6b505f43624..8b7f6f94e09 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/script.py +++ b/contrib/python/ipython/py3/IPython/core/magics/script.py @@ -1,294 +1,294 @@ -"""Magic functions for running cells in various scripts.""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import errno -import os -import sys -import signal -import time -from subprocess import Popen, PIPE, CalledProcessError -import atexit - -from IPython.core import magic_arguments -from IPython.core.magic import ( - Magics, magics_class, line_magic, cell_magic -) -from IPython.lib.backgroundjobs import BackgroundJobManager -from IPython.utils import py3compat -from IPython.utils.process import arg_split -from traitlets import List, Dict, default - -#----------------------------------------------------------------------------- -# Magic implementation classes -#----------------------------------------------------------------------------- - -def script_args(f): - """single decorator for adding script args""" - args = [ - magic_arguments.argument( - '--out', type=str, - help="""The variable in which to store stdout from the script. - If the script is backgrounded, this will be the stdout *pipe*, - instead of the stderr text itself and will not be auto closed. - """ - ), - magic_arguments.argument( - '--err', type=str, - help="""The variable in which to store stderr from the script. - If the script is backgrounded, this will be the stderr *pipe*, - instead of the stderr text itself and will not be autoclosed. - """ - ), - magic_arguments.argument( - '--bg', action="store_true", - help="""Whether to run the script in the background. - If given, the only way to see the output of the command is - with --out/err. - """ - ), - magic_arguments.argument( - '--proc', type=str, - help="""The variable in which to store Popen instance. - This is used only when --bg option is given. - """ - ), - magic_arguments.argument( - '--no-raise-error', action="store_false", dest='raise_error', - help="""Whether you should raise an error message in addition to - a stream on stderr if you get a nonzero exit code. - """ - ) - ] - for arg in args: - f = arg(f) - return f - -@magics_class -class ScriptMagics(Magics): - """Magics for talking to scripts - - This defines a base `%%script` cell magic for running a cell - with a program in a subprocess, and registers a few top-level - magics that call %%script with common interpreters. - """ - script_magics = List( - help="""Extra script cell magics to define - - This generates simple wrappers of `%%script foo` as `%%foo`. - - If you want to add script magics that aren't on your path, - specify them in script_paths - """, - ).tag(config=True) - @default('script_magics') - def _script_magics_default(self): - """default to a common list of programs""" - - defaults = [ - 'sh', - 'bash', - 'perl', - 'ruby', - 'python', - 'python2', - 'python3', - 'pypy', - ] - if os.name == 'nt': - defaults.extend([ - 'cmd', - ]) - - return defaults - - script_paths = Dict( - help="""Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby' - - Only necessary for items in script_magics where the default path will not - find the right interpreter. - """ - ).tag(config=True) - - def __init__(self, shell=None): - super(ScriptMagics, self).__init__(shell=shell) - self._generate_script_magics() - self.job_manager = BackgroundJobManager() - self.bg_processes = [] - atexit.register(self.kill_bg_processes) - - def __del__(self): - self.kill_bg_processes() - - def _generate_script_magics(self): - cell_magics = self.magics['cell'] - for name in self.script_magics: - cell_magics[name] = self._make_script_magic(name) - - def _make_script_magic(self, name): - """make a named magic, that calls %%script with a particular program""" - # expand to explicit path if necessary: - script = self.script_paths.get(name, name) - - @magic_arguments.magic_arguments() - @script_args - def named_script_magic(line, cell): - # if line, add it as cl-flags - if line: - line = "%s %s" % (script, line) - else: - line = script - return self.shebang(line, cell) - - # write a basic docstring: - named_script_magic.__doc__ = \ - """%%{name} script magic - - Run cells with {script} in a subprocess. - - This is a shortcut for `%%script {script}` - """.format(**locals()) - - return named_script_magic - - @magic_arguments.magic_arguments() - @script_args - @cell_magic("script") - def shebang(self, line, cell): - """Run a cell via a shell command - - The `%%script` line is like the #! line of script, - specifying a program (bash, perl, ruby, etc.) with which to run. - - The rest of the cell is run by that program. - - Examples - -------- - :: - - In [1]: %%script bash - ...: for i in 1 2 3; do - ...: echo $i - ...: done - 1 - 2 - 3 - """ - argv = arg_split(line, posix = not sys.platform.startswith('win')) - args, cmd = self.shebang.parser.parse_known_args(argv) - - try: - p = Popen(cmd, stdout=PIPE, stderr=PIPE, stdin=PIPE) - except OSError as e: - if e.errno == errno.ENOENT: - print("Couldn't find program: %r" % cmd[0]) - return - else: - raise - - if not cell.endswith('\n'): - cell += '\n' - cell = cell.encode('utf8', 'replace') - if args.bg: - self.bg_processes.append(p) - self._gc_bg_processes() - to_close = [] - if args.out: - self.shell.user_ns[args.out] = p.stdout - else: - to_close.append(p.stdout) - if args.err: - self.shell.user_ns[args.err] = p.stderr - else: - to_close.append(p.stderr) - self.job_manager.new(self._run_script, p, cell, to_close, daemon=True) - if args.proc: - self.shell.user_ns[args.proc] = p - return - - try: - out, err = p.communicate(cell) - except KeyboardInterrupt: - try: - p.send_signal(signal.SIGINT) - time.sleep(0.1) - if p.poll() is not None: - print("Process is interrupted.") - return - p.terminate() - time.sleep(0.1) - if p.poll() is not None: - print("Process is terminated.") - return - p.kill() - print("Process is killed.") - except OSError: - pass - except Exception as e: - print("Error while terminating subprocess (pid=%i): %s" \ - % (p.pid, e)) - return - out = py3compat.decode(out) - err = py3compat.decode(err) - if args.out: - self.shell.user_ns[args.out] = out - else: - sys.stdout.write(out) - sys.stdout.flush() - if args.err: - self.shell.user_ns[args.err] = err - else: - sys.stderr.write(err) - sys.stderr.flush() - if args.raise_error and p.returncode!=0: - raise CalledProcessError(p.returncode, cell, output=out, stderr=err) - - def _run_script(self, p, cell, to_close): - """callback for running the script in the background""" - p.stdin.write(cell) - p.stdin.close() - for s in to_close: - s.close() - p.wait() - - @line_magic("killbgscripts") - def killbgscripts(self, _nouse_=''): - """Kill all BG processes started by %%script and its family.""" - self.kill_bg_processes() - print("All background processes were killed.") - - def kill_bg_processes(self): - """Kill all BG processes which are still running.""" - if not self.bg_processes: - return - for p in self.bg_processes: - if p.poll() is None: - try: - p.send_signal(signal.SIGINT) - except: - pass - time.sleep(0.1) - self._gc_bg_processes() - if not self.bg_processes: - return - for p in self.bg_processes: - if p.poll() is None: - try: - p.terminate() - except: - pass - time.sleep(0.1) - self._gc_bg_processes() - if not self.bg_processes: - return - for p in self.bg_processes: - if p.poll() is None: - try: - p.kill() - except: - pass - self._gc_bg_processes() - - def _gc_bg_processes(self): - self.bg_processes = [p for p in self.bg_processes if p.poll() is None] +"""Magic functions for running cells in various scripts.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import errno +import os +import sys +import signal +import time +from subprocess import Popen, PIPE, CalledProcessError +import atexit + +from IPython.core import magic_arguments +from IPython.core.magic import ( + Magics, magics_class, line_magic, cell_magic +) +from IPython.lib.backgroundjobs import BackgroundJobManager +from IPython.utils import py3compat +from IPython.utils.process import arg_split +from traitlets import List, Dict, default + +#----------------------------------------------------------------------------- +# Magic implementation classes +#----------------------------------------------------------------------------- + +def script_args(f): + """single decorator for adding script args""" + args = [ + magic_arguments.argument( + '--out', type=str, + help="""The variable in which to store stdout from the script. + If the script is backgrounded, this will be the stdout *pipe*, + instead of the stderr text itself and will not be auto closed. + """ + ), + magic_arguments.argument( + '--err', type=str, + help="""The variable in which to store stderr from the script. + If the script is backgrounded, this will be the stderr *pipe*, + instead of the stderr text itself and will not be autoclosed. + """ + ), + magic_arguments.argument( + '--bg', action="store_true", + help="""Whether to run the script in the background. + If given, the only way to see the output of the command is + with --out/err. + """ + ), + magic_arguments.argument( + '--proc', type=str, + help="""The variable in which to store Popen instance. + This is used only when --bg option is given. + """ + ), + magic_arguments.argument( + '--no-raise-error', action="store_false", dest='raise_error', + help="""Whether you should raise an error message in addition to + a stream on stderr if you get a nonzero exit code. + """ + ) + ] + for arg in args: + f = arg(f) + return f + +@magics_class +class ScriptMagics(Magics): + """Magics for talking to scripts + + This defines a base `%%script` cell magic for running a cell + with a program in a subprocess, and registers a few top-level + magics that call %%script with common interpreters. + """ + script_magics = List( + help="""Extra script cell magics to define + + This generates simple wrappers of `%%script foo` as `%%foo`. + + If you want to add script magics that aren't on your path, + specify them in script_paths + """, + ).tag(config=True) + @default('script_magics') + def _script_magics_default(self): + """default to a common list of programs""" + + defaults = [ + 'sh', + 'bash', + 'perl', + 'ruby', + 'python', + 'python2', + 'python3', + 'pypy', + ] + if os.name == 'nt': + defaults.extend([ + 'cmd', + ]) + + return defaults + + script_paths = Dict( + help="""Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby' + + Only necessary for items in script_magics where the default path will not + find the right interpreter. + """ + ).tag(config=True) + + def __init__(self, shell=None): + super(ScriptMagics, self).__init__(shell=shell) + self._generate_script_magics() + self.job_manager = BackgroundJobManager() + self.bg_processes = [] + atexit.register(self.kill_bg_processes) + + def __del__(self): + self.kill_bg_processes() + + def _generate_script_magics(self): + cell_magics = self.magics['cell'] + for name in self.script_magics: + cell_magics[name] = self._make_script_magic(name) + + def _make_script_magic(self, name): + """make a named magic, that calls %%script with a particular program""" + # expand to explicit path if necessary: + script = self.script_paths.get(name, name) + + @magic_arguments.magic_arguments() + @script_args + def named_script_magic(line, cell): + # if line, add it as cl-flags + if line: + line = "%s %s" % (script, line) + else: + line = script + return self.shebang(line, cell) + + # write a basic docstring: + named_script_magic.__doc__ = \ + """%%{name} script magic + + Run cells with {script} in a subprocess. + + This is a shortcut for `%%script {script}` + """.format(**locals()) + + return named_script_magic + + @magic_arguments.magic_arguments() + @script_args + @cell_magic("script") + def shebang(self, line, cell): + """Run a cell via a shell command + + The `%%script` line is like the #! line of script, + specifying a program (bash, perl, ruby, etc.) with which to run. + + The rest of the cell is run by that program. + + Examples + -------- + :: + + In [1]: %%script bash + ...: for i in 1 2 3; do + ...: echo $i + ...: done + 1 + 2 + 3 + """ + argv = arg_split(line, posix = not sys.platform.startswith('win')) + args, cmd = self.shebang.parser.parse_known_args(argv) + + try: + p = Popen(cmd, stdout=PIPE, stderr=PIPE, stdin=PIPE) + except OSError as e: + if e.errno == errno.ENOENT: + print("Couldn't find program: %r" % cmd[0]) + return + else: + raise + + if not cell.endswith('\n'): + cell += '\n' + cell = cell.encode('utf8', 'replace') + if args.bg: + self.bg_processes.append(p) + self._gc_bg_processes() + to_close = [] + if args.out: + self.shell.user_ns[args.out] = p.stdout + else: + to_close.append(p.stdout) + if args.err: + self.shell.user_ns[args.err] = p.stderr + else: + to_close.append(p.stderr) + self.job_manager.new(self._run_script, p, cell, to_close, daemon=True) + if args.proc: + self.shell.user_ns[args.proc] = p + return + + try: + out, err = p.communicate(cell) + except KeyboardInterrupt: + try: + p.send_signal(signal.SIGINT) + time.sleep(0.1) + if p.poll() is not None: + print("Process is interrupted.") + return + p.terminate() + time.sleep(0.1) + if p.poll() is not None: + print("Process is terminated.") + return + p.kill() + print("Process is killed.") + except OSError: + pass + except Exception as e: + print("Error while terminating subprocess (pid=%i): %s" \ + % (p.pid, e)) + return + out = py3compat.decode(out) + err = py3compat.decode(err) + if args.out: + self.shell.user_ns[args.out] = out + else: + sys.stdout.write(out) + sys.stdout.flush() + if args.err: + self.shell.user_ns[args.err] = err + else: + sys.stderr.write(err) + sys.stderr.flush() + if args.raise_error and p.returncode!=0: + raise CalledProcessError(p.returncode, cell, output=out, stderr=err) + + def _run_script(self, p, cell, to_close): + """callback for running the script in the background""" + p.stdin.write(cell) + p.stdin.close() + for s in to_close: + s.close() + p.wait() + + @line_magic("killbgscripts") + def killbgscripts(self, _nouse_=''): + """Kill all BG processes started by %%script and its family.""" + self.kill_bg_processes() + print("All background processes were killed.") + + def kill_bg_processes(self): + """Kill all BG processes which are still running.""" + if not self.bg_processes: + return + for p in self.bg_processes: + if p.poll() is None: + try: + p.send_signal(signal.SIGINT) + except: + pass + time.sleep(0.1) + self._gc_bg_processes() + if not self.bg_processes: + return + for p in self.bg_processes: + if p.poll() is None: + try: + p.terminate() + except: + pass + time.sleep(0.1) + self._gc_bg_processes() + if not self.bg_processes: + return + for p in self.bg_processes: + if p.poll() is None: + try: + p.kill() + except: + pass + self._gc_bg_processes() + + def _gc_bg_processes(self): + self.bg_processes = [p for p in self.bg_processes if p.poll() is None] diff --git a/contrib/python/ipython/py3/IPython/core/oinspect.py b/contrib/python/ipython/py3/IPython/core/oinspect.py index e6297b98dfa..272916c9663 100644 --- a/contrib/python/ipython/py3/IPython/core/oinspect.py +++ b/contrib/python/ipython/py3/IPython/core/oinspect.py @@ -1,1031 +1,1031 @@ -# -*- coding: utf-8 -*- -"""Tools for inspecting Python objects. - -Uses syntax highlighting for presenting the various information elements. - -Similar in spirit to the inspect module, but all calls take a name argument to -reference the name under which an object is being read. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -__all__ = ['Inspector','InspectColors'] - -# stdlib modules -import ast -import inspect -from inspect import signature -import linecache -import warnings -import os -from textwrap import dedent -import types -import io as stdlib_io - -from typing import Union - -# IPython's own -from IPython.core import page -from IPython.lib.pretty import pretty -from IPython.testing.skipdoctest import skip_doctest -from IPython.utils import PyColorize -from IPython.utils import openpy -from IPython.utils import py3compat -from IPython.utils.dir2 import safe_hasattr -from IPython.utils.path import compress_user -from IPython.utils.text import indent -from IPython.utils.wildcard import list_namespace -from IPython.utils.wildcard import typestr2type -from IPython.utils.coloransi import TermColors, ColorScheme, ColorSchemeTable -from IPython.utils.py3compat import cast_unicode -from IPython.utils.colorable import Colorable -from IPython.utils.decorators import undoc - -from pygments import highlight -from pygments.lexers import PythonLexer -from pygments.formatters import HtmlFormatter - -def pylight(code): - return highlight(code, PythonLexer(), HtmlFormatter(noclasses=True)) - -# builtin docstrings to ignore -_func_call_docstring = types.FunctionType.__call__.__doc__ -_object_init_docstring = object.__init__.__doc__ -_builtin_type_docstrings = { - inspect.getdoc(t) for t in (types.ModuleType, types.MethodType, - types.FunctionType, property) -} - -_builtin_func_type = type(all) -_builtin_meth_type = type(str.upper) # Bound methods have the same type as builtin functions -#**************************************************************************** -# Builtin color schemes - -Colors = TermColors # just a shorthand - -InspectColors = PyColorize.ANSICodeColors - -#**************************************************************************** -# Auxiliary functions and objects - -# See the messaging spec for the definition of all these fields. This list -# effectively defines the order of display -info_fields = ['type_name', 'base_class', 'string_form', 'namespace', - 'length', 'file', 'definition', 'docstring', 'source', - 'init_definition', 'class_docstring', 'init_docstring', - 'call_def', 'call_docstring', - # These won't be printed but will be used to determine how to - # format the object - 'ismagic', 'isalias', 'isclass', 'found', 'name' - ] - - -def object_info(**kw): - """Make an object info dict with all fields present.""" - infodict = {k:None for k in info_fields} - infodict.update(kw) - return infodict - - -def get_encoding(obj): - """Get encoding for python source file defining obj - - Returns None if obj is not defined in a sourcefile. - """ - ofile = find_file(obj) - # run contents of file through pager starting at line where the object - # is defined, as long as the file isn't binary and is actually on the - # filesystem. - if ofile is None: - return None - elif ofile.endswith(('.so', '.dll', '.pyd')): - return None - elif not os.path.isfile(ofile): - return None - else: - # Print only text files, not extension binaries. Note that - # getsourcelines returns lineno with 1-offset and page() uses - # 0-offset, so we must adjust. - with stdlib_io.open(ofile, 'rb') as buffer: # Tweaked to use io.open for Python 2 - encoding, lines = openpy.detect_encoding(buffer.readline) - return encoding - -def getdoc(obj) -> Union[str,None]: - """Stable wrapper around inspect.getdoc. - - This can't crash because of attribute problems. - - It also attempts to call a getdoc() method on the given object. This - allows objects which provide their docstrings via non-standard mechanisms - (like Pyro proxies) to still be inspected by ipython's ? system. - """ - # Allow objects to offer customized documentation via a getdoc method: - try: - ds = obj.getdoc() - except Exception: - pass - else: - if isinstance(ds, str): - return inspect.cleandoc(ds) - docstr = inspect.getdoc(obj) - return docstr - - -def getsource(obj, oname='') -> Union[str,None]: - """Wrapper around inspect.getsource. - - This can be modified by other projects to provide customized source - extraction. - - Parameters - ---------- - obj : object - an object whose source code we will attempt to extract - oname : str - (optional) a name under which the object is known - - Returns - ------- - src : unicode or None - - """ - - if isinstance(obj, property): - sources = [] - for attrname in ['fget', 'fset', 'fdel']: - fn = getattr(obj, attrname) - if fn is not None: - encoding = get_encoding(fn) - oname_prefix = ('%s.' % oname) if oname else '' - sources.append(''.join(('# ', oname_prefix, attrname))) - if inspect.isfunction(fn): - sources.append(dedent(getsource(fn))) - else: - # Default str/repr only prints function name, - # pretty.pretty prints module name too. - sources.append( - '%s%s = %s\n' % (oname_prefix, attrname, pretty(fn)) - ) - if sources: - return '\n'.join(sources) - else: - return None - - else: - # Get source for non-property objects. - - obj = _get_wrapped(obj) - - try: - src = inspect.getsource(obj) - except TypeError: - # The object itself provided no meaningful source, try looking for - # its class definition instead. - try: - src = inspect.getsource(obj.__class__) - except (OSError, TypeError): - return None - except OSError: - return None - - return src - - -def is_simple_callable(obj): - """True if obj is a function ()""" - return (inspect.isfunction(obj) or inspect.ismethod(obj) or \ - isinstance(obj, _builtin_func_type) or isinstance(obj, _builtin_meth_type)) - -@undoc -def getargspec(obj): - """Wrapper around :func:`inspect.getfullargspec` - - In addition to functions and methods, this can also handle objects with a - ``__call__`` attribute. - - DEPRECATED: Deprecated since 7.10. Do not use, will be removed. - """ - - warnings.warn('`getargspec` function is deprecated as of IPython 7.10' - 'and will be removed in future versions.', DeprecationWarning, stacklevel=2) - - if safe_hasattr(obj, '__call__') and not is_simple_callable(obj): - obj = obj.__call__ - - return inspect.getfullargspec(obj) - -@undoc -def format_argspec(argspec): - """Format argspect, convenience wrapper around inspect's. - - This takes a dict instead of ordered arguments and calls - inspect.format_argspec with the arguments in the necessary order. - - DEPRECATED: Do not use; will be removed in future versions. - """ - - warnings.warn('`format_argspec` function is deprecated as of IPython 7.10' - 'and will be removed in future versions.', DeprecationWarning, stacklevel=2) - - - return inspect.formatargspec(argspec['args'], argspec['varargs'], - argspec['varkw'], argspec['defaults']) - -@undoc -def call_tip(oinfo, format_call=True): - """DEPRECATED. Extract call tip data from an oinfo dict. - """ - warnings.warn('`call_tip` function is deprecated as of IPython 6.0' - 'and will be removed in future versions.', DeprecationWarning, stacklevel=2) - # Get call definition - argspec = oinfo.get('argspec') - if argspec is None: - call_line = None - else: - # Callable objects will have 'self' as their first argument, prune - # it out if it's there for clarity (since users do *not* pass an - # extra first argument explicitly). - try: - has_self = argspec['args'][0] == 'self' - except (KeyError, IndexError): - pass - else: - if has_self: - argspec['args'] = argspec['args'][1:] - - call_line = oinfo['name']+format_argspec(argspec) - - # Now get docstring. - # The priority is: call docstring, constructor docstring, main one. - doc = oinfo.get('call_docstring') - if doc is None: - doc = oinfo.get('init_docstring') - if doc is None: - doc = oinfo.get('docstring','') - - return call_line, doc - - -def _get_wrapped(obj): - """Get the original object if wrapped in one or more @decorators - - Some objects automatically construct similar objects on any unrecognised - attribute access (e.g. unittest.mock.call). To protect against infinite loops, - this will arbitrarily cut off after 100 levels of obj.__wrapped__ - attribute access. --TK, Jan 2016 - """ - orig_obj = obj - i = 0 - while safe_hasattr(obj, '__wrapped__'): - obj = obj.__wrapped__ - i += 1 - if i > 100: - # __wrapped__ is probably a lie, so return the thing we started with - return orig_obj - return obj - -def find_file(obj) -> str: - """Find the absolute path to the file where an object was defined. - - This is essentially a robust wrapper around `inspect.getabsfile`. - - Returns None if no file can be found. - - Parameters - ---------- - obj : any Python object - - Returns - ------- - fname : str - The absolute path to the file where the object was defined. - """ - obj = _get_wrapped(obj) - - fname = None - try: - fname = inspect.getabsfile(obj) - except TypeError: - # For an instance, the file that matters is where its class was - # declared. - try: - fname = inspect.getabsfile(obj.__class__) - except (OSError, TypeError): - # Can happen for builtins - pass - except OSError: - pass - - return cast_unicode(fname) - - -def find_source_lines(obj): - """Find the line number in a file where an object was defined. - - This is essentially a robust wrapper around `inspect.getsourcelines`. - - Returns None if no file can be found. - - Parameters - ---------- - obj : any Python object - - Returns - ------- - lineno : int - The line number where the object definition starts. - """ - obj = _get_wrapped(obj) - - try: - lineno = inspect.getsourcelines(obj)[1] - except TypeError: - # For instances, try the class object like getsource() does - try: - lineno = inspect.getsourcelines(obj.__class__)[1] - except (OSError, TypeError): - return None - except OSError: - return None - - return lineno - -class Inspector(Colorable): - - def __init__(self, color_table=InspectColors, - code_color_table=PyColorize.ANSICodeColors, - scheme=None, - str_detail_level=0, - parent=None, config=None): - super(Inspector, self).__init__(parent=parent, config=config) - self.color_table = color_table - self.parser = PyColorize.Parser(out='str', parent=self, style=scheme) - self.format = self.parser.format - self.str_detail_level = str_detail_level - self.set_active_scheme(scheme) - - def _getdef(self,obj,oname='') -> Union[str,None]: - """Return the call signature for any callable object. - - If any exception is generated, None is returned instead and the - exception is suppressed.""" - try: - return _render_signature(signature(obj), oname) - except: - return None - - def __head(self,h) -> str: - """Return a header string with proper colors.""" - return '%s%s%s' % (self.color_table.active_colors.header,h, - self.color_table.active_colors.normal) - - def set_active_scheme(self, scheme): - if scheme is not None: - self.color_table.set_active_scheme(scheme) - self.parser.color_table.set_active_scheme(scheme) - - def noinfo(self, msg, oname): - """Generic message when no information is found.""" - print('No %s found' % msg, end=' ') - if oname: - print('for %s' % oname) - else: - print() - - def pdef(self, obj, oname=''): - """Print the call signature for any callable object. - - If the object is a class, print the constructor information.""" - - if not callable(obj): - print('Object is not callable.') - return - - header = '' - - if inspect.isclass(obj): - header = self.__head('Class constructor information:\n') - - - output = self._getdef(obj,oname) - if output is None: - self.noinfo('definition header',oname) - else: - print(header,self.format(output), end=' ') - - # In Python 3, all classes are new-style, so they all have __init__. - @skip_doctest - def pdoc(self, obj, oname='', formatter=None): - """Print the docstring for any object. - - Optional: - -formatter: a function to run the docstring through for specially - formatted docstrings. - - Examples - -------- - - In [1]: class NoInit: - ...: pass - - In [2]: class NoDoc: - ...: def __init__(self): - ...: pass - - In [3]: %pdoc NoDoc - No documentation found for NoDoc - - In [4]: %pdoc NoInit - No documentation found for NoInit - - In [5]: obj = NoInit() - - In [6]: %pdoc obj - No documentation found for obj - - In [5]: obj2 = NoDoc() - - In [6]: %pdoc obj2 - No documentation found for obj2 - """ - - head = self.__head # For convenience - lines = [] - ds = getdoc(obj) - if formatter: - ds = formatter(ds).get('plain/text', ds) - if ds: - lines.append(head("Class docstring:")) - lines.append(indent(ds)) - if inspect.isclass(obj) and hasattr(obj, '__init__'): - init_ds = getdoc(obj.__init__) - if init_ds is not None: - lines.append(head("Init docstring:")) - lines.append(indent(init_ds)) - elif hasattr(obj,'__call__'): - call_ds = getdoc(obj.__call__) - if call_ds: - lines.append(head("Call docstring:")) - lines.append(indent(call_ds)) - - if not lines: - self.noinfo('documentation',oname) - else: - page.page('\n'.join(lines)) - - def psource(self, obj, oname=''): - """Print the source code for an object.""" - - # Flush the source cache because inspect can return out-of-date source - linecache.checkcache() - try: - src = getsource(obj, oname=oname) - except Exception: - src = None - - if src is None: - self.noinfo('source', oname) - else: - page.page(self.format(src)) - - def pfile(self, obj, oname=''): - """Show the whole file where an object was defined.""" - - lineno = find_source_lines(obj) - if lineno is None: - self.noinfo('file', oname) - return - - ofile = find_file(obj) - # run contents of file through pager starting at line where the object - # is defined, as long as the file isn't binary and is actually on the - # filesystem. - if ofile.endswith(('.so', '.dll', '.pyd')): - print('File %r is binary, not printing.' % ofile) - elif not os.path.isfile(ofile): - print('File %r does not exist, not printing.' % ofile) - else: - # Print only text files, not extension binaries. Note that - # getsourcelines returns lineno with 1-offset and page() uses - # 0-offset, so we must adjust. - page.page(self.format(openpy.read_py_file(ofile, skip_encoding_cookie=False)), lineno - 1) - - - def _mime_format(self, text:str, formatter=None) -> dict: - """Return a mime bundle representation of the input text. - - - if `formatter` is None, the returned mime bundle has - a `text/plain` field, with the input text. - a `text/html` field with a `<pre>` tag containing the input text. - - - if `formatter` is not None, it must be a callable transforming the - input text into a mime bundle. Default values for `text/plain` and - `text/html` representations are the ones described above. - - Note: - - Formatters returning strings are supported but this behavior is deprecated. - - """ - defaults = { - 'text/plain': text, - 'text/html': '<pre>' + text + '</pre>' - } - - if formatter is None: - return defaults - else: - formatted = formatter(text) - - if not isinstance(formatted, dict): - # Handle the deprecated behavior of a formatter returning - # a string instead of a mime bundle. - return { - 'text/plain': formatted, - 'text/html': '<pre>' + formatted + '</pre>' - } - - else: - return dict(defaults, **formatted) - - - def format_mime(self, bundle): - - text_plain = bundle['text/plain'] - - text = '' - heads, bodies = list(zip(*text_plain)) - _len = max(len(h) for h in heads) - - for head, body in zip(heads, bodies): - body = body.strip('\n') - delim = '\n' if '\n' in body else ' ' - text += self.__head(head+':') + (_len - len(head))*' ' +delim + body +'\n' - - bundle['text/plain'] = text - return bundle - - def _get_info(self, obj, oname='', formatter=None, info=None, detail_level=0): - """Retrieve an info dict and format it. - - Parameters - ========== - - obj: any - Object to inspect and return info from - oname: str (default: ''): - Name of the variable pointing to `obj`. - formatter: callable - info: - already computed information - detail_level: integer - Granularity of detail level, if set to 1, give more information. - """ - - info = self._info(obj, oname=oname, info=info, detail_level=detail_level) - - _mime = { - 'text/plain': [], - 'text/html': '', - } - - def append_field(bundle, title:str, key:str, formatter=None): - field = info[key] - if field is not None: - formatted_field = self._mime_format(field, formatter) - bundle['text/plain'].append((title, formatted_field['text/plain'])) - bundle['text/html'] += '<h1>' + title + '</h1>\n' + formatted_field['text/html'] + '\n' - - def code_formatter(text): - return { - 'text/plain': self.format(text), - 'text/html': pylight(text) - } - - if info['isalias']: - append_field(_mime, 'Repr', 'string_form') - - elif info['ismagic']: - if detail_level > 0: - append_field(_mime, 'Source', 'source', code_formatter) - else: - append_field(_mime, 'Docstring', 'docstring', formatter) - append_field(_mime, 'File', 'file') - - elif info['isclass'] or is_simple_callable(obj): - # Functions, methods, classes - append_field(_mime, 'Signature', 'definition', code_formatter) - append_field(_mime, 'Init signature', 'init_definition', code_formatter) - append_field(_mime, 'Docstring', 'docstring', formatter) - if detail_level > 0 and info['source']: - append_field(_mime, 'Source', 'source', code_formatter) - else: - append_field(_mime, 'Init docstring', 'init_docstring', formatter) - - append_field(_mime, 'File', 'file') - append_field(_mime, 'Type', 'type_name') - append_field(_mime, 'Subclasses', 'subclasses') - - else: - # General Python objects - append_field(_mime, 'Signature', 'definition', code_formatter) - append_field(_mime, 'Call signature', 'call_def', code_formatter) - append_field(_mime, 'Type', 'type_name') - append_field(_mime, 'String form', 'string_form') - - # Namespace - if info['namespace'] != 'Interactive': - append_field(_mime, 'Namespace', 'namespace') - - append_field(_mime, 'Length', 'length') - append_field(_mime, 'File', 'file') - - # Source or docstring, depending on detail level and whether - # source found. - if detail_level > 0 and info['source']: - append_field(_mime, 'Source', 'source', code_formatter) - else: - append_field(_mime, 'Docstring', 'docstring', formatter) - - append_field(_mime, 'Class docstring', 'class_docstring', formatter) - append_field(_mime, 'Init docstring', 'init_docstring', formatter) - append_field(_mime, 'Call docstring', 'call_docstring', formatter) - - - return self.format_mime(_mime) - - def pinfo(self, obj, oname='', formatter=None, info=None, detail_level=0, enable_html_pager=True): - """Show detailed information about an object. - - Optional arguments: - - - oname: name of the variable pointing to the object. - - - formatter: callable (optional) - A special formatter for docstrings. - - The formatter is a callable that takes a string as an input - and returns either a formatted string or a mime type bundle - in the form of a dictionary. - - Although the support of custom formatter returning a string - instead of a mime type bundle is deprecated. - - - info: a structure with some information fields which may have been - precomputed already. - - - detail_level: if set to 1, more information is given. - """ - info = self._get_info(obj, oname, formatter, info, detail_level) - if not enable_html_pager: - del info['text/html'] - page.page(info) - - def info(self, obj, oname='', formatter=None, info=None, detail_level=0): - """DEPRECATED. Compute a dict with detailed information about an object. - """ - if formatter is not None: - warnings.warn('The `formatter` keyword argument to `Inspector.info`' - 'is deprecated as of IPython 5.0 and will have no effects.', - DeprecationWarning, stacklevel=2) - return self._info(obj, oname=oname, info=info, detail_level=detail_level) - - def _info(self, obj, oname='', info=None, detail_level=0) -> dict: - """Compute a dict with detailed information about an object. - - Parameters - ========== - - obj: any - An object to find information about - oname: str (default: ''): - Name of the variable pointing to `obj`. - info: (default: None) - A struct (dict like with attr access) with some information fields - which may have been precomputed already. - detail_level: int (default:0) - If set to 1, more information is given. - - Returns - ======= - - An object info dict with known fields from `info_fields`. Keys are - strings, values are string or None. - """ - - if info is None: - ismagic = False - isalias = False - ospace = '' - else: - ismagic = info.ismagic - isalias = info.isalias - ospace = info.namespace - - # Get docstring, special-casing aliases: - if isalias: - if not callable(obj): - try: - ds = "Alias to the system command:\n %s" % obj[1] - except: - ds = "Alias: " + str(obj) - else: - ds = "Alias to " + str(obj) - if obj.__doc__: - ds += "\nDocstring:\n" + obj.__doc__ - else: - ds = getdoc(obj) - if ds is None: - ds = '<no docstring>' - - # store output in a dict, we initialize it here and fill it as we go - out = dict(name=oname, found=True, isalias=isalias, ismagic=ismagic, subclasses=None) - - string_max = 200 # max size of strings to show (snipped if longer) - shalf = int((string_max - 5) / 2) - - if ismagic: - out['type_name'] = 'Magic function' - elif isalias: - out['type_name'] = 'System alias' - else: - out['type_name'] = type(obj).__name__ - - try: - bclass = obj.__class__ - out['base_class'] = str(bclass) - except: - pass - - # String form, but snip if too long in ? form (full in ??) - if detail_level >= self.str_detail_level: - try: - ostr = str(obj) - str_head = 'string_form' - if not detail_level and len(ostr)>string_max: - ostr = ostr[:shalf] + ' <...> ' + ostr[-shalf:] - ostr = ("\n" + " " * len(str_head.expandtabs())).\ - join(q.strip() for q in ostr.split("\n")) - out[str_head] = ostr - except: - pass - - if ospace: - out['namespace'] = ospace - - # Length (for strings and lists) - try: - out['length'] = str(len(obj)) - except Exception: - pass - - # Filename where object was defined - binary_file = False - fname = find_file(obj) - if fname is None: - # if anything goes wrong, we don't want to show source, so it's as - # if the file was binary - binary_file = True - else: - if fname.endswith(('.so', '.dll', '.pyd')): - binary_file = True - elif fname.endswith('<string>'): - fname = 'Dynamically generated function. No source code available.' - out['file'] = compress_user(fname) - - # Original source code for a callable, class or property. - if detail_level: - # Flush the source cache because inspect can return out-of-date - # source - linecache.checkcache() - try: - if isinstance(obj, property) or not binary_file: - src = getsource(obj, oname) - if src is not None: - src = src.rstrip() - out['source'] = src - - except Exception: - pass - - # Add docstring only if no source is to be shown (avoid repetitions). - if ds and not self._source_contains_docstring(out.get('source'), ds): - out['docstring'] = ds - - # Constructor docstring for classes - if inspect.isclass(obj): - out['isclass'] = True - - # get the init signature: - try: - init_def = self._getdef(obj, oname) - except AttributeError: - init_def = None - - # get the __init__ docstring - try: - obj_init = obj.__init__ - except AttributeError: - init_ds = None - else: - if init_def is None: - # Get signature from init if top-level sig failed. - # Can happen for built-in types (list, etc.). - try: - init_def = self._getdef(obj_init, oname) - except AttributeError: - pass - init_ds = getdoc(obj_init) - # Skip Python's auto-generated docstrings - if init_ds == _object_init_docstring: - init_ds = None - - if init_def: - out['init_definition'] = init_def - - if init_ds: - out['init_docstring'] = init_ds - - names = [sub.__name__ for sub in type.__subclasses__(obj)] - if len(names) < 10: - all_names = ', '.join(names) - else: - all_names = ', '.join(names[:10]+['...']) - out['subclasses'] = all_names - # and class docstring for instances: - else: - # reconstruct the function definition and print it: - defln = self._getdef(obj, oname) - if defln: - out['definition'] = defln - - # First, check whether the instance docstring is identical to the - # class one, and print it separately if they don't coincide. In - # most cases they will, but it's nice to print all the info for - # objects which use instance-customized docstrings. - if ds: - try: - cls = getattr(obj,'__class__') - except: - class_ds = None - else: - class_ds = getdoc(cls) - # Skip Python's auto-generated docstrings - if class_ds in _builtin_type_docstrings: - class_ds = None - if class_ds and ds != class_ds: - out['class_docstring'] = class_ds - - # Next, try to show constructor docstrings - try: - init_ds = getdoc(obj.__init__) - # Skip Python's auto-generated docstrings - if init_ds == _object_init_docstring: - init_ds = None - except AttributeError: - init_ds = None - if init_ds: - out['init_docstring'] = init_ds - - # Call form docstring for callable instances - if safe_hasattr(obj, '__call__') and not is_simple_callable(obj): - call_def = self._getdef(obj.__call__, oname) - if call_def and (call_def != out.get('definition')): - # it may never be the case that call def and definition differ, - # but don't include the same signature twice - out['call_def'] = call_def - call_ds = getdoc(obj.__call__) - # Skip Python's auto-generated docstrings - if call_ds == _func_call_docstring: - call_ds = None - if call_ds: - out['call_docstring'] = call_ds - - return object_info(**out) - - @staticmethod - def _source_contains_docstring(src, doc): - """ - Check whether the source *src* contains the docstring *doc*. - - This is is helper function to skip displaying the docstring if the - source already contains it, avoiding repetition of information. - """ - try: - def_node, = ast.parse(dedent(src)).body - return ast.get_docstring(def_node) == doc - except Exception: - # The source can become invalid or even non-existent (because it - # is re-fetched from the source file) so the above code fail in - # arbitrary ways. - return False - - def psearch(self,pattern,ns_table,ns_search=[], - ignore_case=False,show_all=False, *, list_types=False): - """Search namespaces with wildcards for objects. - - Arguments: - - - pattern: string containing shell-like wildcards to use in namespace - searches and optionally a type specification to narrow the search to - objects of that type. - - - ns_table: dict of name->namespaces for search. - - Optional arguments: - - - ns_search: list of namespace names to include in search. - - - ignore_case(False): make the search case-insensitive. - - - show_all(False): show all names, including those starting with - underscores. - - - list_types(False): list all available object types for object matching. - """ - #print 'ps pattern:<%r>' % pattern # dbg - - # defaults - type_pattern = 'all' - filter = '' - - # list all object types - if list_types: - page.page('\n'.join(sorted(typestr2type))) - return - - cmds = pattern.split() - len_cmds = len(cmds) - if len_cmds == 1: - # Only filter pattern given - filter = cmds[0] - elif len_cmds == 2: - # Both filter and type specified - filter,type_pattern = cmds - else: - raise ValueError('invalid argument string for psearch: <%s>' % - pattern) - - # filter search namespaces - for name in ns_search: - if name not in ns_table: - raise ValueError('invalid namespace <%s>. Valid names: %s' % - (name,ns_table.keys())) - - #print 'type_pattern:',type_pattern # dbg - search_result, namespaces_seen = set(), set() - for ns_name in ns_search: - ns = ns_table[ns_name] - # Normally, locals and globals are the same, so we just check one. - if id(ns) in namespaces_seen: - continue - namespaces_seen.add(id(ns)) - tmp_res = list_namespace(ns, type_pattern, filter, - ignore_case=ignore_case, show_all=show_all) - search_result.update(tmp_res) - - page.page('\n'.join(sorted(search_result))) - - -def _render_signature(obj_signature, obj_name) -> str: - """ - This was mostly taken from inspect.Signature.__str__. - Look there for the comments. - The only change is to add linebreaks when this gets too long. - """ - result = [] - pos_only = False - kw_only = True - for param in obj_signature.parameters.values(): - if param.kind == inspect._POSITIONAL_ONLY: - pos_only = True - elif pos_only: - result.append('/') - pos_only = False - - if param.kind == inspect._VAR_POSITIONAL: - kw_only = False - elif param.kind == inspect._KEYWORD_ONLY and kw_only: - result.append('*') - kw_only = False - - result.append(str(param)) - - if pos_only: - result.append('/') - - # add up name, parameters, braces (2), and commas - if len(obj_name) + sum(len(r) + 2 for r in result) > 75: - # This doesn’t fit behind “Signature: ” in an inspect window. - rendered = '{}(\n{})'.format(obj_name, ''.join( - ' {},\n'.format(r) for r in result) - ) - else: - rendered = '{}({})'.format(obj_name, ', '.join(result)) - - if obj_signature.return_annotation is not inspect._empty: - anno = inspect.formatannotation(obj_signature.return_annotation) - rendered += ' -> {}'.format(anno) - - return rendered +# -*- coding: utf-8 -*- +"""Tools for inspecting Python objects. + +Uses syntax highlighting for presenting the various information elements. + +Similar in spirit to the inspect module, but all calls take a name argument to +reference the name under which an object is being read. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +__all__ = ['Inspector','InspectColors'] + +# stdlib modules +import ast +import inspect +from inspect import signature +import linecache +import warnings +import os +from textwrap import dedent +import types +import io as stdlib_io + +from typing import Union + +# IPython's own +from IPython.core import page +from IPython.lib.pretty import pretty +from IPython.testing.skipdoctest import skip_doctest +from IPython.utils import PyColorize +from IPython.utils import openpy +from IPython.utils import py3compat +from IPython.utils.dir2 import safe_hasattr +from IPython.utils.path import compress_user +from IPython.utils.text import indent +from IPython.utils.wildcard import list_namespace +from IPython.utils.wildcard import typestr2type +from IPython.utils.coloransi import TermColors, ColorScheme, ColorSchemeTable +from IPython.utils.py3compat import cast_unicode +from IPython.utils.colorable import Colorable +from IPython.utils.decorators import undoc + +from pygments import highlight +from pygments.lexers import PythonLexer +from pygments.formatters import HtmlFormatter + +def pylight(code): + return highlight(code, PythonLexer(), HtmlFormatter(noclasses=True)) + +# builtin docstrings to ignore +_func_call_docstring = types.FunctionType.__call__.__doc__ +_object_init_docstring = object.__init__.__doc__ +_builtin_type_docstrings = { + inspect.getdoc(t) for t in (types.ModuleType, types.MethodType, + types.FunctionType, property) +} + +_builtin_func_type = type(all) +_builtin_meth_type = type(str.upper) # Bound methods have the same type as builtin functions +#**************************************************************************** +# Builtin color schemes + +Colors = TermColors # just a shorthand + +InspectColors = PyColorize.ANSICodeColors + +#**************************************************************************** +# Auxiliary functions and objects + +# See the messaging spec for the definition of all these fields. This list +# effectively defines the order of display +info_fields = ['type_name', 'base_class', 'string_form', 'namespace', + 'length', 'file', 'definition', 'docstring', 'source', + 'init_definition', 'class_docstring', 'init_docstring', + 'call_def', 'call_docstring', + # These won't be printed but will be used to determine how to + # format the object + 'ismagic', 'isalias', 'isclass', 'found', 'name' + ] + + +def object_info(**kw): + """Make an object info dict with all fields present.""" + infodict = {k:None for k in info_fields} + infodict.update(kw) + return infodict + + +def get_encoding(obj): + """Get encoding for python source file defining obj + + Returns None if obj is not defined in a sourcefile. + """ + ofile = find_file(obj) + # run contents of file through pager starting at line where the object + # is defined, as long as the file isn't binary and is actually on the + # filesystem. + if ofile is None: + return None + elif ofile.endswith(('.so', '.dll', '.pyd')): + return None + elif not os.path.isfile(ofile): + return None + else: + # Print only text files, not extension binaries. Note that + # getsourcelines returns lineno with 1-offset and page() uses + # 0-offset, so we must adjust. + with stdlib_io.open(ofile, 'rb') as buffer: # Tweaked to use io.open for Python 2 + encoding, lines = openpy.detect_encoding(buffer.readline) + return encoding + +def getdoc(obj) -> Union[str,None]: + """Stable wrapper around inspect.getdoc. + + This can't crash because of attribute problems. + + It also attempts to call a getdoc() method on the given object. This + allows objects which provide their docstrings via non-standard mechanisms + (like Pyro proxies) to still be inspected by ipython's ? system. + """ + # Allow objects to offer customized documentation via a getdoc method: + try: + ds = obj.getdoc() + except Exception: + pass + else: + if isinstance(ds, str): + return inspect.cleandoc(ds) + docstr = inspect.getdoc(obj) + return docstr + + +def getsource(obj, oname='') -> Union[str,None]: + """Wrapper around inspect.getsource. + + This can be modified by other projects to provide customized source + extraction. + + Parameters + ---------- + obj : object + an object whose source code we will attempt to extract + oname : str + (optional) a name under which the object is known + + Returns + ------- + src : unicode or None + + """ + + if isinstance(obj, property): + sources = [] + for attrname in ['fget', 'fset', 'fdel']: + fn = getattr(obj, attrname) + if fn is not None: + encoding = get_encoding(fn) + oname_prefix = ('%s.' % oname) if oname else '' + sources.append(''.join(('# ', oname_prefix, attrname))) + if inspect.isfunction(fn): + sources.append(dedent(getsource(fn))) + else: + # Default str/repr only prints function name, + # pretty.pretty prints module name too. + sources.append( + '%s%s = %s\n' % (oname_prefix, attrname, pretty(fn)) + ) + if sources: + return '\n'.join(sources) + else: + return None + + else: + # Get source for non-property objects. + + obj = _get_wrapped(obj) + + try: + src = inspect.getsource(obj) + except TypeError: + # The object itself provided no meaningful source, try looking for + # its class definition instead. + try: + src = inspect.getsource(obj.__class__) + except (OSError, TypeError): + return None + except OSError: + return None + + return src + + +def is_simple_callable(obj): + """True if obj is a function ()""" + return (inspect.isfunction(obj) or inspect.ismethod(obj) or \ + isinstance(obj, _builtin_func_type) or isinstance(obj, _builtin_meth_type)) + +@undoc +def getargspec(obj): + """Wrapper around :func:`inspect.getfullargspec` + + In addition to functions and methods, this can also handle objects with a + ``__call__`` attribute. + + DEPRECATED: Deprecated since 7.10. Do not use, will be removed. + """ + + warnings.warn('`getargspec` function is deprecated as of IPython 7.10' + 'and will be removed in future versions.', DeprecationWarning, stacklevel=2) + + if safe_hasattr(obj, '__call__') and not is_simple_callable(obj): + obj = obj.__call__ + + return inspect.getfullargspec(obj) + +@undoc +def format_argspec(argspec): + """Format argspect, convenience wrapper around inspect's. + + This takes a dict instead of ordered arguments and calls + inspect.format_argspec with the arguments in the necessary order. + + DEPRECATED: Do not use; will be removed in future versions. + """ + + warnings.warn('`format_argspec` function is deprecated as of IPython 7.10' + 'and will be removed in future versions.', DeprecationWarning, stacklevel=2) + + + return inspect.formatargspec(argspec['args'], argspec['varargs'], + argspec['varkw'], argspec['defaults']) + +@undoc +def call_tip(oinfo, format_call=True): + """DEPRECATED. Extract call tip data from an oinfo dict. + """ + warnings.warn('`call_tip` function is deprecated as of IPython 6.0' + 'and will be removed in future versions.', DeprecationWarning, stacklevel=2) + # Get call definition + argspec = oinfo.get('argspec') + if argspec is None: + call_line = None + else: + # Callable objects will have 'self' as their first argument, prune + # it out if it's there for clarity (since users do *not* pass an + # extra first argument explicitly). + try: + has_self = argspec['args'][0] == 'self' + except (KeyError, IndexError): + pass + else: + if has_self: + argspec['args'] = argspec['args'][1:] + + call_line = oinfo['name']+format_argspec(argspec) + + # Now get docstring. + # The priority is: call docstring, constructor docstring, main one. + doc = oinfo.get('call_docstring') + if doc is None: + doc = oinfo.get('init_docstring') + if doc is None: + doc = oinfo.get('docstring','') + + return call_line, doc + + +def _get_wrapped(obj): + """Get the original object if wrapped in one or more @decorators + + Some objects automatically construct similar objects on any unrecognised + attribute access (e.g. unittest.mock.call). To protect against infinite loops, + this will arbitrarily cut off after 100 levels of obj.__wrapped__ + attribute access. --TK, Jan 2016 + """ + orig_obj = obj + i = 0 + while safe_hasattr(obj, '__wrapped__'): + obj = obj.__wrapped__ + i += 1 + if i > 100: + # __wrapped__ is probably a lie, so return the thing we started with + return orig_obj + return obj + +def find_file(obj) -> str: + """Find the absolute path to the file where an object was defined. + + This is essentially a robust wrapper around `inspect.getabsfile`. + + Returns None if no file can be found. + + Parameters + ---------- + obj : any Python object + + Returns + ------- + fname : str + The absolute path to the file where the object was defined. + """ + obj = _get_wrapped(obj) + + fname = None + try: + fname = inspect.getabsfile(obj) + except TypeError: + # For an instance, the file that matters is where its class was + # declared. + try: + fname = inspect.getabsfile(obj.__class__) + except (OSError, TypeError): + # Can happen for builtins + pass + except OSError: + pass + + return cast_unicode(fname) + + +def find_source_lines(obj): + """Find the line number in a file where an object was defined. + + This is essentially a robust wrapper around `inspect.getsourcelines`. + + Returns None if no file can be found. + + Parameters + ---------- + obj : any Python object + + Returns + ------- + lineno : int + The line number where the object definition starts. + """ + obj = _get_wrapped(obj) + + try: + lineno = inspect.getsourcelines(obj)[1] + except TypeError: + # For instances, try the class object like getsource() does + try: + lineno = inspect.getsourcelines(obj.__class__)[1] + except (OSError, TypeError): + return None + except OSError: + return None + + return lineno + +class Inspector(Colorable): + + def __init__(self, color_table=InspectColors, + code_color_table=PyColorize.ANSICodeColors, + scheme=None, + str_detail_level=0, + parent=None, config=None): + super(Inspector, self).__init__(parent=parent, config=config) + self.color_table = color_table + self.parser = PyColorize.Parser(out='str', parent=self, style=scheme) + self.format = self.parser.format + self.str_detail_level = str_detail_level + self.set_active_scheme(scheme) + + def _getdef(self,obj,oname='') -> Union[str,None]: + """Return the call signature for any callable object. + + If any exception is generated, None is returned instead and the + exception is suppressed.""" + try: + return _render_signature(signature(obj), oname) + except: + return None + + def __head(self,h) -> str: + """Return a header string with proper colors.""" + return '%s%s%s' % (self.color_table.active_colors.header,h, + self.color_table.active_colors.normal) + + def set_active_scheme(self, scheme): + if scheme is not None: + self.color_table.set_active_scheme(scheme) + self.parser.color_table.set_active_scheme(scheme) + + def noinfo(self, msg, oname): + """Generic message when no information is found.""" + print('No %s found' % msg, end=' ') + if oname: + print('for %s' % oname) + else: + print() + + def pdef(self, obj, oname=''): + """Print the call signature for any callable object. + + If the object is a class, print the constructor information.""" + + if not callable(obj): + print('Object is not callable.') + return + + header = '' + + if inspect.isclass(obj): + header = self.__head('Class constructor information:\n') + + + output = self._getdef(obj,oname) + if output is None: + self.noinfo('definition header',oname) + else: + print(header,self.format(output), end=' ') + + # In Python 3, all classes are new-style, so they all have __init__. + @skip_doctest + def pdoc(self, obj, oname='', formatter=None): + """Print the docstring for any object. + + Optional: + -formatter: a function to run the docstring through for specially + formatted docstrings. + + Examples + -------- + + In [1]: class NoInit: + ...: pass + + In [2]: class NoDoc: + ...: def __init__(self): + ...: pass + + In [3]: %pdoc NoDoc + No documentation found for NoDoc + + In [4]: %pdoc NoInit + No documentation found for NoInit + + In [5]: obj = NoInit() + + In [6]: %pdoc obj + No documentation found for obj + + In [5]: obj2 = NoDoc() + + In [6]: %pdoc obj2 + No documentation found for obj2 + """ + + head = self.__head # For convenience + lines = [] + ds = getdoc(obj) + if formatter: + ds = formatter(ds).get('plain/text', ds) + if ds: + lines.append(head("Class docstring:")) + lines.append(indent(ds)) + if inspect.isclass(obj) and hasattr(obj, '__init__'): + init_ds = getdoc(obj.__init__) + if init_ds is not None: + lines.append(head("Init docstring:")) + lines.append(indent(init_ds)) + elif hasattr(obj,'__call__'): + call_ds = getdoc(obj.__call__) + if call_ds: + lines.append(head("Call docstring:")) + lines.append(indent(call_ds)) + + if not lines: + self.noinfo('documentation',oname) + else: + page.page('\n'.join(lines)) + + def psource(self, obj, oname=''): + """Print the source code for an object.""" + + # Flush the source cache because inspect can return out-of-date source + linecache.checkcache() + try: + src = getsource(obj, oname=oname) + except Exception: + src = None + + if src is None: + self.noinfo('source', oname) + else: + page.page(self.format(src)) + + def pfile(self, obj, oname=''): + """Show the whole file where an object was defined.""" + + lineno = find_source_lines(obj) + if lineno is None: + self.noinfo('file', oname) + return + + ofile = find_file(obj) + # run contents of file through pager starting at line where the object + # is defined, as long as the file isn't binary and is actually on the + # filesystem. + if ofile.endswith(('.so', '.dll', '.pyd')): + print('File %r is binary, not printing.' % ofile) + elif not os.path.isfile(ofile): + print('File %r does not exist, not printing.' % ofile) + else: + # Print only text files, not extension binaries. Note that + # getsourcelines returns lineno with 1-offset and page() uses + # 0-offset, so we must adjust. + page.page(self.format(openpy.read_py_file(ofile, skip_encoding_cookie=False)), lineno - 1) + + + def _mime_format(self, text:str, formatter=None) -> dict: + """Return a mime bundle representation of the input text. + + - if `formatter` is None, the returned mime bundle has + a `text/plain` field, with the input text. + a `text/html` field with a `<pre>` tag containing the input text. + + - if `formatter` is not None, it must be a callable transforming the + input text into a mime bundle. Default values for `text/plain` and + `text/html` representations are the ones described above. + + Note: + + Formatters returning strings are supported but this behavior is deprecated. + + """ + defaults = { + 'text/plain': text, + 'text/html': '<pre>' + text + '</pre>' + } + + if formatter is None: + return defaults + else: + formatted = formatter(text) + + if not isinstance(formatted, dict): + # Handle the deprecated behavior of a formatter returning + # a string instead of a mime bundle. + return { + 'text/plain': formatted, + 'text/html': '<pre>' + formatted + '</pre>' + } + + else: + return dict(defaults, **formatted) + + + def format_mime(self, bundle): + + text_plain = bundle['text/plain'] + + text = '' + heads, bodies = list(zip(*text_plain)) + _len = max(len(h) for h in heads) + + for head, body in zip(heads, bodies): + body = body.strip('\n') + delim = '\n' if '\n' in body else ' ' + text += self.__head(head+':') + (_len - len(head))*' ' +delim + body +'\n' + + bundle['text/plain'] = text + return bundle + + def _get_info(self, obj, oname='', formatter=None, info=None, detail_level=0): + """Retrieve an info dict and format it. + + Parameters + ========== + + obj: any + Object to inspect and return info from + oname: str (default: ''): + Name of the variable pointing to `obj`. + formatter: callable + info: + already computed information + detail_level: integer + Granularity of detail level, if set to 1, give more information. + """ + + info = self._info(obj, oname=oname, info=info, detail_level=detail_level) + + _mime = { + 'text/plain': [], + 'text/html': '', + } + + def append_field(bundle, title:str, key:str, formatter=None): + field = info[key] + if field is not None: + formatted_field = self._mime_format(field, formatter) + bundle['text/plain'].append((title, formatted_field['text/plain'])) + bundle['text/html'] += '<h1>' + title + '</h1>\n' + formatted_field['text/html'] + '\n' + + def code_formatter(text): + return { + 'text/plain': self.format(text), + 'text/html': pylight(text) + } + + if info['isalias']: + append_field(_mime, 'Repr', 'string_form') + + elif info['ismagic']: + if detail_level > 0: + append_field(_mime, 'Source', 'source', code_formatter) + else: + append_field(_mime, 'Docstring', 'docstring', formatter) + append_field(_mime, 'File', 'file') + + elif info['isclass'] or is_simple_callable(obj): + # Functions, methods, classes + append_field(_mime, 'Signature', 'definition', code_formatter) + append_field(_mime, 'Init signature', 'init_definition', code_formatter) + append_field(_mime, 'Docstring', 'docstring', formatter) + if detail_level > 0 and info['source']: + append_field(_mime, 'Source', 'source', code_formatter) + else: + append_field(_mime, 'Init docstring', 'init_docstring', formatter) + + append_field(_mime, 'File', 'file') + append_field(_mime, 'Type', 'type_name') + append_field(_mime, 'Subclasses', 'subclasses') + + else: + # General Python objects + append_field(_mime, 'Signature', 'definition', code_formatter) + append_field(_mime, 'Call signature', 'call_def', code_formatter) + append_field(_mime, 'Type', 'type_name') + append_field(_mime, 'String form', 'string_form') + + # Namespace + if info['namespace'] != 'Interactive': + append_field(_mime, 'Namespace', 'namespace') + + append_field(_mime, 'Length', 'length') + append_field(_mime, 'File', 'file') + + # Source or docstring, depending on detail level and whether + # source found. + if detail_level > 0 and info['source']: + append_field(_mime, 'Source', 'source', code_formatter) + else: + append_field(_mime, 'Docstring', 'docstring', formatter) + + append_field(_mime, 'Class docstring', 'class_docstring', formatter) + append_field(_mime, 'Init docstring', 'init_docstring', formatter) + append_field(_mime, 'Call docstring', 'call_docstring', formatter) + + + return self.format_mime(_mime) + + def pinfo(self, obj, oname='', formatter=None, info=None, detail_level=0, enable_html_pager=True): + """Show detailed information about an object. + + Optional arguments: + + - oname: name of the variable pointing to the object. + + - formatter: callable (optional) + A special formatter for docstrings. + + The formatter is a callable that takes a string as an input + and returns either a formatted string or a mime type bundle + in the form of a dictionary. + + Although the support of custom formatter returning a string + instead of a mime type bundle is deprecated. + + - info: a structure with some information fields which may have been + precomputed already. + + - detail_level: if set to 1, more information is given. + """ + info = self._get_info(obj, oname, formatter, info, detail_level) + if not enable_html_pager: + del info['text/html'] + page.page(info) + + def info(self, obj, oname='', formatter=None, info=None, detail_level=0): + """DEPRECATED. Compute a dict with detailed information about an object. + """ + if formatter is not None: + warnings.warn('The `formatter` keyword argument to `Inspector.info`' + 'is deprecated as of IPython 5.0 and will have no effects.', + DeprecationWarning, stacklevel=2) + return self._info(obj, oname=oname, info=info, detail_level=detail_level) + + def _info(self, obj, oname='', info=None, detail_level=0) -> dict: + """Compute a dict with detailed information about an object. + + Parameters + ========== + + obj: any + An object to find information about + oname: str (default: ''): + Name of the variable pointing to `obj`. + info: (default: None) + A struct (dict like with attr access) with some information fields + which may have been precomputed already. + detail_level: int (default:0) + If set to 1, more information is given. + + Returns + ======= + + An object info dict with known fields from `info_fields`. Keys are + strings, values are string or None. + """ + + if info is None: + ismagic = False + isalias = False + ospace = '' + else: + ismagic = info.ismagic + isalias = info.isalias + ospace = info.namespace + + # Get docstring, special-casing aliases: + if isalias: + if not callable(obj): + try: + ds = "Alias to the system command:\n %s" % obj[1] + except: + ds = "Alias: " + str(obj) + else: + ds = "Alias to " + str(obj) + if obj.__doc__: + ds += "\nDocstring:\n" + obj.__doc__ + else: + ds = getdoc(obj) + if ds is None: + ds = '<no docstring>' + + # store output in a dict, we initialize it here and fill it as we go + out = dict(name=oname, found=True, isalias=isalias, ismagic=ismagic, subclasses=None) + + string_max = 200 # max size of strings to show (snipped if longer) + shalf = int((string_max - 5) / 2) + + if ismagic: + out['type_name'] = 'Magic function' + elif isalias: + out['type_name'] = 'System alias' + else: + out['type_name'] = type(obj).__name__ + + try: + bclass = obj.__class__ + out['base_class'] = str(bclass) + except: + pass + + # String form, but snip if too long in ? form (full in ??) + if detail_level >= self.str_detail_level: + try: + ostr = str(obj) + str_head = 'string_form' + if not detail_level and len(ostr)>string_max: + ostr = ostr[:shalf] + ' <...> ' + ostr[-shalf:] + ostr = ("\n" + " " * len(str_head.expandtabs())).\ + join(q.strip() for q in ostr.split("\n")) + out[str_head] = ostr + except: + pass + + if ospace: + out['namespace'] = ospace + + # Length (for strings and lists) + try: + out['length'] = str(len(obj)) + except Exception: + pass + + # Filename where object was defined + binary_file = False + fname = find_file(obj) + if fname is None: + # if anything goes wrong, we don't want to show source, so it's as + # if the file was binary + binary_file = True + else: + if fname.endswith(('.so', '.dll', '.pyd')): + binary_file = True + elif fname.endswith('<string>'): + fname = 'Dynamically generated function. No source code available.' + out['file'] = compress_user(fname) + + # Original source code for a callable, class or property. + if detail_level: + # Flush the source cache because inspect can return out-of-date + # source + linecache.checkcache() + try: + if isinstance(obj, property) or not binary_file: + src = getsource(obj, oname) + if src is not None: + src = src.rstrip() + out['source'] = src + + except Exception: + pass + + # Add docstring only if no source is to be shown (avoid repetitions). + if ds and not self._source_contains_docstring(out.get('source'), ds): + out['docstring'] = ds + + # Constructor docstring for classes + if inspect.isclass(obj): + out['isclass'] = True + + # get the init signature: + try: + init_def = self._getdef(obj, oname) + except AttributeError: + init_def = None + + # get the __init__ docstring + try: + obj_init = obj.__init__ + except AttributeError: + init_ds = None + else: + if init_def is None: + # Get signature from init if top-level sig failed. + # Can happen for built-in types (list, etc.). + try: + init_def = self._getdef(obj_init, oname) + except AttributeError: + pass + init_ds = getdoc(obj_init) + # Skip Python's auto-generated docstrings + if init_ds == _object_init_docstring: + init_ds = None + + if init_def: + out['init_definition'] = init_def + + if init_ds: + out['init_docstring'] = init_ds + + names = [sub.__name__ for sub in type.__subclasses__(obj)] + if len(names) < 10: + all_names = ', '.join(names) + else: + all_names = ', '.join(names[:10]+['...']) + out['subclasses'] = all_names + # and class docstring for instances: + else: + # reconstruct the function definition and print it: + defln = self._getdef(obj, oname) + if defln: + out['definition'] = defln + + # First, check whether the instance docstring is identical to the + # class one, and print it separately if they don't coincide. In + # most cases they will, but it's nice to print all the info for + # objects which use instance-customized docstrings. + if ds: + try: + cls = getattr(obj,'__class__') + except: + class_ds = None + else: + class_ds = getdoc(cls) + # Skip Python's auto-generated docstrings + if class_ds in _builtin_type_docstrings: + class_ds = None + if class_ds and ds != class_ds: + out['class_docstring'] = class_ds + + # Next, try to show constructor docstrings + try: + init_ds = getdoc(obj.__init__) + # Skip Python's auto-generated docstrings + if init_ds == _object_init_docstring: + init_ds = None + except AttributeError: + init_ds = None + if init_ds: + out['init_docstring'] = init_ds + + # Call form docstring for callable instances + if safe_hasattr(obj, '__call__') and not is_simple_callable(obj): + call_def = self._getdef(obj.__call__, oname) + if call_def and (call_def != out.get('definition')): + # it may never be the case that call def and definition differ, + # but don't include the same signature twice + out['call_def'] = call_def + call_ds = getdoc(obj.__call__) + # Skip Python's auto-generated docstrings + if call_ds == _func_call_docstring: + call_ds = None + if call_ds: + out['call_docstring'] = call_ds + + return object_info(**out) + + @staticmethod + def _source_contains_docstring(src, doc): + """ + Check whether the source *src* contains the docstring *doc*. + + This is is helper function to skip displaying the docstring if the + source already contains it, avoiding repetition of information. + """ + try: + def_node, = ast.parse(dedent(src)).body + return ast.get_docstring(def_node) == doc + except Exception: + # The source can become invalid or even non-existent (because it + # is re-fetched from the source file) so the above code fail in + # arbitrary ways. + return False + + def psearch(self,pattern,ns_table,ns_search=[], + ignore_case=False,show_all=False, *, list_types=False): + """Search namespaces with wildcards for objects. + + Arguments: + + - pattern: string containing shell-like wildcards to use in namespace + searches and optionally a type specification to narrow the search to + objects of that type. + + - ns_table: dict of name->namespaces for search. + + Optional arguments: + + - ns_search: list of namespace names to include in search. + + - ignore_case(False): make the search case-insensitive. + + - show_all(False): show all names, including those starting with + underscores. + + - list_types(False): list all available object types for object matching. + """ + #print 'ps pattern:<%r>' % pattern # dbg + + # defaults + type_pattern = 'all' + filter = '' + + # list all object types + if list_types: + page.page('\n'.join(sorted(typestr2type))) + return + + cmds = pattern.split() + len_cmds = len(cmds) + if len_cmds == 1: + # Only filter pattern given + filter = cmds[0] + elif len_cmds == 2: + # Both filter and type specified + filter,type_pattern = cmds + else: + raise ValueError('invalid argument string for psearch: <%s>' % + pattern) + + # filter search namespaces + for name in ns_search: + if name not in ns_table: + raise ValueError('invalid namespace <%s>. Valid names: %s' % + (name,ns_table.keys())) + + #print 'type_pattern:',type_pattern # dbg + search_result, namespaces_seen = set(), set() + for ns_name in ns_search: + ns = ns_table[ns_name] + # Normally, locals and globals are the same, so we just check one. + if id(ns) in namespaces_seen: + continue + namespaces_seen.add(id(ns)) + tmp_res = list_namespace(ns, type_pattern, filter, + ignore_case=ignore_case, show_all=show_all) + search_result.update(tmp_res) + + page.page('\n'.join(sorted(search_result))) + + +def _render_signature(obj_signature, obj_name) -> str: + """ + This was mostly taken from inspect.Signature.__str__. + Look there for the comments. + The only change is to add linebreaks when this gets too long. + """ + result = [] + pos_only = False + kw_only = True + for param in obj_signature.parameters.values(): + if param.kind == inspect._POSITIONAL_ONLY: + pos_only = True + elif pos_only: + result.append('/') + pos_only = False + + if param.kind == inspect._VAR_POSITIONAL: + kw_only = False + elif param.kind == inspect._KEYWORD_ONLY and kw_only: + result.append('*') + kw_only = False + + result.append(str(param)) + + if pos_only: + result.append('/') + + # add up name, parameters, braces (2), and commas + if len(obj_name) + sum(len(r) + 2 for r in result) > 75: + # This doesn’t fit behind “Signature: ” in an inspect window. + rendered = '{}(\n{})'.format(obj_name, ''.join( + ' {},\n'.format(r) for r in result) + ) + else: + rendered = '{}({})'.format(obj_name, ', '.join(result)) + + if obj_signature.return_annotation is not inspect._empty: + anno = inspect.formatannotation(obj_signature.return_annotation) + rendered += ' -> {}'.format(anno) + + return rendered diff --git a/contrib/python/ipython/py3/IPython/core/page.py b/contrib/python/ipython/py3/IPython/core/page.py index 22567265449..ed16b617812 100644 --- a/contrib/python/ipython/py3/IPython/core/page.py +++ b/contrib/python/ipython/py3/IPython/core/page.py @@ -1,343 +1,343 @@ -# encoding: utf-8 -""" -Paging capabilities for IPython.core - -Notes ------ - -For now this uses IPython hooks, so it can't be in IPython.utils. If we can get -rid of that dependency, we could move it there. ------ -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -import os -import io -import re -import sys -import tempfile -import subprocess - -from io import UnsupportedOperation - -from IPython import get_ipython -from IPython.core.display import display -from IPython.core.error import TryNext -from IPython.utils.data import chop -from IPython.utils.process import system -from IPython.utils.terminal import get_terminal_size -from IPython.utils import py3compat - - -def display_page(strng, start=0, screen_lines=25): - """Just display, no paging. screen_lines is ignored.""" - if isinstance(strng, dict): - data = strng - else: - if start: - strng = u'\n'.join(strng.splitlines()[start:]) - data = { 'text/plain': strng } - display(data, raw=True) - - -def as_hook(page_func): - """Wrap a pager func to strip the `self` arg - - so it can be called as a hook. - """ - return lambda self, *args, **kwargs: page_func(*args, **kwargs) - - -esc_re = re.compile(r"(\x1b[^m]+m)") - -def page_dumb(strng, start=0, screen_lines=25): - """Very dumb 'pager' in Python, for when nothing else works. - - Only moves forward, same interface as page(), except for pager_cmd and - mode. - """ - if isinstance(strng, dict): - strng = strng.get('text/plain', '') - out_ln = strng.splitlines()[start:] - screens = chop(out_ln,screen_lines-1) - if len(screens) == 1: - print(os.linesep.join(screens[0])) - else: - last_escape = "" - for scr in screens[0:-1]: - hunk = os.linesep.join(scr) - print(last_escape + hunk) - if not page_more(): - return - esc_list = esc_re.findall(hunk) - if len(esc_list) > 0: - last_escape = esc_list[-1] - print(last_escape + os.linesep.join(screens[-1])) - -def _detect_screen_size(screen_lines_def): - """Attempt to work out the number of lines on the screen. - - This is called by page(). It can raise an error (e.g. when run in the - test suite), so it's separated out so it can easily be called in a try block. - """ - TERM = os.environ.get('TERM',None) - if not((TERM=='xterm' or TERM=='xterm-color') and sys.platform != 'sunos5'): - # curses causes problems on many terminals other than xterm, and - # some termios calls lock up on Sun OS5. - return screen_lines_def - - try: - import termios - import curses - except ImportError: - return screen_lines_def - - # There is a bug in curses, where *sometimes* it fails to properly - # initialize, and then after the endwin() call is made, the - # terminal is left in an unusable state. Rather than trying to - # check every time for this (by requesting and comparing termios - # flags each time), we just save the initial terminal state and - # unconditionally reset it every time. It's cheaper than making - # the checks. - try: - term_flags = termios.tcgetattr(sys.stdout) - except termios.error as err: - # can fail on Linux 2.6, pager_page will catch the TypeError - raise TypeError('termios error: {0}'.format(err)) - - try: - scr = curses.initscr() - except AttributeError: - # Curses on Solaris may not be complete, so we can't use it there - return screen_lines_def - - screen_lines_real,screen_cols = scr.getmaxyx() - curses.endwin() - - # Restore terminal state in case endwin() didn't. - termios.tcsetattr(sys.stdout,termios.TCSANOW,term_flags) - # Now we have what we needed: the screen size in rows/columns - return screen_lines_real - #print '***Screen size:',screen_lines_real,'lines x',\ - #screen_cols,'columns.' # dbg - -def pager_page(strng, start=0, screen_lines=0, pager_cmd=None): - """Display a string, piping through a pager after a certain length. - - strng can be a mime-bundle dict, supplying multiple representations, - keyed by mime-type. - - The screen_lines parameter specifies the number of *usable* lines of your - terminal screen (total lines minus lines you need to reserve to show other - information). - - If you set screen_lines to a number <=0, page() will try to auto-determine - your screen size and will only use up to (screen_size+screen_lines) for - printing, paging after that. That is, if you want auto-detection but need - to reserve the bottom 3 lines of the screen, use screen_lines = -3, and for - auto-detection without any lines reserved simply use screen_lines = 0. - - If a string won't fit in the allowed lines, it is sent through the - specified pager command. If none given, look for PAGER in the environment, - and ultimately default to less. - - If no system pager works, the string is sent through a 'dumb pager' - written in python, very simplistic. - """ - - # for compatibility with mime-bundle form: - if isinstance(strng, dict): - strng = strng['text/plain'] - - # Ugly kludge, but calling curses.initscr() flat out crashes in emacs - TERM = os.environ.get('TERM','dumb') - if TERM in ['dumb','emacs'] and os.name != 'nt': - print(strng) - return - # chop off the topmost part of the string we don't want to see - str_lines = strng.splitlines()[start:] - str_toprint = os.linesep.join(str_lines) - num_newlines = len(str_lines) - len_str = len(str_toprint) - - # Dumb heuristics to guesstimate number of on-screen lines the string - # takes. Very basic, but good enough for docstrings in reasonable - # terminals. If someone later feels like refining it, it's not hard. - numlines = max(num_newlines,int(len_str/80)+1) - - screen_lines_def = get_terminal_size()[1] - - # auto-determine screen size - if screen_lines <= 0: - try: - screen_lines += _detect_screen_size(screen_lines_def) - except (TypeError, UnsupportedOperation): - print(str_toprint) - return - - #print 'numlines',numlines,'screenlines',screen_lines # dbg - if numlines <= screen_lines : - #print '*** normal print' # dbg - print(str_toprint) - else: - # Try to open pager and default to internal one if that fails. - # All failure modes are tagged as 'retval=1', to match the return - # value of a failed system command. If any intermediate attempt - # sets retval to 1, at the end we resort to our own page_dumb() pager. - pager_cmd = get_pager_cmd(pager_cmd) - pager_cmd += ' ' + get_pager_start(pager_cmd,start) - if os.name == 'nt': - if pager_cmd.startswith('type'): - # The default WinXP 'type' command is failing on complex strings. - retval = 1 - else: - fd, tmpname = tempfile.mkstemp('.txt') - try: - os.close(fd) - with open(tmpname, 'wt') as tmpfile: - tmpfile.write(strng) - cmd = "%s < %s" % (pager_cmd, tmpname) - # tmpfile needs to be closed for windows - if os.system(cmd): - retval = 1 - else: - retval = None - finally: - os.remove(tmpname) - else: - try: - retval = None - # Emulate os.popen, but redirect stderr - proc = subprocess.Popen(pager_cmd, - shell=True, - stdin=subprocess.PIPE, - stderr=subprocess.DEVNULL - ) - pager = os._wrap_close(io.TextIOWrapper(proc.stdin), proc) - try: - pager_encoding = pager.encoding or sys.stdout.encoding - pager.write(strng) - finally: - retval = pager.close() - except IOError as msg: # broken pipe when user quits - if msg.args == (32, 'Broken pipe'): - retval = None - else: - retval = 1 - except OSError: - # Other strange problems, sometimes seen in Win2k/cygwin - retval = 1 - if retval is not None: - page_dumb(strng,screen_lines=screen_lines) - - -def page(data, start=0, screen_lines=0, pager_cmd=None): - """Display content in a pager, piping through a pager after a certain length. - - data can be a mime-bundle dict, supplying multiple representations, - keyed by mime-type, or text. - - Pager is dispatched via the `show_in_pager` IPython hook. - If no hook is registered, `pager_page` will be used. - """ - # Some routines may auto-compute start offsets incorrectly and pass a - # negative value. Offset to 0 for robustness. - start = max(0, start) - - # first, try the hook - ip = get_ipython() - if ip: - try: - ip.hooks.show_in_pager(data, start=start, screen_lines=screen_lines) - return - except TryNext: - pass - - # fallback on default pager - return pager_page(data, start, screen_lines, pager_cmd) - - -def page_file(fname, start=0, pager_cmd=None): - """Page a file, using an optional pager command and starting line. - """ - - pager_cmd = get_pager_cmd(pager_cmd) - pager_cmd += ' ' + get_pager_start(pager_cmd,start) - - try: - if os.environ['TERM'] in ['emacs','dumb']: - raise EnvironmentError - system(pager_cmd + ' ' + fname) - except: - try: - if start > 0: - start -= 1 - page(open(fname).read(),start) - except: - print('Unable to show file',repr(fname)) - - -def get_pager_cmd(pager_cmd=None): - """Return a pager command. - - Makes some attempts at finding an OS-correct one. - """ - if os.name == 'posix': - default_pager_cmd = 'less -R' # -R for color control sequences - elif os.name in ['nt','dos']: - default_pager_cmd = 'type' - - if pager_cmd is None: - try: - pager_cmd = os.environ['PAGER'] - except: - pager_cmd = default_pager_cmd - - if pager_cmd == 'less' and '-r' not in os.environ.get('LESS', '').lower(): - pager_cmd += ' -R' - - return pager_cmd - - -def get_pager_start(pager, start): - """Return the string for paging files with an offset. - - This is the '+N' argument which less and more (under Unix) accept. - """ - - if pager in ['less','more']: - if start: - start_string = '+' + str(start) - else: - start_string = '' - else: - start_string = '' - return start_string - - -# (X)emacs on win32 doesn't like to be bypassed with msvcrt.getch() -if os.name == 'nt' and os.environ.get('TERM','dumb') != 'emacs': - import msvcrt - def page_more(): - """ Smart pausing between pages - - @return: True if need print more lines, False if quit - """ - sys.stdout.write('---Return to continue, q to quit--- ') - ans = msvcrt.getwch() - if ans in ("q", "Q"): - result = False - else: - result = True - sys.stdout.write("\b"*37 + " "*37 + "\b"*37) - return result -else: - def page_more(): - ans = py3compat.input('---Return to continue, q to quit--- ') - if ans.lower().startswith('q'): - return False - else: - return True +# encoding: utf-8 +""" +Paging capabilities for IPython.core + +Notes +----- + +For now this uses IPython hooks, so it can't be in IPython.utils. If we can get +rid of that dependency, we could move it there. +----- +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +import os +import io +import re +import sys +import tempfile +import subprocess + +from io import UnsupportedOperation + +from IPython import get_ipython +from IPython.core.display import display +from IPython.core.error import TryNext +from IPython.utils.data import chop +from IPython.utils.process import system +from IPython.utils.terminal import get_terminal_size +from IPython.utils import py3compat + + +def display_page(strng, start=0, screen_lines=25): + """Just display, no paging. screen_lines is ignored.""" + if isinstance(strng, dict): + data = strng + else: + if start: + strng = u'\n'.join(strng.splitlines()[start:]) + data = { 'text/plain': strng } + display(data, raw=True) + + +def as_hook(page_func): + """Wrap a pager func to strip the `self` arg + + so it can be called as a hook. + """ + return lambda self, *args, **kwargs: page_func(*args, **kwargs) + + +esc_re = re.compile(r"(\x1b[^m]+m)") + +def page_dumb(strng, start=0, screen_lines=25): + """Very dumb 'pager' in Python, for when nothing else works. + + Only moves forward, same interface as page(), except for pager_cmd and + mode. + """ + if isinstance(strng, dict): + strng = strng.get('text/plain', '') + out_ln = strng.splitlines()[start:] + screens = chop(out_ln,screen_lines-1) + if len(screens) == 1: + print(os.linesep.join(screens[0])) + else: + last_escape = "" + for scr in screens[0:-1]: + hunk = os.linesep.join(scr) + print(last_escape + hunk) + if not page_more(): + return + esc_list = esc_re.findall(hunk) + if len(esc_list) > 0: + last_escape = esc_list[-1] + print(last_escape + os.linesep.join(screens[-1])) + +def _detect_screen_size(screen_lines_def): + """Attempt to work out the number of lines on the screen. + + This is called by page(). It can raise an error (e.g. when run in the + test suite), so it's separated out so it can easily be called in a try block. + """ + TERM = os.environ.get('TERM',None) + if not((TERM=='xterm' or TERM=='xterm-color') and sys.platform != 'sunos5'): + # curses causes problems on many terminals other than xterm, and + # some termios calls lock up on Sun OS5. + return screen_lines_def + + try: + import termios + import curses + except ImportError: + return screen_lines_def + + # There is a bug in curses, where *sometimes* it fails to properly + # initialize, and then after the endwin() call is made, the + # terminal is left in an unusable state. Rather than trying to + # check every time for this (by requesting and comparing termios + # flags each time), we just save the initial terminal state and + # unconditionally reset it every time. It's cheaper than making + # the checks. + try: + term_flags = termios.tcgetattr(sys.stdout) + except termios.error as err: + # can fail on Linux 2.6, pager_page will catch the TypeError + raise TypeError('termios error: {0}'.format(err)) + + try: + scr = curses.initscr() + except AttributeError: + # Curses on Solaris may not be complete, so we can't use it there + return screen_lines_def + + screen_lines_real,screen_cols = scr.getmaxyx() + curses.endwin() + + # Restore terminal state in case endwin() didn't. + termios.tcsetattr(sys.stdout,termios.TCSANOW,term_flags) + # Now we have what we needed: the screen size in rows/columns + return screen_lines_real + #print '***Screen size:',screen_lines_real,'lines x',\ + #screen_cols,'columns.' # dbg + +def pager_page(strng, start=0, screen_lines=0, pager_cmd=None): + """Display a string, piping through a pager after a certain length. + + strng can be a mime-bundle dict, supplying multiple representations, + keyed by mime-type. + + The screen_lines parameter specifies the number of *usable* lines of your + terminal screen (total lines minus lines you need to reserve to show other + information). + + If you set screen_lines to a number <=0, page() will try to auto-determine + your screen size and will only use up to (screen_size+screen_lines) for + printing, paging after that. That is, if you want auto-detection but need + to reserve the bottom 3 lines of the screen, use screen_lines = -3, and for + auto-detection without any lines reserved simply use screen_lines = 0. + + If a string won't fit in the allowed lines, it is sent through the + specified pager command. If none given, look for PAGER in the environment, + and ultimately default to less. + + If no system pager works, the string is sent through a 'dumb pager' + written in python, very simplistic. + """ + + # for compatibility with mime-bundle form: + if isinstance(strng, dict): + strng = strng['text/plain'] + + # Ugly kludge, but calling curses.initscr() flat out crashes in emacs + TERM = os.environ.get('TERM','dumb') + if TERM in ['dumb','emacs'] and os.name != 'nt': + print(strng) + return + # chop off the topmost part of the string we don't want to see + str_lines = strng.splitlines()[start:] + str_toprint = os.linesep.join(str_lines) + num_newlines = len(str_lines) + len_str = len(str_toprint) + + # Dumb heuristics to guesstimate number of on-screen lines the string + # takes. Very basic, but good enough for docstrings in reasonable + # terminals. If someone later feels like refining it, it's not hard. + numlines = max(num_newlines,int(len_str/80)+1) + + screen_lines_def = get_terminal_size()[1] + + # auto-determine screen size + if screen_lines <= 0: + try: + screen_lines += _detect_screen_size(screen_lines_def) + except (TypeError, UnsupportedOperation): + print(str_toprint) + return + + #print 'numlines',numlines,'screenlines',screen_lines # dbg + if numlines <= screen_lines : + #print '*** normal print' # dbg + print(str_toprint) + else: + # Try to open pager and default to internal one if that fails. + # All failure modes are tagged as 'retval=1', to match the return + # value of a failed system command. If any intermediate attempt + # sets retval to 1, at the end we resort to our own page_dumb() pager. + pager_cmd = get_pager_cmd(pager_cmd) + pager_cmd += ' ' + get_pager_start(pager_cmd,start) + if os.name == 'nt': + if pager_cmd.startswith('type'): + # The default WinXP 'type' command is failing on complex strings. + retval = 1 + else: + fd, tmpname = tempfile.mkstemp('.txt') + try: + os.close(fd) + with open(tmpname, 'wt') as tmpfile: + tmpfile.write(strng) + cmd = "%s < %s" % (pager_cmd, tmpname) + # tmpfile needs to be closed for windows + if os.system(cmd): + retval = 1 + else: + retval = None + finally: + os.remove(tmpname) + else: + try: + retval = None + # Emulate os.popen, but redirect stderr + proc = subprocess.Popen(pager_cmd, + shell=True, + stdin=subprocess.PIPE, + stderr=subprocess.DEVNULL + ) + pager = os._wrap_close(io.TextIOWrapper(proc.stdin), proc) + try: + pager_encoding = pager.encoding or sys.stdout.encoding + pager.write(strng) + finally: + retval = pager.close() + except IOError as msg: # broken pipe when user quits + if msg.args == (32, 'Broken pipe'): + retval = None + else: + retval = 1 + except OSError: + # Other strange problems, sometimes seen in Win2k/cygwin + retval = 1 + if retval is not None: + page_dumb(strng,screen_lines=screen_lines) + + +def page(data, start=0, screen_lines=0, pager_cmd=None): + """Display content in a pager, piping through a pager after a certain length. + + data can be a mime-bundle dict, supplying multiple representations, + keyed by mime-type, or text. + + Pager is dispatched via the `show_in_pager` IPython hook. + If no hook is registered, `pager_page` will be used. + """ + # Some routines may auto-compute start offsets incorrectly and pass a + # negative value. Offset to 0 for robustness. + start = max(0, start) + + # first, try the hook + ip = get_ipython() + if ip: + try: + ip.hooks.show_in_pager(data, start=start, screen_lines=screen_lines) + return + except TryNext: + pass + + # fallback on default pager + return pager_page(data, start, screen_lines, pager_cmd) + + +def page_file(fname, start=0, pager_cmd=None): + """Page a file, using an optional pager command and starting line. + """ + + pager_cmd = get_pager_cmd(pager_cmd) + pager_cmd += ' ' + get_pager_start(pager_cmd,start) + + try: + if os.environ['TERM'] in ['emacs','dumb']: + raise EnvironmentError + system(pager_cmd + ' ' + fname) + except: + try: + if start > 0: + start -= 1 + page(open(fname).read(),start) + except: + print('Unable to show file',repr(fname)) + + +def get_pager_cmd(pager_cmd=None): + """Return a pager command. + + Makes some attempts at finding an OS-correct one. + """ + if os.name == 'posix': + default_pager_cmd = 'less -R' # -R for color control sequences + elif os.name in ['nt','dos']: + default_pager_cmd = 'type' + + if pager_cmd is None: + try: + pager_cmd = os.environ['PAGER'] + except: + pager_cmd = default_pager_cmd + + if pager_cmd == 'less' and '-r' not in os.environ.get('LESS', '').lower(): + pager_cmd += ' -R' + + return pager_cmd + + +def get_pager_start(pager, start): + """Return the string for paging files with an offset. + + This is the '+N' argument which less and more (under Unix) accept. + """ + + if pager in ['less','more']: + if start: + start_string = '+' + str(start) + else: + start_string = '' + else: + start_string = '' + return start_string + + +# (X)emacs on win32 doesn't like to be bypassed with msvcrt.getch() +if os.name == 'nt' and os.environ.get('TERM','dumb') != 'emacs': + import msvcrt + def page_more(): + """ Smart pausing between pages + + @return: True if need print more lines, False if quit + """ + sys.stdout.write('---Return to continue, q to quit--- ') + ans = msvcrt.getwch() + if ans in ("q", "Q"): + result = False + else: + result = True + sys.stdout.write("\b"*37 + " "*37 + "\b"*37) + return result +else: + def page_more(): + ans = py3compat.input('---Return to continue, q to quit--- ') + if ans.lower().startswith('q'): + return False + else: + return True diff --git a/contrib/python/ipython/py3/IPython/core/payload.py b/contrib/python/ipython/py3/IPython/core/payload.py index caa9268fe26..6818be15372 100644 --- a/contrib/python/ipython/py3/IPython/core/payload.py +++ b/contrib/python/ipython/py3/IPython/core/payload.py @@ -1,55 +1,55 @@ -# -*- coding: utf-8 -*- -"""Payload system for IPython. - -Authors: - -* Fernando Perez -* Brian Granger -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -from traitlets.config.configurable import Configurable -from traitlets import List - -#----------------------------------------------------------------------------- -# Main payload class -#----------------------------------------------------------------------------- - -class PayloadManager(Configurable): - - _payload = List([]) - - def write_payload(self, data, single=True): - """Include or update the specified `data` payload in the PayloadManager. - - If a previous payload with the same source exists and `single` is True, - it will be overwritten with the new one. - """ - - if not isinstance(data, dict): - raise TypeError('Each payload write must be a dict, got: %r' % data) - - if single and 'source' in data: - source = data['source'] - for i, pl in enumerate(self._payload): - if 'source' in pl and pl['source'] == source: - self._payload[i] = data - return - - self._payload.append(data) - - def read_payload(self): - return self._payload - - def clear_payload(self): - self._payload = [] +# -*- coding: utf-8 -*- +"""Payload system for IPython. + +Authors: + +* Fernando Perez +* Brian Granger +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +from traitlets.config.configurable import Configurable +from traitlets import List + +#----------------------------------------------------------------------------- +# Main payload class +#----------------------------------------------------------------------------- + +class PayloadManager(Configurable): + + _payload = List([]) + + def write_payload(self, data, single=True): + """Include or update the specified `data` payload in the PayloadManager. + + If a previous payload with the same source exists and `single` is True, + it will be overwritten with the new one. + """ + + if not isinstance(data, dict): + raise TypeError('Each payload write must be a dict, got: %r' % data) + + if single and 'source' in data: + source = data['source'] + for i, pl in enumerate(self._payload): + if 'source' in pl and pl['source'] == source: + self._payload[i] = data + return + + self._payload.append(data) + + def read_payload(self): + return self._payload + + def clear_payload(self): + self._payload = [] diff --git a/contrib/python/ipython/py3/IPython/core/payloadpage.py b/contrib/python/ipython/py3/IPython/core/payloadpage.py index 43ac441631a..eb613445dd4 100644 --- a/contrib/python/ipython/py3/IPython/core/payloadpage.py +++ b/contrib/python/ipython/py3/IPython/core/payloadpage.py @@ -1,52 +1,52 @@ -# encoding: utf-8 -"""A payload based version of page.""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import warnings -from IPython.core.getipython import get_ipython - - -def page(strng, start=0, screen_lines=0, pager_cmd=None): - """Print a string, piping through a pager. - - This version ignores the screen_lines and pager_cmd arguments and uses - IPython's payload system instead. - - Parameters - ---------- - strng : str or mime-dict - Text to page, or a mime-type keyed dict of already formatted data. - - start : int - Starting line at which to place the display. - """ - - # Some routines may auto-compute start offsets incorrectly and pass a - # negative value. Offset to 0 for robustness. - start = max(0, start) - shell = get_ipython() - - if isinstance(strng, dict): - data = strng - else: - data = {'text/plain' : strng} - payload = dict( - source='page', - data=data, - start=start, - ) - shell.payload_manager.write_payload(payload) - - -def install_payload_page(): - """DEPRECATED, use show_in_pager hook - - Install this version of page as IPython.core.page.page. - """ - warnings.warn("""install_payload_page is deprecated. - Use `ip.set_hook('show_in_pager, page.as_hook(payloadpage.page))` - """) - from IPython.core import page as corepage - corepage.page = page +# encoding: utf-8 +"""A payload based version of page.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import warnings +from IPython.core.getipython import get_ipython + + +def page(strng, start=0, screen_lines=0, pager_cmd=None): + """Print a string, piping through a pager. + + This version ignores the screen_lines and pager_cmd arguments and uses + IPython's payload system instead. + + Parameters + ---------- + strng : str or mime-dict + Text to page, or a mime-type keyed dict of already formatted data. + + start : int + Starting line at which to place the display. + """ + + # Some routines may auto-compute start offsets incorrectly and pass a + # negative value. Offset to 0 for robustness. + start = max(0, start) + shell = get_ipython() + + if isinstance(strng, dict): + data = strng + else: + data = {'text/plain' : strng} + payload = dict( + source='page', + data=data, + start=start, + ) + shell.payload_manager.write_payload(payload) + + +def install_payload_page(): + """DEPRECATED, use show_in_pager hook + + Install this version of page as IPython.core.page.page. + """ + warnings.warn("""install_payload_page is deprecated. + Use `ip.set_hook('show_in_pager, page.as_hook(payloadpage.page))` + """) + from IPython.core import page as corepage + corepage.page = page diff --git a/contrib/python/ipython/py3/IPython/core/prefilter.py b/contrib/python/ipython/py3/IPython/core/prefilter.py index b4cfe813b25..bf801f999c4 100644 --- a/contrib/python/ipython/py3/IPython/core/prefilter.py +++ b/contrib/python/ipython/py3/IPython/core/prefilter.py @@ -1,709 +1,709 @@ -# encoding: utf-8 -""" -Prefiltering components. - -Prefilters transform user input before it is exec'd by Python. These -transforms are used to implement additional syntax such as !ls and %magic. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -from keyword import iskeyword -import re - -from .autocall import IPyAutocall -from traitlets.config.configurable import Configurable -from .inputtransformer2 import ( - ESC_MAGIC, - ESC_QUOTE, - ESC_QUOTE2, - ESC_PAREN, -) -from .macro import Macro -from .splitinput import LineInfo - -from traitlets import ( - List, Integer, Unicode, Bool, Instance, CRegExp -) - -#----------------------------------------------------------------------------- -# Global utilities, errors and constants -#----------------------------------------------------------------------------- - - -class PrefilterError(Exception): - pass - - -# RegExp to identify potential function names -re_fun_name = re.compile(r'[^\W\d]([\w.]*) *$') - -# RegExp to exclude strings with this start from autocalling. In -# particular, all binary operators should be excluded, so that if foo is -# callable, foo OP bar doesn't become foo(OP bar), which is invalid. The -# characters '!=()' don't need to be checked for, as the checkPythonChars -# routine explicitly does so, to catch direct calls and rebindings of -# existing names. - -# Warning: the '-' HAS TO BE AT THE END of the first group, otherwise -# it affects the rest of the group in square brackets. -re_exclude_auto = re.compile(r'^[,&^\|\*/\+-]' - r'|^is |^not |^in |^and |^or ') - -# try to catch also methods for stuff in lists/tuples/dicts: off -# (experimental). For this to work, the line_split regexp would need -# to be modified so it wouldn't break things at '['. That line is -# nasty enough that I shouldn't change it until I can test it _well_. -#self.re_fun_name = re.compile (r'[a-zA-Z_]([a-zA-Z0-9_.\[\]]*) ?$') - - -# Handler Check Utilities -def is_shadowed(identifier, ip): - """Is the given identifier defined in one of the namespaces which shadow - the alias and magic namespaces? Note that an identifier is different - than ifun, because it can not contain a '.' character.""" - # This is much safer than calling ofind, which can change state - return (identifier in ip.user_ns \ - or identifier in ip.user_global_ns \ - or identifier in ip.ns_table['builtin']\ - or iskeyword(identifier)) - - -#----------------------------------------------------------------------------- -# Main Prefilter manager -#----------------------------------------------------------------------------- - - -class PrefilterManager(Configurable): - """Main prefilter component. - - The IPython prefilter is run on all user input before it is run. The - prefilter consumes lines of input and produces transformed lines of - input. - - The implementation consists of two phases: - - 1. Transformers - 2. Checkers and handlers - - Over time, we plan on deprecating the checkers and handlers and doing - everything in the transformers. - - The transformers are instances of :class:`PrefilterTransformer` and have - a single method :meth:`transform` that takes a line and returns a - transformed line. The transformation can be accomplished using any - tool, but our current ones use regular expressions for speed. - - After all the transformers have been run, the line is fed to the checkers, - which are instances of :class:`PrefilterChecker`. The line is passed to - the :meth:`check` method, which either returns `None` or a - :class:`PrefilterHandler` instance. If `None` is returned, the other - checkers are tried. If an :class:`PrefilterHandler` instance is returned, - the line is passed to the :meth:`handle` method of the returned - handler and no further checkers are tried. - - Both transformers and checkers have a `priority` attribute, that determines - the order in which they are called. Smaller priorities are tried first. - - Both transformers and checkers also have `enabled` attribute, which is - a boolean that determines if the instance is used. - - Users or developers can change the priority or enabled attribute of - transformers or checkers, but they must call the :meth:`sort_checkers` - or :meth:`sort_transformers` method after changing the priority. - """ - - multi_line_specials = Bool(True).tag(config=True) - shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) - - def __init__(self, shell=None, **kwargs): - super(PrefilterManager, self).__init__(shell=shell, **kwargs) - self.shell = shell - self.init_transformers() - self.init_handlers() - self.init_checkers() - - #------------------------------------------------------------------------- - # API for managing transformers - #------------------------------------------------------------------------- - - def init_transformers(self): - """Create the default transformers.""" - self._transformers = [] - for transformer_cls in _default_transformers: - transformer_cls( - shell=self.shell, prefilter_manager=self, parent=self - ) - - def sort_transformers(self): - """Sort the transformers by priority. - - This must be called after the priority of a transformer is changed. - The :meth:`register_transformer` method calls this automatically. - """ - self._transformers.sort(key=lambda x: x.priority) - - @property - def transformers(self): - """Return a list of checkers, sorted by priority.""" - return self._transformers - - def register_transformer(self, transformer): - """Register a transformer instance.""" - if transformer not in self._transformers: - self._transformers.append(transformer) - self.sort_transformers() - - def unregister_transformer(self, transformer): - """Unregister a transformer instance.""" - if transformer in self._transformers: - self._transformers.remove(transformer) - - #------------------------------------------------------------------------- - # API for managing checkers - #------------------------------------------------------------------------- - - def init_checkers(self): - """Create the default checkers.""" - self._checkers = [] - for checker in _default_checkers: - checker( - shell=self.shell, prefilter_manager=self, parent=self - ) - - def sort_checkers(self): - """Sort the checkers by priority. - - This must be called after the priority of a checker is changed. - The :meth:`register_checker` method calls this automatically. - """ - self._checkers.sort(key=lambda x: x.priority) - - @property - def checkers(self): - """Return a list of checkers, sorted by priority.""" - return self._checkers - - def register_checker(self, checker): - """Register a checker instance.""" - if checker not in self._checkers: - self._checkers.append(checker) - self.sort_checkers() - - def unregister_checker(self, checker): - """Unregister a checker instance.""" - if checker in self._checkers: - self._checkers.remove(checker) - - #------------------------------------------------------------------------- - # API for managing handlers - #------------------------------------------------------------------------- - - def init_handlers(self): - """Create the default handlers.""" - self._handlers = {} - self._esc_handlers = {} - for handler in _default_handlers: - handler( - shell=self.shell, prefilter_manager=self, parent=self - ) - - @property - def handlers(self): - """Return a dict of all the handlers.""" - return self._handlers - - def register_handler(self, name, handler, esc_strings): - """Register a handler instance by name with esc_strings.""" - self._handlers[name] = handler - for esc_str in esc_strings: - self._esc_handlers[esc_str] = handler - - def unregister_handler(self, name, handler, esc_strings): - """Unregister a handler instance by name with esc_strings.""" - try: - del self._handlers[name] - except KeyError: - pass - for esc_str in esc_strings: - h = self._esc_handlers.get(esc_str) - if h is handler: - del self._esc_handlers[esc_str] - - def get_handler_by_name(self, name): - """Get a handler by its name.""" - return self._handlers.get(name) - - def get_handler_by_esc(self, esc_str): - """Get a handler by its escape string.""" - return self._esc_handlers.get(esc_str) - - #------------------------------------------------------------------------- - # Main prefiltering API - #------------------------------------------------------------------------- - - def prefilter_line_info(self, line_info): - """Prefilter a line that has been converted to a LineInfo object. - - This implements the checker/handler part of the prefilter pipe. - """ - # print "prefilter_line_info: ", line_info - handler = self.find_handler(line_info) - return handler.handle(line_info) - - def find_handler(self, line_info): - """Find a handler for the line_info by trying checkers.""" - for checker in self.checkers: - if checker.enabled: - handler = checker.check(line_info) - if handler: - return handler - return self.get_handler_by_name('normal') - - def transform_line(self, line, continue_prompt): - """Calls the enabled transformers in order of increasing priority.""" - for transformer in self.transformers: - if transformer.enabled: - line = transformer.transform(line, continue_prompt) - return line - - def prefilter_line(self, line, continue_prompt=False): - """Prefilter a single input line as text. - - This method prefilters a single line of text by calling the - transformers and then the checkers/handlers. - """ - - # print "prefilter_line: ", line, continue_prompt - # All handlers *must* return a value, even if it's blank (''). - - # save the line away in case we crash, so the post-mortem handler can - # record it - self.shell._last_input_line = line - - if not line: - # Return immediately on purely empty lines, so that if the user - # previously typed some whitespace that started a continuation - # prompt, he can break out of that loop with just an empty line. - # This is how the default python prompt works. - return '' - - # At this point, we invoke our transformers. - if not continue_prompt or (continue_prompt and self.multi_line_specials): - line = self.transform_line(line, continue_prompt) - - # Now we compute line_info for the checkers and handlers - line_info = LineInfo(line, continue_prompt) - - # the input history needs to track even empty lines - stripped = line.strip() - - normal_handler = self.get_handler_by_name('normal') - if not stripped: - return normal_handler.handle(line_info) - - # special handlers are only allowed for single line statements - if continue_prompt and not self.multi_line_specials: - return normal_handler.handle(line_info) - - prefiltered = self.prefilter_line_info(line_info) - # print "prefiltered line: %r" % prefiltered - return prefiltered - - def prefilter_lines(self, lines, continue_prompt=False): - """Prefilter multiple input lines of text. - - This is the main entry point for prefiltering multiple lines of - input. This simply calls :meth:`prefilter_line` for each line of - input. - - This covers cases where there are multiple lines in the user entry, - which is the case when the user goes back to a multiline history - entry and presses enter. - """ - llines = lines.rstrip('\n').split('\n') - # We can get multiple lines in one shot, where multiline input 'blends' - # into one line, in cases like recalling from the readline history - # buffer. We need to make sure that in such cases, we correctly - # communicate downstream which line is first and which are continuation - # ones. - if len(llines) > 1: - out = '\n'.join([self.prefilter_line(line, lnum>0) - for lnum, line in enumerate(llines) ]) - else: - out = self.prefilter_line(llines[0], continue_prompt) - - return out - -#----------------------------------------------------------------------------- -# Prefilter transformers -#----------------------------------------------------------------------------- - - -class PrefilterTransformer(Configurable): - """Transform a line of user input.""" - - priority = Integer(100).tag(config=True) - # Transformers don't currently use shell or prefilter_manager, but as we - # move away from checkers and handlers, they will need them. - shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) - prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True) - enabled = Bool(True).tag(config=True) - - def __init__(self, shell=None, prefilter_manager=None, **kwargs): - super(PrefilterTransformer, self).__init__( - shell=shell, prefilter_manager=prefilter_manager, **kwargs - ) - self.prefilter_manager.register_transformer(self) - - def transform(self, line, continue_prompt): - """Transform a line, returning the new one.""" - return None - - def __repr__(self): - return "<%s(priority=%r, enabled=%r)>" % ( - self.__class__.__name__, self.priority, self.enabled) - - -#----------------------------------------------------------------------------- -# Prefilter checkers -#----------------------------------------------------------------------------- - - -class PrefilterChecker(Configurable): - """Inspect an input line and return a handler for that line.""" - - priority = Integer(100).tag(config=True) - shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) - prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True) - enabled = Bool(True).tag(config=True) - - def __init__(self, shell=None, prefilter_manager=None, **kwargs): - super(PrefilterChecker, self).__init__( - shell=shell, prefilter_manager=prefilter_manager, **kwargs - ) - self.prefilter_manager.register_checker(self) - - def check(self, line_info): - """Inspect line_info and return a handler instance or None.""" - return None - - def __repr__(self): - return "<%s(priority=%r, enabled=%r)>" % ( - self.__class__.__name__, self.priority, self.enabled) - - -class EmacsChecker(PrefilterChecker): - - priority = Integer(100).tag(config=True) - enabled = Bool(False).tag(config=True) - - def check(self, line_info): - "Emacs ipython-mode tags certain input lines." - if line_info.line.endswith('# PYTHON-MODE'): - return self.prefilter_manager.get_handler_by_name('emacs') - else: - return None - - -class MacroChecker(PrefilterChecker): - - priority = Integer(250).tag(config=True) - - def check(self, line_info): - obj = self.shell.user_ns.get(line_info.ifun) - if isinstance(obj, Macro): - return self.prefilter_manager.get_handler_by_name('macro') - else: - return None - - -class IPyAutocallChecker(PrefilterChecker): - - priority = Integer(300).tag(config=True) - - def check(self, line_info): - "Instances of IPyAutocall in user_ns get autocalled immediately" - obj = self.shell.user_ns.get(line_info.ifun, None) - if isinstance(obj, IPyAutocall): - obj.set_ip(self.shell) - return self.prefilter_manager.get_handler_by_name('auto') - else: - return None - - -class AssignmentChecker(PrefilterChecker): - - priority = Integer(600).tag(config=True) - - def check(self, line_info): - """Check to see if user is assigning to a var for the first time, in - which case we want to avoid any sort of automagic / autocall games. - - This allows users to assign to either alias or magic names true python - variables (the magic/alias systems always take second seat to true - python code). E.g. ls='hi', or ls,that=1,2""" - if line_info.the_rest: - if line_info.the_rest[0] in '=,': - return self.prefilter_manager.get_handler_by_name('normal') - else: - return None - - -class AutoMagicChecker(PrefilterChecker): - - priority = Integer(700).tag(config=True) - - def check(self, line_info): - """If the ifun is magic, and automagic is on, run it. Note: normal, - non-auto magic would already have been triggered via '%' in - check_esc_chars. This just checks for automagic. Also, before - triggering the magic handler, make sure that there is nothing in the - user namespace which could shadow it.""" - if not self.shell.automagic or not self.shell.find_magic(line_info.ifun): - return None - - # We have a likely magic method. Make sure we should actually call it. - if line_info.continue_prompt and not self.prefilter_manager.multi_line_specials: - return None - - head = line_info.ifun.split('.',1)[0] - if is_shadowed(head, self.shell): - return None - - return self.prefilter_manager.get_handler_by_name('magic') - - -class PythonOpsChecker(PrefilterChecker): - - priority = Integer(900).tag(config=True) - - def check(self, line_info): - """If the 'rest' of the line begins with a function call or pretty much - any python operator, we should simply execute the line (regardless of - whether or not there's a possible autocall expansion). This avoids - spurious (and very confusing) geattr() accesses.""" - if line_info.the_rest and line_info.the_rest[0] in '!=()<>,+*/%^&|': - return self.prefilter_manager.get_handler_by_name('normal') - else: - return None - - -class AutocallChecker(PrefilterChecker): - - priority = Integer(1000).tag(config=True) - - function_name_regexp = CRegExp(re_fun_name, - help="RegExp to identify potential function names." - ).tag(config=True) - exclude_regexp = CRegExp(re_exclude_auto, - help="RegExp to exclude strings with this start from autocalling." - ).tag(config=True) - - def check(self, line_info): - "Check if the initial word/function is callable and autocall is on." - if not self.shell.autocall: - return None - - oinfo = line_info.ofind(self.shell) # This can mutate state via getattr - if not oinfo['found']: - return None - - ignored_funs = ['b', 'f', 'r', 'u', 'br', 'rb', 'fr', 'rf'] - ifun = line_info.ifun - line = line_info.line - if ifun.lower() in ignored_funs and (line.startswith(ifun + "'") or line.startswith(ifun + '"')): - return None - - if callable(oinfo['obj']) \ - and (not self.exclude_regexp.match(line_info.the_rest)) \ - and self.function_name_regexp.match(line_info.ifun): - return self.prefilter_manager.get_handler_by_name('auto') - else: - return None - - -#----------------------------------------------------------------------------- -# Prefilter handlers -#----------------------------------------------------------------------------- - - -class PrefilterHandler(Configurable): - - handler_name = Unicode('normal') - esc_strings = List([]) - shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) - prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True) - - def __init__(self, shell=None, prefilter_manager=None, **kwargs): - super(PrefilterHandler, self).__init__( - shell=shell, prefilter_manager=prefilter_manager, **kwargs - ) - self.prefilter_manager.register_handler( - self.handler_name, - self, - self.esc_strings - ) - - def handle(self, line_info): - # print "normal: ", line_info - """Handle normal input lines. Use as a template for handlers.""" - - # With autoindent on, we need some way to exit the input loop, and I - # don't want to force the user to have to backspace all the way to - # clear the line. The rule will be in this case, that either two - # lines of pure whitespace in a row, or a line of pure whitespace but - # of a size different to the indent level, will exit the input loop. - line = line_info.line - continue_prompt = line_info.continue_prompt - - if (continue_prompt and - self.shell.autoindent and - line.isspace() and - 0 < abs(len(line) - self.shell.indent_current_nsp) <= 2): - line = '' - - return line - - def __str__(self): - return "<%s(name=%s)>" % (self.__class__.__name__, self.handler_name) - - -class MacroHandler(PrefilterHandler): - handler_name = Unicode("macro") - - def handle(self, line_info): - obj = self.shell.user_ns.get(line_info.ifun) - pre_space = line_info.pre_whitespace - line_sep = "\n" + pre_space - return pre_space + line_sep.join(obj.value.splitlines()) - - -class MagicHandler(PrefilterHandler): - - handler_name = Unicode('magic') - esc_strings = List([ESC_MAGIC]) - - def handle(self, line_info): - """Execute magic functions.""" - ifun = line_info.ifun - the_rest = line_info.the_rest - #Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args) - t_arg_s = ifun + " " + the_rest - t_magic_name, _, t_magic_arg_s = t_arg_s.partition(' ') - t_magic_name = t_magic_name.lstrip(ESC_MAGIC) - cmd = '%sget_ipython().run_line_magic(%r, %r)' % (line_info.pre_whitespace, t_magic_name, t_magic_arg_s) - return cmd - - -class AutoHandler(PrefilterHandler): - - handler_name = Unicode('auto') - esc_strings = List([ESC_PAREN, ESC_QUOTE, ESC_QUOTE2]) - - def handle(self, line_info): - """Handle lines which can be auto-executed, quoting if requested.""" - line = line_info.line - ifun = line_info.ifun - the_rest = line_info.the_rest - esc = line_info.esc - continue_prompt = line_info.continue_prompt - obj = line_info.ofind(self.shell)['obj'] - - # This should only be active for single-line input! - if continue_prompt: - return line - - force_auto = isinstance(obj, IPyAutocall) - - # User objects sometimes raise exceptions on attribute access other - # than AttributeError (we've seen it in the past), so it's safest to be - # ultra-conservative here and catch all. - try: - auto_rewrite = obj.rewrite - except Exception: - auto_rewrite = True - - if esc == ESC_QUOTE: - # Auto-quote splitting on whitespace - newcmd = '%s("%s")' % (ifun,'", "'.join(the_rest.split()) ) - elif esc == ESC_QUOTE2: - # Auto-quote whole string - newcmd = '%s("%s")' % (ifun,the_rest) - elif esc == ESC_PAREN: - newcmd = '%s(%s)' % (ifun,",".join(the_rest.split())) - else: - # Auto-paren. - if force_auto: - # Don't rewrite if it is already a call. - do_rewrite = not the_rest.startswith('(') - else: - if not the_rest: - # We only apply it to argument-less calls if the autocall - # parameter is set to 2. - do_rewrite = (self.shell.autocall >= 2) - elif the_rest.startswith('[') and hasattr(obj, '__getitem__'): - # Don't autocall in this case: item access for an object - # which is BOTH callable and implements __getitem__. - do_rewrite = False - else: - do_rewrite = True - - # Figure out the rewritten command - if do_rewrite: - if the_rest.endswith(';'): - newcmd = '%s(%s);' % (ifun.rstrip(),the_rest[:-1]) - else: - newcmd = '%s(%s)' % (ifun.rstrip(), the_rest) - else: - normal_handler = self.prefilter_manager.get_handler_by_name('normal') - return normal_handler.handle(line_info) - - # Display the rewritten call - if auto_rewrite: - self.shell.auto_rewrite_input(newcmd) - - return newcmd - - -class EmacsHandler(PrefilterHandler): - - handler_name = Unicode('emacs') - esc_strings = List([]) - - def handle(self, line_info): - """Handle input lines marked by python-mode.""" - - # Currently, nothing is done. Later more functionality can be added - # here if needed. - - # The input cache shouldn't be updated - return line_info.line - - -#----------------------------------------------------------------------------- -# Defaults -#----------------------------------------------------------------------------- - - -_default_transformers = [ -] - -_default_checkers = [ - EmacsChecker, - MacroChecker, - IPyAutocallChecker, - AssignmentChecker, - AutoMagicChecker, - PythonOpsChecker, - AutocallChecker -] - -_default_handlers = [ - PrefilterHandler, - MacroHandler, - MagicHandler, - AutoHandler, - EmacsHandler -] +# encoding: utf-8 +""" +Prefiltering components. + +Prefilters transform user input before it is exec'd by Python. These +transforms are used to implement additional syntax such as !ls and %magic. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +from keyword import iskeyword +import re + +from .autocall import IPyAutocall +from traitlets.config.configurable import Configurable +from .inputtransformer2 import ( + ESC_MAGIC, + ESC_QUOTE, + ESC_QUOTE2, + ESC_PAREN, +) +from .macro import Macro +from .splitinput import LineInfo + +from traitlets import ( + List, Integer, Unicode, Bool, Instance, CRegExp +) + +#----------------------------------------------------------------------------- +# Global utilities, errors and constants +#----------------------------------------------------------------------------- + + +class PrefilterError(Exception): + pass + + +# RegExp to identify potential function names +re_fun_name = re.compile(r'[^\W\d]([\w.]*) *$') + +# RegExp to exclude strings with this start from autocalling. In +# particular, all binary operators should be excluded, so that if foo is +# callable, foo OP bar doesn't become foo(OP bar), which is invalid. The +# characters '!=()' don't need to be checked for, as the checkPythonChars +# routine explicitly does so, to catch direct calls and rebindings of +# existing names. + +# Warning: the '-' HAS TO BE AT THE END of the first group, otherwise +# it affects the rest of the group in square brackets. +re_exclude_auto = re.compile(r'^[,&^\|\*/\+-]' + r'|^is |^not |^in |^and |^or ') + +# try to catch also methods for stuff in lists/tuples/dicts: off +# (experimental). For this to work, the line_split regexp would need +# to be modified so it wouldn't break things at '['. That line is +# nasty enough that I shouldn't change it until I can test it _well_. +#self.re_fun_name = re.compile (r'[a-zA-Z_]([a-zA-Z0-9_.\[\]]*) ?$') + + +# Handler Check Utilities +def is_shadowed(identifier, ip): + """Is the given identifier defined in one of the namespaces which shadow + the alias and magic namespaces? Note that an identifier is different + than ifun, because it can not contain a '.' character.""" + # This is much safer than calling ofind, which can change state + return (identifier in ip.user_ns \ + or identifier in ip.user_global_ns \ + or identifier in ip.ns_table['builtin']\ + or iskeyword(identifier)) + + +#----------------------------------------------------------------------------- +# Main Prefilter manager +#----------------------------------------------------------------------------- + + +class PrefilterManager(Configurable): + """Main prefilter component. + + The IPython prefilter is run on all user input before it is run. The + prefilter consumes lines of input and produces transformed lines of + input. + + The implementation consists of two phases: + + 1. Transformers + 2. Checkers and handlers + + Over time, we plan on deprecating the checkers and handlers and doing + everything in the transformers. + + The transformers are instances of :class:`PrefilterTransformer` and have + a single method :meth:`transform` that takes a line and returns a + transformed line. The transformation can be accomplished using any + tool, but our current ones use regular expressions for speed. + + After all the transformers have been run, the line is fed to the checkers, + which are instances of :class:`PrefilterChecker`. The line is passed to + the :meth:`check` method, which either returns `None` or a + :class:`PrefilterHandler` instance. If `None` is returned, the other + checkers are tried. If an :class:`PrefilterHandler` instance is returned, + the line is passed to the :meth:`handle` method of the returned + handler and no further checkers are tried. + + Both transformers and checkers have a `priority` attribute, that determines + the order in which they are called. Smaller priorities are tried first. + + Both transformers and checkers also have `enabled` attribute, which is + a boolean that determines if the instance is used. + + Users or developers can change the priority or enabled attribute of + transformers or checkers, but they must call the :meth:`sort_checkers` + or :meth:`sort_transformers` method after changing the priority. + """ + + multi_line_specials = Bool(True).tag(config=True) + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) + + def __init__(self, shell=None, **kwargs): + super(PrefilterManager, self).__init__(shell=shell, **kwargs) + self.shell = shell + self.init_transformers() + self.init_handlers() + self.init_checkers() + + #------------------------------------------------------------------------- + # API for managing transformers + #------------------------------------------------------------------------- + + def init_transformers(self): + """Create the default transformers.""" + self._transformers = [] + for transformer_cls in _default_transformers: + transformer_cls( + shell=self.shell, prefilter_manager=self, parent=self + ) + + def sort_transformers(self): + """Sort the transformers by priority. + + This must be called after the priority of a transformer is changed. + The :meth:`register_transformer` method calls this automatically. + """ + self._transformers.sort(key=lambda x: x.priority) + + @property + def transformers(self): + """Return a list of checkers, sorted by priority.""" + return self._transformers + + def register_transformer(self, transformer): + """Register a transformer instance.""" + if transformer not in self._transformers: + self._transformers.append(transformer) + self.sort_transformers() + + def unregister_transformer(self, transformer): + """Unregister a transformer instance.""" + if transformer in self._transformers: + self._transformers.remove(transformer) + + #------------------------------------------------------------------------- + # API for managing checkers + #------------------------------------------------------------------------- + + def init_checkers(self): + """Create the default checkers.""" + self._checkers = [] + for checker in _default_checkers: + checker( + shell=self.shell, prefilter_manager=self, parent=self + ) + + def sort_checkers(self): + """Sort the checkers by priority. + + This must be called after the priority of a checker is changed. + The :meth:`register_checker` method calls this automatically. + """ + self._checkers.sort(key=lambda x: x.priority) + + @property + def checkers(self): + """Return a list of checkers, sorted by priority.""" + return self._checkers + + def register_checker(self, checker): + """Register a checker instance.""" + if checker not in self._checkers: + self._checkers.append(checker) + self.sort_checkers() + + def unregister_checker(self, checker): + """Unregister a checker instance.""" + if checker in self._checkers: + self._checkers.remove(checker) + + #------------------------------------------------------------------------- + # API for managing handlers + #------------------------------------------------------------------------- + + def init_handlers(self): + """Create the default handlers.""" + self._handlers = {} + self._esc_handlers = {} + for handler in _default_handlers: + handler( + shell=self.shell, prefilter_manager=self, parent=self + ) + + @property + def handlers(self): + """Return a dict of all the handlers.""" + return self._handlers + + def register_handler(self, name, handler, esc_strings): + """Register a handler instance by name with esc_strings.""" + self._handlers[name] = handler + for esc_str in esc_strings: + self._esc_handlers[esc_str] = handler + + def unregister_handler(self, name, handler, esc_strings): + """Unregister a handler instance by name with esc_strings.""" + try: + del self._handlers[name] + except KeyError: + pass + for esc_str in esc_strings: + h = self._esc_handlers.get(esc_str) + if h is handler: + del self._esc_handlers[esc_str] + + def get_handler_by_name(self, name): + """Get a handler by its name.""" + return self._handlers.get(name) + + def get_handler_by_esc(self, esc_str): + """Get a handler by its escape string.""" + return self._esc_handlers.get(esc_str) + + #------------------------------------------------------------------------- + # Main prefiltering API + #------------------------------------------------------------------------- + + def prefilter_line_info(self, line_info): + """Prefilter a line that has been converted to a LineInfo object. + + This implements the checker/handler part of the prefilter pipe. + """ + # print "prefilter_line_info: ", line_info + handler = self.find_handler(line_info) + return handler.handle(line_info) + + def find_handler(self, line_info): + """Find a handler for the line_info by trying checkers.""" + for checker in self.checkers: + if checker.enabled: + handler = checker.check(line_info) + if handler: + return handler + return self.get_handler_by_name('normal') + + def transform_line(self, line, continue_prompt): + """Calls the enabled transformers in order of increasing priority.""" + for transformer in self.transformers: + if transformer.enabled: + line = transformer.transform(line, continue_prompt) + return line + + def prefilter_line(self, line, continue_prompt=False): + """Prefilter a single input line as text. + + This method prefilters a single line of text by calling the + transformers and then the checkers/handlers. + """ + + # print "prefilter_line: ", line, continue_prompt + # All handlers *must* return a value, even if it's blank (''). + + # save the line away in case we crash, so the post-mortem handler can + # record it + self.shell._last_input_line = line + + if not line: + # Return immediately on purely empty lines, so that if the user + # previously typed some whitespace that started a continuation + # prompt, he can break out of that loop with just an empty line. + # This is how the default python prompt works. + return '' + + # At this point, we invoke our transformers. + if not continue_prompt or (continue_prompt and self.multi_line_specials): + line = self.transform_line(line, continue_prompt) + + # Now we compute line_info for the checkers and handlers + line_info = LineInfo(line, continue_prompt) + + # the input history needs to track even empty lines + stripped = line.strip() + + normal_handler = self.get_handler_by_name('normal') + if not stripped: + return normal_handler.handle(line_info) + + # special handlers are only allowed for single line statements + if continue_prompt and not self.multi_line_specials: + return normal_handler.handle(line_info) + + prefiltered = self.prefilter_line_info(line_info) + # print "prefiltered line: %r" % prefiltered + return prefiltered + + def prefilter_lines(self, lines, continue_prompt=False): + """Prefilter multiple input lines of text. + + This is the main entry point for prefiltering multiple lines of + input. This simply calls :meth:`prefilter_line` for each line of + input. + + This covers cases where there are multiple lines in the user entry, + which is the case when the user goes back to a multiline history + entry and presses enter. + """ + llines = lines.rstrip('\n').split('\n') + # We can get multiple lines in one shot, where multiline input 'blends' + # into one line, in cases like recalling from the readline history + # buffer. We need to make sure that in such cases, we correctly + # communicate downstream which line is first and which are continuation + # ones. + if len(llines) > 1: + out = '\n'.join([self.prefilter_line(line, lnum>0) + for lnum, line in enumerate(llines) ]) + else: + out = self.prefilter_line(llines[0], continue_prompt) + + return out + +#----------------------------------------------------------------------------- +# Prefilter transformers +#----------------------------------------------------------------------------- + + +class PrefilterTransformer(Configurable): + """Transform a line of user input.""" + + priority = Integer(100).tag(config=True) + # Transformers don't currently use shell or prefilter_manager, but as we + # move away from checkers and handlers, they will need them. + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) + prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True) + enabled = Bool(True).tag(config=True) + + def __init__(self, shell=None, prefilter_manager=None, **kwargs): + super(PrefilterTransformer, self).__init__( + shell=shell, prefilter_manager=prefilter_manager, **kwargs + ) + self.prefilter_manager.register_transformer(self) + + def transform(self, line, continue_prompt): + """Transform a line, returning the new one.""" + return None + + def __repr__(self): + return "<%s(priority=%r, enabled=%r)>" % ( + self.__class__.__name__, self.priority, self.enabled) + + +#----------------------------------------------------------------------------- +# Prefilter checkers +#----------------------------------------------------------------------------- + + +class PrefilterChecker(Configurable): + """Inspect an input line and return a handler for that line.""" + + priority = Integer(100).tag(config=True) + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) + prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True) + enabled = Bool(True).tag(config=True) + + def __init__(self, shell=None, prefilter_manager=None, **kwargs): + super(PrefilterChecker, self).__init__( + shell=shell, prefilter_manager=prefilter_manager, **kwargs + ) + self.prefilter_manager.register_checker(self) + + def check(self, line_info): + """Inspect line_info and return a handler instance or None.""" + return None + + def __repr__(self): + return "<%s(priority=%r, enabled=%r)>" % ( + self.__class__.__name__, self.priority, self.enabled) + + +class EmacsChecker(PrefilterChecker): + + priority = Integer(100).tag(config=True) + enabled = Bool(False).tag(config=True) + + def check(self, line_info): + "Emacs ipython-mode tags certain input lines." + if line_info.line.endswith('# PYTHON-MODE'): + return self.prefilter_manager.get_handler_by_name('emacs') + else: + return None + + +class MacroChecker(PrefilterChecker): + + priority = Integer(250).tag(config=True) + + def check(self, line_info): + obj = self.shell.user_ns.get(line_info.ifun) + if isinstance(obj, Macro): + return self.prefilter_manager.get_handler_by_name('macro') + else: + return None + + +class IPyAutocallChecker(PrefilterChecker): + + priority = Integer(300).tag(config=True) + + def check(self, line_info): + "Instances of IPyAutocall in user_ns get autocalled immediately" + obj = self.shell.user_ns.get(line_info.ifun, None) + if isinstance(obj, IPyAutocall): + obj.set_ip(self.shell) + return self.prefilter_manager.get_handler_by_name('auto') + else: + return None + + +class AssignmentChecker(PrefilterChecker): + + priority = Integer(600).tag(config=True) + + def check(self, line_info): + """Check to see if user is assigning to a var for the first time, in + which case we want to avoid any sort of automagic / autocall games. + + This allows users to assign to either alias or magic names true python + variables (the magic/alias systems always take second seat to true + python code). E.g. ls='hi', or ls,that=1,2""" + if line_info.the_rest: + if line_info.the_rest[0] in '=,': + return self.prefilter_manager.get_handler_by_name('normal') + else: + return None + + +class AutoMagicChecker(PrefilterChecker): + + priority = Integer(700).tag(config=True) + + def check(self, line_info): + """If the ifun is magic, and automagic is on, run it. Note: normal, + non-auto magic would already have been triggered via '%' in + check_esc_chars. This just checks for automagic. Also, before + triggering the magic handler, make sure that there is nothing in the + user namespace which could shadow it.""" + if not self.shell.automagic or not self.shell.find_magic(line_info.ifun): + return None + + # We have a likely magic method. Make sure we should actually call it. + if line_info.continue_prompt and not self.prefilter_manager.multi_line_specials: + return None + + head = line_info.ifun.split('.',1)[0] + if is_shadowed(head, self.shell): + return None + + return self.prefilter_manager.get_handler_by_name('magic') + + +class PythonOpsChecker(PrefilterChecker): + + priority = Integer(900).tag(config=True) + + def check(self, line_info): + """If the 'rest' of the line begins with a function call or pretty much + any python operator, we should simply execute the line (regardless of + whether or not there's a possible autocall expansion). This avoids + spurious (and very confusing) geattr() accesses.""" + if line_info.the_rest and line_info.the_rest[0] in '!=()<>,+*/%^&|': + return self.prefilter_manager.get_handler_by_name('normal') + else: + return None + + +class AutocallChecker(PrefilterChecker): + + priority = Integer(1000).tag(config=True) + + function_name_regexp = CRegExp(re_fun_name, + help="RegExp to identify potential function names." + ).tag(config=True) + exclude_regexp = CRegExp(re_exclude_auto, + help="RegExp to exclude strings with this start from autocalling." + ).tag(config=True) + + def check(self, line_info): + "Check if the initial word/function is callable and autocall is on." + if not self.shell.autocall: + return None + + oinfo = line_info.ofind(self.shell) # This can mutate state via getattr + if not oinfo['found']: + return None + + ignored_funs = ['b', 'f', 'r', 'u', 'br', 'rb', 'fr', 'rf'] + ifun = line_info.ifun + line = line_info.line + if ifun.lower() in ignored_funs and (line.startswith(ifun + "'") or line.startswith(ifun + '"')): + return None + + if callable(oinfo['obj']) \ + and (not self.exclude_regexp.match(line_info.the_rest)) \ + and self.function_name_regexp.match(line_info.ifun): + return self.prefilter_manager.get_handler_by_name('auto') + else: + return None + + +#----------------------------------------------------------------------------- +# Prefilter handlers +#----------------------------------------------------------------------------- + + +class PrefilterHandler(Configurable): + + handler_name = Unicode('normal') + esc_strings = List([]) + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) + prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True) + + def __init__(self, shell=None, prefilter_manager=None, **kwargs): + super(PrefilterHandler, self).__init__( + shell=shell, prefilter_manager=prefilter_manager, **kwargs + ) + self.prefilter_manager.register_handler( + self.handler_name, + self, + self.esc_strings + ) + + def handle(self, line_info): + # print "normal: ", line_info + """Handle normal input lines. Use as a template for handlers.""" + + # With autoindent on, we need some way to exit the input loop, and I + # don't want to force the user to have to backspace all the way to + # clear the line. The rule will be in this case, that either two + # lines of pure whitespace in a row, or a line of pure whitespace but + # of a size different to the indent level, will exit the input loop. + line = line_info.line + continue_prompt = line_info.continue_prompt + + if (continue_prompt and + self.shell.autoindent and + line.isspace() and + 0 < abs(len(line) - self.shell.indent_current_nsp) <= 2): + line = '' + + return line + + def __str__(self): + return "<%s(name=%s)>" % (self.__class__.__name__, self.handler_name) + + +class MacroHandler(PrefilterHandler): + handler_name = Unicode("macro") + + def handle(self, line_info): + obj = self.shell.user_ns.get(line_info.ifun) + pre_space = line_info.pre_whitespace + line_sep = "\n" + pre_space + return pre_space + line_sep.join(obj.value.splitlines()) + + +class MagicHandler(PrefilterHandler): + + handler_name = Unicode('magic') + esc_strings = List([ESC_MAGIC]) + + def handle(self, line_info): + """Execute magic functions.""" + ifun = line_info.ifun + the_rest = line_info.the_rest + #Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args) + t_arg_s = ifun + " " + the_rest + t_magic_name, _, t_magic_arg_s = t_arg_s.partition(' ') + t_magic_name = t_magic_name.lstrip(ESC_MAGIC) + cmd = '%sget_ipython().run_line_magic(%r, %r)' % (line_info.pre_whitespace, t_magic_name, t_magic_arg_s) + return cmd + + +class AutoHandler(PrefilterHandler): + + handler_name = Unicode('auto') + esc_strings = List([ESC_PAREN, ESC_QUOTE, ESC_QUOTE2]) + + def handle(self, line_info): + """Handle lines which can be auto-executed, quoting if requested.""" + line = line_info.line + ifun = line_info.ifun + the_rest = line_info.the_rest + esc = line_info.esc + continue_prompt = line_info.continue_prompt + obj = line_info.ofind(self.shell)['obj'] + + # This should only be active for single-line input! + if continue_prompt: + return line + + force_auto = isinstance(obj, IPyAutocall) + + # User objects sometimes raise exceptions on attribute access other + # than AttributeError (we've seen it in the past), so it's safest to be + # ultra-conservative here and catch all. + try: + auto_rewrite = obj.rewrite + except Exception: + auto_rewrite = True + + if esc == ESC_QUOTE: + # Auto-quote splitting on whitespace + newcmd = '%s("%s")' % (ifun,'", "'.join(the_rest.split()) ) + elif esc == ESC_QUOTE2: + # Auto-quote whole string + newcmd = '%s("%s")' % (ifun,the_rest) + elif esc == ESC_PAREN: + newcmd = '%s(%s)' % (ifun,",".join(the_rest.split())) + else: + # Auto-paren. + if force_auto: + # Don't rewrite if it is already a call. + do_rewrite = not the_rest.startswith('(') + else: + if not the_rest: + # We only apply it to argument-less calls if the autocall + # parameter is set to 2. + do_rewrite = (self.shell.autocall >= 2) + elif the_rest.startswith('[') and hasattr(obj, '__getitem__'): + # Don't autocall in this case: item access for an object + # which is BOTH callable and implements __getitem__. + do_rewrite = False + else: + do_rewrite = True + + # Figure out the rewritten command + if do_rewrite: + if the_rest.endswith(';'): + newcmd = '%s(%s);' % (ifun.rstrip(),the_rest[:-1]) + else: + newcmd = '%s(%s)' % (ifun.rstrip(), the_rest) + else: + normal_handler = self.prefilter_manager.get_handler_by_name('normal') + return normal_handler.handle(line_info) + + # Display the rewritten call + if auto_rewrite: + self.shell.auto_rewrite_input(newcmd) + + return newcmd + + +class EmacsHandler(PrefilterHandler): + + handler_name = Unicode('emacs') + esc_strings = List([]) + + def handle(self, line_info): + """Handle input lines marked by python-mode.""" + + # Currently, nothing is done. Later more functionality can be added + # here if needed. + + # The input cache shouldn't be updated + return line_info.line + + +#----------------------------------------------------------------------------- +# Defaults +#----------------------------------------------------------------------------- + + +_default_transformers = [ +] + +_default_checkers = [ + EmacsChecker, + MacroChecker, + IPyAutocallChecker, + AssignmentChecker, + AutoMagicChecker, + PythonOpsChecker, + AutocallChecker +] + +_default_handlers = [ + PrefilterHandler, + MacroHandler, + MagicHandler, + AutoHandler, + EmacsHandler +] diff --git a/contrib/python/ipython/py3/IPython/core/profile/README_STARTUP b/contrib/python/ipython/py3/IPython/core/profile/README_STARTUP index 051134cfc31..61d47000421 100644 --- a/contrib/python/ipython/py3/IPython/core/profile/README_STARTUP +++ b/contrib/python/ipython/py3/IPython/core/profile/README_STARTUP @@ -1,11 +1,11 @@ -This is the IPython startup directory - -.py and .ipy files in this directory will be run *prior* to any code or files specified -via the exec_lines or exec_files configurables whenever you load this profile. - -Files will be run in lexicographical order, so you can control the execution order of files -with a prefix, e.g.:: - - 00-first.py - 50-middle.py - 99-last.ipy +This is the IPython startup directory + +.py and .ipy files in this directory will be run *prior* to any code or files specified +via the exec_lines or exec_files configurables whenever you load this profile. + +Files will be run in lexicographical order, so you can control the execution order of files +with a prefix, e.g.:: + + 00-first.py + 50-middle.py + 99-last.ipy diff --git a/contrib/python/ipython/py3/IPython/core/profileapp.py b/contrib/python/ipython/py3/IPython/core/profileapp.py index 92a10edb7c6..9a1bae55ac5 100644 --- a/contrib/python/ipython/py3/IPython/core/profileapp.py +++ b/contrib/python/ipython/py3/IPython/core/profileapp.py @@ -1,312 +1,312 @@ -# encoding: utf-8 -""" -An application for managing IPython profiles. - -To be invoked as the `ipython profile` subcommand. - -Authors: - -* Min RK - -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import os - -from traitlets.config.application import Application -from IPython.core.application import ( - BaseIPythonApplication, base_flags -) -from IPython.core.profiledir import ProfileDir -from IPython.utils.importstring import import_item -from IPython.paths import get_ipython_dir, get_ipython_package_dir -from traitlets import Unicode, Bool, Dict, observe - -#----------------------------------------------------------------------------- -# Constants -#----------------------------------------------------------------------------- - -create_help = """Create an IPython profile by name - -Create an ipython profile directory by its name or -profile directory path. Profile directories contain -configuration, log and security related files and are named -using the convention 'profile_<name>'. By default they are -located in your ipython directory. Once created, you will -can edit the configuration files in the profile -directory to configure IPython. Most users will create a -profile directory by name, -`ipython profile create myprofile`, which will put the directory -in `<ipython_dir>/profile_myprofile`. -""" -list_help = """List available IPython profiles - -List all available profiles, by profile location, that can -be found in the current working directly or in the ipython -directory. Profile directories are named using the convention -'profile_<profile>'. -""" -profile_help = """Manage IPython profiles - -Profile directories contain -configuration, log and security related files and are named -using the convention 'profile_<name>'. By default they are -located in your ipython directory. You can create profiles -with `ipython profile create <name>`, or see the profiles you -already have with `ipython profile list` - -To get started configuring IPython, simply do: - -$> ipython profile create - -and IPython will create the default profile in <ipython_dir>/profile_default, -where you can edit ipython_config.py to start configuring IPython. - -""" - -_list_examples = "ipython profile list # list all profiles" - -_create_examples = """ -ipython profile create foo # create profile foo w/ default config files -ipython profile create foo --reset # restage default config files over current -ipython profile create foo --parallel # also stage parallel config files -""" - -_main_examples = """ -ipython profile create -h # show the help string for the create subcommand -ipython profile list -h # show the help string for the list subcommand - -ipython locate profile foo # print the path to the directory for profile 'foo' -""" - -#----------------------------------------------------------------------------- -# Profile Application Class (for `ipython profile` subcommand) -#----------------------------------------------------------------------------- - - -def list_profiles_in(path): - """list profiles in a given root directory""" - profiles = [] - - # for python 3.6+ rewrite to: with os.scandir(path) as dirlist: - files = os.scandir(path) - for f in files: - if f.is_dir() and f.name.startswith('profile_'): - profiles.append(f.name.split('_', 1)[-1]) - return profiles - - -def list_bundled_profiles(): - """list profiles that are bundled with IPython.""" - path = os.path.join(get_ipython_package_dir(), u'core', u'profile') - profiles = [] - - # for python 3.6+ rewrite to: with os.scandir(path) as dirlist: - files = os.scandir(path) - for profile in files: - if profile.is_dir() and profile.name != "__pycache__": - profiles.append(profile.name) - return profiles - - -class ProfileLocate(BaseIPythonApplication): - description = """print the path to an IPython profile dir""" - - def parse_command_line(self, argv=None): - super(ProfileLocate, self).parse_command_line(argv) - if self.extra_args: - self.profile = self.extra_args[0] - - def start(self): - print(self.profile_dir.location) - - -class ProfileList(Application): - name = u'ipython-profile' - description = list_help - examples = _list_examples - - aliases = Dict({ - 'ipython-dir' : 'ProfileList.ipython_dir', - 'log-level' : 'Application.log_level', - }) - flags = Dict(dict( - debug = ({'Application' : {'log_level' : 0}}, - "Set Application.log_level to 0, maximizing log output." - ) - )) - - ipython_dir = Unicode(get_ipython_dir(), - help=""" - The name of the IPython directory. This directory is used for logging - configuration (through profiles), history storage, etc. The default - is usually $HOME/.ipython. This options can also be specified through - the environment variable IPYTHONDIR. - """ - ).tag(config=True) - - - def _print_profiles(self, profiles): - """print list of profiles, indented.""" - for profile in profiles: - print(' %s' % profile) - - def list_profile_dirs(self): - profiles = list_bundled_profiles() - if profiles: - print() - print("Available profiles in IPython:") - self._print_profiles(profiles) - print() - print(" The first request for a bundled profile will copy it") - print(" into your IPython directory (%s)," % self.ipython_dir) - print(" where you can customize it.") - - profiles = list_profiles_in(self.ipython_dir) - if profiles: - print() - print("Available profiles in %s:" % self.ipython_dir) - self._print_profiles(profiles) - - profiles = list_profiles_in(os.getcwd()) - if profiles: - print() - print( - "Profiles from CWD have been removed for security reason, see CVE-2022-21699:" - ) - - print() - print("To use any of the above profiles, start IPython with:") - print(" ipython --profile=<name>") - print() - - def start(self): - self.list_profile_dirs() - - -create_flags = {} -create_flags.update(base_flags) -# don't include '--init' flag, which implies running profile create in other apps -create_flags.pop('init') -create_flags['reset'] = ({'ProfileCreate': {'overwrite' : True}}, - "reset config files in this profile to the defaults.") -create_flags['parallel'] = ({'ProfileCreate': {'parallel' : True}}, - "Include the config files for parallel " - "computing apps (ipengine, ipcontroller, etc.)") - - -class ProfileCreate(BaseIPythonApplication): - name = u'ipython-profile' - description = create_help - examples = _create_examples - auto_create = Bool(True) - def _log_format_default(self): - return "[%(name)s] %(message)s" - - def _copy_config_files_default(self): - return True - - parallel = Bool(False, - help="whether to include parallel computing config files" - ).tag(config=True) - - @observe('parallel') - def _parallel_changed(self, change): - parallel_files = [ 'ipcontroller_config.py', - 'ipengine_config.py', - 'ipcluster_config.py' - ] - if change['new']: - for cf in parallel_files: - self.config_files.append(cf) - else: - for cf in parallel_files: - if cf in self.config_files: - self.config_files.remove(cf) - - def parse_command_line(self, argv): - super(ProfileCreate, self).parse_command_line(argv) - # accept positional arg as profile name - if self.extra_args: - self.profile = self.extra_args[0] - - flags = Dict(create_flags) - - classes = [ProfileDir] - - def _import_app(self, app_path): - """import an app class""" - app = None - name = app_path.rsplit('.', 1)[-1] - try: - app = import_item(app_path) - except ImportError: - self.log.info("Couldn't import %s, config file will be excluded", name) - except Exception: - self.log.warning('Unexpected error importing %s', name, exc_info=True) - return app - - def init_config_files(self): - super(ProfileCreate, self).init_config_files() - # use local imports, since these classes may import from here - from IPython.terminal.ipapp import TerminalIPythonApp - apps = [TerminalIPythonApp] - for app_path in ( - 'ipykernel.kernelapp.IPKernelApp', - ): - app = self._import_app(app_path) - if app is not None: - apps.append(app) - if self.parallel: - from ipyparallel.apps.ipcontrollerapp import IPControllerApp - from ipyparallel.apps.ipengineapp import IPEngineApp - from ipyparallel.apps.ipclusterapp import IPClusterStart - apps.extend([ - IPControllerApp, - IPEngineApp, - IPClusterStart, - ]) - for App in apps: - app = App() - app.config.update(self.config) - app.log = self.log - app.overwrite = self.overwrite - app.copy_config_files=True - app.ipython_dir=self.ipython_dir - app.profile_dir=self.profile_dir - app.init_config_files() - - def stage_default_config_file(self): - pass - - -class ProfileApp(Application): - name = u'ipython profile' - description = profile_help - examples = _main_examples - - subcommands = Dict(dict( - create = (ProfileCreate, ProfileCreate.description.splitlines()[0]), - list = (ProfileList, ProfileList.description.splitlines()[0]), - locate = (ProfileLocate, ProfileLocate.description.splitlines()[0]), - )) - - def start(self): - if self.subapp is None: - print("No subcommand specified. Must specify one of: %s"%(self.subcommands.keys())) - print() - self.print_description() - self.print_subcommands() - self.exit(1) - else: - return self.subapp.start() +# encoding: utf-8 +""" +An application for managing IPython profiles. + +To be invoked as the `ipython profile` subcommand. + +Authors: + +* Min RK + +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os + +from traitlets.config.application import Application +from IPython.core.application import ( + BaseIPythonApplication, base_flags +) +from IPython.core.profiledir import ProfileDir +from IPython.utils.importstring import import_item +from IPython.paths import get_ipython_dir, get_ipython_package_dir +from traitlets import Unicode, Bool, Dict, observe + +#----------------------------------------------------------------------------- +# Constants +#----------------------------------------------------------------------------- + +create_help = """Create an IPython profile by name + +Create an ipython profile directory by its name or +profile directory path. Profile directories contain +configuration, log and security related files and are named +using the convention 'profile_<name>'. By default they are +located in your ipython directory. Once created, you will +can edit the configuration files in the profile +directory to configure IPython. Most users will create a +profile directory by name, +`ipython profile create myprofile`, which will put the directory +in `<ipython_dir>/profile_myprofile`. +""" +list_help = """List available IPython profiles + +List all available profiles, by profile location, that can +be found in the current working directly or in the ipython +directory. Profile directories are named using the convention +'profile_<profile>'. +""" +profile_help = """Manage IPython profiles + +Profile directories contain +configuration, log and security related files and are named +using the convention 'profile_<name>'. By default they are +located in your ipython directory. You can create profiles +with `ipython profile create <name>`, or see the profiles you +already have with `ipython profile list` + +To get started configuring IPython, simply do: + +$> ipython profile create + +and IPython will create the default profile in <ipython_dir>/profile_default, +where you can edit ipython_config.py to start configuring IPython. + +""" + +_list_examples = "ipython profile list # list all profiles" + +_create_examples = """ +ipython profile create foo # create profile foo w/ default config files +ipython profile create foo --reset # restage default config files over current +ipython profile create foo --parallel # also stage parallel config files +""" + +_main_examples = """ +ipython profile create -h # show the help string for the create subcommand +ipython profile list -h # show the help string for the list subcommand + +ipython locate profile foo # print the path to the directory for profile 'foo' +""" + +#----------------------------------------------------------------------------- +# Profile Application Class (for `ipython profile` subcommand) +#----------------------------------------------------------------------------- + + +def list_profiles_in(path): + """list profiles in a given root directory""" + profiles = [] + + # for python 3.6+ rewrite to: with os.scandir(path) as dirlist: + files = os.scandir(path) + for f in files: + if f.is_dir() and f.name.startswith('profile_'): + profiles.append(f.name.split('_', 1)[-1]) + return profiles + + +def list_bundled_profiles(): + """list profiles that are bundled with IPython.""" + path = os.path.join(get_ipython_package_dir(), u'core', u'profile') + profiles = [] + + # for python 3.6+ rewrite to: with os.scandir(path) as dirlist: + files = os.scandir(path) + for profile in files: + if profile.is_dir() and profile.name != "__pycache__": + profiles.append(profile.name) + return profiles + + +class ProfileLocate(BaseIPythonApplication): + description = """print the path to an IPython profile dir""" + + def parse_command_line(self, argv=None): + super(ProfileLocate, self).parse_command_line(argv) + if self.extra_args: + self.profile = self.extra_args[0] + + def start(self): + print(self.profile_dir.location) + + +class ProfileList(Application): + name = u'ipython-profile' + description = list_help + examples = _list_examples + + aliases = Dict({ + 'ipython-dir' : 'ProfileList.ipython_dir', + 'log-level' : 'Application.log_level', + }) + flags = Dict(dict( + debug = ({'Application' : {'log_level' : 0}}, + "Set Application.log_level to 0, maximizing log output." + ) + )) + + ipython_dir = Unicode(get_ipython_dir(), + help=""" + The name of the IPython directory. This directory is used for logging + configuration (through profiles), history storage, etc. The default + is usually $HOME/.ipython. This options can also be specified through + the environment variable IPYTHONDIR. + """ + ).tag(config=True) + + + def _print_profiles(self, profiles): + """print list of profiles, indented.""" + for profile in profiles: + print(' %s' % profile) + + def list_profile_dirs(self): + profiles = list_bundled_profiles() + if profiles: + print() + print("Available profiles in IPython:") + self._print_profiles(profiles) + print() + print(" The first request for a bundled profile will copy it") + print(" into your IPython directory (%s)," % self.ipython_dir) + print(" where you can customize it.") + + profiles = list_profiles_in(self.ipython_dir) + if profiles: + print() + print("Available profiles in %s:" % self.ipython_dir) + self._print_profiles(profiles) + + profiles = list_profiles_in(os.getcwd()) + if profiles: + print() + print( + "Profiles from CWD have been removed for security reason, see CVE-2022-21699:" + ) + + print() + print("To use any of the above profiles, start IPython with:") + print(" ipython --profile=<name>") + print() + + def start(self): + self.list_profile_dirs() + + +create_flags = {} +create_flags.update(base_flags) +# don't include '--init' flag, which implies running profile create in other apps +create_flags.pop('init') +create_flags['reset'] = ({'ProfileCreate': {'overwrite' : True}}, + "reset config files in this profile to the defaults.") +create_flags['parallel'] = ({'ProfileCreate': {'parallel' : True}}, + "Include the config files for parallel " + "computing apps (ipengine, ipcontroller, etc.)") + + +class ProfileCreate(BaseIPythonApplication): + name = u'ipython-profile' + description = create_help + examples = _create_examples + auto_create = Bool(True) + def _log_format_default(self): + return "[%(name)s] %(message)s" + + def _copy_config_files_default(self): + return True + + parallel = Bool(False, + help="whether to include parallel computing config files" + ).tag(config=True) + + @observe('parallel') + def _parallel_changed(self, change): + parallel_files = [ 'ipcontroller_config.py', + 'ipengine_config.py', + 'ipcluster_config.py' + ] + if change['new']: + for cf in parallel_files: + self.config_files.append(cf) + else: + for cf in parallel_files: + if cf in self.config_files: + self.config_files.remove(cf) + + def parse_command_line(self, argv): + super(ProfileCreate, self).parse_command_line(argv) + # accept positional arg as profile name + if self.extra_args: + self.profile = self.extra_args[0] + + flags = Dict(create_flags) + + classes = [ProfileDir] + + def _import_app(self, app_path): + """import an app class""" + app = None + name = app_path.rsplit('.', 1)[-1] + try: + app = import_item(app_path) + except ImportError: + self.log.info("Couldn't import %s, config file will be excluded", name) + except Exception: + self.log.warning('Unexpected error importing %s', name, exc_info=True) + return app + + def init_config_files(self): + super(ProfileCreate, self).init_config_files() + # use local imports, since these classes may import from here + from IPython.terminal.ipapp import TerminalIPythonApp + apps = [TerminalIPythonApp] + for app_path in ( + 'ipykernel.kernelapp.IPKernelApp', + ): + app = self._import_app(app_path) + if app is not None: + apps.append(app) + if self.parallel: + from ipyparallel.apps.ipcontrollerapp import IPControllerApp + from ipyparallel.apps.ipengineapp import IPEngineApp + from ipyparallel.apps.ipclusterapp import IPClusterStart + apps.extend([ + IPControllerApp, + IPEngineApp, + IPClusterStart, + ]) + for App in apps: + app = App() + app.config.update(self.config) + app.log = self.log + app.overwrite = self.overwrite + app.copy_config_files=True + app.ipython_dir=self.ipython_dir + app.profile_dir=self.profile_dir + app.init_config_files() + + def stage_default_config_file(self): + pass + + +class ProfileApp(Application): + name = u'ipython profile' + description = profile_help + examples = _main_examples + + subcommands = Dict(dict( + create = (ProfileCreate, ProfileCreate.description.splitlines()[0]), + list = (ProfileList, ProfileList.description.splitlines()[0]), + locate = (ProfileLocate, ProfileLocate.description.splitlines()[0]), + )) + + def start(self): + if self.subapp is None: + print("No subcommand specified. Must specify one of: %s"%(self.subcommands.keys())) + print() + self.print_description() + self.print_subcommands() + self.exit(1) + else: + return self.subapp.start() diff --git a/contrib/python/ipython/py3/IPython/core/profiledir.py b/contrib/python/ipython/py3/IPython/core/profiledir.py index 3eaf691e690..ba8f82b7d98 100644 --- a/contrib/python/ipython/py3/IPython/core/profiledir.py +++ b/contrib/python/ipython/py3/IPython/core/profiledir.py @@ -1,221 +1,221 @@ -# encoding: utf-8 -"""An object for managing IPython profile directories.""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import os -import shutil -import errno - -from traitlets.config.configurable import LoggingConfigurable -from ..paths import get_ipython_package_dir -from ..utils.path import expand_path, ensure_dir_exists -from traitlets import Unicode, Bool, observe - -#----------------------------------------------------------------------------- -# Module errors -#----------------------------------------------------------------------------- - -class ProfileDirError(Exception): - pass - - -#----------------------------------------------------------------------------- -# Class for managing profile directories -#----------------------------------------------------------------------------- - -class ProfileDir(LoggingConfigurable): - """An object to manage the profile directory and its resources. - - The profile directory is used by all IPython applications, to manage - configuration, logging and security. - - This object knows how to find, create and manage these directories. This - should be used by any code that wants to handle profiles. - """ - - security_dir_name = Unicode('security') - log_dir_name = Unicode('log') - startup_dir_name = Unicode('startup') - pid_dir_name = Unicode('pid') - static_dir_name = Unicode('static') - security_dir = Unicode(u'') - log_dir = Unicode(u'') - startup_dir = Unicode(u'') - pid_dir = Unicode(u'') - static_dir = Unicode(u'') - - location = Unicode(u'', - help="""Set the profile location directly. This overrides the logic used by the - `profile` option.""", - ).tag(config=True) - - _location_isset = Bool(False) # flag for detecting multiply set location - @observe('location') - def _location_changed(self, change): - if self._location_isset: - raise RuntimeError("Cannot set profile location more than once.") - self._location_isset = True - new = change['new'] - ensure_dir_exists(new) - - # ensure config files exist: - self.security_dir = os.path.join(new, self.security_dir_name) - self.log_dir = os.path.join(new, self.log_dir_name) - self.startup_dir = os.path.join(new, self.startup_dir_name) - self.pid_dir = os.path.join(new, self.pid_dir_name) - self.static_dir = os.path.join(new, self.static_dir_name) - self.check_dirs() - - def _mkdir(self, path, mode=None): - """ensure a directory exists at a given path - - This is a version of os.mkdir, with the following differences: - - - returns True if it created the directory, False otherwise - - ignores EEXIST, protecting against race conditions where - the dir may have been created in between the check and - the creation - - sets permissions if requested and the dir already exists - """ - if os.path.exists(path): - if mode and os.stat(path).st_mode != mode: - try: - os.chmod(path, mode) - except OSError: - self.log.warning( - "Could not set permissions on %s", - path - ) - return False - try: - if mode: - os.mkdir(path, mode) - else: - os.mkdir(path) - except OSError as e: - if e.errno == errno.EEXIST: - return False - else: - raise - - return True - - @observe('log_dir') - def check_log_dir(self, change=None): - self._mkdir(self.log_dir) - - @observe('startup_dir') - def check_startup_dir(self, change=None): - self._mkdir(self.startup_dir) - - readme = os.path.join(self.startup_dir, 'README') - - if not os.path.exists(readme): - import pkgutil - with open(readme, 'wb') as f: - f.write(pkgutil.get_data(__name__, 'profile/README_STARTUP')) - - @observe('security_dir') - def check_security_dir(self, change=None): - self._mkdir(self.security_dir, 0o40700) - - @observe('pid_dir') - def check_pid_dir(self, change=None): - self._mkdir(self.pid_dir, 0o40700) - - def check_dirs(self): - self.check_security_dir() - self.check_log_dir() - self.check_pid_dir() - self.check_startup_dir() - - def copy_config_file(self, config_file, path=None, overwrite=False): - """Copy a default config file into the active profile directory. - - Default configuration files are kept in :mod:`IPython.core.profile`. - This function moves these from that location to the working profile - directory. - """ - dst = os.path.join(self.location, config_file) - if os.path.isfile(dst) and not overwrite: - return False - if path is None: - path = os.path.join(get_ipython_package_dir(), u'core', u'profile', u'default') - src = os.path.join(path, config_file) - shutil.copy(src, dst) - return True - - @classmethod - def create_profile_dir(cls, profile_dir, config=None): - """Create a new profile directory given a full path. - - Parameters - ---------- - profile_dir : str - The full path to the profile directory. If it does exist, it will - be used. If not, it will be created. - """ - return cls(location=profile_dir, config=config) - - @classmethod - def create_profile_dir_by_name(cls, path, name=u'default', config=None): - """Create a profile dir by profile name and path. - - Parameters - ---------- - path : unicode - The path (directory) to put the profile directory in. - name : unicode - The name of the profile. The name of the profile directory will - be "profile_<profile>". - """ - if not os.path.isdir(path): - raise ProfileDirError('Directory not found: %s' % path) - profile_dir = os.path.join(path, u'profile_' + name) - return cls(location=profile_dir, config=config) - - @classmethod - def find_profile_dir_by_name(cls, ipython_dir, name=u'default', config=None): - """Find an existing profile dir by profile name, return its ProfileDir. - - This searches through a sequence of paths for a profile dir. If it - is not found, a :class:`ProfileDirError` exception will be raised. - - The search path algorithm is: - 1. ``os.getcwd()`` # removed for security reason. - 2. ``ipython_dir`` - - Parameters - ---------- - ipython_dir : unicode or str - The IPython directory to use. - name : unicode or str - The name of the profile. The name of the profile directory - will be "profile_<profile>". - """ - dirname = u'profile_' + name - paths = [ipython_dir] - for p in paths: - profile_dir = os.path.join(p, dirname) - if os.path.isdir(profile_dir): - return cls(location=profile_dir, config=config) - else: - raise ProfileDirError('Profile directory not found in paths: %s' % dirname) - - @classmethod - def find_profile_dir(cls, profile_dir, config=None): - """Find/create a profile dir and return its ProfileDir. - - This will create the profile directory if it doesn't exist. - - Parameters - ---------- - profile_dir : unicode or str - The path of the profile directory. - """ - profile_dir = expand_path(profile_dir) - if not os.path.isdir(profile_dir): - raise ProfileDirError('Profile directory not found: %s' % profile_dir) - return cls(location=profile_dir, config=config) +# encoding: utf-8 +"""An object for managing IPython profile directories.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import os +import shutil +import errno + +from traitlets.config.configurable import LoggingConfigurable +from ..paths import get_ipython_package_dir +from ..utils.path import expand_path, ensure_dir_exists +from traitlets import Unicode, Bool, observe + +#----------------------------------------------------------------------------- +# Module errors +#----------------------------------------------------------------------------- + +class ProfileDirError(Exception): + pass + + +#----------------------------------------------------------------------------- +# Class for managing profile directories +#----------------------------------------------------------------------------- + +class ProfileDir(LoggingConfigurable): + """An object to manage the profile directory and its resources. + + The profile directory is used by all IPython applications, to manage + configuration, logging and security. + + This object knows how to find, create and manage these directories. This + should be used by any code that wants to handle profiles. + """ + + security_dir_name = Unicode('security') + log_dir_name = Unicode('log') + startup_dir_name = Unicode('startup') + pid_dir_name = Unicode('pid') + static_dir_name = Unicode('static') + security_dir = Unicode(u'') + log_dir = Unicode(u'') + startup_dir = Unicode(u'') + pid_dir = Unicode(u'') + static_dir = Unicode(u'') + + location = Unicode(u'', + help="""Set the profile location directly. This overrides the logic used by the + `profile` option.""", + ).tag(config=True) + + _location_isset = Bool(False) # flag for detecting multiply set location + @observe('location') + def _location_changed(self, change): + if self._location_isset: + raise RuntimeError("Cannot set profile location more than once.") + self._location_isset = True + new = change['new'] + ensure_dir_exists(new) + + # ensure config files exist: + self.security_dir = os.path.join(new, self.security_dir_name) + self.log_dir = os.path.join(new, self.log_dir_name) + self.startup_dir = os.path.join(new, self.startup_dir_name) + self.pid_dir = os.path.join(new, self.pid_dir_name) + self.static_dir = os.path.join(new, self.static_dir_name) + self.check_dirs() + + def _mkdir(self, path, mode=None): + """ensure a directory exists at a given path + + This is a version of os.mkdir, with the following differences: + + - returns True if it created the directory, False otherwise + - ignores EEXIST, protecting against race conditions where + the dir may have been created in between the check and + the creation + - sets permissions if requested and the dir already exists + """ + if os.path.exists(path): + if mode and os.stat(path).st_mode != mode: + try: + os.chmod(path, mode) + except OSError: + self.log.warning( + "Could not set permissions on %s", + path + ) + return False + try: + if mode: + os.mkdir(path, mode) + else: + os.mkdir(path) + except OSError as e: + if e.errno == errno.EEXIST: + return False + else: + raise + + return True + + @observe('log_dir') + def check_log_dir(self, change=None): + self._mkdir(self.log_dir) + + @observe('startup_dir') + def check_startup_dir(self, change=None): + self._mkdir(self.startup_dir) + + readme = os.path.join(self.startup_dir, 'README') + + if not os.path.exists(readme): + import pkgutil + with open(readme, 'wb') as f: + f.write(pkgutil.get_data(__name__, 'profile/README_STARTUP')) + + @observe('security_dir') + def check_security_dir(self, change=None): + self._mkdir(self.security_dir, 0o40700) + + @observe('pid_dir') + def check_pid_dir(self, change=None): + self._mkdir(self.pid_dir, 0o40700) + + def check_dirs(self): + self.check_security_dir() + self.check_log_dir() + self.check_pid_dir() + self.check_startup_dir() + + def copy_config_file(self, config_file, path=None, overwrite=False): + """Copy a default config file into the active profile directory. + + Default configuration files are kept in :mod:`IPython.core.profile`. + This function moves these from that location to the working profile + directory. + """ + dst = os.path.join(self.location, config_file) + if os.path.isfile(dst) and not overwrite: + return False + if path is None: + path = os.path.join(get_ipython_package_dir(), u'core', u'profile', u'default') + src = os.path.join(path, config_file) + shutil.copy(src, dst) + return True + + @classmethod + def create_profile_dir(cls, profile_dir, config=None): + """Create a new profile directory given a full path. + + Parameters + ---------- + profile_dir : str + The full path to the profile directory. If it does exist, it will + be used. If not, it will be created. + """ + return cls(location=profile_dir, config=config) + + @classmethod + def create_profile_dir_by_name(cls, path, name=u'default', config=None): + """Create a profile dir by profile name and path. + + Parameters + ---------- + path : unicode + The path (directory) to put the profile directory in. + name : unicode + The name of the profile. The name of the profile directory will + be "profile_<profile>". + """ + if not os.path.isdir(path): + raise ProfileDirError('Directory not found: %s' % path) + profile_dir = os.path.join(path, u'profile_' + name) + return cls(location=profile_dir, config=config) + + @classmethod + def find_profile_dir_by_name(cls, ipython_dir, name=u'default', config=None): + """Find an existing profile dir by profile name, return its ProfileDir. + + This searches through a sequence of paths for a profile dir. If it + is not found, a :class:`ProfileDirError` exception will be raised. + + The search path algorithm is: + 1. ``os.getcwd()`` # removed for security reason. + 2. ``ipython_dir`` + + Parameters + ---------- + ipython_dir : unicode or str + The IPython directory to use. + name : unicode or str + The name of the profile. The name of the profile directory + will be "profile_<profile>". + """ + dirname = u'profile_' + name + paths = [ipython_dir] + for p in paths: + profile_dir = os.path.join(p, dirname) + if os.path.isdir(profile_dir): + return cls(location=profile_dir, config=config) + else: + raise ProfileDirError('Profile directory not found in paths: %s' % dirname) + + @classmethod + def find_profile_dir(cls, profile_dir, config=None): + """Find/create a profile dir and return its ProfileDir. + + This will create the profile directory if it doesn't exist. + + Parameters + ---------- + profile_dir : unicode or str + The path of the profile directory. + """ + profile_dir = expand_path(profile_dir) + if not os.path.isdir(profile_dir): + raise ProfileDirError('Profile directory not found: %s' % profile_dir) + return cls(location=profile_dir, config=config) diff --git a/contrib/python/ipython/py3/IPython/core/prompts.py b/contrib/python/ipython/py3/IPython/core/prompts.py index 708656b0110..7fd218d37ae 100644 --- a/contrib/python/ipython/py3/IPython/core/prompts.py +++ b/contrib/python/ipython/py3/IPython/core/prompts.py @@ -1,21 +1,21 @@ -# -*- coding: utf-8 -*- -"""Being removed -""" - -class LazyEvaluate(object): - """This is used for formatting strings with values that need to be updated - at that time, such as the current time or working directory.""" - def __init__(self, func, *args, **kwargs): - self.func = func - self.args = args - self.kwargs = kwargs - - def __call__(self, **kwargs): - self.kwargs.update(kwargs) - return self.func(*self.args, **self.kwargs) - - def __str__(self): - return str(self()) - - def __format__(self, format_spec): - return format(self(), format_spec) +# -*- coding: utf-8 -*- +"""Being removed +""" + +class LazyEvaluate(object): + """This is used for formatting strings with values that need to be updated + at that time, such as the current time or working directory.""" + def __init__(self, func, *args, **kwargs): + self.func = func + self.args = args + self.kwargs = kwargs + + def __call__(self, **kwargs): + self.kwargs.update(kwargs) + return self.func(*self.args, **self.kwargs) + + def __str__(self): + return str(self()) + + def __format__(self, format_spec): + return format(self(), format_spec) diff --git a/contrib/python/ipython/py3/IPython/core/pylabtools.py b/contrib/python/ipython/py3/IPython/core/pylabtools.py index 78fbd945128..c9c8e14aa28 100644 --- a/contrib/python/ipython/py3/IPython/core/pylabtools.py +++ b/contrib/python/ipython/py3/IPython/core/pylabtools.py @@ -1,423 +1,423 @@ -# -*- coding: utf-8 -*- -"""Pylab (matplotlib) support utilities.""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -from io import BytesIO -from binascii import b2a_base64 -from functools import partial -import warnings - -from IPython.core.display import _pngxy -from IPython.utils.decorators import flag_calls - -# If user specifies a GUI, that dictates the backend, otherwise we read the -# user's mpl default from the mpl rc structure -backends = { - "tk": "TkAgg", - "gtk": "GTKAgg", - "gtk3": "GTK3Agg", - "gtk4": "GTK4Agg", - "wx": "WXAgg", - "qt4": "Qt4Agg", - "qt5": "Qt5Agg", - "qt6": "QtAgg", - "qt": "Qt5Agg", - "osx": "MacOSX", - "nbagg": "nbAgg", - "notebook": "nbAgg", - "agg": "agg", - "svg": "svg", - "pdf": "pdf", - "ps": "ps", - "inline": "module://matplotlib_inline.backend_inline", - "ipympl": "module://ipympl.backend_nbagg", - "widget": "module://ipympl.backend_nbagg", -} - -# We also need a reverse backends2guis mapping that will properly choose which -# GUI support to activate based on the desired matplotlib backend. For the -# most part it's just a reverse of the above dict, but we also need to add a -# few others that map to the same GUI manually: -backend2gui = dict(zip(backends.values(), backends.keys())) -# In the reverse mapping, there are a few extra valid matplotlib backends that -# map to the same GUI support -backend2gui["GTK"] = backend2gui["GTKCairo"] = "gtk" -backend2gui["GTK3Cairo"] = "gtk3" -backend2gui["GTK4Cairo"] = "gtk4" -backend2gui["WX"] = "wx" -backend2gui["CocoaAgg"] = "osx" -# There needs to be a hysteresis here as the new QtAgg Matplotlib backend -# supports either Qt5 or Qt6 and the IPython qt event loop support Qt4, Qt5, -# and Qt6. -backend2gui["QtAgg"] = "qt" -backend2gui["Qt4Agg"] = "qt" -backend2gui["Qt5Agg"] = "qt" - -# And some backends that don't need GUI integration -del backend2gui["nbAgg"] -del backend2gui["agg"] -del backend2gui["svg"] -del backend2gui["pdf"] -del backend2gui["ps"] -del backend2gui["module://matplotlib_inline.backend_inline"] -del backend2gui["module://ipympl.backend_nbagg"] - -#----------------------------------------------------------------------------- -# Matplotlib utilities -#----------------------------------------------------------------------------- - - -def getfigs(*fig_nums): - """Get a list of matplotlib figures by figure numbers. - - If no arguments are given, all available figures are returned. If the - argument list contains references to invalid figures, a warning is printed - but the function continues pasting further figures. - - Parameters - ---------- - figs : tuple - A tuple of ints giving the figure numbers of the figures to return. - """ - from matplotlib._pylab_helpers import Gcf - if not fig_nums: - fig_managers = Gcf.get_all_fig_managers() - return [fm.canvas.figure for fm in fig_managers] - else: - figs = [] - for num in fig_nums: - f = Gcf.figs.get(num) - if f is None: - print('Warning: figure %s not available.' % num) - else: - figs.append(f.canvas.figure) - return figs - - -def figsize(sizex, sizey): - """Set the default figure size to be [sizex, sizey]. - - This is just an easy to remember, convenience wrapper that sets:: - - matplotlib.rcParams['figure.figsize'] = [sizex, sizey] - """ - import matplotlib - matplotlib.rcParams['figure.figsize'] = [sizex, sizey] - - -def print_figure(fig, fmt="png", bbox_inches="tight", base64=False, **kwargs): - """Print a figure to an image, and return the resulting file data - - Returned data will be bytes unless ``fmt='svg'``, - in which case it will be unicode. - - Any keyword args are passed to fig.canvas.print_figure, - such as ``quality`` or ``bbox_inches``. - - If `base64` is True, return base64-encoded str instead of raw bytes - for binary-encoded image formats - - .. versionadded:: 7.29 - base64 argument - """ - # When there's an empty figure, we shouldn't return anything, otherwise we - # get big blank areas in the qt console. - if not fig.axes and not fig.lines: - return - - dpi = fig.dpi - if fmt == 'retina': - dpi = dpi * 2 - fmt = 'png' - - # build keyword args - kw = { - "format":fmt, - "facecolor":fig.get_facecolor(), - "edgecolor":fig.get_edgecolor(), - "dpi":dpi, - "bbox_inches":bbox_inches, - } - # **kwargs get higher priority - kw.update(kwargs) - - bytes_io = BytesIO() - if fig.canvas is None: - from matplotlib.backend_bases import FigureCanvasBase - FigureCanvasBase(fig) - - fig.canvas.print_figure(bytes_io, **kw) - data = bytes_io.getvalue() - if fmt == 'svg': - data = data.decode('utf-8') - elif base64: - data = b2a_base64(data).decode("ascii") - return data - -def retina_figure(fig, base64=False, **kwargs): - """format a figure as a pixel-doubled (retina) PNG - - If `base64` is True, return base64-encoded str instead of raw bytes - for binary-encoded image formats - - .. versionadded:: 7.29 - base64 argument - """ - pngdata = print_figure(fig, fmt="retina", base64=False, **kwargs) - # Make sure that retina_figure acts just like print_figure and returns - # None when the figure is empty. - if pngdata is None: - return - w, h = _pngxy(pngdata) - metadata = {"width": w//2, "height":h//2} - if base64: - pngdata = b2a_base64(pngdata).decode("ascii") - return pngdata, metadata - - -# We need a little factory function here to create the closure where -# safe_execfile can live. -def mpl_runner(safe_execfile): - """Factory to return a matplotlib-enabled runner for %run. - - Parameters - ---------- - safe_execfile : function - This must be a function with the same interface as the - :meth:`safe_execfile` method of IPython. - - Returns - ------- - A function suitable for use as the ``runner`` argument of the %run magic - function. - """ - - def mpl_execfile(fname,*where,**kw): - """matplotlib-aware wrapper around safe_execfile. - - Its interface is identical to that of the :func:`execfile` builtin. - - This is ultimately a call to execfile(), but wrapped in safeties to - properly handle interactive rendering.""" - - import matplotlib - import matplotlib.pyplot as plt - - #print '*** Matplotlib runner ***' # dbg - # turn off rendering until end of script - is_interactive = matplotlib.rcParams['interactive'] - matplotlib.interactive(False) - safe_execfile(fname,*where,**kw) - matplotlib.interactive(is_interactive) - # make rendering call now, if the user tried to do it - if plt.draw_if_interactive.called: - plt.draw() - plt.draw_if_interactive.called = False - - # re-draw everything that is stale - try: - da = plt.draw_all - except AttributeError: - pass - else: - da() - - return mpl_execfile - - -def _reshow_nbagg_figure(fig): - """reshow an nbagg figure""" - try: - reshow = fig.canvas.manager.reshow - except AttributeError: - raise NotImplementedError() - else: - reshow() - - -def select_figure_formats(shell, formats, **kwargs): - """Select figure formats for the inline backend. - - Parameters - ========== - shell : InteractiveShell - The main IPython instance. - formats : str or set - One or a set of figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'. - **kwargs : any - Extra keyword arguments to be passed to fig.canvas.print_figure. - """ - import matplotlib - from matplotlib.figure import Figure - - svg_formatter = shell.display_formatter.formatters['image/svg+xml'] - png_formatter = shell.display_formatter.formatters['image/png'] - jpg_formatter = shell.display_formatter.formatters['image/jpeg'] - pdf_formatter = shell.display_formatter.formatters['application/pdf'] - - if isinstance(formats, str): - formats = {formats} - # cast in case of list / tuple - formats = set(formats) - - [ f.pop(Figure, None) for f in shell.display_formatter.formatters.values() ] - mplbackend = matplotlib.get_backend().lower() - if mplbackend == 'nbagg' or mplbackend == 'module://ipympl.backend_nbagg': - formatter = shell.display_formatter.ipython_display_formatter - formatter.for_type(Figure, _reshow_nbagg_figure) - - supported = {'png', 'png2x', 'retina', 'jpg', 'jpeg', 'svg', 'pdf'} - bad = formats.difference(supported) - if bad: - bs = "%s" % ','.join([repr(f) for f in bad]) - gs = "%s" % ','.join([repr(f) for f in supported]) - raise ValueError("supported formats are: %s not %s" % (gs, bs)) - - if "png" in formats: - png_formatter.for_type( - Figure, partial(print_figure, fmt="png", base64=True, **kwargs) - ) - if "retina" in formats or "png2x" in formats: - png_formatter.for_type(Figure, partial(retina_figure, base64=True, **kwargs)) - if "jpg" in formats or "jpeg" in formats: - jpg_formatter.for_type( - Figure, partial(print_figure, fmt="jpg", base64=True, **kwargs) - ) - if "svg" in formats: - svg_formatter.for_type(Figure, partial(print_figure, fmt="svg", **kwargs)) - if "pdf" in formats: - pdf_formatter.for_type( - Figure, partial(print_figure, fmt="pdf", base64=True, **kwargs) - ) - -#----------------------------------------------------------------------------- -# Code for initializing matplotlib and importing pylab -#----------------------------------------------------------------------------- - - -def find_gui_and_backend(gui=None, gui_select=None): - """Given a gui string return the gui and mpl backend. - - Parameters - ---------- - gui : str - Can be one of ('tk','gtk','wx','qt','qt4','inline','agg'). - gui_select : str - Can be one of ('tk','gtk','wx','qt','qt4','inline'). - This is any gui already selected by the shell. - - Returns - ------- - A tuple of (gui, backend) where backend is one of ('TkAgg','GTKAgg', - 'WXAgg','Qt4Agg','module://matplotlib_inline.backend_inline','agg'). - """ - - import matplotlib - - if gui and gui != 'auto': - # select backend based on requested gui - backend = backends[gui] - if gui == 'agg': - gui = None - else: - # We need to read the backend from the original data structure, *not* - # from mpl.rcParams, since a prior invocation of %matplotlib may have - # overwritten that. - # WARNING: this assumes matplotlib 1.1 or newer!! - backend = matplotlib.rcParamsOrig['backend'] - # In this case, we need to find what the appropriate gui selection call - # should be for IPython, so we can activate inputhook accordingly - gui = backend2gui.get(backend, None) - - # If we have already had a gui active, we need it and inline are the - # ones allowed. - if gui_select and gui != gui_select: - gui = gui_select - backend = backends[gui] - - return gui, backend - - -def activate_matplotlib(backend): - """Activate the given backend and set interactive to True.""" - - import matplotlib - matplotlib.interactive(True) - - # Matplotlib had a bug where even switch_backend could not force - # the rcParam to update. This needs to be set *before* the module - # magic of switch_backend(). - matplotlib.rcParams['backend'] = backend - - # Due to circular imports, pyplot may be only partially initialised - # when this function runs. - # So avoid needing matplotlib attribute-lookup to access pyplot. - from matplotlib import pyplot as plt - - plt.switch_backend(backend) - - plt.show._needmain = False - # We need to detect at runtime whether show() is called by the user. - # For this, we wrap it into a decorator which adds a 'called' flag. - plt.draw_if_interactive = flag_calls(plt.draw_if_interactive) - - -def import_pylab(user_ns, import_all=True): - """Populate the namespace with pylab-related values. - - Imports matplotlib, pylab, numpy, and everything from pylab and numpy. - - Also imports a few names from IPython (figsize, display, getfigs) - - """ - - # Import numpy as np/pyplot as plt are conventions we're trying to - # somewhat standardize on. Making them available to users by default - # will greatly help this. - s = ("import numpy\n" - "import matplotlib\n" - "from matplotlib import pylab, mlab, pyplot\n" - "np = numpy\n" - "plt = pyplot\n" - ) - exec(s, user_ns) - - if import_all: - s = ("from matplotlib.pylab import *\n" - "from numpy import *\n") - exec(s, user_ns) - - # IPython symbols to add - user_ns['figsize'] = figsize - from IPython.core.display import display - # Add display and getfigs to the user's namespace - user_ns['display'] = display - user_ns['getfigs'] = getfigs - - -def configure_inline_support(shell, backend): - """ - .. deprecated:: 7.23 - - use `matplotlib_inline.backend_inline.configure_inline_support()` - - Configure an IPython shell object for matplotlib use. - - Parameters - ---------- - shell : InteractiveShell instance - - backend : matplotlib backend - """ - warnings.warn( - "`configure_inline_support` is deprecated since IPython 7.23, directly " - "use `matplotlib_inline.backend_inline.configure_inline_support()`", - DeprecationWarning, - stacklevel=2, - ) - - from matplotlib_inline.backend_inline import configure_inline_support as configure_inline_support_orig - - configure_inline_support_orig(shell, backend) +# -*- coding: utf-8 -*- +"""Pylab (matplotlib) support utilities.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +from io import BytesIO +from binascii import b2a_base64 +from functools import partial +import warnings + +from IPython.core.display import _pngxy +from IPython.utils.decorators import flag_calls + +# If user specifies a GUI, that dictates the backend, otherwise we read the +# user's mpl default from the mpl rc structure +backends = { + "tk": "TkAgg", + "gtk": "GTKAgg", + "gtk3": "GTK3Agg", + "gtk4": "GTK4Agg", + "wx": "WXAgg", + "qt4": "Qt4Agg", + "qt5": "Qt5Agg", + "qt6": "QtAgg", + "qt": "Qt5Agg", + "osx": "MacOSX", + "nbagg": "nbAgg", + "notebook": "nbAgg", + "agg": "agg", + "svg": "svg", + "pdf": "pdf", + "ps": "ps", + "inline": "module://matplotlib_inline.backend_inline", + "ipympl": "module://ipympl.backend_nbagg", + "widget": "module://ipympl.backend_nbagg", +} + +# We also need a reverse backends2guis mapping that will properly choose which +# GUI support to activate based on the desired matplotlib backend. For the +# most part it's just a reverse of the above dict, but we also need to add a +# few others that map to the same GUI manually: +backend2gui = dict(zip(backends.values(), backends.keys())) +# In the reverse mapping, there are a few extra valid matplotlib backends that +# map to the same GUI support +backend2gui["GTK"] = backend2gui["GTKCairo"] = "gtk" +backend2gui["GTK3Cairo"] = "gtk3" +backend2gui["GTK4Cairo"] = "gtk4" +backend2gui["WX"] = "wx" +backend2gui["CocoaAgg"] = "osx" +# There needs to be a hysteresis here as the new QtAgg Matplotlib backend +# supports either Qt5 or Qt6 and the IPython qt event loop support Qt4, Qt5, +# and Qt6. +backend2gui["QtAgg"] = "qt" +backend2gui["Qt4Agg"] = "qt" +backend2gui["Qt5Agg"] = "qt" + +# And some backends that don't need GUI integration +del backend2gui["nbAgg"] +del backend2gui["agg"] +del backend2gui["svg"] +del backend2gui["pdf"] +del backend2gui["ps"] +del backend2gui["module://matplotlib_inline.backend_inline"] +del backend2gui["module://ipympl.backend_nbagg"] + +#----------------------------------------------------------------------------- +# Matplotlib utilities +#----------------------------------------------------------------------------- + + +def getfigs(*fig_nums): + """Get a list of matplotlib figures by figure numbers. + + If no arguments are given, all available figures are returned. If the + argument list contains references to invalid figures, a warning is printed + but the function continues pasting further figures. + + Parameters + ---------- + figs : tuple + A tuple of ints giving the figure numbers of the figures to return. + """ + from matplotlib._pylab_helpers import Gcf + if not fig_nums: + fig_managers = Gcf.get_all_fig_managers() + return [fm.canvas.figure for fm in fig_managers] + else: + figs = [] + for num in fig_nums: + f = Gcf.figs.get(num) + if f is None: + print('Warning: figure %s not available.' % num) + else: + figs.append(f.canvas.figure) + return figs + + +def figsize(sizex, sizey): + """Set the default figure size to be [sizex, sizey]. + + This is just an easy to remember, convenience wrapper that sets:: + + matplotlib.rcParams['figure.figsize'] = [sizex, sizey] + """ + import matplotlib + matplotlib.rcParams['figure.figsize'] = [sizex, sizey] + + +def print_figure(fig, fmt="png", bbox_inches="tight", base64=False, **kwargs): + """Print a figure to an image, and return the resulting file data + + Returned data will be bytes unless ``fmt='svg'``, + in which case it will be unicode. + + Any keyword args are passed to fig.canvas.print_figure, + such as ``quality`` or ``bbox_inches``. + + If `base64` is True, return base64-encoded str instead of raw bytes + for binary-encoded image formats + + .. versionadded:: 7.29 + base64 argument + """ + # When there's an empty figure, we shouldn't return anything, otherwise we + # get big blank areas in the qt console. + if not fig.axes and not fig.lines: + return + + dpi = fig.dpi + if fmt == 'retina': + dpi = dpi * 2 + fmt = 'png' + + # build keyword args + kw = { + "format":fmt, + "facecolor":fig.get_facecolor(), + "edgecolor":fig.get_edgecolor(), + "dpi":dpi, + "bbox_inches":bbox_inches, + } + # **kwargs get higher priority + kw.update(kwargs) + + bytes_io = BytesIO() + if fig.canvas is None: + from matplotlib.backend_bases import FigureCanvasBase + FigureCanvasBase(fig) + + fig.canvas.print_figure(bytes_io, **kw) + data = bytes_io.getvalue() + if fmt == 'svg': + data = data.decode('utf-8') + elif base64: + data = b2a_base64(data).decode("ascii") + return data + +def retina_figure(fig, base64=False, **kwargs): + """format a figure as a pixel-doubled (retina) PNG + + If `base64` is True, return base64-encoded str instead of raw bytes + for binary-encoded image formats + + .. versionadded:: 7.29 + base64 argument + """ + pngdata = print_figure(fig, fmt="retina", base64=False, **kwargs) + # Make sure that retina_figure acts just like print_figure and returns + # None when the figure is empty. + if pngdata is None: + return + w, h = _pngxy(pngdata) + metadata = {"width": w//2, "height":h//2} + if base64: + pngdata = b2a_base64(pngdata).decode("ascii") + return pngdata, metadata + + +# We need a little factory function here to create the closure where +# safe_execfile can live. +def mpl_runner(safe_execfile): + """Factory to return a matplotlib-enabled runner for %run. + + Parameters + ---------- + safe_execfile : function + This must be a function with the same interface as the + :meth:`safe_execfile` method of IPython. + + Returns + ------- + A function suitable for use as the ``runner`` argument of the %run magic + function. + """ + + def mpl_execfile(fname,*where,**kw): + """matplotlib-aware wrapper around safe_execfile. + + Its interface is identical to that of the :func:`execfile` builtin. + + This is ultimately a call to execfile(), but wrapped in safeties to + properly handle interactive rendering.""" + + import matplotlib + import matplotlib.pyplot as plt + + #print '*** Matplotlib runner ***' # dbg + # turn off rendering until end of script + is_interactive = matplotlib.rcParams['interactive'] + matplotlib.interactive(False) + safe_execfile(fname,*where,**kw) + matplotlib.interactive(is_interactive) + # make rendering call now, if the user tried to do it + if plt.draw_if_interactive.called: + plt.draw() + plt.draw_if_interactive.called = False + + # re-draw everything that is stale + try: + da = plt.draw_all + except AttributeError: + pass + else: + da() + + return mpl_execfile + + +def _reshow_nbagg_figure(fig): + """reshow an nbagg figure""" + try: + reshow = fig.canvas.manager.reshow + except AttributeError: + raise NotImplementedError() + else: + reshow() + + +def select_figure_formats(shell, formats, **kwargs): + """Select figure formats for the inline backend. + + Parameters + ========== + shell : InteractiveShell + The main IPython instance. + formats : str or set + One or a set of figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'. + **kwargs : any + Extra keyword arguments to be passed to fig.canvas.print_figure. + """ + import matplotlib + from matplotlib.figure import Figure + + svg_formatter = shell.display_formatter.formatters['image/svg+xml'] + png_formatter = shell.display_formatter.formatters['image/png'] + jpg_formatter = shell.display_formatter.formatters['image/jpeg'] + pdf_formatter = shell.display_formatter.formatters['application/pdf'] + + if isinstance(formats, str): + formats = {formats} + # cast in case of list / tuple + formats = set(formats) + + [ f.pop(Figure, None) for f in shell.display_formatter.formatters.values() ] + mplbackend = matplotlib.get_backend().lower() + if mplbackend == 'nbagg' or mplbackend == 'module://ipympl.backend_nbagg': + formatter = shell.display_formatter.ipython_display_formatter + formatter.for_type(Figure, _reshow_nbagg_figure) + + supported = {'png', 'png2x', 'retina', 'jpg', 'jpeg', 'svg', 'pdf'} + bad = formats.difference(supported) + if bad: + bs = "%s" % ','.join([repr(f) for f in bad]) + gs = "%s" % ','.join([repr(f) for f in supported]) + raise ValueError("supported formats are: %s not %s" % (gs, bs)) + + if "png" in formats: + png_formatter.for_type( + Figure, partial(print_figure, fmt="png", base64=True, **kwargs) + ) + if "retina" in formats or "png2x" in formats: + png_formatter.for_type(Figure, partial(retina_figure, base64=True, **kwargs)) + if "jpg" in formats or "jpeg" in formats: + jpg_formatter.for_type( + Figure, partial(print_figure, fmt="jpg", base64=True, **kwargs) + ) + if "svg" in formats: + svg_formatter.for_type(Figure, partial(print_figure, fmt="svg", **kwargs)) + if "pdf" in formats: + pdf_formatter.for_type( + Figure, partial(print_figure, fmt="pdf", base64=True, **kwargs) + ) + +#----------------------------------------------------------------------------- +# Code for initializing matplotlib and importing pylab +#----------------------------------------------------------------------------- + + +def find_gui_and_backend(gui=None, gui_select=None): + """Given a gui string return the gui and mpl backend. + + Parameters + ---------- + gui : str + Can be one of ('tk','gtk','wx','qt','qt4','inline','agg'). + gui_select : str + Can be one of ('tk','gtk','wx','qt','qt4','inline'). + This is any gui already selected by the shell. + + Returns + ------- + A tuple of (gui, backend) where backend is one of ('TkAgg','GTKAgg', + 'WXAgg','Qt4Agg','module://matplotlib_inline.backend_inline','agg'). + """ + + import matplotlib + + if gui and gui != 'auto': + # select backend based on requested gui + backend = backends[gui] + if gui == 'agg': + gui = None + else: + # We need to read the backend from the original data structure, *not* + # from mpl.rcParams, since a prior invocation of %matplotlib may have + # overwritten that. + # WARNING: this assumes matplotlib 1.1 or newer!! + backend = matplotlib.rcParamsOrig['backend'] + # In this case, we need to find what the appropriate gui selection call + # should be for IPython, so we can activate inputhook accordingly + gui = backend2gui.get(backend, None) + + # If we have already had a gui active, we need it and inline are the + # ones allowed. + if gui_select and gui != gui_select: + gui = gui_select + backend = backends[gui] + + return gui, backend + + +def activate_matplotlib(backend): + """Activate the given backend and set interactive to True.""" + + import matplotlib + matplotlib.interactive(True) + + # Matplotlib had a bug where even switch_backend could not force + # the rcParam to update. This needs to be set *before* the module + # magic of switch_backend(). + matplotlib.rcParams['backend'] = backend + + # Due to circular imports, pyplot may be only partially initialised + # when this function runs. + # So avoid needing matplotlib attribute-lookup to access pyplot. + from matplotlib import pyplot as plt + + plt.switch_backend(backend) + + plt.show._needmain = False + # We need to detect at runtime whether show() is called by the user. + # For this, we wrap it into a decorator which adds a 'called' flag. + plt.draw_if_interactive = flag_calls(plt.draw_if_interactive) + + +def import_pylab(user_ns, import_all=True): + """Populate the namespace with pylab-related values. + + Imports matplotlib, pylab, numpy, and everything from pylab and numpy. + + Also imports a few names from IPython (figsize, display, getfigs) + + """ + + # Import numpy as np/pyplot as plt are conventions we're trying to + # somewhat standardize on. Making them available to users by default + # will greatly help this. + s = ("import numpy\n" + "import matplotlib\n" + "from matplotlib import pylab, mlab, pyplot\n" + "np = numpy\n" + "plt = pyplot\n" + ) + exec(s, user_ns) + + if import_all: + s = ("from matplotlib.pylab import *\n" + "from numpy import *\n") + exec(s, user_ns) + + # IPython symbols to add + user_ns['figsize'] = figsize + from IPython.core.display import display + # Add display and getfigs to the user's namespace + user_ns['display'] = display + user_ns['getfigs'] = getfigs + + +def configure_inline_support(shell, backend): + """ + .. deprecated:: 7.23 + + use `matplotlib_inline.backend_inline.configure_inline_support()` + + Configure an IPython shell object for matplotlib use. + + Parameters + ---------- + shell : InteractiveShell instance + + backend : matplotlib backend + """ + warnings.warn( + "`configure_inline_support` is deprecated since IPython 7.23, directly " + "use `matplotlib_inline.backend_inline.configure_inline_support()`", + DeprecationWarning, + stacklevel=2, + ) + + from matplotlib_inline.backend_inline import configure_inline_support as configure_inline_support_orig + + configure_inline_support_orig(shell, backend) diff --git a/contrib/python/ipython/py3/IPython/core/release.py b/contrib/python/ipython/py3/IPython/core/release.py index 3375cd69ad4..90d9e6caf59 100644 --- a/contrib/python/ipython/py3/IPython/core/release.py +++ b/contrib/python/ipython/py3/IPython/core/release.py @@ -1,119 +1,119 @@ -# -*- coding: utf-8 -*- -"""Release data for the IPython project.""" - -#----------------------------------------------------------------------------- -# Copyright (c) 2008, IPython Development Team. -# Copyright (c) 2001, Fernando Perez <fernando.perez@colorado.edu> -# Copyright (c) 2001, Janko Hauser <jhauser@zscout.de> -# Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu> -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -# Name of the package for release purposes. This is the name which labels -# the tarballs and RPMs made by distutils, so it's best to lowercase it. -name = 'ipython' - -# IPython version information. An empty _version_extra corresponds to a full -# release. 'dev' as a _version_extra string means this is a development -# version -_version_major = 7 -_version_minor = 31 -_version_patch = 1 -_version_extra = '.dev' -# _version_extra = 'b1' -_version_extra = "" # Uncomment this for full releases - -# Construct full version string from these. -_ver = [_version_major, _version_minor, _version_patch] - -__version__ = '.'.join(map(str, _ver)) -if _version_extra: - __version__ = __version__ + _version_extra - -version = __version__ # backwards compatibility name -version_info = (_version_major, _version_minor, _version_patch, _version_extra) - -# Change this when incrementing the kernel protocol version -kernel_protocol_version_info = (5, 0) -kernel_protocol_version = "%i.%i" % kernel_protocol_version_info - -description = "IPython: Productive Interactive Computing" - -long_description = \ -""" -IPython provides a rich toolkit to help you make the most out of using Python -interactively. Its main components are: - -* A powerful interactive Python shell -* A `Jupyter <https://jupyter.org/>`_ kernel to work with Python code in Jupyter - notebooks and other interactive frontends. - -The enhanced interactive Python shells have the following main features: - -* Comprehensive object introspection. - -* Input history, persistent across sessions. - -* Caching of output results during a session with automatically generated - references. - -* Extensible tab completion, with support by default for completion of python - variables and keywords, filenames and function keywords. - -* Extensible system of 'magic' commands for controlling the environment and - performing many tasks related either to IPython or the operating system. - -* A rich configuration system with easy switching between different setups - (simpler than changing $PYTHONSTARTUP environment variables every time). - -* Session logging and reloading. - -* Extensible syntax processing for special purpose situations. - -* Access to the system shell with user-extensible alias system. - -* Easily embeddable in other Python programs and GUIs. - -* Integrated access to the pdb debugger and the Python profiler. - -The latest development version is always available from IPython's `GitHub -site <http://github.com/ipython>`_. -""" - -license = 'BSD' - -authors = {'Fernando' : ('Fernando Perez','fperez.net@gmail.com'), - 'Janko' : ('Janko Hauser','jhauser@zscout.de'), - 'Nathan' : ('Nathaniel Gray','n8gray@caltech.edu'), - 'Ville' : ('Ville Vainio','vivainio@gmail.com'), - 'Brian' : ('Brian E Granger', 'ellisonbg@gmail.com'), - 'Min' : ('Min Ragan-Kelley', 'benjaminrk@gmail.com'), - 'Thomas' : ('Thomas A. Kluyver', 'takowl@gmail.com'), - 'Jorgen' : ('Jorgen Stenarson', 'jorgen.stenarson@bostream.nu'), - 'Matthias' : ('Matthias Bussonnier', 'bussonniermatthias@gmail.com'), - } - -author = 'The IPython Development Team' - -author_email = 'ipython-dev@python.org' - -url = 'https://ipython.org' - - -platforms = ['Linux','Mac OSX','Windows'] - -keywords = ['Interactive','Interpreter','Shell', 'Embedding'] - -classifiers = [ - 'Framework :: IPython', - 'Intended Audience :: Developers', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3 :: Only', - 'Topic :: System :: Shells' - ] +# -*- coding: utf-8 -*- +"""Release data for the IPython project.""" + +#----------------------------------------------------------------------------- +# Copyright (c) 2008, IPython Development Team. +# Copyright (c) 2001, Fernando Perez <fernando.perez@colorado.edu> +# Copyright (c) 2001, Janko Hauser <jhauser@zscout.de> +# Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu> +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +# Name of the package for release purposes. This is the name which labels +# the tarballs and RPMs made by distutils, so it's best to lowercase it. +name = 'ipython' + +# IPython version information. An empty _version_extra corresponds to a full +# release. 'dev' as a _version_extra string means this is a development +# version +_version_major = 7 +_version_minor = 31 +_version_patch = 1 +_version_extra = '.dev' +# _version_extra = 'b1' +_version_extra = "" # Uncomment this for full releases + +# Construct full version string from these. +_ver = [_version_major, _version_minor, _version_patch] + +__version__ = '.'.join(map(str, _ver)) +if _version_extra: + __version__ = __version__ + _version_extra + +version = __version__ # backwards compatibility name +version_info = (_version_major, _version_minor, _version_patch, _version_extra) + +# Change this when incrementing the kernel protocol version +kernel_protocol_version_info = (5, 0) +kernel_protocol_version = "%i.%i" % kernel_protocol_version_info + +description = "IPython: Productive Interactive Computing" + +long_description = \ +""" +IPython provides a rich toolkit to help you make the most out of using Python +interactively. Its main components are: + +* A powerful interactive Python shell +* A `Jupyter <https://jupyter.org/>`_ kernel to work with Python code in Jupyter + notebooks and other interactive frontends. + +The enhanced interactive Python shells have the following main features: + +* Comprehensive object introspection. + +* Input history, persistent across sessions. + +* Caching of output results during a session with automatically generated + references. + +* Extensible tab completion, with support by default for completion of python + variables and keywords, filenames and function keywords. + +* Extensible system of 'magic' commands for controlling the environment and + performing many tasks related either to IPython or the operating system. + +* A rich configuration system with easy switching between different setups + (simpler than changing $PYTHONSTARTUP environment variables every time). + +* Session logging and reloading. + +* Extensible syntax processing for special purpose situations. + +* Access to the system shell with user-extensible alias system. + +* Easily embeddable in other Python programs and GUIs. + +* Integrated access to the pdb debugger and the Python profiler. + +The latest development version is always available from IPython's `GitHub +site <http://github.com/ipython>`_. +""" + +license = 'BSD' + +authors = {'Fernando' : ('Fernando Perez','fperez.net@gmail.com'), + 'Janko' : ('Janko Hauser','jhauser@zscout.de'), + 'Nathan' : ('Nathaniel Gray','n8gray@caltech.edu'), + 'Ville' : ('Ville Vainio','vivainio@gmail.com'), + 'Brian' : ('Brian E Granger', 'ellisonbg@gmail.com'), + 'Min' : ('Min Ragan-Kelley', 'benjaminrk@gmail.com'), + 'Thomas' : ('Thomas A. Kluyver', 'takowl@gmail.com'), + 'Jorgen' : ('Jorgen Stenarson', 'jorgen.stenarson@bostream.nu'), + 'Matthias' : ('Matthias Bussonnier', 'bussonniermatthias@gmail.com'), + } + +author = 'The IPython Development Team' + +author_email = 'ipython-dev@python.org' + +url = 'https://ipython.org' + + +platforms = ['Linux','Mac OSX','Windows'] + +keywords = ['Interactive','Interpreter','Shell', 'Embedding'] + +classifiers = [ + 'Framework :: IPython', + 'Intended Audience :: Developers', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: BSD License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3 :: Only', + 'Topic :: System :: Shells' + ] diff --git a/contrib/python/ipython/py3/IPython/core/shellapp.py b/contrib/python/ipython/py3/IPython/core/shellapp.py index 415e44fe6d6..c442658ae70 100644 --- a/contrib/python/ipython/py3/IPython/core/shellapp.py +++ b/contrib/python/ipython/py3/IPython/core/shellapp.py @@ -1,470 +1,470 @@ -# encoding: utf-8 -""" -A mixin for :class:`~IPython.core.application.Application` classes that -launch InteractiveShell instances, load extensions, etc. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import glob -from itertools import chain -import os -import sys - -from traitlets.config.application import boolean_flag -from traitlets.config.configurable import Configurable -from traitlets.config.loader import Config -from IPython.core.application import SYSTEM_CONFIG_DIRS, ENV_CONFIG_DIRS -from IPython.core import pylabtools -from IPython.utils.contexts import preserve_keys -from IPython.utils.path import filefind -import traitlets -from traitlets import ( - Unicode, Instance, List, Bool, CaselessStrEnum, observe, - DottedObjectName, -) -from IPython.terminal import pt_inputhooks - -#----------------------------------------------------------------------------- -# Aliases and Flags -#----------------------------------------------------------------------------- - -gui_keys = tuple(sorted(pt_inputhooks.backends) + sorted(pt_inputhooks.aliases)) - -backend_keys = sorted(pylabtools.backends.keys()) -backend_keys.insert(0, 'auto') - -shell_flags = {} - -addflag = lambda *args: shell_flags.update(boolean_flag(*args)) -addflag('autoindent', 'InteractiveShell.autoindent', - 'Turn on autoindenting.', 'Turn off autoindenting.' -) -addflag('automagic', 'InteractiveShell.automagic', - """Turn on the auto calling of magic commands. Type %%magic at the - IPython prompt for more information.""", - 'Turn off the auto calling of magic commands.' -) -addflag('pdb', 'InteractiveShell.pdb', - "Enable auto calling the pdb debugger after every exception.", - "Disable auto calling the pdb debugger after every exception." -) -addflag('pprint', 'PlainTextFormatter.pprint', - "Enable auto pretty printing of results.", - "Disable auto pretty printing of results." -) -addflag('color-info', 'InteractiveShell.color_info', - """IPython can display information about objects via a set of functions, - and optionally can use colors for this, syntax highlighting - source code and various other elements. This is on by default, but can cause - problems with some pagers. If you see such problems, you can disable the - colours.""", - "Disable using colors for info related things." -) -addflag('ignore-cwd', 'InteractiveShellApp.ignore_cwd', - "Exclude the current working directory from sys.path", - "Include the current working directory in sys.path", -) -nosep_config = Config() -nosep_config.InteractiveShell.separate_in = '' -nosep_config.InteractiveShell.separate_out = '' -nosep_config.InteractiveShell.separate_out2 = '' - -shell_flags['nosep']=(nosep_config, "Eliminate all spacing between prompts.") -shell_flags['pylab'] = ( - {'InteractiveShellApp' : {'pylab' : 'auto'}}, - """Pre-load matplotlib and numpy for interactive use with - the default matplotlib backend.""" -) -shell_flags['matplotlib'] = ( - {'InteractiveShellApp' : {'matplotlib' : 'auto'}}, - """Configure matplotlib for interactive use with - the default matplotlib backend.""" -) - -# it's possible we don't want short aliases for *all* of these: -shell_aliases = dict( - autocall='InteractiveShell.autocall', - colors='InteractiveShell.colors', - logfile='InteractiveShell.logfile', - logappend='InteractiveShell.logappend', - c='InteractiveShellApp.code_to_run', - m='InteractiveShellApp.module_to_run', - ext="InteractiveShellApp.extra_extensions", - gui='InteractiveShellApp.gui', - pylab='InteractiveShellApp.pylab', - matplotlib='InteractiveShellApp.matplotlib', -) -shell_aliases['cache-size'] = 'InteractiveShell.cache_size' - -if traitlets.version_info < (5, 0): - # traitlets 4 doesn't handle lists on CLI - shell_aliases["ext"] = "InteractiveShellApp.extra_extension" - - -#----------------------------------------------------------------------------- -# Main classes and functions -#----------------------------------------------------------------------------- - -class InteractiveShellApp(Configurable): - """A Mixin for applications that start InteractiveShell instances. - - Provides configurables for loading extensions and executing files - as part of configuring a Shell environment. - - The following methods should be called by the :meth:`initialize` method - of the subclass: - - - :meth:`init_path` - - :meth:`init_shell` (to be implemented by the subclass) - - :meth:`init_gui_pylab` - - :meth:`init_extensions` - - :meth:`init_code` - """ - extensions = List(Unicode(), - help="A list of dotted module names of IPython extensions to load." - ).tag(config=True) - - extra_extension = Unicode( - "", - help=""" - DEPRECATED. Dotted module name of a single extra IPython extension to load. - - Only one extension can be added this way. - - Only used with traitlets < 5.0, plural extra_extensions list is used in traitlets 5. - """, - ).tag(config=True) - - extra_extensions = List( - DottedObjectName(), - help=""" - Dotted module name(s) of one or more IPython extensions to load. - - For specifying extra extensions to load on the command-line. - - .. versionadded:: 7.10 - """, - ).tag(config=True) - - reraise_ipython_extension_failures = Bool(False, - help="Reraise exceptions encountered loading IPython extensions?", - ).tag(config=True) - - # Extensions that are always loaded (not configurable) - default_extensions = List(Unicode(), [u'storemagic']).tag(config=False) - - hide_initial_ns = Bool(True, - help="""Should variables loaded at startup (by startup files, exec_lines, etc.) - be hidden from tools like %who?""" - ).tag(config=True) - - exec_files = List(Unicode(), - help="""List of files to run at IPython startup.""" - ).tag(config=True) - exec_PYTHONSTARTUP = Bool(True, - help="""Run the file referenced by the PYTHONSTARTUP environment - variable at IPython startup.""" - ).tag(config=True) - file_to_run = Unicode('', - help="""A file to be run""").tag(config=True) - - exec_lines = List(Unicode(), - help="""lines of code to run at IPython startup.""" - ).tag(config=True) - code_to_run = Unicode('', - help="Execute the given command string." - ).tag(config=True) - module_to_run = Unicode('', - help="Run the module as a script." - ).tag(config=True) - gui = CaselessStrEnum(gui_keys, allow_none=True, - help="Enable GUI event loop integration with any of {0}.".format(gui_keys) - ).tag(config=True) - matplotlib = CaselessStrEnum(backend_keys, allow_none=True, - help="""Configure matplotlib for interactive use with - the default matplotlib backend.""" - ).tag(config=True) - pylab = CaselessStrEnum(backend_keys, allow_none=True, - help="""Pre-load matplotlib and numpy for interactive use, - selecting a particular matplotlib backend and loop integration. - """ - ).tag(config=True) - pylab_import_all = Bool(True, - help="""If true, IPython will populate the user namespace with numpy, pylab, etc. - and an ``import *`` is done from numpy and pylab, when using pylab mode. - - When False, pylab mode should not import any names into the user namespace. - """ - ).tag(config=True) - ignore_cwd = Bool( - False, - help="""If True, IPython will not add the current working directory to sys.path. - When False, the current working directory is added to sys.path, allowing imports - of modules defined in the current directory.""" - ).tag(config=True) - shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', - allow_none=True) - # whether interact-loop should start - interact = Bool(True) - - user_ns = Instance(dict, args=None, allow_none=True) - @observe('user_ns') - def _user_ns_changed(self, change): - if self.shell is not None: - self.shell.user_ns = change['new'] - self.shell.init_user_ns() - - def init_path(self): - """Add current working directory, '', to sys.path - - Unlike Python's default, we insert before the first `site-packages` - or `dist-packages` directory, - so that it is after the standard library. - - .. versionchanged:: 7.2 - Try to insert after the standard library, instead of first. - .. versionchanged:: 8.0 - Allow optionally not including the current directory in sys.path - """ - if '' in sys.path or self.ignore_cwd: - return - for idx, path in enumerate(sys.path): - parent, last_part = os.path.split(path) - if last_part in {'site-packages', 'dist-packages'}: - break - else: - # no site-packages or dist-packages found (?!) - # back to original behavior of inserting at the front - idx = 0 - sys.path.insert(idx, '') - - def init_shell(self): - raise NotImplementedError("Override in subclasses") - - def init_gui_pylab(self): - """Enable GUI event loop integration, taking pylab into account.""" - enable = False - shell = self.shell - if self.pylab: - enable = lambda key: shell.enable_pylab(key, import_all=self.pylab_import_all) - key = self.pylab - elif self.matplotlib: - enable = shell.enable_matplotlib - key = self.matplotlib - elif self.gui: - enable = shell.enable_gui - key = self.gui - - if not enable: - return - - try: - r = enable(key) - except ImportError: - self.log.warning("Eventloop or matplotlib integration failed. Is matplotlib installed?") - self.shell.showtraceback() - return - except Exception: - self.log.warning("GUI event loop or pylab initialization failed") - self.shell.showtraceback() - return - - if isinstance(r, tuple): - gui, backend = r[:2] - self.log.info("Enabling GUI event loop integration, " - "eventloop=%s, matplotlib=%s", gui, backend) - if key == "auto": - print("Using matplotlib backend: %s" % backend) - else: - gui = r - self.log.info("Enabling GUI event loop integration, " - "eventloop=%s", gui) - - def init_extensions(self): - """Load all IPython extensions in IPythonApp.extensions. - - This uses the :meth:`ExtensionManager.load_extensions` to load all - the extensions listed in ``self.extensions``. - """ - try: - self.log.debug("Loading IPython extensions...") - extensions = ( - self.default_extensions + self.extensions + self.extra_extensions - ) - if self.extra_extension: - extensions.append(self.extra_extension) - for ext in extensions: - try: - self.log.info("Loading IPython extension: %s" % ext) - self.shell.extension_manager.load_extension(ext) - except: - if self.reraise_ipython_extension_failures: - raise - msg = ("Error in loading extension: {ext}\n" - "Check your config files in {location}".format( - ext=ext, - location=self.profile_dir.location - )) - self.log.warning(msg, exc_info=True) - except: - if self.reraise_ipython_extension_failures: - raise - self.log.warning("Unknown error in loading extensions:", exc_info=True) - - def init_code(self): - """run the pre-flight code, specified via exec_lines""" - self._run_startup_files() - self._run_exec_lines() - self._run_exec_files() - - # Hide variables defined here from %who etc. - if self.hide_initial_ns: - self.shell.user_ns_hidden.update(self.shell.user_ns) - - # command-line execution (ipython -i script.py, ipython -m module) - # should *not* be excluded from %whos - self._run_cmd_line_code() - self._run_module() - - # flush output, so itwon't be attached to the first cell - sys.stdout.flush() - sys.stderr.flush() - self.shell._sys_modules_keys = set(sys.modules.keys()) - - def _run_exec_lines(self): - """Run lines of code in IPythonApp.exec_lines in the user's namespace.""" - if not self.exec_lines: - return - try: - self.log.debug("Running code from IPythonApp.exec_lines...") - for line in self.exec_lines: - try: - self.log.info("Running code in user namespace: %s" % - line) - self.shell.run_cell(line, store_history=False) - except: - self.log.warning("Error in executing line in user " - "namespace: %s" % line) - self.shell.showtraceback() - except: - self.log.warning("Unknown error in handling IPythonApp.exec_lines:") - self.shell.showtraceback() - - def _exec_file(self, fname, shell_futures=False): - try: - full_filename = filefind(fname, [u'.', self.ipython_dir]) - except IOError: - self.log.warning("File not found: %r"%fname) - return - # Make sure that the running script gets a proper sys.argv as if it - # were run from a system shell. - save_argv = sys.argv - sys.argv = [full_filename] + self.extra_args[1:] - try: - if os.path.isfile(full_filename): - self.log.info("Running file in user namespace: %s" % - full_filename) - # Ensure that __file__ is always defined to match Python - # behavior. - with preserve_keys(self.shell.user_ns, '__file__'): - self.shell.user_ns['__file__'] = fname - if full_filename.endswith('.ipy') or full_filename.endswith('.ipynb'): - self.shell.safe_execfile_ipy(full_filename, - shell_futures=shell_futures) - else: - # default to python, even without extension - self.shell.safe_execfile(full_filename, - self.shell.user_ns, - shell_futures=shell_futures, - raise_exceptions=True) - finally: - sys.argv = save_argv - - def _run_startup_files(self): - """Run files from profile startup directory""" - startup_dirs = [self.profile_dir.startup_dir] + [ - os.path.join(p, 'startup') for p in chain(ENV_CONFIG_DIRS, SYSTEM_CONFIG_DIRS) - ] - startup_files = [] - - if self.exec_PYTHONSTARTUP and os.environ.get('PYTHONSTARTUP', False) and \ - not (self.file_to_run or self.code_to_run or self.module_to_run): - python_startup = os.environ['PYTHONSTARTUP'] - self.log.debug("Running PYTHONSTARTUP file %s...", python_startup) - try: - self._exec_file(python_startup) - except: - self.log.warning("Unknown error in handling PYTHONSTARTUP file %s:", python_startup) - self.shell.showtraceback() - for startup_dir in startup_dirs[::-1]: - startup_files += glob.glob(os.path.join(startup_dir, '*.py')) - startup_files += glob.glob(os.path.join(startup_dir, '*.ipy')) - if not startup_files: - return - - self.log.debug("Running startup files from %s...", startup_dir) - try: - for fname in sorted(startup_files): - self._exec_file(fname) - except: - self.log.warning("Unknown error in handling startup files:") - self.shell.showtraceback() - - def _run_exec_files(self): - """Run files from IPythonApp.exec_files""" - if not self.exec_files: - return - - self.log.debug("Running files in IPythonApp.exec_files...") - try: - for fname in self.exec_files: - self._exec_file(fname) - except: - self.log.warning("Unknown error in handling IPythonApp.exec_files:") - self.shell.showtraceback() - - def _run_cmd_line_code(self): - """Run code or file specified at the command-line""" - if self.code_to_run: - line = self.code_to_run - try: - self.log.info("Running code given at command line (c=): %s" % - line) - self.shell.run_cell(line, store_history=False) - except: - self.log.warning("Error in executing line in user namespace: %s" % - line) - self.shell.showtraceback() - if not self.interact: - self.exit(1) - - # Like Python itself, ignore the second if the first of these is present - elif self.file_to_run: - fname = self.file_to_run - if os.path.isdir(fname): - fname = os.path.join(fname, "__main__.py") - if not os.path.exists(fname): - self.log.warning("File '%s' doesn't exist", fname) - if not self.interact: - self.exit(2) - try: - self._exec_file(fname, shell_futures=True) - except: - self.shell.showtraceback(tb_offset=4) - if not self.interact: - self.exit(1) - - def _run_module(self): - """Run module specified at the command-line.""" - if self.module_to_run: - # Make sure that the module gets a proper sys.argv as if it were - # run using `python -m`. - save_argv = sys.argv - sys.argv = [sys.executable] + self.extra_args - try: - self.shell.safe_run_module(self.module_to_run, - self.shell.user_ns) - finally: - sys.argv = save_argv +# encoding: utf-8 +""" +A mixin for :class:`~IPython.core.application.Application` classes that +launch InteractiveShell instances, load extensions, etc. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import glob +from itertools import chain +import os +import sys + +from traitlets.config.application import boolean_flag +from traitlets.config.configurable import Configurable +from traitlets.config.loader import Config +from IPython.core.application import SYSTEM_CONFIG_DIRS, ENV_CONFIG_DIRS +from IPython.core import pylabtools +from IPython.utils.contexts import preserve_keys +from IPython.utils.path import filefind +import traitlets +from traitlets import ( + Unicode, Instance, List, Bool, CaselessStrEnum, observe, + DottedObjectName, +) +from IPython.terminal import pt_inputhooks + +#----------------------------------------------------------------------------- +# Aliases and Flags +#----------------------------------------------------------------------------- + +gui_keys = tuple(sorted(pt_inputhooks.backends) + sorted(pt_inputhooks.aliases)) + +backend_keys = sorted(pylabtools.backends.keys()) +backend_keys.insert(0, 'auto') + +shell_flags = {} + +addflag = lambda *args: shell_flags.update(boolean_flag(*args)) +addflag('autoindent', 'InteractiveShell.autoindent', + 'Turn on autoindenting.', 'Turn off autoindenting.' +) +addflag('automagic', 'InteractiveShell.automagic', + """Turn on the auto calling of magic commands. Type %%magic at the + IPython prompt for more information.""", + 'Turn off the auto calling of magic commands.' +) +addflag('pdb', 'InteractiveShell.pdb', + "Enable auto calling the pdb debugger after every exception.", + "Disable auto calling the pdb debugger after every exception." +) +addflag('pprint', 'PlainTextFormatter.pprint', + "Enable auto pretty printing of results.", + "Disable auto pretty printing of results." +) +addflag('color-info', 'InteractiveShell.color_info', + """IPython can display information about objects via a set of functions, + and optionally can use colors for this, syntax highlighting + source code and various other elements. This is on by default, but can cause + problems with some pagers. If you see such problems, you can disable the + colours.""", + "Disable using colors for info related things." +) +addflag('ignore-cwd', 'InteractiveShellApp.ignore_cwd', + "Exclude the current working directory from sys.path", + "Include the current working directory in sys.path", +) +nosep_config = Config() +nosep_config.InteractiveShell.separate_in = '' +nosep_config.InteractiveShell.separate_out = '' +nosep_config.InteractiveShell.separate_out2 = '' + +shell_flags['nosep']=(nosep_config, "Eliminate all spacing between prompts.") +shell_flags['pylab'] = ( + {'InteractiveShellApp' : {'pylab' : 'auto'}}, + """Pre-load matplotlib and numpy for interactive use with + the default matplotlib backend.""" +) +shell_flags['matplotlib'] = ( + {'InteractiveShellApp' : {'matplotlib' : 'auto'}}, + """Configure matplotlib for interactive use with + the default matplotlib backend.""" +) + +# it's possible we don't want short aliases for *all* of these: +shell_aliases = dict( + autocall='InteractiveShell.autocall', + colors='InteractiveShell.colors', + logfile='InteractiveShell.logfile', + logappend='InteractiveShell.logappend', + c='InteractiveShellApp.code_to_run', + m='InteractiveShellApp.module_to_run', + ext="InteractiveShellApp.extra_extensions", + gui='InteractiveShellApp.gui', + pylab='InteractiveShellApp.pylab', + matplotlib='InteractiveShellApp.matplotlib', +) +shell_aliases['cache-size'] = 'InteractiveShell.cache_size' + +if traitlets.version_info < (5, 0): + # traitlets 4 doesn't handle lists on CLI + shell_aliases["ext"] = "InteractiveShellApp.extra_extension" + + +#----------------------------------------------------------------------------- +# Main classes and functions +#----------------------------------------------------------------------------- + +class InteractiveShellApp(Configurable): + """A Mixin for applications that start InteractiveShell instances. + + Provides configurables for loading extensions and executing files + as part of configuring a Shell environment. + + The following methods should be called by the :meth:`initialize` method + of the subclass: + + - :meth:`init_path` + - :meth:`init_shell` (to be implemented by the subclass) + - :meth:`init_gui_pylab` + - :meth:`init_extensions` + - :meth:`init_code` + """ + extensions = List(Unicode(), + help="A list of dotted module names of IPython extensions to load." + ).tag(config=True) + + extra_extension = Unicode( + "", + help=""" + DEPRECATED. Dotted module name of a single extra IPython extension to load. + + Only one extension can be added this way. + + Only used with traitlets < 5.0, plural extra_extensions list is used in traitlets 5. + """, + ).tag(config=True) + + extra_extensions = List( + DottedObjectName(), + help=""" + Dotted module name(s) of one or more IPython extensions to load. + + For specifying extra extensions to load on the command-line. + + .. versionadded:: 7.10 + """, + ).tag(config=True) + + reraise_ipython_extension_failures = Bool(False, + help="Reraise exceptions encountered loading IPython extensions?", + ).tag(config=True) + + # Extensions that are always loaded (not configurable) + default_extensions = List(Unicode(), [u'storemagic']).tag(config=False) + + hide_initial_ns = Bool(True, + help="""Should variables loaded at startup (by startup files, exec_lines, etc.) + be hidden from tools like %who?""" + ).tag(config=True) + + exec_files = List(Unicode(), + help="""List of files to run at IPython startup.""" + ).tag(config=True) + exec_PYTHONSTARTUP = Bool(True, + help="""Run the file referenced by the PYTHONSTARTUP environment + variable at IPython startup.""" + ).tag(config=True) + file_to_run = Unicode('', + help="""A file to be run""").tag(config=True) + + exec_lines = List(Unicode(), + help="""lines of code to run at IPython startup.""" + ).tag(config=True) + code_to_run = Unicode('', + help="Execute the given command string." + ).tag(config=True) + module_to_run = Unicode('', + help="Run the module as a script." + ).tag(config=True) + gui = CaselessStrEnum(gui_keys, allow_none=True, + help="Enable GUI event loop integration with any of {0}.".format(gui_keys) + ).tag(config=True) + matplotlib = CaselessStrEnum(backend_keys, allow_none=True, + help="""Configure matplotlib for interactive use with + the default matplotlib backend.""" + ).tag(config=True) + pylab = CaselessStrEnum(backend_keys, allow_none=True, + help="""Pre-load matplotlib and numpy for interactive use, + selecting a particular matplotlib backend and loop integration. + """ + ).tag(config=True) + pylab_import_all = Bool(True, + help="""If true, IPython will populate the user namespace with numpy, pylab, etc. + and an ``import *`` is done from numpy and pylab, when using pylab mode. + + When False, pylab mode should not import any names into the user namespace. + """ + ).tag(config=True) + ignore_cwd = Bool( + False, + help="""If True, IPython will not add the current working directory to sys.path. + When False, the current working directory is added to sys.path, allowing imports + of modules defined in the current directory.""" + ).tag(config=True) + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', + allow_none=True) + # whether interact-loop should start + interact = Bool(True) + + user_ns = Instance(dict, args=None, allow_none=True) + @observe('user_ns') + def _user_ns_changed(self, change): + if self.shell is not None: + self.shell.user_ns = change['new'] + self.shell.init_user_ns() + + def init_path(self): + """Add current working directory, '', to sys.path + + Unlike Python's default, we insert before the first `site-packages` + or `dist-packages` directory, + so that it is after the standard library. + + .. versionchanged:: 7.2 + Try to insert after the standard library, instead of first. + .. versionchanged:: 8.0 + Allow optionally not including the current directory in sys.path + """ + if '' in sys.path or self.ignore_cwd: + return + for idx, path in enumerate(sys.path): + parent, last_part = os.path.split(path) + if last_part in {'site-packages', 'dist-packages'}: + break + else: + # no site-packages or dist-packages found (?!) + # back to original behavior of inserting at the front + idx = 0 + sys.path.insert(idx, '') + + def init_shell(self): + raise NotImplementedError("Override in subclasses") + + def init_gui_pylab(self): + """Enable GUI event loop integration, taking pylab into account.""" + enable = False + shell = self.shell + if self.pylab: + enable = lambda key: shell.enable_pylab(key, import_all=self.pylab_import_all) + key = self.pylab + elif self.matplotlib: + enable = shell.enable_matplotlib + key = self.matplotlib + elif self.gui: + enable = shell.enable_gui + key = self.gui + + if not enable: + return + + try: + r = enable(key) + except ImportError: + self.log.warning("Eventloop or matplotlib integration failed. Is matplotlib installed?") + self.shell.showtraceback() + return + except Exception: + self.log.warning("GUI event loop or pylab initialization failed") + self.shell.showtraceback() + return + + if isinstance(r, tuple): + gui, backend = r[:2] + self.log.info("Enabling GUI event loop integration, " + "eventloop=%s, matplotlib=%s", gui, backend) + if key == "auto": + print("Using matplotlib backend: %s" % backend) + else: + gui = r + self.log.info("Enabling GUI event loop integration, " + "eventloop=%s", gui) + + def init_extensions(self): + """Load all IPython extensions in IPythonApp.extensions. + + This uses the :meth:`ExtensionManager.load_extensions` to load all + the extensions listed in ``self.extensions``. + """ + try: + self.log.debug("Loading IPython extensions...") + extensions = ( + self.default_extensions + self.extensions + self.extra_extensions + ) + if self.extra_extension: + extensions.append(self.extra_extension) + for ext in extensions: + try: + self.log.info("Loading IPython extension: %s" % ext) + self.shell.extension_manager.load_extension(ext) + except: + if self.reraise_ipython_extension_failures: + raise + msg = ("Error in loading extension: {ext}\n" + "Check your config files in {location}".format( + ext=ext, + location=self.profile_dir.location + )) + self.log.warning(msg, exc_info=True) + except: + if self.reraise_ipython_extension_failures: + raise + self.log.warning("Unknown error in loading extensions:", exc_info=True) + + def init_code(self): + """run the pre-flight code, specified via exec_lines""" + self._run_startup_files() + self._run_exec_lines() + self._run_exec_files() + + # Hide variables defined here from %who etc. + if self.hide_initial_ns: + self.shell.user_ns_hidden.update(self.shell.user_ns) + + # command-line execution (ipython -i script.py, ipython -m module) + # should *not* be excluded from %whos + self._run_cmd_line_code() + self._run_module() + + # flush output, so itwon't be attached to the first cell + sys.stdout.flush() + sys.stderr.flush() + self.shell._sys_modules_keys = set(sys.modules.keys()) + + def _run_exec_lines(self): + """Run lines of code in IPythonApp.exec_lines in the user's namespace.""" + if not self.exec_lines: + return + try: + self.log.debug("Running code from IPythonApp.exec_lines...") + for line in self.exec_lines: + try: + self.log.info("Running code in user namespace: %s" % + line) + self.shell.run_cell(line, store_history=False) + except: + self.log.warning("Error in executing line in user " + "namespace: %s" % line) + self.shell.showtraceback() + except: + self.log.warning("Unknown error in handling IPythonApp.exec_lines:") + self.shell.showtraceback() + + def _exec_file(self, fname, shell_futures=False): + try: + full_filename = filefind(fname, [u'.', self.ipython_dir]) + except IOError: + self.log.warning("File not found: %r"%fname) + return + # Make sure that the running script gets a proper sys.argv as if it + # were run from a system shell. + save_argv = sys.argv + sys.argv = [full_filename] + self.extra_args[1:] + try: + if os.path.isfile(full_filename): + self.log.info("Running file in user namespace: %s" % + full_filename) + # Ensure that __file__ is always defined to match Python + # behavior. + with preserve_keys(self.shell.user_ns, '__file__'): + self.shell.user_ns['__file__'] = fname + if full_filename.endswith('.ipy') or full_filename.endswith('.ipynb'): + self.shell.safe_execfile_ipy(full_filename, + shell_futures=shell_futures) + else: + # default to python, even without extension + self.shell.safe_execfile(full_filename, + self.shell.user_ns, + shell_futures=shell_futures, + raise_exceptions=True) + finally: + sys.argv = save_argv + + def _run_startup_files(self): + """Run files from profile startup directory""" + startup_dirs = [self.profile_dir.startup_dir] + [ + os.path.join(p, 'startup') for p in chain(ENV_CONFIG_DIRS, SYSTEM_CONFIG_DIRS) + ] + startup_files = [] + + if self.exec_PYTHONSTARTUP and os.environ.get('PYTHONSTARTUP', False) and \ + not (self.file_to_run or self.code_to_run or self.module_to_run): + python_startup = os.environ['PYTHONSTARTUP'] + self.log.debug("Running PYTHONSTARTUP file %s...", python_startup) + try: + self._exec_file(python_startup) + except: + self.log.warning("Unknown error in handling PYTHONSTARTUP file %s:", python_startup) + self.shell.showtraceback() + for startup_dir in startup_dirs[::-1]: + startup_files += glob.glob(os.path.join(startup_dir, '*.py')) + startup_files += glob.glob(os.path.join(startup_dir, '*.ipy')) + if not startup_files: + return + + self.log.debug("Running startup files from %s...", startup_dir) + try: + for fname in sorted(startup_files): + self._exec_file(fname) + except: + self.log.warning("Unknown error in handling startup files:") + self.shell.showtraceback() + + def _run_exec_files(self): + """Run files from IPythonApp.exec_files""" + if not self.exec_files: + return + + self.log.debug("Running files in IPythonApp.exec_files...") + try: + for fname in self.exec_files: + self._exec_file(fname) + except: + self.log.warning("Unknown error in handling IPythonApp.exec_files:") + self.shell.showtraceback() + + def _run_cmd_line_code(self): + """Run code or file specified at the command-line""" + if self.code_to_run: + line = self.code_to_run + try: + self.log.info("Running code given at command line (c=): %s" % + line) + self.shell.run_cell(line, store_history=False) + except: + self.log.warning("Error in executing line in user namespace: %s" % + line) + self.shell.showtraceback() + if not self.interact: + self.exit(1) + + # Like Python itself, ignore the second if the first of these is present + elif self.file_to_run: + fname = self.file_to_run + if os.path.isdir(fname): + fname = os.path.join(fname, "__main__.py") + if not os.path.exists(fname): + self.log.warning("File '%s' doesn't exist", fname) + if not self.interact: + self.exit(2) + try: + self._exec_file(fname, shell_futures=True) + except: + self.shell.showtraceback(tb_offset=4) + if not self.interact: + self.exit(1) + + def _run_module(self): + """Run module specified at the command-line.""" + if self.module_to_run: + # Make sure that the module gets a proper sys.argv as if it were + # run using `python -m`. + save_argv = sys.argv + sys.argv = [sys.executable] + self.extra_args + try: + self.shell.safe_run_module(self.module_to_run, + self.shell.user_ns) + finally: + sys.argv = save_argv diff --git a/contrib/python/ipython/py3/IPython/core/splitinput.py b/contrib/python/ipython/py3/IPython/core/splitinput.py index 925c1f8f21e..63cdce79558 100644 --- a/contrib/python/ipython/py3/IPython/core/splitinput.py +++ b/contrib/python/ipython/py3/IPython/core/splitinput.py @@ -1,137 +1,137 @@ -# encoding: utf-8 -""" -Simple utility for splitting user input. This is used by both inputsplitter and -prefilter. - -Authors: - -* Brian Granger -* Fernando Perez -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import re -import sys - -from IPython.utils import py3compat -from IPython.utils.encoding import get_stream_enc - -#----------------------------------------------------------------------------- -# Main function -#----------------------------------------------------------------------------- - -# RegExp for splitting line contents into pre-char//first word-method//rest. -# For clarity, each group in on one line. - -# WARNING: update the regexp if the escapes in interactiveshell are changed, as -# they are hardwired in. - -# Although it's not solely driven by the regex, note that: -# ,;/% only trigger if they are the first character on the line -# ! and !! trigger if they are first char(s) *or* follow an indent -# ? triggers as first or last char. - -line_split = re.compile(r""" - ^(\s*) # any leading space - ([,;/%]|!!?|\?\??)? # escape character or characters - \s*(%{0,2}[\w\.\*]*) # function/method, possibly with leading % - # to correctly treat things like '?%magic' - (.*?$|$) # rest of line - """, re.VERBOSE) - - -def split_user_input(line, pattern=None): - """Split user input into initial whitespace, escape character, function part - and the rest. - """ - # We need to ensure that the rest of this routine deals only with unicode - encoding = get_stream_enc(sys.stdin, 'utf-8') - line = py3compat.cast_unicode(line, encoding) - - if pattern is None: - pattern = line_split - match = pattern.match(line) - if not match: - # print "match failed for line '%s'" % line - try: - ifun, the_rest = line.split(None,1) - except ValueError: - # print "split failed for line '%s'" % line - ifun, the_rest = line, u'' - pre = re.match(r'^(\s*)(.*)',line).groups()[0] - esc = "" - else: - pre, esc, ifun, the_rest = match.groups() - - #print 'line:<%s>' % line # dbg - #print 'pre <%s> ifun <%s> rest <%s>' % (pre,ifun.strip(),the_rest) # dbg - return pre, esc or '', ifun.strip(), the_rest.lstrip() - - -class LineInfo(object): - """A single line of input and associated info. - - Includes the following as properties: - - line - The original, raw line - - continue_prompt - Is this line a continuation in a sequence of multiline input? - - pre - Any leading whitespace. - - esc - The escape character(s) in pre or the empty string if there isn't one. - Note that '!!' and '??' are possible values for esc. Otherwise it will - always be a single character. - - ifun - The 'function part', which is basically the maximal initial sequence - of valid python identifiers and the '.' character. This is what is - checked for alias and magic transformations, used for auto-calling, - etc. In contrast to Python identifiers, it may start with "%" and contain - "*". - - the_rest - Everything else on the line. - """ - def __init__(self, line, continue_prompt=False): - self.line = line - self.continue_prompt = continue_prompt - self.pre, self.esc, self.ifun, self.the_rest = split_user_input(line) - - self.pre_char = self.pre.strip() - if self.pre_char: - self.pre_whitespace = '' # No whitespace allowed before esc chars - else: - self.pre_whitespace = self.pre - - def ofind(self, ip): - """Do a full, attribute-walking lookup of the ifun in the various - namespaces for the given IPython InteractiveShell instance. - - Return a dict with keys: {found, obj, ospace, ismagic} - - Note: can cause state changes because of calling getattr, but should - only be run if autocall is on and if the line hasn't matched any - other, less dangerous handlers. - - Does cache the results of the call, so can be called multiple times - without worrying about *further* damaging state. - """ - return ip._ofind(self.ifun) - - def __str__(self): - return "LineInfo [%s|%s|%s|%s]" %(self.pre, self.esc, self.ifun, self.the_rest) +# encoding: utf-8 +""" +Simple utility for splitting user input. This is used by both inputsplitter and +prefilter. + +Authors: + +* Brian Granger +* Fernando Perez +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import re +import sys + +from IPython.utils import py3compat +from IPython.utils.encoding import get_stream_enc + +#----------------------------------------------------------------------------- +# Main function +#----------------------------------------------------------------------------- + +# RegExp for splitting line contents into pre-char//first word-method//rest. +# For clarity, each group in on one line. + +# WARNING: update the regexp if the escapes in interactiveshell are changed, as +# they are hardwired in. + +# Although it's not solely driven by the regex, note that: +# ,;/% only trigger if they are the first character on the line +# ! and !! trigger if they are first char(s) *or* follow an indent +# ? triggers as first or last char. + +line_split = re.compile(r""" + ^(\s*) # any leading space + ([,;/%]|!!?|\?\??)? # escape character or characters + \s*(%{0,2}[\w\.\*]*) # function/method, possibly with leading % + # to correctly treat things like '?%magic' + (.*?$|$) # rest of line + """, re.VERBOSE) + + +def split_user_input(line, pattern=None): + """Split user input into initial whitespace, escape character, function part + and the rest. + """ + # We need to ensure that the rest of this routine deals only with unicode + encoding = get_stream_enc(sys.stdin, 'utf-8') + line = py3compat.cast_unicode(line, encoding) + + if pattern is None: + pattern = line_split + match = pattern.match(line) + if not match: + # print "match failed for line '%s'" % line + try: + ifun, the_rest = line.split(None,1) + except ValueError: + # print "split failed for line '%s'" % line + ifun, the_rest = line, u'' + pre = re.match(r'^(\s*)(.*)',line).groups()[0] + esc = "" + else: + pre, esc, ifun, the_rest = match.groups() + + #print 'line:<%s>' % line # dbg + #print 'pre <%s> ifun <%s> rest <%s>' % (pre,ifun.strip(),the_rest) # dbg + return pre, esc or '', ifun.strip(), the_rest.lstrip() + + +class LineInfo(object): + """A single line of input and associated info. + + Includes the following as properties: + + line + The original, raw line + + continue_prompt + Is this line a continuation in a sequence of multiline input? + + pre + Any leading whitespace. + + esc + The escape character(s) in pre or the empty string if there isn't one. + Note that '!!' and '??' are possible values for esc. Otherwise it will + always be a single character. + + ifun + The 'function part', which is basically the maximal initial sequence + of valid python identifiers and the '.' character. This is what is + checked for alias and magic transformations, used for auto-calling, + etc. In contrast to Python identifiers, it may start with "%" and contain + "*". + + the_rest + Everything else on the line. + """ + def __init__(self, line, continue_prompt=False): + self.line = line + self.continue_prompt = continue_prompt + self.pre, self.esc, self.ifun, self.the_rest = split_user_input(line) + + self.pre_char = self.pre.strip() + if self.pre_char: + self.pre_whitespace = '' # No whitespace allowed before esc chars + else: + self.pre_whitespace = self.pre + + def ofind(self, ip): + """Do a full, attribute-walking lookup of the ifun in the various + namespaces for the given IPython InteractiveShell instance. + + Return a dict with keys: {found, obj, ospace, ismagic} + + Note: can cause state changes because of calling getattr, but should + only be run if autocall is on and if the line hasn't matched any + other, less dangerous handlers. + + Does cache the results of the call, so can be called multiple times + without worrying about *further* damaging state. + """ + return ip._ofind(self.ifun) + + def __str__(self): + return "LineInfo [%s|%s|%s|%s]" %(self.pre, self.esc, self.ifun, self.the_rest) diff --git a/contrib/python/ipython/py3/IPython/core/ultratb.py b/contrib/python/ipython/py3/IPython/core/ultratb.py index 88f1ec280ae..de85a1f8ea6 100644 --- a/contrib/python/ipython/py3/IPython/core/ultratb.py +++ b/contrib/python/ipython/py3/IPython/core/ultratb.py @@ -1,1456 +1,1456 @@ -# -*- coding: utf-8 -*- -""" -Verbose and colourful traceback formatting. - -**ColorTB** - -I've always found it a bit hard to visually parse tracebacks in Python. The -ColorTB class is a solution to that problem. It colors the different parts of a -traceback in a manner similar to what you would expect from a syntax-highlighting -text editor. - -Installation instructions for ColorTB:: - - import sys,ultratb - sys.excepthook = ultratb.ColorTB() - -**VerboseTB** - -I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds -of useful info when a traceback occurs. Ping originally had it spit out HTML -and intended it for CGI programmers, but why should they have all the fun? I -altered it to spit out colored text to the terminal. It's a bit overwhelming, -but kind of neat, and maybe useful for long-running programs that you believe -are bug-free. If a crash *does* occur in that type of program you want details. -Give it a shot--you'll love it or you'll hate it. - -.. note:: - - The Verbose mode prints the variables currently visible where the exception - happened (shortening their strings if too long). This can potentially be - very slow, if you happen to have a huge data structure whose string - representation is complex to compute. Your computer may appear to freeze for - a while with cpu usage at 100%. If this occurs, you can cancel the traceback - with Ctrl-C (maybe hitting it more than once). - - If you encounter this kind of situation often, you may want to use the - Verbose_novars mode instead of the regular Verbose, which avoids formatting - variables (but otherwise includes the information and context given by - Verbose). - -.. note:: - - The verbose mode print all variables in the stack, which means it can - potentially leak sensitive information like access keys, or unencrypted - password. - -Installation instructions for VerboseTB:: - - import sys,ultratb - sys.excepthook = ultratb.VerboseTB() - -Note: Much of the code in this module was lifted verbatim from the standard -library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'. - -Color schemes -------------- - -The colors are defined in the class TBTools through the use of the -ColorSchemeTable class. Currently the following exist: - - - NoColor: allows all of this module to be used in any terminal (the color - escapes are just dummy blank strings). - - - Linux: is meant to look good in a terminal like the Linux console (black - or very dark background). - - - LightBG: similar to Linux but swaps dark/light colors to be more readable - in light background terminals. - - - Neutral: a neutral color scheme that should be readable on both light and - dark background - -You can implement other color schemes easily, the syntax is fairly -self-explanatory. Please send back new schemes you develop to the author for -possible inclusion in future releases. - -Inheritance diagram: - -.. inheritance-diagram:: IPython.core.ultratb - :parts: 3 -""" - -#***************************************************************************** -# Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu> -# Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu> -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#***************************************************************************** - - -import dis -import inspect -import keyword -import linecache -import os -import pydoc -import re -import sys -import time -import tokenize -import traceback - -from tokenize import generate_tokens - -# For purposes of monkeypatching inspect to fix a bug in it. -from inspect import getsourcefile, getfile, getmodule, \ - ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode - -# IPython's own modules -from IPython import get_ipython -from IPython.core import debugger -from IPython.core.display_trap import DisplayTrap -from IPython.core.excolors import exception_colors -from IPython.utils import PyColorize -from IPython.utils import path as util_path -from IPython.utils import py3compat -from IPython.utils.data import uniq_stable -from IPython.utils.terminal import get_terminal_size - -from logging import info, error, debug - -from importlib.util import source_from_cache - -import IPython.utils.colorable as colorable - -# Globals -# amount of space to put line numbers before verbose tracebacks -INDENT_SIZE = 8 - -# Default color scheme. This is used, for example, by the traceback -# formatter. When running in an actual IPython instance, the user's rc.colors -# value is used, but having a module global makes this functionality available -# to users of ultratb who are NOT running inside ipython. -DEFAULT_SCHEME = 'NoColor' - - -# Number of frame above which we are likely to have a recursion and will -# **attempt** to detect it. Made modifiable mostly to speedup test suite -# as detecting recursion is one of our slowest test -_FRAME_RECURSION_LIMIT = 500 - -# --------------------------------------------------------------------------- -# Code begins - -# Utility functions -def inspect_error(): - """Print a message about internal inspect errors. - - These are unfortunately quite common.""" - - error('Internal Python error in the inspect module.\n' - 'Below is the traceback from this internal error.\n') - - -# This function is a monkeypatch we apply to the Python inspect module. We have -# now found when it's needed (see discussion on issue gh-1456), and we have a -# test case (IPython.core.tests.test_ultratb.ChangedPyFileTest) that fails if -# the monkeypatch is not applied. TK, Aug 2012. -def findsource(object): - """Return the entire source file and starting line number for an object. - - The argument may be a module, class, method, function, traceback, frame, - or code object. The source code is returned as a list of all the lines - in the file and the line number indexes a line in that list. An IOError - is raised if the source code cannot be retrieved. - - FIXED version with which we monkeypatch the stdlib to work around a bug.""" - - file = getsourcefile(object) or getfile(object) - # If the object is a frame, then trying to get the globals dict from its - # module won't work. Instead, the frame object itself has the globals - # dictionary. - globals_dict = None - if inspect.isframe(object): - # XXX: can this ever be false? - globals_dict = object.f_globals - else: - module = getmodule(object, file) - if module: - globals_dict = module.__dict__ - lines = linecache.getlines(file, globals_dict) - if not lines: - raise IOError('could not get source code') - - if ismodule(object): - return lines, 0 - - if isclass(object): - name = object.__name__ - pat = re.compile(r'^(\s*)class\s*' + name + r'\b') - # make some effort to find the best matching class definition: - # use the one with the least indentation, which is the one - # that's most probably not inside a function definition. - candidates = [] - for i, line in enumerate(lines): - match = pat.match(line) - if match: - # if it's at toplevel, it's already the best one - if line[0] == 'c': - return lines, i - # else add whitespace to candidate list - candidates.append((match.group(1), i)) - if candidates: - # this will sort by whitespace, and by line number, - # less whitespace first - candidates.sort() - return lines, candidates[0][1] - else: - raise IOError('could not find class definition') - - if ismethod(object): - object = object.__func__ - if isfunction(object): - object = object.__code__ - if istraceback(object): - object = object.tb_frame - if isframe(object): - object = object.f_code - if iscode(object): - if not hasattr(object, 'co_firstlineno'): - raise IOError('could not find function definition') - pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)') - pmatch = pat.match - # fperez - fix: sometimes, co_firstlineno can give a number larger than - # the length of lines, which causes an error. Safeguard against that. - lnum = min(object.co_firstlineno, len(lines)) - 1 - while lnum > 0: - if pmatch(lines[lnum]): - break - lnum -= 1 - - return lines, lnum - raise IOError('could not find code object') - - -# Monkeypatch inspect to apply our bugfix. -def with_patch_inspect(f): - """ - Deprecated since IPython 6.0 - decorator for monkeypatching inspect.findsource - """ - - def wrapped(*args, **kwargs): - save_findsource = inspect.findsource - inspect.findsource = findsource - try: - return f(*args, **kwargs) - finally: - inspect.findsource = save_findsource - - return wrapped - - -def fix_frame_records_filenames(records): - """Try to fix the filenames in each record from inspect.getinnerframes(). - - Particularly, modules loaded from within zip files have useless filenames - attached to their code object, and inspect.getinnerframes() just uses it. - """ - fixed_records = [] - for frame, filename, line_no, func_name, lines, index in records: - # Look inside the frame's globals dictionary for __file__, - # which should be better. However, keep Cython filenames since - # we prefer the source filenames over the compiled .so file. - if not filename.endswith(('.pyx', '.pxd', '.pxi')): - better_fn = frame.f_globals.get('__file__', None) - if isinstance(better_fn, str): - # Check the type just in case someone did something weird with - # __file__. It might also be None if the error occurred during - # import. - filename = better_fn - fixed_records.append((frame, filename, line_no, func_name, lines, index)) - return fixed_records - - -@with_patch_inspect -def _fixed_getinnerframes(etb, context=1, tb_offset=0): - LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5 - - records = fix_frame_records_filenames(inspect.getinnerframes(etb, context)) - # If the error is at the console, don't build any context, since it would - # otherwise produce 5 blank lines printed out (there is no file at the - # console) - rec_check = records[tb_offset:] - try: - rname = rec_check[0][1] - if rname == '<ipython console>' or rname.endswith('<string>'): - return rec_check - except IndexError: - pass - - aux = traceback.extract_tb(etb) - assert len(records) == len(aux) - for i, (file, lnum, _, _) in enumerate(aux): - maybeStart = lnum - 1 - context // 2 - start = max(maybeStart, 0) - end = start + context - lines = linecache.getlines(file)[start:end] - buf = list(records[i]) - buf[LNUM_POS] = lnum - buf[INDEX_POS] = lnum - 1 - start - buf[LINES_POS] = lines - records[i] = tuple(buf) - return records[tb_offset:] - -# Helper function -- largely belongs to VerboseTB, but we need the same -# functionality to produce a pseudo verbose TB for SyntaxErrors, so that they -# can be recognized properly by ipython.el's py-traceback-line-re -# (SyntaxErrors have to be treated specially because they have no traceback) - - -def _format_traceback_lines(lnum, index, lines, Colors, lvals, _line_format): - """ - Format tracebacks lines with pointing arrow, leading numbers... - - Parameters - ========== - - lnum: int - index: int - lines: list[string] - Colors: - ColorScheme used. - lvals: bytes - Values of local variables, already colored, to inject just after the error line. - _line_format: f (str) -> (str, bool) - return (colorized version of str, failure to do so) - """ - numbers_width = INDENT_SIZE - 1 - res = [] - - for i,line in enumerate(lines, lnum-index): - line = py3compat.cast_unicode(line) - - new_line, err = _line_format(line, 'str') - if not err: - line = new_line - - if i == lnum: - # This is the line with the error - pad = numbers_width - len(str(i)) - num = '%s%s' % (debugger.make_arrow(pad), str(lnum)) - line = '%s%s%s %s%s' % (Colors.linenoEm, num, - Colors.line, line, Colors.Normal) - else: - num = '%*s' % (numbers_width, i) - line = '%s%s%s %s' % (Colors.lineno, num, - Colors.Normal, line) - - res.append(line) - if lvals and i == lnum: - res.append(lvals + '\n') - return res - -def is_recursion_error(etype, value, records): - try: - # RecursionError is new in Python 3.5 - recursion_error_type = RecursionError - except NameError: - recursion_error_type = RuntimeError - - # The default recursion limit is 1000, but some of that will be taken up - # by stack frames in IPython itself. >500 frames probably indicates - # a recursion error. - return (etype is recursion_error_type) \ - and "recursion" in str(value).lower() \ - and len(records) > _FRAME_RECURSION_LIMIT - -def find_recursion(etype, value, records): - """Identify the repeating stack frames from a RecursionError traceback - - 'records' is a list as returned by VerboseTB.get_records() - - Returns (last_unique, repeat_length) - """ - # This involves a bit of guesswork - we want to show enough of the traceback - # to indicate where the recursion is occurring. We guess that the innermost - # quarter of the traceback (250 frames by default) is repeats, and find the - # first frame (from in to out) that looks different. - if not is_recursion_error(etype, value, records): - return len(records), 0 - - # Select filename, lineno, func_name to track frames with - records = [r[1:4] for r in records] - inner_frames = records[-(len(records)//4):] - frames_repeated = set(inner_frames) - - last_seen_at = {} - longest_repeat = 0 - i = len(records) - for frame in reversed(records): - i -= 1 - if frame not in frames_repeated: - last_unique = i - break - - if frame in last_seen_at: - distance = last_seen_at[frame] - i - longest_repeat = max(longest_repeat, distance) - - last_seen_at[frame] = i - else: - last_unique = 0 # The whole traceback was recursion - - return last_unique, longest_repeat - -#--------------------------------------------------------------------------- -# Module classes -class TBTools(colorable.Colorable): - """Basic tools used by all traceback printer classes.""" - - # Number of frames to skip when reporting tracebacks - tb_offset = 0 - - def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None, parent=None, config=None): - # Whether to call the interactive pdb debugger after printing - # tracebacks or not - super(TBTools, self).__init__(parent=parent, config=config) - self.call_pdb = call_pdb - - # Output stream to write to. Note that we store the original value in - # a private attribute and then make the public ostream a property, so - # that we can delay accessing sys.stdout until runtime. The way - # things are written now, the sys.stdout object is dynamically managed - # so a reference to it should NEVER be stored statically. This - # property approach confines this detail to a single location, and all - # subclasses can simply access self.ostream for writing. - self._ostream = ostream - - # Create color table - self.color_scheme_table = exception_colors() - - self.set_colors(color_scheme) - self.old_scheme = color_scheme # save initial value for toggles - - if call_pdb: - self.pdb = debugger.Pdb() - else: - self.pdb = None - - def _get_ostream(self): - """Output stream that exceptions are written to. - - Valid values are: - - - None: the default, which means that IPython will dynamically resolve - to sys.stdout. This ensures compatibility with most tools, including - Windows (where plain stdout doesn't recognize ANSI escapes). - - - Any object with 'write' and 'flush' attributes. - """ - return sys.stdout if self._ostream is None else self._ostream - - def _set_ostream(self, val): - assert val is None or (hasattr(val, 'write') and hasattr(val, 'flush')) - self._ostream = val - - ostream = property(_get_ostream, _set_ostream) - - def get_parts_of_chained_exception(self, evalue): - def get_chained_exception(exception_value): - cause = getattr(exception_value, '__cause__', None) - if cause: - return cause - if getattr(exception_value, '__suppress_context__', False): - return None - return getattr(exception_value, '__context__', None) - - chained_evalue = get_chained_exception(evalue) - - if chained_evalue: - return chained_evalue.__class__, chained_evalue, chained_evalue.__traceback__ - - def prepare_chained_exception_message(self, cause): - direct_cause = "\nThe above exception was the direct cause of the following exception:\n" - exception_during_handling = "\nDuring handling of the above exception, another exception occurred:\n" - - if cause: - message = [[direct_cause]] - else: - message = [[exception_during_handling]] - return message - - def set_colors(self, *args, **kw): - """Shorthand access to the color table scheme selector method.""" - - # Set own color table - self.color_scheme_table.set_active_scheme(*args, **kw) - # for convenience, set Colors to the active scheme - self.Colors = self.color_scheme_table.active_colors - # Also set colors of debugger - if hasattr(self, 'pdb') and self.pdb is not None: - self.pdb.set_colors(*args, **kw) - - def color_toggle(self): - """Toggle between the currently active color scheme and NoColor.""" - - if self.color_scheme_table.active_scheme_name == 'NoColor': - self.color_scheme_table.set_active_scheme(self.old_scheme) - self.Colors = self.color_scheme_table.active_colors - else: - self.old_scheme = self.color_scheme_table.active_scheme_name - self.color_scheme_table.set_active_scheme('NoColor') - self.Colors = self.color_scheme_table.active_colors - - def stb2text(self, stb): - """Convert a structured traceback (a list) to a string.""" - return '\n'.join(stb) - - def text(self, etype, value, tb, tb_offset=None, context=5): - """Return formatted traceback. - - Subclasses may override this if they add extra arguments. - """ - tb_list = self.structured_traceback(etype, value, tb, - tb_offset, context) - return self.stb2text(tb_list) - - def structured_traceback(self, etype, evalue, tb, tb_offset=None, - context=5, mode=None): - """Return a list of traceback frames. - - Must be implemented by each class. - """ - raise NotImplementedError() - - -#--------------------------------------------------------------------------- -class ListTB(TBTools): - """Print traceback information from a traceback list, with optional color. - - Calling requires 3 arguments: (etype, evalue, elist) - as would be obtained by:: - - etype, evalue, tb = sys.exc_info() - if tb: - elist = traceback.extract_tb(tb) - else: - elist = None - - It can thus be used by programs which need to process the traceback before - printing (such as console replacements based on the code module from the - standard library). - - Because they are meant to be called without a full traceback (only a - list), instances of this class can't call the interactive pdb debugger.""" - - def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None, parent=None, config=None): - TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, - ostream=ostream, parent=parent,config=config) - - def __call__(self, etype, value, elist): - self.ostream.flush() - self.ostream.write(self.text(etype, value, elist)) - self.ostream.write('\n') - - def _extract_tb(self, tb): - if tb: - return traceback.extract_tb(tb) - else: - return None - - def structured_traceback(self, etype, evalue, etb=None, tb_offset=None, - context=5): - """Return a color formatted string with the traceback info. - - Parameters - ---------- - etype : exception type - Type of the exception raised. - - evalue : object - Data stored in the exception - - etb : object - If list: List of frames, see class docstring for details. - If Traceback: Traceback of the exception. - - tb_offset : int, optional - Number of frames in the traceback to skip. If not given, the - instance evalue is used (set in constructor). - - context : int, optional - Number of lines of context information to print. - - Returns - ------- - String with formatted exception. - """ - # This is a workaround to get chained_exc_ids in recursive calls - # etb should not be a tuple if structured_traceback is not recursive - if isinstance(etb, tuple): - etb, chained_exc_ids = etb - else: - chained_exc_ids = set() - - if isinstance(etb, list): - elist = etb - elif etb is not None: - elist = self._extract_tb(etb) - else: - elist = [] - tb_offset = self.tb_offset if tb_offset is None else tb_offset - Colors = self.Colors - out_list = [] - if elist: - - if tb_offset and len(elist) > tb_offset: - elist = elist[tb_offset:] - - out_list.append('Traceback %s(most recent call last)%s:' % - (Colors.normalEm, Colors.Normal) + '\n') - out_list.extend(self._format_list(elist)) - # The exception info should be a single entry in the list. - lines = ''.join(self._format_exception_only(etype, evalue)) - out_list.append(lines) - - exception = self.get_parts_of_chained_exception(evalue) - - if exception and not id(exception[1]) in chained_exc_ids: - chained_exception_message = self.prepare_chained_exception_message( - evalue.__cause__)[0] - etype, evalue, etb = exception - # Trace exception to avoid infinite 'cause' loop - chained_exc_ids.add(id(exception[1])) - chained_exceptions_tb_offset = 0 - out_list = ( - self.structured_traceback( - etype, evalue, (etb, chained_exc_ids), - chained_exceptions_tb_offset, context) - + chained_exception_message - + out_list) - - return out_list - - def _format_list(self, extracted_list): - """Format a list of traceback entry tuples for printing. - - Given a list of tuples as returned by extract_tb() or - extract_stack(), return a list of strings ready for printing. - Each string in the resulting list corresponds to the item with the - same index in the argument list. Each string ends in a newline; - the strings may contain internal newlines as well, for those items - whose source text line is not None. - - Lifted almost verbatim from traceback.py - """ - - Colors = self.Colors - list = [] - for filename, lineno, name, line in extracted_list[:-1]: - item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \ - (Colors.filename, filename, Colors.Normal, - Colors.lineno, lineno, Colors.Normal, - Colors.name, name, Colors.Normal) - if line: - item += ' %s\n' % line.strip() - list.append(item) - # Emphasize the last entry - filename, lineno, name, line = extracted_list[-1] - item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \ - (Colors.normalEm, - Colors.filenameEm, filename, Colors.normalEm, - Colors.linenoEm, lineno, Colors.normalEm, - Colors.nameEm, name, Colors.normalEm, - Colors.Normal) - if line: - item += '%s %s%s\n' % (Colors.line, line.strip(), - Colors.Normal) - list.append(item) - return list - - def _format_exception_only(self, etype, value): - """Format the exception part of a traceback. - - The arguments are the exception type and value such as given by - sys.exc_info()[:2]. The return value is a list of strings, each ending - in a newline. Normally, the list contains a single string; however, - for SyntaxError exceptions, it contains several lines that (when - printed) display detailed information about where the syntax error - occurred. The message indicating which exception occurred is the - always last string in the list. - - Also lifted nearly verbatim from traceback.py - """ - have_filedata = False - Colors = self.Colors - list = [] - stype = py3compat.cast_unicode(Colors.excName + etype.__name__ + Colors.Normal) - if value is None: - # Not sure if this can still happen in Python 2.6 and above - list.append(stype + '\n') - else: - if issubclass(etype, SyntaxError): - have_filedata = True - if not value.filename: value.filename = "<string>" - if value.lineno: - lineno = value.lineno - textline = linecache.getline(value.filename, value.lineno) - else: - lineno = 'unknown' - textline = '' - list.append('%s File %s"%s"%s, line %s%s%s\n' % \ - (Colors.normalEm, - Colors.filenameEm, py3compat.cast_unicode(value.filename), Colors.normalEm, - Colors.linenoEm, lineno, Colors.Normal )) - if textline == '': - textline = py3compat.cast_unicode(value.text, "utf-8") - - if textline is not None: - i = 0 - while i < len(textline) and textline[i].isspace(): - i += 1 - list.append('%s %s%s\n' % (Colors.line, - textline.strip(), - Colors.Normal)) - if value.offset is not None: - s = ' ' - for c in textline[i:value.offset - 1]: - if c.isspace(): - s += c - else: - s += ' ' - list.append('%s%s^%s\n' % (Colors.caret, s, - Colors.Normal)) - - try: - s = value.msg - except Exception: - s = self._some_str(value) - if s: - list.append('%s%s:%s %s\n' % (stype, Colors.excName, - Colors.Normal, s)) - else: - list.append('%s\n' % stype) - - # sync with user hooks - if have_filedata: - ipinst = get_ipython() - if ipinst is not None: - ipinst.hooks.synchronize_with_editor(value.filename, value.lineno, 0) - - return list - - def get_exception_only(self, etype, value): - """Only print the exception type and message, without a traceback. - - Parameters - ---------- - etype : exception type - value : exception value - """ - return ListTB.structured_traceback(self, etype, value) - - def show_exception_only(self, etype, evalue): - """Only print the exception type and message, without a traceback. - - Parameters - ---------- - etype : exception type - value : exception value - """ - # This method needs to use __call__ from *this* class, not the one from - # a subclass whose signature or behavior may be different - ostream = self.ostream - ostream.flush() - ostream.write('\n'.join(self.get_exception_only(etype, evalue))) - ostream.flush() - - def _some_str(self, value): - # Lifted from traceback.py - try: - return py3compat.cast_unicode(str(value)) - except: - return u'<unprintable %s object>' % type(value).__name__ - - -#---------------------------------------------------------------------------- -class VerboseTB(TBTools): - """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead - of HTML. Requires inspect and pydoc. Crazy, man. - - Modified version which optionally strips the topmost entries from the - traceback, to be used with alternate interpreters (because their own code - would appear in the traceback).""" - - def __init__(self, color_scheme='Linux', call_pdb=False, ostream=None, - tb_offset=0, long_header=False, include_vars=True, - check_cache=None, debugger_cls = None, - parent=None, config=None): - """Specify traceback offset, headers and color scheme. - - Define how many frames to drop from the tracebacks. Calling it with - tb_offset=1 allows use of this handler in interpreters which will have - their own code at the top of the traceback (VerboseTB will first - remove that frame before printing the traceback info).""" - TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, - ostream=ostream, parent=parent, config=config) - self.tb_offset = tb_offset - self.long_header = long_header - self.include_vars = include_vars - # By default we use linecache.checkcache, but the user can provide a - # different check_cache implementation. This is used by the IPython - # kernel to provide tracebacks for interactive code that is cached, - # by a compiler instance that flushes the linecache but preserves its - # own code cache. - if check_cache is None: - check_cache = linecache.checkcache - self.check_cache = check_cache - - self.debugger_cls = debugger_cls or debugger.Pdb - self.skip_hidden = True - - def format_records(self, records, last_unique, recursion_repeat): - """Format the stack frames of the traceback""" - frames = [] - - skipped = 0 - lastrecord = len(records) - 1 - for i, r in enumerate(records[: last_unique + recursion_repeat + 1]): - if self.skip_hidden: - if r[0].f_locals.get("__tracebackhide__", 0) and i != lastrecord: - skipped += 1 - continue - if skipped: - Colors = self.Colors # just a shorthand + quicker name lookup - ColorsNormal = Colors.Normal # used a lot - frames.append( - " %s[... skipping hidden %s frame]%s\n" - % (Colors.excName, skipped, ColorsNormal) - ) - skipped = 0 - - frames.append(self.format_record(*r)) - - if skipped: - Colors = self.Colors # just a shorthand + quicker name lookup - ColorsNormal = Colors.Normal # used a lot - frames.append( - " %s[... skipping hidden %s frame]%s\n" - % (Colors.excName, skipped, ColorsNormal) - ) - - if recursion_repeat: - frames.append('... last %d frames repeated, from the frame below ...\n' % recursion_repeat) - frames.append(self.format_record(*records[last_unique+recursion_repeat+1])) - - return frames - - def format_record(self, frame, file, lnum, func, lines, index): - """Format a single stack frame""" - Colors = self.Colors # just a shorthand + quicker name lookup - ColorsNormal = Colors.Normal # used a lot - col_scheme = self.color_scheme_table.active_scheme_name - indent = ' ' * INDENT_SIZE - em_normal = '%s\n%s%s' % (Colors.valEm, indent, ColorsNormal) - undefined = '%sundefined%s' % (Colors.em, ColorsNormal) - tpl_link = '%s%%s%s' % (Colors.filenameEm, ColorsNormal) - tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm, - ColorsNormal) - tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \ - (Colors.vName, Colors.valEm, ColorsNormal) - tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal) - tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal, - Colors.vName, ColorsNormal) - tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal) - - if not file: - file = '?' - elif file.startswith(str("<")) and file.endswith(str(">")): - # Not a real filename, no problem... - pass - elif not os.path.isabs(file): - # Try to make the filename absolute by trying all - # sys.path entries (which is also what linecache does) - for dirname in sys.path: - try: - fullname = os.path.join(dirname, file) - if os.path.isfile(fullname): - file = os.path.abspath(fullname) - break - except Exception: - # Just in case that sys.path contains very - # strange entries... - pass - - file = py3compat.cast_unicode(file, util_path.fs_encoding) - link = tpl_link % util_path.compress_user(file) - args, varargs, varkw, locals_ = inspect.getargvalues(frame) - - if func == '?': - call = '' - elif func == '<module>': - call = tpl_call % (func, '') - else: - # Decide whether to include variable details or not - var_repr = eqrepr if self.include_vars else nullrepr - try: - call = tpl_call % (func, inspect.formatargvalues(args, - varargs, varkw, - locals_, formatvalue=var_repr)) - except KeyError: - # This happens in situations like errors inside generator - # expressions, where local variables are listed in the - # line, but can't be extracted from the frame. I'm not - # 100% sure this isn't actually a bug in inspect itself, - # but since there's no info for us to compute with, the - # best we can do is report the failure and move on. Here - # we must *not* call any traceback construction again, - # because that would mess up use of %debug later on. So we - # simply report the failure and move on. The only - # limitation will be that this frame won't have locals - # listed in the call signature. Quite subtle problem... - # I can't think of a good way to validate this in a unit - # test, but running a script consisting of: - # dict( (k,v.strip()) for (k,v) in range(10) ) - # will illustrate the error, if this exception catch is - # disabled. - call = tpl_call_fail % func - - # Don't attempt to tokenize binary files. - if file.endswith(('.so', '.pyd', '.dll')): - return '%s %s\n' % (link, call) - - elif file.endswith(('.pyc', '.pyo')): - # Look up the corresponding source file. - try: - file = source_from_cache(file) - except ValueError: - # Failed to get the source file for some reason - # E.g. https://github.com/ipython/ipython/issues/9486 - return '%s %s\n' % (link, call) - - def linereader(file=file, lnum=[lnum], getline=linecache.getline): - line = getline(file, lnum[0]) - lnum[0] += 1 - return line - - # Build the list of names on this line of code where the exception - # occurred. - try: - names = [] - name_cont = False - - for token_type, token, start, end, line in generate_tokens(linereader): - # build composite names - if token_type == tokenize.NAME and token not in keyword.kwlist: - if name_cont: - # Continuation of a dotted name - try: - names[-1].append(token) - except IndexError: - names.append([token]) - name_cont = False - else: - # Regular new names. We append everything, the caller - # will be responsible for pruning the list later. It's - # very tricky to try to prune as we go, b/c composite - # names can fool us. The pruning at the end is easy - # to do (or the caller can print a list with repeated - # names if so desired. - names.append([token]) - elif token == '.': - name_cont = True - elif token_type == tokenize.NEWLINE: - break - - except (IndexError, UnicodeDecodeError, SyntaxError): - # signals exit of tokenizer - # SyntaxError can occur if the file is not actually Python - # - see gh-6300 - pass - except tokenize.TokenError as msg: - # Tokenizing may fail for various reasons, many of which are - # harmless. (A good example is when the line in question is the - # close of a triple-quoted string, cf gh-6864). We don't want to - # show this to users, but want make it available for debugging - # purposes. - _m = ("An unexpected error occurred while tokenizing input\n" - "The following traceback may be corrupted or invalid\n" - "The error message is: %s\n" % msg) - debug(_m) - - # Join composite names (e.g. "dict.fromkeys") - names = ['.'.join(n) for n in names] - # prune names list of duplicates, but keep the right order - unique_names = uniq_stable(names) - - # Start loop over vars - lvals = '' - lvals_list = [] - if self.include_vars: - for name_full in unique_names: - name_base = name_full.split('.', 1)[0] - if name_base in frame.f_code.co_varnames: - if name_base in locals_: - try: - value = repr(eval(name_full, locals_)) - except: - value = undefined - else: - value = undefined - name = tpl_local_var % name_full - else: - if name_base in frame.f_globals: - try: - value = repr(eval(name_full, frame.f_globals)) - except: - value = undefined - else: - value = undefined - name = tpl_global_var % name_full - lvals_list.append(tpl_name_val % (name, value)) - if lvals_list: - lvals = '%s%s' % (indent, em_normal.join(lvals_list)) - - level = '%s %s\n' % (link, call) - - if index is None: - return level - else: - _line_format = PyColorize.Parser(style=col_scheme, parent=self).format2 - return '%s%s' % (level, ''.join( - _format_traceback_lines(lnum, index, lines, Colors, lvals, - _line_format))) - - def prepare_header(self, etype, long_version=False): - colors = self.Colors # just a shorthand + quicker name lookup - colorsnormal = colors.Normal # used a lot - exc = '%s%s%s' % (colors.excName, etype, colorsnormal) - width = min(75, get_terminal_size()[0]) - if long_version: - # Header with the exception type, python version, and date - pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable - date = time.ctime(time.time()) - - head = '%s%s%s\n%s%s%s\n%s' % (colors.topline, '-' * width, colorsnormal, - exc, ' ' * (width - len(str(etype)) - len(pyver)), - pyver, date.rjust(width) ) - head += "\nA problem occurred executing Python code. Here is the sequence of function" \ - "\ncalls leading up to the error, with the most recent (innermost) call last." - else: - # Simplified header - head = '%s%s' % (exc, 'Traceback (most recent call last)'. \ - rjust(width - len(str(etype))) ) - - return head - - def format_exception(self, etype, evalue): - colors = self.Colors # just a shorthand + quicker name lookup - colorsnormal = colors.Normal # used a lot - # Get (safely) a string form of the exception info - try: - etype_str, evalue_str = map(str, (etype, evalue)) - except: - # User exception is improperly defined. - etype, evalue = str, sys.exc_info()[:2] - etype_str, evalue_str = map(str, (etype, evalue)) - # ... and format it - return ['%s%s%s: %s' % (colors.excName, etype_str, - colorsnormal, py3compat.cast_unicode(evalue_str))] - - def format_exception_as_a_whole(self, etype, evalue, etb, number_of_lines_of_context, tb_offset): - """Formats the header, traceback and exception message for a single exception. - - This may be called multiple times by Python 3 exception chaining - (PEP 3134). - """ - # some locals - orig_etype = etype - try: - etype = etype.__name__ - except AttributeError: - pass - - tb_offset = self.tb_offset if tb_offset is None else tb_offset - head = self.prepare_header(etype, self.long_header) - records = self.get_records(etb, number_of_lines_of_context, tb_offset) - - - last_unique, recursion_repeat = find_recursion(orig_etype, evalue, records) - - frames = self.format_records(records, last_unique, recursion_repeat) - - formatted_exception = self.format_exception(etype, evalue) - if records: - filepath, lnum = records[-1][1:3] - filepath = os.path.abspath(filepath) - ipinst = get_ipython() - if ipinst is not None: - ipinst.hooks.synchronize_with_editor(filepath, lnum, 0) - - return [[head] + frames + [''.join(formatted_exception[0])]] - - def get_records(self, etb, number_of_lines_of_context, tb_offset): - try: - # Try the default getinnerframes and Alex's: Alex's fixes some - # problems, but it generates empty tracebacks for console errors - # (5 blanks lines) where none should be returned. - return _fixed_getinnerframes(etb, number_of_lines_of_context, tb_offset) - except UnicodeDecodeError: - # This can occur if a file's encoding magic comment is wrong. - # I can't see a way to recover without duplicating a bunch of code - # from the stdlib traceback module. --TK - error('\nUnicodeDecodeError while processing traceback.\n') - return None - except: - # FIXME: I've been getting many crash reports from python 2.3 - # users, traceable to inspect.py. If I can find a small test-case - # to reproduce this, I should either write a better workaround or - # file a bug report against inspect (if that's the real problem). - # So far, I haven't been able to find an isolated example to - # reproduce the problem. - inspect_error() - traceback.print_exc(file=self.ostream) - info('\nUnfortunately, your original traceback can not be constructed.\n') - return None - - def structured_traceback(self, etype, evalue, etb, tb_offset=None, - number_of_lines_of_context=5): - """Return a nice text document describing the traceback.""" - - formatted_exception = self.format_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context, - tb_offset) - - colors = self.Colors # just a shorthand + quicker name lookup - colorsnormal = colors.Normal # used a lot - head = '%s%s%s' % (colors.topline, '-' * min(75, get_terminal_size()[0]), colorsnormal) - structured_traceback_parts = [head] - chained_exceptions_tb_offset = 0 - lines_of_context = 3 - formatted_exceptions = formatted_exception - exception = self.get_parts_of_chained_exception(evalue) - if exception: - formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__) - etype, evalue, etb = exception - else: - evalue = None - chained_exc_ids = set() - while evalue: - formatted_exceptions += self.format_exception_as_a_whole(etype, evalue, etb, lines_of_context, - chained_exceptions_tb_offset) - exception = self.get_parts_of_chained_exception(evalue) - - if exception and not id(exception[1]) in chained_exc_ids: - chained_exc_ids.add(id(exception[1])) # trace exception to avoid infinite 'cause' loop - formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__) - etype, evalue, etb = exception - else: - evalue = None - - # we want to see exceptions in a reversed order: - # the first exception should be on top - for formatted_exception in reversed(formatted_exceptions): - structured_traceback_parts += formatted_exception - - return structured_traceback_parts - - def debugger(self, force=False): - """Call up the pdb debugger if desired, always clean up the tb - reference. - - Keywords: - - - force(False): by default, this routine checks the instance call_pdb - flag and does not actually invoke the debugger if the flag is false. - The 'force' option forces the debugger to activate even if the flag - is false. - - If the call_pdb flag is set, the pdb interactive debugger is - invoked. In all cases, the self.tb reference to the current traceback - is deleted to prevent lingering references which hamper memory - management. - - Note that each call to pdb() does an 'import readline', so if your app - requires a special setup for the readline completers, you'll have to - fix that by hand after invoking the exception handler.""" - - if force or self.call_pdb: - if self.pdb is None: - self.pdb = self.debugger_cls() - # the system displayhook may have changed, restore the original - # for pdb - display_trap = DisplayTrap(hook=sys.__displayhook__) - with display_trap: - self.pdb.reset() - # Find the right frame so we don't pop up inside ipython itself - if hasattr(self, 'tb') and self.tb is not None: - etb = self.tb - else: - etb = self.tb = sys.last_traceback - while self.tb is not None and self.tb.tb_next is not None: - self.tb = self.tb.tb_next - if etb and etb.tb_next: - etb = etb.tb_next - self.pdb.botframe = etb.tb_frame - self.pdb.interaction(None, etb) - - if hasattr(self, 'tb'): - del self.tb - - def handler(self, info=None): - (etype, evalue, etb) = info or sys.exc_info() - self.tb = etb - ostream = self.ostream - ostream.flush() - ostream.write(self.text(etype, evalue, etb)) - ostream.write('\n') - ostream.flush() - - # Changed so an instance can just be called as VerboseTB_inst() and print - # out the right info on its own. - def __call__(self, etype=None, evalue=None, etb=None): - """This hook can replace sys.excepthook (for Python 2.1 or higher).""" - if etb is None: - self.handler() - else: - self.handler((etype, evalue, etb)) - try: - self.debugger() - except KeyboardInterrupt: - print("\nKeyboardInterrupt") - - -#---------------------------------------------------------------------------- -class FormattedTB(VerboseTB, ListTB): - """Subclass ListTB but allow calling with a traceback. - - It can thus be used as a sys.excepthook for Python > 2.1. - - Also adds 'Context' and 'Verbose' modes, not available in ListTB. - - Allows a tb_offset to be specified. This is useful for situations where - one needs to remove a number of topmost frames from the traceback (such as - occurs with python programs that themselves execute other python code, - like Python shells). """ - - def __init__(self, mode='Plain', color_scheme='Linux', call_pdb=False, - ostream=None, - tb_offset=0, long_header=False, include_vars=False, - check_cache=None, debugger_cls=None, - parent=None, config=None): - - # NEVER change the order of this list. Put new modes at the end: - self.valid_modes = ['Plain', 'Context', 'Verbose', 'Minimal'] - self.verbose_modes = self.valid_modes[1:3] - - VerboseTB.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, - ostream=ostream, tb_offset=tb_offset, - long_header=long_header, include_vars=include_vars, - check_cache=check_cache, debugger_cls=debugger_cls, - parent=parent, config=config) - - # Different types of tracebacks are joined with different separators to - # form a single string. They are taken from this dict - self._join_chars = dict(Plain='', Context='\n', Verbose='\n', - Minimal='') - # set_mode also sets the tb_join_char attribute - self.set_mode(mode) - - def structured_traceback(self, etype, value, tb, tb_offset=None, number_of_lines_of_context=5): - tb_offset = self.tb_offset if tb_offset is None else tb_offset - mode = self.mode - if mode in self.verbose_modes: - # Verbose modes need a full traceback - return VerboseTB.structured_traceback( - self, etype, value, tb, tb_offset, number_of_lines_of_context - ) - elif mode == 'Minimal': - return ListTB.get_exception_only(self, etype, value) - else: - # We must check the source cache because otherwise we can print - # out-of-date source code. - self.check_cache() - # Now we can extract and format the exception - return ListTB.structured_traceback( - self, etype, value, tb, tb_offset, number_of_lines_of_context - ) - - def stb2text(self, stb): - """Convert a structured traceback (a list) to a string.""" - return self.tb_join_char.join(stb) - - - def set_mode(self, mode=None): - """Switch to the desired mode. - - If mode is not specified, cycles through the available modes.""" - - if not mode: - new_idx = (self.valid_modes.index(self.mode) + 1 ) % \ - len(self.valid_modes) - self.mode = self.valid_modes[new_idx] - elif mode not in self.valid_modes: - raise ValueError('Unrecognized mode in FormattedTB: <' + mode + '>\n' - 'Valid modes: ' + str(self.valid_modes)) - else: - self.mode = mode - # include variable details only in 'Verbose' mode - self.include_vars = (self.mode == self.valid_modes[2]) - # Set the join character for generating text tracebacks - self.tb_join_char = self._join_chars[self.mode] - - # some convenient shortcuts - def plain(self): - self.set_mode(self.valid_modes[0]) - - def context(self): - self.set_mode(self.valid_modes[1]) - - def verbose(self): - self.set_mode(self.valid_modes[2]) - - def minimal(self): - self.set_mode(self.valid_modes[3]) - - -#---------------------------------------------------------------------------- -class AutoFormattedTB(FormattedTB): - """A traceback printer which can be called on the fly. - - It will find out about exceptions by itself. - - A brief example:: - - AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux') - try: - ... - except: - AutoTB() # or AutoTB(out=logfile) where logfile is an open file object - """ - - def __call__(self, etype=None, evalue=None, etb=None, - out=None, tb_offset=None): - """Print out a formatted exception traceback. - - Optional arguments: - - out: an open file-like object to direct output to. - - - tb_offset: the number of frames to skip over in the stack, on a - per-call basis (this overrides temporarily the instance's tb_offset - given at initialization time. """ - - if out is None: - out = self.ostream - out.flush() - out.write(self.text(etype, evalue, etb, tb_offset)) - out.write('\n') - out.flush() - # FIXME: we should remove the auto pdb behavior from here and leave - # that to the clients. - try: - self.debugger() - except KeyboardInterrupt: - print("\nKeyboardInterrupt") - - def structured_traceback(self, etype=None, value=None, tb=None, - tb_offset=None, number_of_lines_of_context=5): - if etype is None: - etype, value, tb = sys.exc_info() - if isinstance(tb, tuple): - # tb is a tuple if this is a chained exception. - self.tb = tb[0] - else: - self.tb = tb - return FormattedTB.structured_traceback( - self, etype, value, tb, tb_offset, number_of_lines_of_context) - - -#--------------------------------------------------------------------------- - -# A simple class to preserve Nathan's original functionality. -class ColorTB(FormattedTB): - """Shorthand to initialize a FormattedTB in Linux colors mode.""" - - def __init__(self, color_scheme='Linux', call_pdb=0, **kwargs): - FormattedTB.__init__(self, color_scheme=color_scheme, - call_pdb=call_pdb, **kwargs) - - -class SyntaxTB(ListTB): - """Extension which holds some state: the last exception value""" - - def __init__(self, color_scheme='NoColor', parent=None, config=None): - ListTB.__init__(self, color_scheme, parent=parent, config=config) - self.last_syntax_error = None - - def __call__(self, etype, value, elist): - self.last_syntax_error = value - - ListTB.__call__(self, etype, value, elist) - - def structured_traceback(self, etype, value, elist, tb_offset=None, - context=5): - # If the source file has been edited, the line in the syntax error can - # be wrong (retrieved from an outdated cache). This replaces it with - # the current value. - if isinstance(value, SyntaxError) \ - and isinstance(value.filename, str) \ - and isinstance(value.lineno, int): - linecache.checkcache(value.filename) - newtext = linecache.getline(value.filename, value.lineno) - if newtext: - value.text = newtext - self.last_syntax_error = value - return super(SyntaxTB, self).structured_traceback(etype, value, elist, - tb_offset=tb_offset, context=context) - - def clear_err_state(self): - """Return the current error state and clear it""" - e = self.last_syntax_error - self.last_syntax_error = None - return e - - def stb2text(self, stb): - """Convert a structured traceback (a list) to a string.""" - return ''.join(stb) - - -# some internal-use functions -def text_repr(value): - """Hopefully pretty robust repr equivalent.""" - # this is pretty horrible but should always return *something* - try: - return pydoc.text.repr(value) - except KeyboardInterrupt: - raise - except: - try: - return repr(value) - except KeyboardInterrupt: - raise - except: - try: - # all still in an except block so we catch - # getattr raising - name = getattr(value, '__name__', None) - if name: - # ick, recursion - return text_repr(name) - klass = getattr(value, '__class__', None) - if klass: - return '%s instance' % text_repr(klass) - except KeyboardInterrupt: - raise - except: - return 'UNRECOVERABLE REPR FAILURE' - - -def eqrepr(value, repr=text_repr): - return '=%s' % repr(value) - - -def nullrepr(value, repr=text_repr): - return '' +# -*- coding: utf-8 -*- +""" +Verbose and colourful traceback formatting. + +**ColorTB** + +I've always found it a bit hard to visually parse tracebacks in Python. The +ColorTB class is a solution to that problem. It colors the different parts of a +traceback in a manner similar to what you would expect from a syntax-highlighting +text editor. + +Installation instructions for ColorTB:: + + import sys,ultratb + sys.excepthook = ultratb.ColorTB() + +**VerboseTB** + +I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds +of useful info when a traceback occurs. Ping originally had it spit out HTML +and intended it for CGI programmers, but why should they have all the fun? I +altered it to spit out colored text to the terminal. It's a bit overwhelming, +but kind of neat, and maybe useful for long-running programs that you believe +are bug-free. If a crash *does* occur in that type of program you want details. +Give it a shot--you'll love it or you'll hate it. + +.. note:: + + The Verbose mode prints the variables currently visible where the exception + happened (shortening their strings if too long). This can potentially be + very slow, if you happen to have a huge data structure whose string + representation is complex to compute. Your computer may appear to freeze for + a while with cpu usage at 100%. If this occurs, you can cancel the traceback + with Ctrl-C (maybe hitting it more than once). + + If you encounter this kind of situation often, you may want to use the + Verbose_novars mode instead of the regular Verbose, which avoids formatting + variables (but otherwise includes the information and context given by + Verbose). + +.. note:: + + The verbose mode print all variables in the stack, which means it can + potentially leak sensitive information like access keys, or unencrypted + password. + +Installation instructions for VerboseTB:: + + import sys,ultratb + sys.excepthook = ultratb.VerboseTB() + +Note: Much of the code in this module was lifted verbatim from the standard +library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'. + +Color schemes +------------- + +The colors are defined in the class TBTools through the use of the +ColorSchemeTable class. Currently the following exist: + + - NoColor: allows all of this module to be used in any terminal (the color + escapes are just dummy blank strings). + + - Linux: is meant to look good in a terminal like the Linux console (black + or very dark background). + + - LightBG: similar to Linux but swaps dark/light colors to be more readable + in light background terminals. + + - Neutral: a neutral color scheme that should be readable on both light and + dark background + +You can implement other color schemes easily, the syntax is fairly +self-explanatory. Please send back new schemes you develop to the author for +possible inclusion in future releases. + +Inheritance diagram: + +.. inheritance-diagram:: IPython.core.ultratb + :parts: 3 +""" + +#***************************************************************************** +# Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu> +# Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu> +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + + +import dis +import inspect +import keyword +import linecache +import os +import pydoc +import re +import sys +import time +import tokenize +import traceback + +from tokenize import generate_tokens + +# For purposes of monkeypatching inspect to fix a bug in it. +from inspect import getsourcefile, getfile, getmodule, \ + ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode + +# IPython's own modules +from IPython import get_ipython +from IPython.core import debugger +from IPython.core.display_trap import DisplayTrap +from IPython.core.excolors import exception_colors +from IPython.utils import PyColorize +from IPython.utils import path as util_path +from IPython.utils import py3compat +from IPython.utils.data import uniq_stable +from IPython.utils.terminal import get_terminal_size + +from logging import info, error, debug + +from importlib.util import source_from_cache + +import IPython.utils.colorable as colorable + +# Globals +# amount of space to put line numbers before verbose tracebacks +INDENT_SIZE = 8 + +# Default color scheme. This is used, for example, by the traceback +# formatter. When running in an actual IPython instance, the user's rc.colors +# value is used, but having a module global makes this functionality available +# to users of ultratb who are NOT running inside ipython. +DEFAULT_SCHEME = 'NoColor' + + +# Number of frame above which we are likely to have a recursion and will +# **attempt** to detect it. Made modifiable mostly to speedup test suite +# as detecting recursion is one of our slowest test +_FRAME_RECURSION_LIMIT = 500 + +# --------------------------------------------------------------------------- +# Code begins + +# Utility functions +def inspect_error(): + """Print a message about internal inspect errors. + + These are unfortunately quite common.""" + + error('Internal Python error in the inspect module.\n' + 'Below is the traceback from this internal error.\n') + + +# This function is a monkeypatch we apply to the Python inspect module. We have +# now found when it's needed (see discussion on issue gh-1456), and we have a +# test case (IPython.core.tests.test_ultratb.ChangedPyFileTest) that fails if +# the monkeypatch is not applied. TK, Aug 2012. +def findsource(object): + """Return the entire source file and starting line number for an object. + + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a list of all the lines + in the file and the line number indexes a line in that list. An IOError + is raised if the source code cannot be retrieved. + + FIXED version with which we monkeypatch the stdlib to work around a bug.""" + + file = getsourcefile(object) or getfile(object) + # If the object is a frame, then trying to get the globals dict from its + # module won't work. Instead, the frame object itself has the globals + # dictionary. + globals_dict = None + if inspect.isframe(object): + # XXX: can this ever be false? + globals_dict = object.f_globals + else: + module = getmodule(object, file) + if module: + globals_dict = module.__dict__ + lines = linecache.getlines(file, globals_dict) + if not lines: + raise IOError('could not get source code') + + if ismodule(object): + return lines, 0 + + if isclass(object): + name = object.__name__ + pat = re.compile(r'^(\s*)class\s*' + name + r'\b') + # make some effort to find the best matching class definition: + # use the one with the least indentation, which is the one + # that's most probably not inside a function definition. + candidates = [] + for i, line in enumerate(lines): + match = pat.match(line) + if match: + # if it's at toplevel, it's already the best one + if line[0] == 'c': + return lines, i + # else add whitespace to candidate list + candidates.append((match.group(1), i)) + if candidates: + # this will sort by whitespace, and by line number, + # less whitespace first + candidates.sort() + return lines, candidates[0][1] + else: + raise IOError('could not find class definition') + + if ismethod(object): + object = object.__func__ + if isfunction(object): + object = object.__code__ + if istraceback(object): + object = object.tb_frame + if isframe(object): + object = object.f_code + if iscode(object): + if not hasattr(object, 'co_firstlineno'): + raise IOError('could not find function definition') + pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)') + pmatch = pat.match + # fperez - fix: sometimes, co_firstlineno can give a number larger than + # the length of lines, which causes an error. Safeguard against that. + lnum = min(object.co_firstlineno, len(lines)) - 1 + while lnum > 0: + if pmatch(lines[lnum]): + break + lnum -= 1 + + return lines, lnum + raise IOError('could not find code object') + + +# Monkeypatch inspect to apply our bugfix. +def with_patch_inspect(f): + """ + Deprecated since IPython 6.0 + decorator for monkeypatching inspect.findsource + """ + + def wrapped(*args, **kwargs): + save_findsource = inspect.findsource + inspect.findsource = findsource + try: + return f(*args, **kwargs) + finally: + inspect.findsource = save_findsource + + return wrapped + + +def fix_frame_records_filenames(records): + """Try to fix the filenames in each record from inspect.getinnerframes(). + + Particularly, modules loaded from within zip files have useless filenames + attached to their code object, and inspect.getinnerframes() just uses it. + """ + fixed_records = [] + for frame, filename, line_no, func_name, lines, index in records: + # Look inside the frame's globals dictionary for __file__, + # which should be better. However, keep Cython filenames since + # we prefer the source filenames over the compiled .so file. + if not filename.endswith(('.pyx', '.pxd', '.pxi')): + better_fn = frame.f_globals.get('__file__', None) + if isinstance(better_fn, str): + # Check the type just in case someone did something weird with + # __file__. It might also be None if the error occurred during + # import. + filename = better_fn + fixed_records.append((frame, filename, line_no, func_name, lines, index)) + return fixed_records + + +@with_patch_inspect +def _fixed_getinnerframes(etb, context=1, tb_offset=0): + LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5 + + records = fix_frame_records_filenames(inspect.getinnerframes(etb, context)) + # If the error is at the console, don't build any context, since it would + # otherwise produce 5 blank lines printed out (there is no file at the + # console) + rec_check = records[tb_offset:] + try: + rname = rec_check[0][1] + if rname == '<ipython console>' or rname.endswith('<string>'): + return rec_check + except IndexError: + pass + + aux = traceback.extract_tb(etb) + assert len(records) == len(aux) + for i, (file, lnum, _, _) in enumerate(aux): + maybeStart = lnum - 1 - context // 2 + start = max(maybeStart, 0) + end = start + context + lines = linecache.getlines(file)[start:end] + buf = list(records[i]) + buf[LNUM_POS] = lnum + buf[INDEX_POS] = lnum - 1 - start + buf[LINES_POS] = lines + records[i] = tuple(buf) + return records[tb_offset:] + +# Helper function -- largely belongs to VerboseTB, but we need the same +# functionality to produce a pseudo verbose TB for SyntaxErrors, so that they +# can be recognized properly by ipython.el's py-traceback-line-re +# (SyntaxErrors have to be treated specially because they have no traceback) + + +def _format_traceback_lines(lnum, index, lines, Colors, lvals, _line_format): + """ + Format tracebacks lines with pointing arrow, leading numbers... + + Parameters + ========== + + lnum: int + index: int + lines: list[string] + Colors: + ColorScheme used. + lvals: bytes + Values of local variables, already colored, to inject just after the error line. + _line_format: f (str) -> (str, bool) + return (colorized version of str, failure to do so) + """ + numbers_width = INDENT_SIZE - 1 + res = [] + + for i,line in enumerate(lines, lnum-index): + line = py3compat.cast_unicode(line) + + new_line, err = _line_format(line, 'str') + if not err: + line = new_line + + if i == lnum: + # This is the line with the error + pad = numbers_width - len(str(i)) + num = '%s%s' % (debugger.make_arrow(pad), str(lnum)) + line = '%s%s%s %s%s' % (Colors.linenoEm, num, + Colors.line, line, Colors.Normal) + else: + num = '%*s' % (numbers_width, i) + line = '%s%s%s %s' % (Colors.lineno, num, + Colors.Normal, line) + + res.append(line) + if lvals and i == lnum: + res.append(lvals + '\n') + return res + +def is_recursion_error(etype, value, records): + try: + # RecursionError is new in Python 3.5 + recursion_error_type = RecursionError + except NameError: + recursion_error_type = RuntimeError + + # The default recursion limit is 1000, but some of that will be taken up + # by stack frames in IPython itself. >500 frames probably indicates + # a recursion error. + return (etype is recursion_error_type) \ + and "recursion" in str(value).lower() \ + and len(records) > _FRAME_RECURSION_LIMIT + +def find_recursion(etype, value, records): + """Identify the repeating stack frames from a RecursionError traceback + + 'records' is a list as returned by VerboseTB.get_records() + + Returns (last_unique, repeat_length) + """ + # This involves a bit of guesswork - we want to show enough of the traceback + # to indicate where the recursion is occurring. We guess that the innermost + # quarter of the traceback (250 frames by default) is repeats, and find the + # first frame (from in to out) that looks different. + if not is_recursion_error(etype, value, records): + return len(records), 0 + + # Select filename, lineno, func_name to track frames with + records = [r[1:4] for r in records] + inner_frames = records[-(len(records)//4):] + frames_repeated = set(inner_frames) + + last_seen_at = {} + longest_repeat = 0 + i = len(records) + for frame in reversed(records): + i -= 1 + if frame not in frames_repeated: + last_unique = i + break + + if frame in last_seen_at: + distance = last_seen_at[frame] - i + longest_repeat = max(longest_repeat, distance) + + last_seen_at[frame] = i + else: + last_unique = 0 # The whole traceback was recursion + + return last_unique, longest_repeat + +#--------------------------------------------------------------------------- +# Module classes +class TBTools(colorable.Colorable): + """Basic tools used by all traceback printer classes.""" + + # Number of frames to skip when reporting tracebacks + tb_offset = 0 + + def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None, parent=None, config=None): + # Whether to call the interactive pdb debugger after printing + # tracebacks or not + super(TBTools, self).__init__(parent=parent, config=config) + self.call_pdb = call_pdb + + # Output stream to write to. Note that we store the original value in + # a private attribute and then make the public ostream a property, so + # that we can delay accessing sys.stdout until runtime. The way + # things are written now, the sys.stdout object is dynamically managed + # so a reference to it should NEVER be stored statically. This + # property approach confines this detail to a single location, and all + # subclasses can simply access self.ostream for writing. + self._ostream = ostream + + # Create color table + self.color_scheme_table = exception_colors() + + self.set_colors(color_scheme) + self.old_scheme = color_scheme # save initial value for toggles + + if call_pdb: + self.pdb = debugger.Pdb() + else: + self.pdb = None + + def _get_ostream(self): + """Output stream that exceptions are written to. + + Valid values are: + + - None: the default, which means that IPython will dynamically resolve + to sys.stdout. This ensures compatibility with most tools, including + Windows (where plain stdout doesn't recognize ANSI escapes). + + - Any object with 'write' and 'flush' attributes. + """ + return sys.stdout if self._ostream is None else self._ostream + + def _set_ostream(self, val): + assert val is None or (hasattr(val, 'write') and hasattr(val, 'flush')) + self._ostream = val + + ostream = property(_get_ostream, _set_ostream) + + def get_parts_of_chained_exception(self, evalue): + def get_chained_exception(exception_value): + cause = getattr(exception_value, '__cause__', None) + if cause: + return cause + if getattr(exception_value, '__suppress_context__', False): + return None + return getattr(exception_value, '__context__', None) + + chained_evalue = get_chained_exception(evalue) + + if chained_evalue: + return chained_evalue.__class__, chained_evalue, chained_evalue.__traceback__ + + def prepare_chained_exception_message(self, cause): + direct_cause = "\nThe above exception was the direct cause of the following exception:\n" + exception_during_handling = "\nDuring handling of the above exception, another exception occurred:\n" + + if cause: + message = [[direct_cause]] + else: + message = [[exception_during_handling]] + return message + + def set_colors(self, *args, **kw): + """Shorthand access to the color table scheme selector method.""" + + # Set own color table + self.color_scheme_table.set_active_scheme(*args, **kw) + # for convenience, set Colors to the active scheme + self.Colors = self.color_scheme_table.active_colors + # Also set colors of debugger + if hasattr(self, 'pdb') and self.pdb is not None: + self.pdb.set_colors(*args, **kw) + + def color_toggle(self): + """Toggle between the currently active color scheme and NoColor.""" + + if self.color_scheme_table.active_scheme_name == 'NoColor': + self.color_scheme_table.set_active_scheme(self.old_scheme) + self.Colors = self.color_scheme_table.active_colors + else: + self.old_scheme = self.color_scheme_table.active_scheme_name + self.color_scheme_table.set_active_scheme('NoColor') + self.Colors = self.color_scheme_table.active_colors + + def stb2text(self, stb): + """Convert a structured traceback (a list) to a string.""" + return '\n'.join(stb) + + def text(self, etype, value, tb, tb_offset=None, context=5): + """Return formatted traceback. + + Subclasses may override this if they add extra arguments. + """ + tb_list = self.structured_traceback(etype, value, tb, + tb_offset, context) + return self.stb2text(tb_list) + + def structured_traceback(self, etype, evalue, tb, tb_offset=None, + context=5, mode=None): + """Return a list of traceback frames. + + Must be implemented by each class. + """ + raise NotImplementedError() + + +#--------------------------------------------------------------------------- +class ListTB(TBTools): + """Print traceback information from a traceback list, with optional color. + + Calling requires 3 arguments: (etype, evalue, elist) + as would be obtained by:: + + etype, evalue, tb = sys.exc_info() + if tb: + elist = traceback.extract_tb(tb) + else: + elist = None + + It can thus be used by programs which need to process the traceback before + printing (such as console replacements based on the code module from the + standard library). + + Because they are meant to be called without a full traceback (only a + list), instances of this class can't call the interactive pdb debugger.""" + + def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None, parent=None, config=None): + TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, + ostream=ostream, parent=parent,config=config) + + def __call__(self, etype, value, elist): + self.ostream.flush() + self.ostream.write(self.text(etype, value, elist)) + self.ostream.write('\n') + + def _extract_tb(self, tb): + if tb: + return traceback.extract_tb(tb) + else: + return None + + def structured_traceback(self, etype, evalue, etb=None, tb_offset=None, + context=5): + """Return a color formatted string with the traceback info. + + Parameters + ---------- + etype : exception type + Type of the exception raised. + + evalue : object + Data stored in the exception + + etb : object + If list: List of frames, see class docstring for details. + If Traceback: Traceback of the exception. + + tb_offset : int, optional + Number of frames in the traceback to skip. If not given, the + instance evalue is used (set in constructor). + + context : int, optional + Number of lines of context information to print. + + Returns + ------- + String with formatted exception. + """ + # This is a workaround to get chained_exc_ids in recursive calls + # etb should not be a tuple if structured_traceback is not recursive + if isinstance(etb, tuple): + etb, chained_exc_ids = etb + else: + chained_exc_ids = set() + + if isinstance(etb, list): + elist = etb + elif etb is not None: + elist = self._extract_tb(etb) + else: + elist = [] + tb_offset = self.tb_offset if tb_offset is None else tb_offset + Colors = self.Colors + out_list = [] + if elist: + + if tb_offset and len(elist) > tb_offset: + elist = elist[tb_offset:] + + out_list.append('Traceback %s(most recent call last)%s:' % + (Colors.normalEm, Colors.Normal) + '\n') + out_list.extend(self._format_list(elist)) + # The exception info should be a single entry in the list. + lines = ''.join(self._format_exception_only(etype, evalue)) + out_list.append(lines) + + exception = self.get_parts_of_chained_exception(evalue) + + if exception and not id(exception[1]) in chained_exc_ids: + chained_exception_message = self.prepare_chained_exception_message( + evalue.__cause__)[0] + etype, evalue, etb = exception + # Trace exception to avoid infinite 'cause' loop + chained_exc_ids.add(id(exception[1])) + chained_exceptions_tb_offset = 0 + out_list = ( + self.structured_traceback( + etype, evalue, (etb, chained_exc_ids), + chained_exceptions_tb_offset, context) + + chained_exception_message + + out_list) + + return out_list + + def _format_list(self, extracted_list): + """Format a list of traceback entry tuples for printing. + + Given a list of tuples as returned by extract_tb() or + extract_stack(), return a list of strings ready for printing. + Each string in the resulting list corresponds to the item with the + same index in the argument list. Each string ends in a newline; + the strings may contain internal newlines as well, for those items + whose source text line is not None. + + Lifted almost verbatim from traceback.py + """ + + Colors = self.Colors + list = [] + for filename, lineno, name, line in extracted_list[:-1]: + item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \ + (Colors.filename, filename, Colors.Normal, + Colors.lineno, lineno, Colors.Normal, + Colors.name, name, Colors.Normal) + if line: + item += ' %s\n' % line.strip() + list.append(item) + # Emphasize the last entry + filename, lineno, name, line = extracted_list[-1] + item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \ + (Colors.normalEm, + Colors.filenameEm, filename, Colors.normalEm, + Colors.linenoEm, lineno, Colors.normalEm, + Colors.nameEm, name, Colors.normalEm, + Colors.Normal) + if line: + item += '%s %s%s\n' % (Colors.line, line.strip(), + Colors.Normal) + list.append(item) + return list + + def _format_exception_only(self, etype, value): + """Format the exception part of a traceback. + + The arguments are the exception type and value such as given by + sys.exc_info()[:2]. The return value is a list of strings, each ending + in a newline. Normally, the list contains a single string; however, + for SyntaxError exceptions, it contains several lines that (when + printed) display detailed information about where the syntax error + occurred. The message indicating which exception occurred is the + always last string in the list. + + Also lifted nearly verbatim from traceback.py + """ + have_filedata = False + Colors = self.Colors + list = [] + stype = py3compat.cast_unicode(Colors.excName + etype.__name__ + Colors.Normal) + if value is None: + # Not sure if this can still happen in Python 2.6 and above + list.append(stype + '\n') + else: + if issubclass(etype, SyntaxError): + have_filedata = True + if not value.filename: value.filename = "<string>" + if value.lineno: + lineno = value.lineno + textline = linecache.getline(value.filename, value.lineno) + else: + lineno = 'unknown' + textline = '' + list.append('%s File %s"%s"%s, line %s%s%s\n' % \ + (Colors.normalEm, + Colors.filenameEm, py3compat.cast_unicode(value.filename), Colors.normalEm, + Colors.linenoEm, lineno, Colors.Normal )) + if textline == '': + textline = py3compat.cast_unicode(value.text, "utf-8") + + if textline is not None: + i = 0 + while i < len(textline) and textline[i].isspace(): + i += 1 + list.append('%s %s%s\n' % (Colors.line, + textline.strip(), + Colors.Normal)) + if value.offset is not None: + s = ' ' + for c in textline[i:value.offset - 1]: + if c.isspace(): + s += c + else: + s += ' ' + list.append('%s%s^%s\n' % (Colors.caret, s, + Colors.Normal)) + + try: + s = value.msg + except Exception: + s = self._some_str(value) + if s: + list.append('%s%s:%s %s\n' % (stype, Colors.excName, + Colors.Normal, s)) + else: + list.append('%s\n' % stype) + + # sync with user hooks + if have_filedata: + ipinst = get_ipython() + if ipinst is not None: + ipinst.hooks.synchronize_with_editor(value.filename, value.lineno, 0) + + return list + + def get_exception_only(self, etype, value): + """Only print the exception type and message, without a traceback. + + Parameters + ---------- + etype : exception type + value : exception value + """ + return ListTB.structured_traceback(self, etype, value) + + def show_exception_only(self, etype, evalue): + """Only print the exception type and message, without a traceback. + + Parameters + ---------- + etype : exception type + value : exception value + """ + # This method needs to use __call__ from *this* class, not the one from + # a subclass whose signature or behavior may be different + ostream = self.ostream + ostream.flush() + ostream.write('\n'.join(self.get_exception_only(etype, evalue))) + ostream.flush() + + def _some_str(self, value): + # Lifted from traceback.py + try: + return py3compat.cast_unicode(str(value)) + except: + return u'<unprintable %s object>' % type(value).__name__ + + +#---------------------------------------------------------------------------- +class VerboseTB(TBTools): + """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead + of HTML. Requires inspect and pydoc. Crazy, man. + + Modified version which optionally strips the topmost entries from the + traceback, to be used with alternate interpreters (because their own code + would appear in the traceback).""" + + def __init__(self, color_scheme='Linux', call_pdb=False, ostream=None, + tb_offset=0, long_header=False, include_vars=True, + check_cache=None, debugger_cls = None, + parent=None, config=None): + """Specify traceback offset, headers and color scheme. + + Define how many frames to drop from the tracebacks. Calling it with + tb_offset=1 allows use of this handler in interpreters which will have + their own code at the top of the traceback (VerboseTB will first + remove that frame before printing the traceback info).""" + TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, + ostream=ostream, parent=parent, config=config) + self.tb_offset = tb_offset + self.long_header = long_header + self.include_vars = include_vars + # By default we use linecache.checkcache, but the user can provide a + # different check_cache implementation. This is used by the IPython + # kernel to provide tracebacks for interactive code that is cached, + # by a compiler instance that flushes the linecache but preserves its + # own code cache. + if check_cache is None: + check_cache = linecache.checkcache + self.check_cache = check_cache + + self.debugger_cls = debugger_cls or debugger.Pdb + self.skip_hidden = True + + def format_records(self, records, last_unique, recursion_repeat): + """Format the stack frames of the traceback""" + frames = [] + + skipped = 0 + lastrecord = len(records) - 1 + for i, r in enumerate(records[: last_unique + recursion_repeat + 1]): + if self.skip_hidden: + if r[0].f_locals.get("__tracebackhide__", 0) and i != lastrecord: + skipped += 1 + continue + if skipped: + Colors = self.Colors # just a shorthand + quicker name lookup + ColorsNormal = Colors.Normal # used a lot + frames.append( + " %s[... skipping hidden %s frame]%s\n" + % (Colors.excName, skipped, ColorsNormal) + ) + skipped = 0 + + frames.append(self.format_record(*r)) + + if skipped: + Colors = self.Colors # just a shorthand + quicker name lookup + ColorsNormal = Colors.Normal # used a lot + frames.append( + " %s[... skipping hidden %s frame]%s\n" + % (Colors.excName, skipped, ColorsNormal) + ) + + if recursion_repeat: + frames.append('... last %d frames repeated, from the frame below ...\n' % recursion_repeat) + frames.append(self.format_record(*records[last_unique+recursion_repeat+1])) + + return frames + + def format_record(self, frame, file, lnum, func, lines, index): + """Format a single stack frame""" + Colors = self.Colors # just a shorthand + quicker name lookup + ColorsNormal = Colors.Normal # used a lot + col_scheme = self.color_scheme_table.active_scheme_name + indent = ' ' * INDENT_SIZE + em_normal = '%s\n%s%s' % (Colors.valEm, indent, ColorsNormal) + undefined = '%sundefined%s' % (Colors.em, ColorsNormal) + tpl_link = '%s%%s%s' % (Colors.filenameEm, ColorsNormal) + tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm, + ColorsNormal) + tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \ + (Colors.vName, Colors.valEm, ColorsNormal) + tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal) + tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal, + Colors.vName, ColorsNormal) + tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal) + + if not file: + file = '?' + elif file.startswith(str("<")) and file.endswith(str(">")): + # Not a real filename, no problem... + pass + elif not os.path.isabs(file): + # Try to make the filename absolute by trying all + # sys.path entries (which is also what linecache does) + for dirname in sys.path: + try: + fullname = os.path.join(dirname, file) + if os.path.isfile(fullname): + file = os.path.abspath(fullname) + break + except Exception: + # Just in case that sys.path contains very + # strange entries... + pass + + file = py3compat.cast_unicode(file, util_path.fs_encoding) + link = tpl_link % util_path.compress_user(file) + args, varargs, varkw, locals_ = inspect.getargvalues(frame) + + if func == '?': + call = '' + elif func == '<module>': + call = tpl_call % (func, '') + else: + # Decide whether to include variable details or not + var_repr = eqrepr if self.include_vars else nullrepr + try: + call = tpl_call % (func, inspect.formatargvalues(args, + varargs, varkw, + locals_, formatvalue=var_repr)) + except KeyError: + # This happens in situations like errors inside generator + # expressions, where local variables are listed in the + # line, but can't be extracted from the frame. I'm not + # 100% sure this isn't actually a bug in inspect itself, + # but since there's no info for us to compute with, the + # best we can do is report the failure and move on. Here + # we must *not* call any traceback construction again, + # because that would mess up use of %debug later on. So we + # simply report the failure and move on. The only + # limitation will be that this frame won't have locals + # listed in the call signature. Quite subtle problem... + # I can't think of a good way to validate this in a unit + # test, but running a script consisting of: + # dict( (k,v.strip()) for (k,v) in range(10) ) + # will illustrate the error, if this exception catch is + # disabled. + call = tpl_call_fail % func + + # Don't attempt to tokenize binary files. + if file.endswith(('.so', '.pyd', '.dll')): + return '%s %s\n' % (link, call) + + elif file.endswith(('.pyc', '.pyo')): + # Look up the corresponding source file. + try: + file = source_from_cache(file) + except ValueError: + # Failed to get the source file for some reason + # E.g. https://github.com/ipython/ipython/issues/9486 + return '%s %s\n' % (link, call) + + def linereader(file=file, lnum=[lnum], getline=linecache.getline): + line = getline(file, lnum[0]) + lnum[0] += 1 + return line + + # Build the list of names on this line of code where the exception + # occurred. + try: + names = [] + name_cont = False + + for token_type, token, start, end, line in generate_tokens(linereader): + # build composite names + if token_type == tokenize.NAME and token not in keyword.kwlist: + if name_cont: + # Continuation of a dotted name + try: + names[-1].append(token) + except IndexError: + names.append([token]) + name_cont = False + else: + # Regular new names. We append everything, the caller + # will be responsible for pruning the list later. It's + # very tricky to try to prune as we go, b/c composite + # names can fool us. The pruning at the end is easy + # to do (or the caller can print a list with repeated + # names if so desired. + names.append([token]) + elif token == '.': + name_cont = True + elif token_type == tokenize.NEWLINE: + break + + except (IndexError, UnicodeDecodeError, SyntaxError): + # signals exit of tokenizer + # SyntaxError can occur if the file is not actually Python + # - see gh-6300 + pass + except tokenize.TokenError as msg: + # Tokenizing may fail for various reasons, many of which are + # harmless. (A good example is when the line in question is the + # close of a triple-quoted string, cf gh-6864). We don't want to + # show this to users, but want make it available for debugging + # purposes. + _m = ("An unexpected error occurred while tokenizing input\n" + "The following traceback may be corrupted or invalid\n" + "The error message is: %s\n" % msg) + debug(_m) + + # Join composite names (e.g. "dict.fromkeys") + names = ['.'.join(n) for n in names] + # prune names list of duplicates, but keep the right order + unique_names = uniq_stable(names) + + # Start loop over vars + lvals = '' + lvals_list = [] + if self.include_vars: + for name_full in unique_names: + name_base = name_full.split('.', 1)[0] + if name_base in frame.f_code.co_varnames: + if name_base in locals_: + try: + value = repr(eval(name_full, locals_)) + except: + value = undefined + else: + value = undefined + name = tpl_local_var % name_full + else: + if name_base in frame.f_globals: + try: + value = repr(eval(name_full, frame.f_globals)) + except: + value = undefined + else: + value = undefined + name = tpl_global_var % name_full + lvals_list.append(tpl_name_val % (name, value)) + if lvals_list: + lvals = '%s%s' % (indent, em_normal.join(lvals_list)) + + level = '%s %s\n' % (link, call) + + if index is None: + return level + else: + _line_format = PyColorize.Parser(style=col_scheme, parent=self).format2 + return '%s%s' % (level, ''.join( + _format_traceback_lines(lnum, index, lines, Colors, lvals, + _line_format))) + + def prepare_header(self, etype, long_version=False): + colors = self.Colors # just a shorthand + quicker name lookup + colorsnormal = colors.Normal # used a lot + exc = '%s%s%s' % (colors.excName, etype, colorsnormal) + width = min(75, get_terminal_size()[0]) + if long_version: + # Header with the exception type, python version, and date + pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable + date = time.ctime(time.time()) + + head = '%s%s%s\n%s%s%s\n%s' % (colors.topline, '-' * width, colorsnormal, + exc, ' ' * (width - len(str(etype)) - len(pyver)), + pyver, date.rjust(width) ) + head += "\nA problem occurred executing Python code. Here is the sequence of function" \ + "\ncalls leading up to the error, with the most recent (innermost) call last." + else: + # Simplified header + head = '%s%s' % (exc, 'Traceback (most recent call last)'. \ + rjust(width - len(str(etype))) ) + + return head + + def format_exception(self, etype, evalue): + colors = self.Colors # just a shorthand + quicker name lookup + colorsnormal = colors.Normal # used a lot + # Get (safely) a string form of the exception info + try: + etype_str, evalue_str = map(str, (etype, evalue)) + except: + # User exception is improperly defined. + etype, evalue = str, sys.exc_info()[:2] + etype_str, evalue_str = map(str, (etype, evalue)) + # ... and format it + return ['%s%s%s: %s' % (colors.excName, etype_str, + colorsnormal, py3compat.cast_unicode(evalue_str))] + + def format_exception_as_a_whole(self, etype, evalue, etb, number_of_lines_of_context, tb_offset): + """Formats the header, traceback and exception message for a single exception. + + This may be called multiple times by Python 3 exception chaining + (PEP 3134). + """ + # some locals + orig_etype = etype + try: + etype = etype.__name__ + except AttributeError: + pass + + tb_offset = self.tb_offset if tb_offset is None else tb_offset + head = self.prepare_header(etype, self.long_header) + records = self.get_records(etb, number_of_lines_of_context, tb_offset) + + + last_unique, recursion_repeat = find_recursion(orig_etype, evalue, records) + + frames = self.format_records(records, last_unique, recursion_repeat) + + formatted_exception = self.format_exception(etype, evalue) + if records: + filepath, lnum = records[-1][1:3] + filepath = os.path.abspath(filepath) + ipinst = get_ipython() + if ipinst is not None: + ipinst.hooks.synchronize_with_editor(filepath, lnum, 0) + + return [[head] + frames + [''.join(formatted_exception[0])]] + + def get_records(self, etb, number_of_lines_of_context, tb_offset): + try: + # Try the default getinnerframes and Alex's: Alex's fixes some + # problems, but it generates empty tracebacks for console errors + # (5 blanks lines) where none should be returned. + return _fixed_getinnerframes(etb, number_of_lines_of_context, tb_offset) + except UnicodeDecodeError: + # This can occur if a file's encoding magic comment is wrong. + # I can't see a way to recover without duplicating a bunch of code + # from the stdlib traceback module. --TK + error('\nUnicodeDecodeError while processing traceback.\n') + return None + except: + # FIXME: I've been getting many crash reports from python 2.3 + # users, traceable to inspect.py. If I can find a small test-case + # to reproduce this, I should either write a better workaround or + # file a bug report against inspect (if that's the real problem). + # So far, I haven't been able to find an isolated example to + # reproduce the problem. + inspect_error() + traceback.print_exc(file=self.ostream) + info('\nUnfortunately, your original traceback can not be constructed.\n') + return None + + def structured_traceback(self, etype, evalue, etb, tb_offset=None, + number_of_lines_of_context=5): + """Return a nice text document describing the traceback.""" + + formatted_exception = self.format_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context, + tb_offset) + + colors = self.Colors # just a shorthand + quicker name lookup + colorsnormal = colors.Normal # used a lot + head = '%s%s%s' % (colors.topline, '-' * min(75, get_terminal_size()[0]), colorsnormal) + structured_traceback_parts = [head] + chained_exceptions_tb_offset = 0 + lines_of_context = 3 + formatted_exceptions = formatted_exception + exception = self.get_parts_of_chained_exception(evalue) + if exception: + formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__) + etype, evalue, etb = exception + else: + evalue = None + chained_exc_ids = set() + while evalue: + formatted_exceptions += self.format_exception_as_a_whole(etype, evalue, etb, lines_of_context, + chained_exceptions_tb_offset) + exception = self.get_parts_of_chained_exception(evalue) + + if exception and not id(exception[1]) in chained_exc_ids: + chained_exc_ids.add(id(exception[1])) # trace exception to avoid infinite 'cause' loop + formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__) + etype, evalue, etb = exception + else: + evalue = None + + # we want to see exceptions in a reversed order: + # the first exception should be on top + for formatted_exception in reversed(formatted_exceptions): + structured_traceback_parts += formatted_exception + + return structured_traceback_parts + + def debugger(self, force=False): + """Call up the pdb debugger if desired, always clean up the tb + reference. + + Keywords: + + - force(False): by default, this routine checks the instance call_pdb + flag and does not actually invoke the debugger if the flag is false. + The 'force' option forces the debugger to activate even if the flag + is false. + + If the call_pdb flag is set, the pdb interactive debugger is + invoked. In all cases, the self.tb reference to the current traceback + is deleted to prevent lingering references which hamper memory + management. + + Note that each call to pdb() does an 'import readline', so if your app + requires a special setup for the readline completers, you'll have to + fix that by hand after invoking the exception handler.""" + + if force or self.call_pdb: + if self.pdb is None: + self.pdb = self.debugger_cls() + # the system displayhook may have changed, restore the original + # for pdb + display_trap = DisplayTrap(hook=sys.__displayhook__) + with display_trap: + self.pdb.reset() + # Find the right frame so we don't pop up inside ipython itself + if hasattr(self, 'tb') and self.tb is not None: + etb = self.tb + else: + etb = self.tb = sys.last_traceback + while self.tb is not None and self.tb.tb_next is not None: + self.tb = self.tb.tb_next + if etb and etb.tb_next: + etb = etb.tb_next + self.pdb.botframe = etb.tb_frame + self.pdb.interaction(None, etb) + + if hasattr(self, 'tb'): + del self.tb + + def handler(self, info=None): + (etype, evalue, etb) = info or sys.exc_info() + self.tb = etb + ostream = self.ostream + ostream.flush() + ostream.write(self.text(etype, evalue, etb)) + ostream.write('\n') + ostream.flush() + + # Changed so an instance can just be called as VerboseTB_inst() and print + # out the right info on its own. + def __call__(self, etype=None, evalue=None, etb=None): + """This hook can replace sys.excepthook (for Python 2.1 or higher).""" + if etb is None: + self.handler() + else: + self.handler((etype, evalue, etb)) + try: + self.debugger() + except KeyboardInterrupt: + print("\nKeyboardInterrupt") + + +#---------------------------------------------------------------------------- +class FormattedTB(VerboseTB, ListTB): + """Subclass ListTB but allow calling with a traceback. + + It can thus be used as a sys.excepthook for Python > 2.1. + + Also adds 'Context' and 'Verbose' modes, not available in ListTB. + + Allows a tb_offset to be specified. This is useful for situations where + one needs to remove a number of topmost frames from the traceback (such as + occurs with python programs that themselves execute other python code, + like Python shells). """ + + def __init__(self, mode='Plain', color_scheme='Linux', call_pdb=False, + ostream=None, + tb_offset=0, long_header=False, include_vars=False, + check_cache=None, debugger_cls=None, + parent=None, config=None): + + # NEVER change the order of this list. Put new modes at the end: + self.valid_modes = ['Plain', 'Context', 'Verbose', 'Minimal'] + self.verbose_modes = self.valid_modes[1:3] + + VerboseTB.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, + ostream=ostream, tb_offset=tb_offset, + long_header=long_header, include_vars=include_vars, + check_cache=check_cache, debugger_cls=debugger_cls, + parent=parent, config=config) + + # Different types of tracebacks are joined with different separators to + # form a single string. They are taken from this dict + self._join_chars = dict(Plain='', Context='\n', Verbose='\n', + Minimal='') + # set_mode also sets the tb_join_char attribute + self.set_mode(mode) + + def structured_traceback(self, etype, value, tb, tb_offset=None, number_of_lines_of_context=5): + tb_offset = self.tb_offset if tb_offset is None else tb_offset + mode = self.mode + if mode in self.verbose_modes: + # Verbose modes need a full traceback + return VerboseTB.structured_traceback( + self, etype, value, tb, tb_offset, number_of_lines_of_context + ) + elif mode == 'Minimal': + return ListTB.get_exception_only(self, etype, value) + else: + # We must check the source cache because otherwise we can print + # out-of-date source code. + self.check_cache() + # Now we can extract and format the exception + return ListTB.structured_traceback( + self, etype, value, tb, tb_offset, number_of_lines_of_context + ) + + def stb2text(self, stb): + """Convert a structured traceback (a list) to a string.""" + return self.tb_join_char.join(stb) + + + def set_mode(self, mode=None): + """Switch to the desired mode. + + If mode is not specified, cycles through the available modes.""" + + if not mode: + new_idx = (self.valid_modes.index(self.mode) + 1 ) % \ + len(self.valid_modes) + self.mode = self.valid_modes[new_idx] + elif mode not in self.valid_modes: + raise ValueError('Unrecognized mode in FormattedTB: <' + mode + '>\n' + 'Valid modes: ' + str(self.valid_modes)) + else: + self.mode = mode + # include variable details only in 'Verbose' mode + self.include_vars = (self.mode == self.valid_modes[2]) + # Set the join character for generating text tracebacks + self.tb_join_char = self._join_chars[self.mode] + + # some convenient shortcuts + def plain(self): + self.set_mode(self.valid_modes[0]) + + def context(self): + self.set_mode(self.valid_modes[1]) + + def verbose(self): + self.set_mode(self.valid_modes[2]) + + def minimal(self): + self.set_mode(self.valid_modes[3]) + + +#---------------------------------------------------------------------------- +class AutoFormattedTB(FormattedTB): + """A traceback printer which can be called on the fly. + + It will find out about exceptions by itself. + + A brief example:: + + AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux') + try: + ... + except: + AutoTB() # or AutoTB(out=logfile) where logfile is an open file object + """ + + def __call__(self, etype=None, evalue=None, etb=None, + out=None, tb_offset=None): + """Print out a formatted exception traceback. + + Optional arguments: + - out: an open file-like object to direct output to. + + - tb_offset: the number of frames to skip over in the stack, on a + per-call basis (this overrides temporarily the instance's tb_offset + given at initialization time. """ + + if out is None: + out = self.ostream + out.flush() + out.write(self.text(etype, evalue, etb, tb_offset)) + out.write('\n') + out.flush() + # FIXME: we should remove the auto pdb behavior from here and leave + # that to the clients. + try: + self.debugger() + except KeyboardInterrupt: + print("\nKeyboardInterrupt") + + def structured_traceback(self, etype=None, value=None, tb=None, + tb_offset=None, number_of_lines_of_context=5): + if etype is None: + etype, value, tb = sys.exc_info() + if isinstance(tb, tuple): + # tb is a tuple if this is a chained exception. + self.tb = tb[0] + else: + self.tb = tb + return FormattedTB.structured_traceback( + self, etype, value, tb, tb_offset, number_of_lines_of_context) + + +#--------------------------------------------------------------------------- + +# A simple class to preserve Nathan's original functionality. +class ColorTB(FormattedTB): + """Shorthand to initialize a FormattedTB in Linux colors mode.""" + + def __init__(self, color_scheme='Linux', call_pdb=0, **kwargs): + FormattedTB.__init__(self, color_scheme=color_scheme, + call_pdb=call_pdb, **kwargs) + + +class SyntaxTB(ListTB): + """Extension which holds some state: the last exception value""" + + def __init__(self, color_scheme='NoColor', parent=None, config=None): + ListTB.__init__(self, color_scheme, parent=parent, config=config) + self.last_syntax_error = None + + def __call__(self, etype, value, elist): + self.last_syntax_error = value + + ListTB.__call__(self, etype, value, elist) + + def structured_traceback(self, etype, value, elist, tb_offset=None, + context=5): + # If the source file has been edited, the line in the syntax error can + # be wrong (retrieved from an outdated cache). This replaces it with + # the current value. + if isinstance(value, SyntaxError) \ + and isinstance(value.filename, str) \ + and isinstance(value.lineno, int): + linecache.checkcache(value.filename) + newtext = linecache.getline(value.filename, value.lineno) + if newtext: + value.text = newtext + self.last_syntax_error = value + return super(SyntaxTB, self).structured_traceback(etype, value, elist, + tb_offset=tb_offset, context=context) + + def clear_err_state(self): + """Return the current error state and clear it""" + e = self.last_syntax_error + self.last_syntax_error = None + return e + + def stb2text(self, stb): + """Convert a structured traceback (a list) to a string.""" + return ''.join(stb) + + +# some internal-use functions +def text_repr(value): + """Hopefully pretty robust repr equivalent.""" + # this is pretty horrible but should always return *something* + try: + return pydoc.text.repr(value) + except KeyboardInterrupt: + raise + except: + try: + return repr(value) + except KeyboardInterrupt: + raise + except: + try: + # all still in an except block so we catch + # getattr raising + name = getattr(value, '__name__', None) + if name: + # ick, recursion + return text_repr(name) + klass = getattr(value, '__class__', None) + if klass: + return '%s instance' % text_repr(klass) + except KeyboardInterrupt: + raise + except: + return 'UNRECOVERABLE REPR FAILURE' + + +def eqrepr(value, repr=text_repr): + return '=%s' % repr(value) + + +def nullrepr(value, repr=text_repr): + return '' diff --git a/contrib/python/ipython/py3/IPython/core/usage.py b/contrib/python/ipython/py3/IPython/core/usage.py index 06c4aaa947c..53219bceb25 100644 --- a/contrib/python/ipython/py3/IPython/core/usage.py +++ b/contrib/python/ipython/py3/IPython/core/usage.py @@ -1,341 +1,341 @@ -# -*- coding: utf-8 -*- -"""Usage information for the main IPython applications. -""" -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu> -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -import sys -from IPython.core import release - -cl_usage = """\ -========= - IPython -========= - -Tools for Interactive Computing in Python -========================================= - - A Python shell with automatic history (input and output), dynamic object - introspection, easier configuration, command completion, access to the - system shell and more. IPython can also be embedded in running programs. - - -Usage - - ipython [subcommand] [options] [-c cmd | -m mod | file] [--] [arg] ... - - If invoked with no options, it executes the file and exits, passing the - remaining arguments to the script, just as if you had specified the same - command with python. You may need to specify `--` before args to be passed - to the script, to prevent IPython from attempting to parse them. If you - specify the option `-i` before the filename, it will enter an interactive - IPython session after running the script, rather than exiting. Files ending - in .py will be treated as normal Python, but files ending in .ipy can - contain special IPython syntax (magic commands, shell expansions, etc.). - - Almost all configuration in IPython is available via the command-line. Do - `ipython --help-all` to see all available options. For persistent - configuration, look into your `ipython_config.py` configuration file for - details. - - This file is typically installed in the `IPYTHONDIR` directory, and there - is a separate configuration directory for each profile. The default profile - directory will be located in $IPYTHONDIR/profile_default. IPYTHONDIR - defaults to to `$HOME/.ipython`. For Windows users, $HOME resolves to - C:\\Users\\YourUserName in most instances. - - To initialize a profile with the default configuration file, do:: - - $> ipython profile create - - and start editing `IPYTHONDIR/profile_default/ipython_config.py` - - In IPython's documentation, we will refer to this directory as - `IPYTHONDIR`, you can change its default location by creating an - environment variable with this name and setting it to the desired path. - - For more information, see the manual available in HTML and PDF in your - installation, or online at https://ipython.org/documentation.html. -""" - -interactive_usage = """ -IPython -- An enhanced Interactive Python -========================================= - -IPython offers a fully compatible replacement for the standard Python -interpreter, with convenient shell features, special commands, command -history mechanism and output results caching. - -At your system command line, type 'ipython -h' to see the command line -options available. This document only describes interactive features. - -GETTING HELP ------------- - -Within IPython you have various way to access help: - - ? -> Introduction and overview of IPython's features (this screen). - object? -> Details about 'object'. - object?? -> More detailed, verbose information about 'object'. - %quickref -> Quick reference of all IPython specific syntax and magics. - help -> Access Python's own help system. - -If you are in terminal IPython you can quit this screen by pressing `q`. - - -MAIN FEATURES -------------- - -* Access to the standard Python help with object docstrings and the Python - manuals. Simply type 'help' (no quotes) to invoke it. - -* Magic commands: type %magic for information on the magic subsystem. - -* System command aliases, via the %alias command or the configuration file(s). - -* Dynamic object information: - - Typing ?word or word? prints detailed information about an object. Certain - long strings (code, etc.) get snipped in the center for brevity. - - Typing ??word or word?? gives access to the full information without - snipping long strings. Strings that are longer than the screen are printed - through the less pager. - - The ?/?? system gives access to the full source code for any object (if - available), shows function prototypes and other useful information. - - If you just want to see an object's docstring, type '%pdoc object' (without - quotes, and without % if you have automagic on). - -* Tab completion in the local namespace: - - At any time, hitting tab will complete any available python commands or - variable names, and show you a list of the possible completions if there's - no unambiguous one. It will also complete filenames in the current directory. - -* Search previous command history in multiple ways: - - - Start typing, and then use arrow keys up/down or (Ctrl-p/Ctrl-n) to search - through the history items that match what you've typed so far. - - - Hit Ctrl-r: opens a search prompt. Begin typing and the system searches - your history for lines that match what you've typed so far, completing as - much as it can. - - - %hist: search history by index. - -* Persistent command history across sessions. - -* Logging of input with the ability to save and restore a working session. - -* System shell with !. Typing !ls will run 'ls' in the current directory. - -* The reload command does a 'deep' reload of a module: changes made to the - module since you imported will actually be available without having to exit. - -* Verbose and colored exception traceback printouts. See the magic xmode and - xcolor functions for details (just type %magic). - -* Input caching system: - - IPython offers numbered prompts (In/Out) with input and output caching. All - input is saved and can be retrieved as variables (besides the usual arrow - key recall). - - The following GLOBAL variables always exist (so don't overwrite them!): - _i: stores previous input. - _ii: next previous. - _iii: next-next previous. - _ih : a list of all input _ih[n] is the input from line n. - - Additionally, global variables named _i<n> are dynamically created (<n> - being the prompt counter), such that _i<n> == _ih[<n>] - - For example, what you typed at prompt 14 is available as _i14 and _ih[14]. - - You can create macros which contain multiple input lines from this history, - for later re-execution, with the %macro function. - - The history function %hist allows you to see any part of your input history - by printing a range of the _i variables. Note that inputs which contain - magic functions (%) appear in the history with a prepended comment. This is - because they aren't really valid Python code, so you can't exec them. - -* Output caching system: - - For output that is returned from actions, a system similar to the input - cache exists but using _ instead of _i. Only actions that produce a result - (NOT assignments, for example) are cached. If you are familiar with - Mathematica, IPython's _ variables behave exactly like Mathematica's % - variables. - - The following GLOBAL variables always exist (so don't overwrite them!): - _ (one underscore): previous output. - __ (two underscores): next previous. - ___ (three underscores): next-next previous. - - Global variables named _<n> are dynamically created (<n> being the prompt - counter), such that the result of output <n> is always available as _<n>. - - Finally, a global dictionary named _oh exists with entries for all lines - which generated output. - -* Directory history: - - Your history of visited directories is kept in the global list _dh, and the - magic %cd command can be used to go to any entry in that list. - -* Auto-parentheses and auto-quotes (adapted from Nathan Gray's LazyPython) - - 1. Auto-parentheses - - Callable objects (i.e. functions, methods, etc) can be invoked like - this (notice the commas between the arguments):: +# -*- coding: utf-8 -*- +"""Usage information for the main IPython applications. +""" +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu> +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +import sys +from IPython.core import release + +cl_usage = """\ +========= + IPython +========= + +Tools for Interactive Computing in Python +========================================= + + A Python shell with automatic history (input and output), dynamic object + introspection, easier configuration, command completion, access to the + system shell and more. IPython can also be embedded in running programs. + + +Usage + + ipython [subcommand] [options] [-c cmd | -m mod | file] [--] [arg] ... + + If invoked with no options, it executes the file and exits, passing the + remaining arguments to the script, just as if you had specified the same + command with python. You may need to specify `--` before args to be passed + to the script, to prevent IPython from attempting to parse them. If you + specify the option `-i` before the filename, it will enter an interactive + IPython session after running the script, rather than exiting. Files ending + in .py will be treated as normal Python, but files ending in .ipy can + contain special IPython syntax (magic commands, shell expansions, etc.). + + Almost all configuration in IPython is available via the command-line. Do + `ipython --help-all` to see all available options. For persistent + configuration, look into your `ipython_config.py` configuration file for + details. + + This file is typically installed in the `IPYTHONDIR` directory, and there + is a separate configuration directory for each profile. The default profile + directory will be located in $IPYTHONDIR/profile_default. IPYTHONDIR + defaults to to `$HOME/.ipython`. For Windows users, $HOME resolves to + C:\\Users\\YourUserName in most instances. + + To initialize a profile with the default configuration file, do:: + + $> ipython profile create + + and start editing `IPYTHONDIR/profile_default/ipython_config.py` + + In IPython's documentation, we will refer to this directory as + `IPYTHONDIR`, you can change its default location by creating an + environment variable with this name and setting it to the desired path. + + For more information, see the manual available in HTML and PDF in your + installation, or online at https://ipython.org/documentation.html. +""" + +interactive_usage = """ +IPython -- An enhanced Interactive Python +========================================= + +IPython offers a fully compatible replacement for the standard Python +interpreter, with convenient shell features, special commands, command +history mechanism and output results caching. + +At your system command line, type 'ipython -h' to see the command line +options available. This document only describes interactive features. + +GETTING HELP +------------ + +Within IPython you have various way to access help: + + ? -> Introduction and overview of IPython's features (this screen). + object? -> Details about 'object'. + object?? -> More detailed, verbose information about 'object'. + %quickref -> Quick reference of all IPython specific syntax and magics. + help -> Access Python's own help system. + +If you are in terminal IPython you can quit this screen by pressing `q`. + + +MAIN FEATURES +------------- + +* Access to the standard Python help with object docstrings and the Python + manuals. Simply type 'help' (no quotes) to invoke it. + +* Magic commands: type %magic for information on the magic subsystem. + +* System command aliases, via the %alias command or the configuration file(s). + +* Dynamic object information: + + Typing ?word or word? prints detailed information about an object. Certain + long strings (code, etc.) get snipped in the center for brevity. + + Typing ??word or word?? gives access to the full information without + snipping long strings. Strings that are longer than the screen are printed + through the less pager. + + The ?/?? system gives access to the full source code for any object (if + available), shows function prototypes and other useful information. + + If you just want to see an object's docstring, type '%pdoc object' (without + quotes, and without % if you have automagic on). + +* Tab completion in the local namespace: + + At any time, hitting tab will complete any available python commands or + variable names, and show you a list of the possible completions if there's + no unambiguous one. It will also complete filenames in the current directory. + +* Search previous command history in multiple ways: + + - Start typing, and then use arrow keys up/down or (Ctrl-p/Ctrl-n) to search + through the history items that match what you've typed so far. + + - Hit Ctrl-r: opens a search prompt. Begin typing and the system searches + your history for lines that match what you've typed so far, completing as + much as it can. + + - %hist: search history by index. + +* Persistent command history across sessions. + +* Logging of input with the ability to save and restore a working session. + +* System shell with !. Typing !ls will run 'ls' in the current directory. + +* The reload command does a 'deep' reload of a module: changes made to the + module since you imported will actually be available without having to exit. + +* Verbose and colored exception traceback printouts. See the magic xmode and + xcolor functions for details (just type %magic). + +* Input caching system: + + IPython offers numbered prompts (In/Out) with input and output caching. All + input is saved and can be retrieved as variables (besides the usual arrow + key recall). + + The following GLOBAL variables always exist (so don't overwrite them!): + _i: stores previous input. + _ii: next previous. + _iii: next-next previous. + _ih : a list of all input _ih[n] is the input from line n. + + Additionally, global variables named _i<n> are dynamically created (<n> + being the prompt counter), such that _i<n> == _ih[<n>] + + For example, what you typed at prompt 14 is available as _i14 and _ih[14]. + + You can create macros which contain multiple input lines from this history, + for later re-execution, with the %macro function. + + The history function %hist allows you to see any part of your input history + by printing a range of the _i variables. Note that inputs which contain + magic functions (%) appear in the history with a prepended comment. This is + because they aren't really valid Python code, so you can't exec them. + +* Output caching system: + + For output that is returned from actions, a system similar to the input + cache exists but using _ instead of _i. Only actions that produce a result + (NOT assignments, for example) are cached. If you are familiar with + Mathematica, IPython's _ variables behave exactly like Mathematica's % + variables. + + The following GLOBAL variables always exist (so don't overwrite them!): + _ (one underscore): previous output. + __ (two underscores): next previous. + ___ (three underscores): next-next previous. + + Global variables named _<n> are dynamically created (<n> being the prompt + counter), such that the result of output <n> is always available as _<n>. + + Finally, a global dictionary named _oh exists with entries for all lines + which generated output. + +* Directory history: + + Your history of visited directories is kept in the global list _dh, and the + magic %cd command can be used to go to any entry in that list. + +* Auto-parentheses and auto-quotes (adapted from Nathan Gray's LazyPython) + + 1. Auto-parentheses - In [1]: callable_ob arg1, arg2, arg3 - - and the input will be translated to this:: - - callable_ob(arg1, arg2, arg3) - - This feature is off by default (in rare cases it can produce - undesirable side-effects), but you can activate it at the command-line - by starting IPython with `--autocall 1`, set it permanently in your - configuration file, or turn on at runtime with `%autocall 1`. - - You can force auto-parentheses by using '/' as the first character - of a line. For example:: - - In [1]: /globals # becomes 'globals()' - - Note that the '/' MUST be the first character on the line! This - won't work:: - - In [2]: print /globals # syntax error - - In most cases the automatic algorithm should work, so you should - rarely need to explicitly invoke /. One notable exception is if you - are trying to call a function with a list of tuples as arguments (the - parenthesis will confuse IPython):: - - In [1]: zip (1,2,3),(4,5,6) # won't work - - but this will work:: - - In [2]: /zip (1,2,3),(4,5,6) - ------> zip ((1,2,3),(4,5,6)) - Out[2]= [(1, 4), (2, 5), (3, 6)] - - IPython tells you that it has altered your command line by - displaying the new command line preceded by -->. e.g.:: - - In [18]: callable list - -------> callable (list) - - 2. Auto-Quoting - - You can force auto-quoting of a function's arguments by using ',' as - the first character of a line. For example:: - - In [1]: ,my_function /home/me # becomes my_function("/home/me") - - If you use ';' instead, the whole argument is quoted as a single - string (while ',' splits on whitespace):: - - In [2]: ,my_function a b c # becomes my_function("a","b","c") - In [3]: ;my_function a b c # becomes my_function("a b c") - - Note that the ',' MUST be the first character on the line! This - won't work:: - - In [4]: x = ,my_function /home/me # syntax error -""" - -interactive_usage_min = """\ -An enhanced console for Python. -Some of its features are: -- Tab completion in the local namespace. -- Logging of input, see command-line options. -- System shell escape via ! , eg !ls. -- Magic commands, starting with a % (like %ls, %pwd, %cd, etc.) -- Keeps track of locally defined variables via %who, %whos. -- Show object information with a ? eg ?x or x? (use ?? for more info). -""" - -quick_reference = r""" -IPython -- An enhanced Interactive Python - Quick Reference Card -================================================================ - -obj?, obj?? : Get help, or more help for object (also works as - ?obj, ??obj). -?foo.*abc* : List names in 'foo' containing 'abc' in them. -%magic : Information about IPython's 'magic' % functions. - -Magic functions are prefixed by % or %%, and typically take their arguments -without parentheses, quotes or even commas for convenience. Line magics take a -single % and cell magics are prefixed with two %%. - -Example magic function calls: - -%alias d ls -F : 'd' is now an alias for 'ls -F' -alias d ls -F : Works if 'alias' not a python name -alist = %alias : Get list of aliases to 'alist' -cd /usr/share : Obvious. cd -<tab> to choose from visited dirs. -%cd?? : See help AND source for magic %cd -%timeit x=10 : time the 'x=10' statement with high precision. -%%timeit x=2**100 -x**100 : time 'x**100' with a setup of 'x=2**100'; setup code is not - counted. This is an example of a cell magic. - -System commands: - -!cp a.txt b/ : System command escape, calls os.system() -cp a.txt b/ : after %rehashx, most system commands work without ! -cp ${f}.txt $bar : Variable expansion in magics and system commands -files = !ls /usr : Capture system command output -files.s, files.l, files.n: "a b c", ['a','b','c'], 'a\nb\nc' - -History: - -_i, _ii, _iii : Previous, next previous, next next previous input -_i4, _ih[2:5] : Input history line 4, lines 2-4 -exec(_i81) : Execute input history line #81 again -%rep 81 : Edit input history line #81 -_, __, ___ : previous, next previous, next next previous output -_dh : Directory history -_oh : Output history -%hist : Command history of current session. -%hist -g foo : Search command history of (almost) all sessions for 'foo'. -%hist -g : Command history of (almost) all sessions. -%hist 1/2-8 : Command history containing lines 2-8 of session 1. -%hist 1/ ~2/ : Command history of session 1 and 2 sessions before current. -%hist ~8/1-~6/5 : Command history from line 1 of 8 sessions ago to - line 5 of 6 sessions ago. -%edit 0/ : Open editor to execute code with history of current session. - -Autocall: - -f 1,2 : f(1,2) # Off by default, enable with %autocall magic. -/f 1,2 : f(1,2) (forced autoparen) -,f 1 2 : f("1","2") -;f 1 2 : f("1 2") - -Remember: TAB completion works in many contexts, not just file names -or python names. - -The following magic functions are currently available: - -""" - -default_banner_parts = ["Python %s\n"%sys.version.split("\n")[0], - "Type 'copyright', 'credits' or 'license' for more information\n" , - "IPython {version} -- An enhanced Interactive Python. Type '?' for help.\n".format(version=release.version), -] - -default_banner = ''.join(default_banner_parts) + Callable objects (i.e. functions, methods, etc) can be invoked like + this (notice the commas between the arguments):: + + In [1]: callable_ob arg1, arg2, arg3 + + and the input will be translated to this:: + + callable_ob(arg1, arg2, arg3) + + This feature is off by default (in rare cases it can produce + undesirable side-effects), but you can activate it at the command-line + by starting IPython with `--autocall 1`, set it permanently in your + configuration file, or turn on at runtime with `%autocall 1`. + + You can force auto-parentheses by using '/' as the first character + of a line. For example:: + + In [1]: /globals # becomes 'globals()' + + Note that the '/' MUST be the first character on the line! This + won't work:: + + In [2]: print /globals # syntax error + + In most cases the automatic algorithm should work, so you should + rarely need to explicitly invoke /. One notable exception is if you + are trying to call a function with a list of tuples as arguments (the + parenthesis will confuse IPython):: + + In [1]: zip (1,2,3),(4,5,6) # won't work + + but this will work:: + + In [2]: /zip (1,2,3),(4,5,6) + ------> zip ((1,2,3),(4,5,6)) + Out[2]= [(1, 4), (2, 5), (3, 6)] + + IPython tells you that it has altered your command line by + displaying the new command line preceded by -->. e.g.:: + + In [18]: callable list + -------> callable (list) + + 2. Auto-Quoting + + You can force auto-quoting of a function's arguments by using ',' as + the first character of a line. For example:: + + In [1]: ,my_function /home/me # becomes my_function("/home/me") + + If you use ';' instead, the whole argument is quoted as a single + string (while ',' splits on whitespace):: + + In [2]: ,my_function a b c # becomes my_function("a","b","c") + In [3]: ;my_function a b c # becomes my_function("a b c") + + Note that the ',' MUST be the first character on the line! This + won't work:: + + In [4]: x = ,my_function /home/me # syntax error +""" + +interactive_usage_min = """\ +An enhanced console for Python. +Some of its features are: +- Tab completion in the local namespace. +- Logging of input, see command-line options. +- System shell escape via ! , eg !ls. +- Magic commands, starting with a % (like %ls, %pwd, %cd, etc.) +- Keeps track of locally defined variables via %who, %whos. +- Show object information with a ? eg ?x or x? (use ?? for more info). +""" + +quick_reference = r""" +IPython -- An enhanced Interactive Python - Quick Reference Card +================================================================ + +obj?, obj?? : Get help, or more help for object (also works as + ?obj, ??obj). +?foo.*abc* : List names in 'foo' containing 'abc' in them. +%magic : Information about IPython's 'magic' % functions. + +Magic functions are prefixed by % or %%, and typically take their arguments +without parentheses, quotes or even commas for convenience. Line magics take a +single % and cell magics are prefixed with two %%. + +Example magic function calls: + +%alias d ls -F : 'd' is now an alias for 'ls -F' +alias d ls -F : Works if 'alias' not a python name +alist = %alias : Get list of aliases to 'alist' +cd /usr/share : Obvious. cd -<tab> to choose from visited dirs. +%cd?? : See help AND source for magic %cd +%timeit x=10 : time the 'x=10' statement with high precision. +%%timeit x=2**100 +x**100 : time 'x**100' with a setup of 'x=2**100'; setup code is not + counted. This is an example of a cell magic. + +System commands: + +!cp a.txt b/ : System command escape, calls os.system() +cp a.txt b/ : after %rehashx, most system commands work without ! +cp ${f}.txt $bar : Variable expansion in magics and system commands +files = !ls /usr : Capture system command output +files.s, files.l, files.n: "a b c", ['a','b','c'], 'a\nb\nc' + +History: + +_i, _ii, _iii : Previous, next previous, next next previous input +_i4, _ih[2:5] : Input history line 4, lines 2-4 +exec(_i81) : Execute input history line #81 again +%rep 81 : Edit input history line #81 +_, __, ___ : previous, next previous, next next previous output +_dh : Directory history +_oh : Output history +%hist : Command history of current session. +%hist -g foo : Search command history of (almost) all sessions for 'foo'. +%hist -g : Command history of (almost) all sessions. +%hist 1/2-8 : Command history containing lines 2-8 of session 1. +%hist 1/ ~2/ : Command history of session 1 and 2 sessions before current. +%hist ~8/1-~6/5 : Command history from line 1 of 8 sessions ago to + line 5 of 6 sessions ago. +%edit 0/ : Open editor to execute code with history of current session. + +Autocall: + +f 1,2 : f(1,2) # Off by default, enable with %autocall magic. +/f 1,2 : f(1,2) (forced autoparen) +,f 1 2 : f("1","2") +;f 1 2 : f("1 2") + +Remember: TAB completion works in many contexts, not just file names +or python names. + +The following magic functions are currently available: + +""" + +default_banner_parts = ["Python %s\n"%sys.version.split("\n")[0], + "Type 'copyright', 'credits' or 'license' for more information\n" , + "IPython {version} -- An enhanced Interactive Python. Type '?' for help.\n".format(version=release.version), +] + +default_banner = ''.join(default_banner_parts) diff --git a/contrib/python/ipython/py3/IPython/display.py b/contrib/python/ipython/py3/IPython/display.py index 872b93e92b8..7d248ba023d 100644 --- a/contrib/python/ipython/py3/IPython/display.py +++ b/contrib/python/ipython/py3/IPython/display.py @@ -1,16 +1,16 @@ -"""Public API for display tools in IPython. -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2012 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -from IPython.core.display import * -from IPython.lib.display import * +"""Public API for display tools in IPython. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2012 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +from IPython.core.display import * +from IPython.lib.display import * diff --git a/contrib/python/ipython/py3/IPython/extensions/__init__.py b/contrib/python/ipython/py3/IPython/extensions/__init__.py index acc4dc76012..db7f79fca6a 100644 --- a/contrib/python/ipython/py3/IPython/extensions/__init__.py +++ b/contrib/python/ipython/py3/IPython/extensions/__init__.py @@ -1,2 +1,2 @@ -# -*- coding: utf-8 -*- -"""This directory is meant for IPython extensions.""" +# -*- coding: utf-8 -*- +"""This directory is meant for IPython extensions.""" diff --git a/contrib/python/ipython/py3/IPython/extensions/autoreload.py b/contrib/python/ipython/py3/IPython/extensions/autoreload.py index 274b8bfd886..ada680fcf08 100644 --- a/contrib/python/ipython/py3/IPython/extensions/autoreload.py +++ b/contrib/python/ipython/py3/IPython/extensions/autoreload.py @@ -1,550 +1,550 @@ -"""IPython extension to reload modules before executing user code. - -``autoreload`` reloads modules automatically before entering the execution of -code typed at the IPython prompt. - -This makes for example the following workflow possible: - -.. sourcecode:: ipython - - In [1]: %load_ext autoreload - - In [2]: %autoreload 2 - - In [3]: from foo import some_function - - In [4]: some_function() - Out[4]: 42 - - In [5]: # open foo.py in an editor and change some_function to return 43 - - In [6]: some_function() - Out[6]: 43 - -The module was reloaded without reloading it explicitly, and the object -imported with ``from foo import ...`` was also updated. - -Usage -===== - -The following magic commands are provided: - -``%autoreload`` - - Reload all modules (except those excluded by ``%aimport``) - automatically now. - -``%autoreload 0`` - - Disable automatic reloading. - -``%autoreload 1`` - - Reload all modules imported with ``%aimport`` every time before - executing the Python code typed. - -``%autoreload 2`` - - Reload all modules (except those excluded by ``%aimport``) every - time before executing the Python code typed. - -``%aimport`` - - List modules which are to be automatically imported or not to be imported. - -``%aimport foo`` - - Import module 'foo' and mark it to be autoreloaded for ``%autoreload 1`` - -``%aimport foo, bar`` - - Import modules 'foo', 'bar' and mark them to be autoreloaded for ``%autoreload 1`` - -``%aimport -foo`` - - Mark module 'foo' to not be autoreloaded. - -Caveats -======= - -Reloading Python modules in a reliable way is in general difficult, -and unexpected things may occur. ``%autoreload`` tries to work around -common pitfalls by replacing function code objects and parts of -classes previously in the module with new versions. This makes the -following things to work: - -- Functions and classes imported via 'from xxx import foo' are upgraded - to new versions when 'xxx' is reloaded. - -- Methods and properties of classes are upgraded on reload, so that - calling 'c.foo()' on an object 'c' created before the reload causes - the new code for 'foo' to be executed. - -Some of the known remaining caveats are: - -- Replacing code objects does not always succeed: changing a @property - in a class to an ordinary method or a method to a member variable - can cause problems (but in old objects only). - -- Functions that are removed (eg. via monkey-patching) from a module - before it is reloaded are not upgraded. - -- C extension modules cannot be reloaded, and so cannot be autoreloaded. -""" - -skip_doctest = True - -#----------------------------------------------------------------------------- -# Copyright (C) 2000 Thomas Heller -# Copyright (C) 2008 Pauli Virtanen <pav@iki.fi> -# Copyright (C) 2012 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- -# -# This IPython module is written by Pauli Virtanen, based on the autoreload -# code by Thomas Heller. - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import os -import sys -import traceback -import types -import weakref -import gc -from importlib import import_module -from importlib.util import source_from_cache -from imp import reload - -#------------------------------------------------------------------------------ -# Autoreload functionality -#------------------------------------------------------------------------------ - -class ModuleReloader(object): - enabled = False - """Whether this reloader is enabled""" - - check_all = True - """Autoreload all modules, not just those listed in 'modules'""" - - def __init__(self): - # Modules that failed to reload: {module: mtime-on-failed-reload, ...} - self.failed = {} - # Modules specially marked as autoreloadable. - self.modules = {} - # Modules specially marked as not autoreloadable. - self.skip_modules = {} - # (module-name, name) -> weakref, for replacing old code objects - self.old_objects = {} - # Module modification timestamps - self.modules_mtimes = {} - - # Cache module modification times - self.check(check_all=True, do_reload=False) - - def mark_module_skipped(self, module_name): - """Skip reloading the named module in the future""" - try: - del self.modules[module_name] - except KeyError: - pass - self.skip_modules[module_name] = True - - def mark_module_reloadable(self, module_name): - """Reload the named module in the future (if it is imported)""" - try: - del self.skip_modules[module_name] - except KeyError: - pass - self.modules[module_name] = True - - def aimport_module(self, module_name): - """Import a module, and mark it reloadable - - Returns - ------- - top_module : module - The imported module if it is top-level, or the top-level - top_name : module - Name of top_module - - """ - self.mark_module_reloadable(module_name) - - import_module(module_name) - top_name = module_name.split('.')[0] - top_module = sys.modules[top_name] - return top_module, top_name - - def filename_and_mtime(self, module): - if not hasattr(module, '__file__') or module.__file__ is None: - return None, None - - if getattr(module, '__name__', None) in [None, '__mp_main__', '__main__']: - # we cannot reload(__main__) or reload(__mp_main__) - return None, None - - filename = module.__file__ - path, ext = os.path.splitext(filename) - - if ext.lower() == '.py': - py_filename = filename - else: - try: - py_filename = source_from_cache(filename) - except ValueError: - return None, None - - try: - pymtime = os.stat(py_filename).st_mtime - except OSError: - return None, None - - return py_filename, pymtime - - def check(self, check_all=False, do_reload=True): - """Check whether some modules need to be reloaded.""" - - if not self.enabled and not check_all: - return - - if check_all or self.check_all: - modules = list(sys.modules.keys()) - else: - modules = list(self.modules.keys()) - - for modname in modules: - m = sys.modules.get(modname, None) - - if modname in self.skip_modules: - continue - - py_filename, pymtime = self.filename_and_mtime(m) - if py_filename is None: - continue - - try: - if pymtime <= self.modules_mtimes[modname]: - continue - except KeyError: - self.modules_mtimes[modname] = pymtime - continue - else: - if self.failed.get(py_filename, None) == pymtime: - continue - - self.modules_mtimes[modname] = pymtime - - # If we've reached this point, we should try to reload the module - if do_reload: - try: - superreload(m, reload, self.old_objects) - if py_filename in self.failed: - del self.failed[py_filename] - except: - print("[autoreload of %s failed: %s]" % ( - modname, traceback.format_exc(10)), file=sys.stderr) - self.failed[py_filename] = pymtime - -#------------------------------------------------------------------------------ -# superreload -#------------------------------------------------------------------------------ - - -func_attrs = ['__code__', '__defaults__', '__doc__', - '__closure__', '__globals__', '__dict__'] - - -def update_function(old, new): - """Upgrade the code object of a function""" - for name in func_attrs: - try: - setattr(old, name, getattr(new, name)) - except (AttributeError, TypeError): - pass - - -def update_instances(old, new): - """Use garbage collector to find all instances that refer to the old - class definition and update their __class__ to point to the new class - definition""" - - refs = gc.get_referrers(old) - - for ref in refs: - if type(ref) is old: - ref.__class__ = new - - -def update_class(old, new): - """Replace stuff in the __dict__ of a class, and upgrade - method code objects, and add new methods, if any""" - for key in list(old.__dict__.keys()): - old_obj = getattr(old, key) - try: - new_obj = getattr(new, key) - # explicitly checking that comparison returns True to handle - # cases where `==` doesn't return a boolean. - if (old_obj == new_obj) is True: - continue - except AttributeError: - # obsolete attribute: remove it - try: - delattr(old, key) - except (AttributeError, TypeError): - pass - continue - - if update_generic(old_obj, new_obj): continue - - try: - setattr(old, key, getattr(new, key)) - except (AttributeError, TypeError): - pass # skip non-writable attributes - - for key in list(new.__dict__.keys()): - if key not in list(old.__dict__.keys()): - try: - setattr(old, key, getattr(new, key)) - except (AttributeError, TypeError): - pass # skip non-writable attributes - - # update all instances of class - update_instances(old, new) - - -def update_property(old, new): - """Replace get/set/del functions of a property""" - update_generic(old.fdel, new.fdel) - update_generic(old.fget, new.fget) - update_generic(old.fset, new.fset) - - -def isinstance2(a, b, typ): - return isinstance(a, typ) and isinstance(b, typ) - - -UPDATE_RULES = [ - (lambda a, b: isinstance2(a, b, type), - update_class), - (lambda a, b: isinstance2(a, b, types.FunctionType), - update_function), - (lambda a, b: isinstance2(a, b, property), - update_property), -] -UPDATE_RULES.extend([(lambda a, b: isinstance2(a, b, types.MethodType), - lambda a, b: update_function(a.__func__, b.__func__)), -]) - - -def update_generic(a, b): - for type_check, update in UPDATE_RULES: - if type_check(a, b): - update(a, b) - return True - return False - - -class StrongRef(object): - def __init__(self, obj): - self.obj = obj - def __call__(self): - return self.obj - - -def superreload(module, reload=reload, old_objects=None): - """Enhanced version of the builtin reload function. - - superreload remembers objects previously in the module, and - - - upgrades the class dictionary of every old class in the module - - upgrades the code object of every old function and method - - clears the module's namespace before reloading - - """ - if old_objects is None: - old_objects = {} - - # collect old objects in the module - for name, obj in list(module.__dict__.items()): - if not hasattr(obj, '__module__') or obj.__module__ != module.__name__: - continue - key = (module.__name__, name) - try: - old_objects.setdefault(key, []).append(weakref.ref(obj)) - except TypeError: - pass - - # reload module - try: - # clear namespace first from old cruft - old_dict = module.__dict__.copy() - old_name = module.__name__ - module.__dict__.clear() - module.__dict__['__name__'] = old_name - module.__dict__['__loader__'] = old_dict['__loader__'] - except (TypeError, AttributeError, KeyError): - pass - - try: - module = reload(module) - except: - # restore module dictionary on failed reload - module.__dict__.update(old_dict) - raise - - # iterate over all objects and update functions & classes - for name, new_obj in list(module.__dict__.items()): - key = (module.__name__, name) - if key not in old_objects: continue - - new_refs = [] - for old_ref in old_objects[key]: - old_obj = old_ref() - if old_obj is None: continue - new_refs.append(old_ref) - update_generic(old_obj, new_obj) - - if new_refs: - old_objects[key] = new_refs - else: - del old_objects[key] - - return module - -#------------------------------------------------------------------------------ -# IPython connectivity -#------------------------------------------------------------------------------ - -from IPython.core.magic import Magics, magics_class, line_magic - -@magics_class -class AutoreloadMagics(Magics): - def __init__(self, *a, **kw): - super(AutoreloadMagics, self).__init__(*a, **kw) - self._reloader = ModuleReloader() - self._reloader.check_all = False - self.loaded_modules = set(sys.modules) - - @line_magic - def autoreload(self, parameter_s=''): - r"""%autoreload => Reload modules automatically - - %autoreload - Reload all modules (except those excluded by %aimport) automatically - now. - - %autoreload 0 - Disable automatic reloading. - - %autoreload 1 - Reload all modules imported with %aimport every time before executing - the Python code typed. - - %autoreload 2 - Reload all modules (except those excluded by %aimport) every time - before executing the Python code typed. - - Reloading Python modules in a reliable way is in general - difficult, and unexpected things may occur. %autoreload tries to - work around common pitfalls by replacing function code objects and - parts of classes previously in the module with new versions. This - makes the following things to work: - - - Functions and classes imported via 'from xxx import foo' are upgraded - to new versions when 'xxx' is reloaded. - - - Methods and properties of classes are upgraded on reload, so that - calling 'c.foo()' on an object 'c' created before the reload causes - the new code for 'foo' to be executed. - - Some of the known remaining caveats are: - - - Replacing code objects does not always succeed: changing a @property - in a class to an ordinary method or a method to a member variable - can cause problems (but in old objects only). - - - Functions that are removed (eg. via monkey-patching) from a module - before it is reloaded are not upgraded. - - - C extension modules cannot be reloaded, and so cannot be - autoreloaded. - - """ - if parameter_s == '': - self._reloader.check(True) - elif parameter_s == '0': - self._reloader.enabled = False - elif parameter_s == '1': - self._reloader.check_all = False - self._reloader.enabled = True - elif parameter_s == '2': - self._reloader.check_all = True - self._reloader.enabled = True - - @line_magic - def aimport(self, parameter_s='', stream=None): - """%aimport => Import modules for automatic reloading. - - %aimport - List modules to automatically import and not to import. - - %aimport foo - Import module 'foo' and mark it to be autoreloaded for %autoreload 1 - - %aimport foo, bar - Import modules 'foo', 'bar' and mark them to be autoreloaded for %autoreload 1 - - %aimport -foo - Mark module 'foo' to not be autoreloaded for %autoreload 1 - """ - modname = parameter_s - if not modname: - to_reload = sorted(self._reloader.modules.keys()) - to_skip = sorted(self._reloader.skip_modules.keys()) - if stream is None: - stream = sys.stdout - if self._reloader.check_all: - stream.write("Modules to reload:\nall-except-skipped\n") - else: - stream.write("Modules to reload:\n%s\n" % ' '.join(to_reload)) - stream.write("\nModules to skip:\n%s\n" % ' '.join(to_skip)) - elif modname.startswith('-'): - modname = modname[1:] - self._reloader.mark_module_skipped(modname) - else: - for _module in ([_.strip() for _ in modname.split(',')]): - top_module, top_name = self._reloader.aimport_module(_module) - - # Inject module to user namespace - self.shell.push({top_name: top_module}) - - def pre_run_cell(self): - if self._reloader.enabled: - try: - self._reloader.check() - except: - pass - - def post_execute_hook(self): - """Cache the modification times of any modules imported in this execution - """ - newly_loaded_modules = set(sys.modules) - self.loaded_modules - for modname in newly_loaded_modules: - _, pymtime = self._reloader.filename_and_mtime(sys.modules[modname]) - if pymtime is not None: - self._reloader.modules_mtimes[modname] = pymtime - - self.loaded_modules.update(newly_loaded_modules) - - -def load_ipython_extension(ip): - """Load the extension in IPython.""" - auto_reload = AutoreloadMagics(ip) - ip.register_magics(auto_reload) - ip.events.register('pre_run_cell', auto_reload.pre_run_cell) - ip.events.register('post_execute', auto_reload.post_execute_hook) +"""IPython extension to reload modules before executing user code. + +``autoreload`` reloads modules automatically before entering the execution of +code typed at the IPython prompt. + +This makes for example the following workflow possible: + +.. sourcecode:: ipython + + In [1]: %load_ext autoreload + + In [2]: %autoreload 2 + + In [3]: from foo import some_function + + In [4]: some_function() + Out[4]: 42 + + In [5]: # open foo.py in an editor and change some_function to return 43 + + In [6]: some_function() + Out[6]: 43 + +The module was reloaded without reloading it explicitly, and the object +imported with ``from foo import ...`` was also updated. + +Usage +===== + +The following magic commands are provided: + +``%autoreload`` + + Reload all modules (except those excluded by ``%aimport``) + automatically now. + +``%autoreload 0`` + + Disable automatic reloading. + +``%autoreload 1`` + + Reload all modules imported with ``%aimport`` every time before + executing the Python code typed. + +``%autoreload 2`` + + Reload all modules (except those excluded by ``%aimport``) every + time before executing the Python code typed. + +``%aimport`` + + List modules which are to be automatically imported or not to be imported. + +``%aimport foo`` + + Import module 'foo' and mark it to be autoreloaded for ``%autoreload 1`` + +``%aimport foo, bar`` + + Import modules 'foo', 'bar' and mark them to be autoreloaded for ``%autoreload 1`` + +``%aimport -foo`` + + Mark module 'foo' to not be autoreloaded. + +Caveats +======= + +Reloading Python modules in a reliable way is in general difficult, +and unexpected things may occur. ``%autoreload`` tries to work around +common pitfalls by replacing function code objects and parts of +classes previously in the module with new versions. This makes the +following things to work: + +- Functions and classes imported via 'from xxx import foo' are upgraded + to new versions when 'xxx' is reloaded. + +- Methods and properties of classes are upgraded on reload, so that + calling 'c.foo()' on an object 'c' created before the reload causes + the new code for 'foo' to be executed. + +Some of the known remaining caveats are: + +- Replacing code objects does not always succeed: changing a @property + in a class to an ordinary method or a method to a member variable + can cause problems (but in old objects only). + +- Functions that are removed (eg. via monkey-patching) from a module + before it is reloaded are not upgraded. + +- C extension modules cannot be reloaded, and so cannot be autoreloaded. +""" + +skip_doctest = True + +#----------------------------------------------------------------------------- +# Copyright (C) 2000 Thomas Heller +# Copyright (C) 2008 Pauli Virtanen <pav@iki.fi> +# Copyright (C) 2012 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- +# +# This IPython module is written by Pauli Virtanen, based on the autoreload +# code by Thomas Heller. + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import sys +import traceback +import types +import weakref +import gc +from importlib import import_module +from importlib.util import source_from_cache +from imp import reload + +#------------------------------------------------------------------------------ +# Autoreload functionality +#------------------------------------------------------------------------------ + +class ModuleReloader(object): + enabled = False + """Whether this reloader is enabled""" + + check_all = True + """Autoreload all modules, not just those listed in 'modules'""" + + def __init__(self): + # Modules that failed to reload: {module: mtime-on-failed-reload, ...} + self.failed = {} + # Modules specially marked as autoreloadable. + self.modules = {} + # Modules specially marked as not autoreloadable. + self.skip_modules = {} + # (module-name, name) -> weakref, for replacing old code objects + self.old_objects = {} + # Module modification timestamps + self.modules_mtimes = {} + + # Cache module modification times + self.check(check_all=True, do_reload=False) + + def mark_module_skipped(self, module_name): + """Skip reloading the named module in the future""" + try: + del self.modules[module_name] + except KeyError: + pass + self.skip_modules[module_name] = True + + def mark_module_reloadable(self, module_name): + """Reload the named module in the future (if it is imported)""" + try: + del self.skip_modules[module_name] + except KeyError: + pass + self.modules[module_name] = True + + def aimport_module(self, module_name): + """Import a module, and mark it reloadable + + Returns + ------- + top_module : module + The imported module if it is top-level, or the top-level + top_name : module + Name of top_module + + """ + self.mark_module_reloadable(module_name) + + import_module(module_name) + top_name = module_name.split('.')[0] + top_module = sys.modules[top_name] + return top_module, top_name + + def filename_and_mtime(self, module): + if not hasattr(module, '__file__') or module.__file__ is None: + return None, None + + if getattr(module, '__name__', None) in [None, '__mp_main__', '__main__']: + # we cannot reload(__main__) or reload(__mp_main__) + return None, None + + filename = module.__file__ + path, ext = os.path.splitext(filename) + + if ext.lower() == '.py': + py_filename = filename + else: + try: + py_filename = source_from_cache(filename) + except ValueError: + return None, None + + try: + pymtime = os.stat(py_filename).st_mtime + except OSError: + return None, None + + return py_filename, pymtime + + def check(self, check_all=False, do_reload=True): + """Check whether some modules need to be reloaded.""" + + if not self.enabled and not check_all: + return + + if check_all or self.check_all: + modules = list(sys.modules.keys()) + else: + modules = list(self.modules.keys()) + + for modname in modules: + m = sys.modules.get(modname, None) + + if modname in self.skip_modules: + continue + + py_filename, pymtime = self.filename_and_mtime(m) + if py_filename is None: + continue + + try: + if pymtime <= self.modules_mtimes[modname]: + continue + except KeyError: + self.modules_mtimes[modname] = pymtime + continue + else: + if self.failed.get(py_filename, None) == pymtime: + continue + + self.modules_mtimes[modname] = pymtime + + # If we've reached this point, we should try to reload the module + if do_reload: + try: + superreload(m, reload, self.old_objects) + if py_filename in self.failed: + del self.failed[py_filename] + except: + print("[autoreload of %s failed: %s]" % ( + modname, traceback.format_exc(10)), file=sys.stderr) + self.failed[py_filename] = pymtime + +#------------------------------------------------------------------------------ +# superreload +#------------------------------------------------------------------------------ + + +func_attrs = ['__code__', '__defaults__', '__doc__', + '__closure__', '__globals__', '__dict__'] + + +def update_function(old, new): + """Upgrade the code object of a function""" + for name in func_attrs: + try: + setattr(old, name, getattr(new, name)) + except (AttributeError, TypeError): + pass + + +def update_instances(old, new): + """Use garbage collector to find all instances that refer to the old + class definition and update their __class__ to point to the new class + definition""" + + refs = gc.get_referrers(old) + + for ref in refs: + if type(ref) is old: + ref.__class__ = new + + +def update_class(old, new): + """Replace stuff in the __dict__ of a class, and upgrade + method code objects, and add new methods, if any""" + for key in list(old.__dict__.keys()): + old_obj = getattr(old, key) + try: + new_obj = getattr(new, key) + # explicitly checking that comparison returns True to handle + # cases where `==` doesn't return a boolean. + if (old_obj == new_obj) is True: + continue + except AttributeError: + # obsolete attribute: remove it + try: + delattr(old, key) + except (AttributeError, TypeError): + pass + continue + + if update_generic(old_obj, new_obj): continue + + try: + setattr(old, key, getattr(new, key)) + except (AttributeError, TypeError): + pass # skip non-writable attributes + + for key in list(new.__dict__.keys()): + if key not in list(old.__dict__.keys()): + try: + setattr(old, key, getattr(new, key)) + except (AttributeError, TypeError): + pass # skip non-writable attributes + + # update all instances of class + update_instances(old, new) + + +def update_property(old, new): + """Replace get/set/del functions of a property""" + update_generic(old.fdel, new.fdel) + update_generic(old.fget, new.fget) + update_generic(old.fset, new.fset) + + +def isinstance2(a, b, typ): + return isinstance(a, typ) and isinstance(b, typ) + + +UPDATE_RULES = [ + (lambda a, b: isinstance2(a, b, type), + update_class), + (lambda a, b: isinstance2(a, b, types.FunctionType), + update_function), + (lambda a, b: isinstance2(a, b, property), + update_property), +] +UPDATE_RULES.extend([(lambda a, b: isinstance2(a, b, types.MethodType), + lambda a, b: update_function(a.__func__, b.__func__)), +]) + + +def update_generic(a, b): + for type_check, update in UPDATE_RULES: + if type_check(a, b): + update(a, b) + return True + return False + + +class StrongRef(object): + def __init__(self, obj): + self.obj = obj + def __call__(self): + return self.obj + + +def superreload(module, reload=reload, old_objects=None): + """Enhanced version of the builtin reload function. + + superreload remembers objects previously in the module, and + + - upgrades the class dictionary of every old class in the module + - upgrades the code object of every old function and method + - clears the module's namespace before reloading + + """ + if old_objects is None: + old_objects = {} + + # collect old objects in the module + for name, obj in list(module.__dict__.items()): + if not hasattr(obj, '__module__') or obj.__module__ != module.__name__: + continue + key = (module.__name__, name) + try: + old_objects.setdefault(key, []).append(weakref.ref(obj)) + except TypeError: + pass + + # reload module + try: + # clear namespace first from old cruft + old_dict = module.__dict__.copy() + old_name = module.__name__ + module.__dict__.clear() + module.__dict__['__name__'] = old_name + module.__dict__['__loader__'] = old_dict['__loader__'] + except (TypeError, AttributeError, KeyError): + pass + + try: + module = reload(module) + except: + # restore module dictionary on failed reload + module.__dict__.update(old_dict) + raise + + # iterate over all objects and update functions & classes + for name, new_obj in list(module.__dict__.items()): + key = (module.__name__, name) + if key not in old_objects: continue + + new_refs = [] + for old_ref in old_objects[key]: + old_obj = old_ref() + if old_obj is None: continue + new_refs.append(old_ref) + update_generic(old_obj, new_obj) + + if new_refs: + old_objects[key] = new_refs + else: + del old_objects[key] + + return module + +#------------------------------------------------------------------------------ +# IPython connectivity +#------------------------------------------------------------------------------ + +from IPython.core.magic import Magics, magics_class, line_magic + +@magics_class +class AutoreloadMagics(Magics): + def __init__(self, *a, **kw): + super(AutoreloadMagics, self).__init__(*a, **kw) + self._reloader = ModuleReloader() + self._reloader.check_all = False + self.loaded_modules = set(sys.modules) + + @line_magic + def autoreload(self, parameter_s=''): + r"""%autoreload => Reload modules automatically + + %autoreload + Reload all modules (except those excluded by %aimport) automatically + now. + + %autoreload 0 + Disable automatic reloading. + + %autoreload 1 + Reload all modules imported with %aimport every time before executing + the Python code typed. + + %autoreload 2 + Reload all modules (except those excluded by %aimport) every time + before executing the Python code typed. + + Reloading Python modules in a reliable way is in general + difficult, and unexpected things may occur. %autoreload tries to + work around common pitfalls by replacing function code objects and + parts of classes previously in the module with new versions. This + makes the following things to work: + + - Functions and classes imported via 'from xxx import foo' are upgraded + to new versions when 'xxx' is reloaded. + + - Methods and properties of classes are upgraded on reload, so that + calling 'c.foo()' on an object 'c' created before the reload causes + the new code for 'foo' to be executed. + + Some of the known remaining caveats are: + + - Replacing code objects does not always succeed: changing a @property + in a class to an ordinary method or a method to a member variable + can cause problems (but in old objects only). + + - Functions that are removed (eg. via monkey-patching) from a module + before it is reloaded are not upgraded. + + - C extension modules cannot be reloaded, and so cannot be + autoreloaded. + + """ + if parameter_s == '': + self._reloader.check(True) + elif parameter_s == '0': + self._reloader.enabled = False + elif parameter_s == '1': + self._reloader.check_all = False + self._reloader.enabled = True + elif parameter_s == '2': + self._reloader.check_all = True + self._reloader.enabled = True + + @line_magic + def aimport(self, parameter_s='', stream=None): + """%aimport => Import modules for automatic reloading. + + %aimport + List modules to automatically import and not to import. + + %aimport foo + Import module 'foo' and mark it to be autoreloaded for %autoreload 1 + + %aimport foo, bar + Import modules 'foo', 'bar' and mark them to be autoreloaded for %autoreload 1 + + %aimport -foo + Mark module 'foo' to not be autoreloaded for %autoreload 1 + """ + modname = parameter_s + if not modname: + to_reload = sorted(self._reloader.modules.keys()) + to_skip = sorted(self._reloader.skip_modules.keys()) + if stream is None: + stream = sys.stdout + if self._reloader.check_all: + stream.write("Modules to reload:\nall-except-skipped\n") + else: + stream.write("Modules to reload:\n%s\n" % ' '.join(to_reload)) + stream.write("\nModules to skip:\n%s\n" % ' '.join(to_skip)) + elif modname.startswith('-'): + modname = modname[1:] + self._reloader.mark_module_skipped(modname) + else: + for _module in ([_.strip() for _ in modname.split(',')]): + top_module, top_name = self._reloader.aimport_module(_module) + + # Inject module to user namespace + self.shell.push({top_name: top_module}) + + def pre_run_cell(self): + if self._reloader.enabled: + try: + self._reloader.check() + except: + pass + + def post_execute_hook(self): + """Cache the modification times of any modules imported in this execution + """ + newly_loaded_modules = set(sys.modules) - self.loaded_modules + for modname in newly_loaded_modules: + _, pymtime = self._reloader.filename_and_mtime(sys.modules[modname]) + if pymtime is not None: + self._reloader.modules_mtimes[modname] = pymtime + + self.loaded_modules.update(newly_loaded_modules) + + +def load_ipython_extension(ip): + """Load the extension in IPython.""" + auto_reload = AutoreloadMagics(ip) + ip.register_magics(auto_reload) + ip.events.register('pre_run_cell', auto_reload.pre_run_cell) + ip.events.register('post_execute', auto_reload.post_execute_hook) diff --git a/contrib/python/ipython/py3/IPython/extensions/cythonmagic.py b/contrib/python/ipython/py3/IPython/extensions/cythonmagic.py index 65c3e395486..3c88e7c2a1c 100644 --- a/contrib/python/ipython/py3/IPython/extensions/cythonmagic.py +++ b/contrib/python/ipython/py3/IPython/extensions/cythonmagic.py @@ -1,21 +1,21 @@ -# -*- coding: utf-8 -*- -""" -**DEPRECATED** - -The cython magic has been integrated into Cython itself, -which is now released in version 0.21. - -cf github `Cython` organisation, `Cython` repo, under the -file `Cython/Build/IpythonMagic.py` -""" -#----------------------------------------------------------------------------- -# Copyright (C) 2010-2011, IPython Development Team. -#----------------------------------------------------------------------------- - -import warnings - -## still load the magic in IPython 3.x, remove completely in future versions. -def load_ipython_extension(ip): - """Load the extension in IPython.""" - - warnings.warn("""The Cython magic has been moved to the Cython package""") +# -*- coding: utf-8 -*- +""" +**DEPRECATED** + +The cython magic has been integrated into Cython itself, +which is now released in version 0.21. + +cf github `Cython` organisation, `Cython` repo, under the +file `Cython/Build/IpythonMagic.py` +""" +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011, IPython Development Team. +#----------------------------------------------------------------------------- + +import warnings + +## still load the magic in IPython 3.x, remove completely in future versions. +def load_ipython_extension(ip): + """Load the extension in IPython.""" + + warnings.warn("""The Cython magic has been moved to the Cython package""") diff --git a/contrib/python/ipython/py3/IPython/extensions/rmagic.py b/contrib/python/ipython/py3/IPython/extensions/rmagic.py index ecc25bc4f84..ec5763972e4 100644 --- a/contrib/python/ipython/py3/IPython/extensions/rmagic.py +++ b/contrib/python/ipython/py3/IPython/extensions/rmagic.py @@ -1,12 +1,12 @@ -# -*- coding: utf-8 -*- - -#----------------------------------------------------------------------------- -# Copyright (C) 2012 The IPython Development Team -#----------------------------------------------------------------------------- - -import warnings - -def load_ipython_extension(ip): - """Load the extension in IPython.""" - warnings.warn("The rmagic extension in IPython has moved to " - "`rpy2.ipython`, please see `rpy2` documentation.") +# -*- coding: utf-8 -*- + +#----------------------------------------------------------------------------- +# Copyright (C) 2012 The IPython Development Team +#----------------------------------------------------------------------------- + +import warnings + +def load_ipython_extension(ip): + """Load the extension in IPython.""" + warnings.warn("The rmagic extension in IPython has moved to " + "`rpy2.ipython`, please see `rpy2` documentation.") diff --git a/contrib/python/ipython/py3/IPython/extensions/storemagic.py b/contrib/python/ipython/py3/IPython/extensions/storemagic.py index 6854044e3f2..51b79ad314e 100644 --- a/contrib/python/ipython/py3/IPython/extensions/storemagic.py +++ b/contrib/python/ipython/py3/IPython/extensions/storemagic.py @@ -1,233 +1,233 @@ -# -*- coding: utf-8 -*- -""" -%store magic for lightweight persistence. - -Stores variables, aliases and macros in IPython's database. - -To automatically restore stored variables at startup, add this to your -:file:`ipython_config.py` file:: - - c.StoreMagics.autorestore = True -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import inspect, os, sys, textwrap - -from IPython.core.error import UsageError -from IPython.core.magic import Magics, magics_class, line_magic -from traitlets import Bool - - -def restore_aliases(ip, alias=None): - staliases = ip.db.get('stored_aliases', {}) - if alias is None: - for k,v in staliases.items(): - #print "restore alias",k,v # dbg - #self.alias_table[k] = v - ip.alias_manager.define_alias(k,v) - else: - ip.alias_manager.define_alias(alias, staliases[alias]) - - -def refresh_variables(ip): - db = ip.db - for key in db.keys('autorestore/*'): - # strip autorestore - justkey = os.path.basename(key) - try: - obj = db[key] - except KeyError: - print("Unable to restore variable '%s', ignoring (use %%store -d to forget!)" % justkey) - print("The error was:", sys.exc_info()[0]) - else: - #print "restored",justkey,"=",obj #dbg - ip.user_ns[justkey] = obj - - -def restore_dhist(ip): - ip.user_ns['_dh'] = ip.db.get('dhist',[]) - - -def restore_data(ip): - refresh_variables(ip) - restore_aliases(ip) - restore_dhist(ip) - - -@magics_class -class StoreMagics(Magics): - """Lightweight persistence for python variables. - - Provides the %store magic.""" - - autorestore = Bool(False, help= - """If True, any %store-d variables will be automatically restored - when IPython starts. - """ - ).tag(config=True) - - def __init__(self, shell): - super(StoreMagics, self).__init__(shell=shell) - self.shell.configurables.append(self) - if self.autorestore: - restore_data(self.shell) - - @line_magic - def store(self, parameter_s=''): - """Lightweight persistence for python variables. - - Example:: - - In [1]: l = ['hello',10,'world'] - In [2]: %store l - In [3]: exit - - (IPython session is closed and started again...) - - ville@badger:~$ ipython - In [1]: l - NameError: name 'l' is not defined - In [2]: %store -r - In [3]: l - Out[3]: ['hello', 10, 'world'] - - Usage: - - * ``%store`` - Show list of all variables and their current - values - * ``%store spam bar`` - Store the *current* value of the variables spam - and bar to disk - * ``%store -d spam`` - Remove the variable and its value from storage - * ``%store -z`` - Remove all variables from storage - * ``%store -r`` - Refresh all variables, aliases and directory history - from store (overwrite current vals) - * ``%store -r spam bar`` - Refresh specified variables and aliases from store - (delete current val) - * ``%store foo >a.txt`` - Store value of foo to new file a.txt - * ``%store foo >>a.txt`` - Append value of foo to file a.txt - - It should be noted that if you change the value of a variable, you - need to %store it again if you want to persist the new value. - - Note also that the variables will need to be pickleable; most basic - python types can be safely %store'd. - - Also aliases can be %store'd across sessions. - To remove an alias from the storage, use the %unalias magic. - """ - - opts,argsl = self.parse_options(parameter_s,'drz',mode='string') - args = argsl.split() - ip = self.shell - db = ip.db - # delete - if 'd' in opts: - try: - todel = args[0] - except IndexError: - raise UsageError('You must provide the variable to forget') - else: - try: - del db['autorestore/' + todel] - except: - raise UsageError("Can't delete variable '%s'" % todel) - # reset - elif 'z' in opts: - for k in db.keys('autorestore/*'): - del db[k] - - elif 'r' in opts: - if args: - for arg in args: - try: - obj = db['autorestore/' + arg] - except KeyError: - try: - restore_aliases(ip, alias=arg) - except KeyError: - print("no stored variable or alias %s" % arg) - else: - ip.user_ns[arg] = obj - else: - restore_data(ip) - - # run without arguments -> list variables & values - elif not args: - vars = db.keys('autorestore/*') - vars.sort() - if vars: - size = max(map(len, vars)) - else: - size = 0 - - print('Stored variables and their in-db values:') - fmt = '%-'+str(size)+'s -> %s' - get = db.get - for var in vars: - justkey = os.path.basename(var) - # print 30 first characters from every var - print(fmt % (justkey, repr(get(var, '<unavailable>'))[:50])) - - # default action - store the variable - else: - # %store foo >file.txt or >>file.txt - if len(args) > 1 and args[1].startswith('>'): - fnam = os.path.expanduser(args[1].lstrip('>').lstrip()) - if args[1].startswith('>>'): - fil = open(fnam, 'a') - else: - fil = open(fnam, 'w') - with fil: - obj = ip.ev(args[0]) - print("Writing '%s' (%s) to file '%s'." % (args[0], - obj.__class__.__name__, fnam)) - - if not isinstance (obj, str): - from pprint import pprint - pprint(obj, fil) - else: - fil.write(obj) - if not obj.endswith('\n'): - fil.write('\n') - - return - - # %store foo - for arg in args: - try: - obj = ip.user_ns[arg] - except KeyError: - # it might be an alias - name = arg - try: - cmd = ip.alias_manager.retrieve_alias(name) - except ValueError: - raise UsageError("Unknown variable '%s'" % name) - - staliases = db.get('stored_aliases',{}) - staliases[name] = cmd - db['stored_aliases'] = staliases - print("Alias stored: %s (%s)" % (name, cmd)) - return - - else: - modname = getattr(inspect.getmodule(obj), '__name__', '') - if modname == '__main__': - print(textwrap.dedent("""\ - Warning:%s is %s - Proper storage of interactively declared classes (or instances - of those classes) is not possible! Only instances - of classes in real modules on file system can be %%store'd. - """ % (arg, obj) )) - return - #pickled = pickle.dumps(obj) - db[ 'autorestore/' + arg ] = obj - print("Stored '%s' (%s)" % (arg, obj.__class__.__name__)) - - -def load_ipython_extension(ip): - """Load the extension in IPython.""" - ip.register_magics(StoreMagics) - +# -*- coding: utf-8 -*- +""" +%store magic for lightweight persistence. + +Stores variables, aliases and macros in IPython's database. + +To automatically restore stored variables at startup, add this to your +:file:`ipython_config.py` file:: + + c.StoreMagics.autorestore = True +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import inspect, os, sys, textwrap + +from IPython.core.error import UsageError +from IPython.core.magic import Magics, magics_class, line_magic +from traitlets import Bool + + +def restore_aliases(ip, alias=None): + staliases = ip.db.get('stored_aliases', {}) + if alias is None: + for k,v in staliases.items(): + #print "restore alias",k,v # dbg + #self.alias_table[k] = v + ip.alias_manager.define_alias(k,v) + else: + ip.alias_manager.define_alias(alias, staliases[alias]) + + +def refresh_variables(ip): + db = ip.db + for key in db.keys('autorestore/*'): + # strip autorestore + justkey = os.path.basename(key) + try: + obj = db[key] + except KeyError: + print("Unable to restore variable '%s', ignoring (use %%store -d to forget!)" % justkey) + print("The error was:", sys.exc_info()[0]) + else: + #print "restored",justkey,"=",obj #dbg + ip.user_ns[justkey] = obj + + +def restore_dhist(ip): + ip.user_ns['_dh'] = ip.db.get('dhist',[]) + + +def restore_data(ip): + refresh_variables(ip) + restore_aliases(ip) + restore_dhist(ip) + + +@magics_class +class StoreMagics(Magics): + """Lightweight persistence for python variables. + + Provides the %store magic.""" + + autorestore = Bool(False, help= + """If True, any %store-d variables will be automatically restored + when IPython starts. + """ + ).tag(config=True) + + def __init__(self, shell): + super(StoreMagics, self).__init__(shell=shell) + self.shell.configurables.append(self) + if self.autorestore: + restore_data(self.shell) + + @line_magic + def store(self, parameter_s=''): + """Lightweight persistence for python variables. + + Example:: + + In [1]: l = ['hello',10,'world'] + In [2]: %store l + In [3]: exit + + (IPython session is closed and started again...) + + ville@badger:~$ ipython + In [1]: l + NameError: name 'l' is not defined + In [2]: %store -r + In [3]: l + Out[3]: ['hello', 10, 'world'] + + Usage: + + * ``%store`` - Show list of all variables and their current + values + * ``%store spam bar`` - Store the *current* value of the variables spam + and bar to disk + * ``%store -d spam`` - Remove the variable and its value from storage + * ``%store -z`` - Remove all variables from storage + * ``%store -r`` - Refresh all variables, aliases and directory history + from store (overwrite current vals) + * ``%store -r spam bar`` - Refresh specified variables and aliases from store + (delete current val) + * ``%store foo >a.txt`` - Store value of foo to new file a.txt + * ``%store foo >>a.txt`` - Append value of foo to file a.txt + + It should be noted that if you change the value of a variable, you + need to %store it again if you want to persist the new value. + + Note also that the variables will need to be pickleable; most basic + python types can be safely %store'd. + + Also aliases can be %store'd across sessions. + To remove an alias from the storage, use the %unalias magic. + """ + + opts,argsl = self.parse_options(parameter_s,'drz',mode='string') + args = argsl.split() + ip = self.shell + db = ip.db + # delete + if 'd' in opts: + try: + todel = args[0] + except IndexError: + raise UsageError('You must provide the variable to forget') + else: + try: + del db['autorestore/' + todel] + except: + raise UsageError("Can't delete variable '%s'" % todel) + # reset + elif 'z' in opts: + for k in db.keys('autorestore/*'): + del db[k] + + elif 'r' in opts: + if args: + for arg in args: + try: + obj = db['autorestore/' + arg] + except KeyError: + try: + restore_aliases(ip, alias=arg) + except KeyError: + print("no stored variable or alias %s" % arg) + else: + ip.user_ns[arg] = obj + else: + restore_data(ip) + + # run without arguments -> list variables & values + elif not args: + vars = db.keys('autorestore/*') + vars.sort() + if vars: + size = max(map(len, vars)) + else: + size = 0 + + print('Stored variables and their in-db values:') + fmt = '%-'+str(size)+'s -> %s' + get = db.get + for var in vars: + justkey = os.path.basename(var) + # print 30 first characters from every var + print(fmt % (justkey, repr(get(var, '<unavailable>'))[:50])) + + # default action - store the variable + else: + # %store foo >file.txt or >>file.txt + if len(args) > 1 and args[1].startswith('>'): + fnam = os.path.expanduser(args[1].lstrip('>').lstrip()) + if args[1].startswith('>>'): + fil = open(fnam, 'a') + else: + fil = open(fnam, 'w') + with fil: + obj = ip.ev(args[0]) + print("Writing '%s' (%s) to file '%s'." % (args[0], + obj.__class__.__name__, fnam)) + + if not isinstance (obj, str): + from pprint import pprint + pprint(obj, fil) + else: + fil.write(obj) + if not obj.endswith('\n'): + fil.write('\n') + + return + + # %store foo + for arg in args: + try: + obj = ip.user_ns[arg] + except KeyError: + # it might be an alias + name = arg + try: + cmd = ip.alias_manager.retrieve_alias(name) + except ValueError: + raise UsageError("Unknown variable '%s'" % name) + + staliases = db.get('stored_aliases',{}) + staliases[name] = cmd + db['stored_aliases'] = staliases + print("Alias stored: %s (%s)" % (name, cmd)) + return + + else: + modname = getattr(inspect.getmodule(obj), '__name__', '') + if modname == '__main__': + print(textwrap.dedent("""\ + Warning:%s is %s + Proper storage of interactively declared classes (or instances + of those classes) is not possible! Only instances + of classes in real modules on file system can be %%store'd. + """ % (arg, obj) )) + return + #pickled = pickle.dumps(obj) + db[ 'autorestore/' + arg ] = obj + print("Stored '%s' (%s)" % (arg, obj.__class__.__name__)) + + +def load_ipython_extension(ip): + """Load the extension in IPython.""" + ip.register_magics(StoreMagics) + diff --git a/contrib/python/ipython/py3/IPython/extensions/sympyprinting.py b/contrib/python/ipython/py3/IPython/extensions/sympyprinting.py index 8d41983c432..e6a83cd34b6 100644 --- a/contrib/python/ipython/py3/IPython/extensions/sympyprinting.py +++ b/contrib/python/ipython/py3/IPython/extensions/sympyprinting.py @@ -1,32 +1,32 @@ -""" -**DEPRECATED** - -A print function that pretty prints sympy Basic objects. - -:moduleauthor: Brian Granger - -Usage -===== - -Once the extension is loaded, Sympy Basic objects are automatically -pretty-printed. - -As of SymPy 0.7.2, maintenance of this extension has moved to SymPy under -sympy.interactive.ipythonprinting, any modifications to account for changes to -SymPy should be submitted to SymPy rather than changed here. This module is -maintained here for backwards compatibility with old SymPy versions. - -""" -#----------------------------------------------------------------------------- -# Copyright (C) 2008 The IPython Development Team -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import warnings - -def load_ipython_extension(ip): - warnings.warn("The sympyprinting extension has moved to `sympy`, " - "use `from sympy import init_printing; init_printing()`") +""" +**DEPRECATED** + +A print function that pretty prints sympy Basic objects. + +:moduleauthor: Brian Granger + +Usage +===== + +Once the extension is loaded, Sympy Basic objects are automatically +pretty-printed. + +As of SymPy 0.7.2, maintenance of this extension has moved to SymPy under +sympy.interactive.ipythonprinting, any modifications to account for changes to +SymPy should be submitted to SymPy rather than changed here. This module is +maintained here for backwards compatibility with old SymPy versions. + +""" +#----------------------------------------------------------------------------- +# Copyright (C) 2008 The IPython Development Team +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import warnings + +def load_ipython_extension(ip): + warnings.warn("The sympyprinting extension has moved to `sympy`, " + "use `from sympy import init_printing; init_printing()`") diff --git a/contrib/python/ipython/py3/IPython/external/__init__.py b/contrib/python/ipython/py3/IPython/external/__init__.py index 92563bb4711..1c8c546f118 100644 --- a/contrib/python/ipython/py3/IPython/external/__init__.py +++ b/contrib/python/ipython/py3/IPython/external/__init__.py @@ -1,5 +1,5 @@ -""" -This package contains all third-party modules bundled with IPython. -""" - -__all__ = [] +""" +This package contains all third-party modules bundled with IPython. +""" + +__all__ = [] diff --git a/contrib/python/ipython/py3/IPython/external/decorators/__init__.py b/contrib/python/ipython/py3/IPython/external/decorators/__init__.py index 8a5ff866b9c..1db80edd357 100644 --- a/contrib/python/ipython/py3/IPython/external/decorators/__init__.py +++ b/contrib/python/ipython/py3/IPython/external/decorators/__init__.py @@ -1,8 +1,8 @@ -try: - from numpy.testing import KnownFailure, knownfailureif -except ImportError: - from ._decorators import knownfailureif - try: - from ._numpy_testing_noseclasses import KnownFailure - except ImportError: - pass +try: + from numpy.testing import KnownFailure, knownfailureif +except ImportError: + from ._decorators import knownfailureif + try: + from ._numpy_testing_noseclasses import KnownFailure + except ImportError: + pass diff --git a/contrib/python/ipython/py3/IPython/external/decorators/_decorators.py b/contrib/python/ipython/py3/IPython/external/decorators/_decorators.py index b216e9f2d6f..18f847adadd 100644 --- a/contrib/python/ipython/py3/IPython/external/decorators/_decorators.py +++ b/contrib/python/ipython/py3/IPython/external/decorators/_decorators.py @@ -1,143 +1,143 @@ -""" -Decorators for labeling and modifying behavior of test objects. - -Decorators that merely return a modified version of the original -function object are straightforward. Decorators that return a new -function object need to use -:: - - nose.tools.make_decorator(original_function)(decorator) - -in returning the decorator, in order to preserve meta-data such as -function name, setup and teardown functions and so on - see -``nose.tools`` for more information. - -""" - -# IPython changes: make this work if numpy not available -# Original code: -try: - from ._numpy_testing_noseclasses import KnownFailureTest -except: - pass - -# End IPython changes - - -def skipif(skip_condition, msg=None): - """ - Make function raise SkipTest exception if a given condition is true. - - If the condition is a callable, it is used at runtime to dynamically - make the decision. This is useful for tests that may require costly - imports, to delay the cost until the test suite is actually executed. - - Parameters - ---------- - skip_condition : bool or callable - Flag to determine whether to skip the decorated test. - msg : str, optional - Message to give on raising a SkipTest exception. Default is None. - - Returns - ------- - decorator : function - Decorator which, when applied to a function, causes SkipTest - to be raised when `skip_condition` is True, and the function - to be called normally otherwise. - - Notes - ----- - The decorator itself is decorated with the ``nose.tools.make_decorator`` - function in order to transmit function name, and various other metadata. - - """ - - def skip_decorator(f): - # Local import to avoid a hard nose dependency and only incur the - # import time overhead at actual test-time. - import nose - - # Allow for both boolean or callable skip conditions. - if callable(skip_condition): - skip_val = lambda : skip_condition() - else: - skip_val = lambda : skip_condition - - def get_msg(func,msg=None): - """Skip message with information about function being skipped.""" - if msg is None: - out = 'Test skipped due to test condition' - else: - out = '\n'+msg - - return "Skipping test: %s%s" % (func.__name__,out) - - # We need to define *two* skippers because Python doesn't allow both - # return with value and yield inside the same function. - def skipper_func(*args, **kwargs): - """Skipper for normal test functions.""" - if skip_val(): - raise nose.SkipTest(get_msg(f,msg)) - else: - return f(*args, **kwargs) - - def skipper_gen(*args, **kwargs): - """Skipper for test generators.""" - if skip_val(): - raise nose.SkipTest(get_msg(f,msg)) - else: - for x in f(*args, **kwargs): - yield x - - # Choose the right skipper to use when building the actual decorator. - if nose.util.isgenerator(f): - skipper = skipper_gen - else: - skipper = skipper_func - - return nose.tools.make_decorator(f)(skipper) - - return skip_decorator - -def knownfailureif(fail_condition, msg=None): - """ - Make function raise KnownFailureTest exception if given condition is true. - - Parameters - ---------- - fail_condition : bool - Flag to determine whether to mark the decorated test as a known - failure (if True) or not (if False). - msg : str, optional - Message to give on raising a KnownFailureTest exception. - Default is None. - - Returns - ------- - decorator : function - Decorator, which, when applied to a function, causes KnownFailureTest to - be raised when `fail_condition` is True and the test fails. - - Notes - ----- - The decorator itself is decorated with the ``nose.tools.make_decorator`` - function in order to transmit function name, and various other metadata. - - """ - if msg is None: - msg = 'Test skipped due to known failure' - - def knownfail_decorator(f): - # Local import to avoid a hard nose dependency and only incur the - # import time overhead at actual test-time. - import nose - - def knownfailer(*args, **kwargs): - if fail_condition: - raise KnownFailureTest(msg) - else: - return f(*args, **kwargs) - return nose.tools.make_decorator(f)(knownfailer) - - return knownfail_decorator +""" +Decorators for labeling and modifying behavior of test objects. + +Decorators that merely return a modified version of the original +function object are straightforward. Decorators that return a new +function object need to use +:: + + nose.tools.make_decorator(original_function)(decorator) + +in returning the decorator, in order to preserve meta-data such as +function name, setup and teardown functions and so on - see +``nose.tools`` for more information. + +""" + +# IPython changes: make this work if numpy not available +# Original code: +try: + from ._numpy_testing_noseclasses import KnownFailureTest +except: + pass + +# End IPython changes + + +def skipif(skip_condition, msg=None): + """ + Make function raise SkipTest exception if a given condition is true. + + If the condition is a callable, it is used at runtime to dynamically + make the decision. This is useful for tests that may require costly + imports, to delay the cost until the test suite is actually executed. + + Parameters + ---------- + skip_condition : bool or callable + Flag to determine whether to skip the decorated test. + msg : str, optional + Message to give on raising a SkipTest exception. Default is None. + + Returns + ------- + decorator : function + Decorator which, when applied to a function, causes SkipTest + to be raised when `skip_condition` is True, and the function + to be called normally otherwise. + + Notes + ----- + The decorator itself is decorated with the ``nose.tools.make_decorator`` + function in order to transmit function name, and various other metadata. + + """ + + def skip_decorator(f): + # Local import to avoid a hard nose dependency and only incur the + # import time overhead at actual test-time. + import nose + + # Allow for both boolean or callable skip conditions. + if callable(skip_condition): + skip_val = lambda : skip_condition() + else: + skip_val = lambda : skip_condition + + def get_msg(func,msg=None): + """Skip message with information about function being skipped.""" + if msg is None: + out = 'Test skipped due to test condition' + else: + out = '\n'+msg + + return "Skipping test: %s%s" % (func.__name__,out) + + # We need to define *two* skippers because Python doesn't allow both + # return with value and yield inside the same function. + def skipper_func(*args, **kwargs): + """Skipper for normal test functions.""" + if skip_val(): + raise nose.SkipTest(get_msg(f,msg)) + else: + return f(*args, **kwargs) + + def skipper_gen(*args, **kwargs): + """Skipper for test generators.""" + if skip_val(): + raise nose.SkipTest(get_msg(f,msg)) + else: + for x in f(*args, **kwargs): + yield x + + # Choose the right skipper to use when building the actual decorator. + if nose.util.isgenerator(f): + skipper = skipper_gen + else: + skipper = skipper_func + + return nose.tools.make_decorator(f)(skipper) + + return skip_decorator + +def knownfailureif(fail_condition, msg=None): + """ + Make function raise KnownFailureTest exception if given condition is true. + + Parameters + ---------- + fail_condition : bool + Flag to determine whether to mark the decorated test as a known + failure (if True) or not (if False). + msg : str, optional + Message to give on raising a KnownFailureTest exception. + Default is None. + + Returns + ------- + decorator : function + Decorator, which, when applied to a function, causes KnownFailureTest to + be raised when `fail_condition` is True and the test fails. + + Notes + ----- + The decorator itself is decorated with the ``nose.tools.make_decorator`` + function in order to transmit function name, and various other metadata. + + """ + if msg is None: + msg = 'Test skipped due to known failure' + + def knownfail_decorator(f): + # Local import to avoid a hard nose dependency and only incur the + # import time overhead at actual test-time. + import nose + + def knownfailer(*args, **kwargs): + if fail_condition: + raise KnownFailureTest(msg) + else: + return f(*args, **kwargs) + return nose.tools.make_decorator(f)(knownfailer) + + return knownfail_decorator diff --git a/contrib/python/ipython/py3/IPython/external/decorators/_numpy_testing_noseclasses.py b/contrib/python/ipython/py3/IPython/external/decorators/_numpy_testing_noseclasses.py index 2b3d2841a75..ca6ccd87bbc 100644 --- a/contrib/python/ipython/py3/IPython/external/decorators/_numpy_testing_noseclasses.py +++ b/contrib/python/ipython/py3/IPython/external/decorators/_numpy_testing_noseclasses.py @@ -1,41 +1,41 @@ -# IPython: modified copy of numpy.testing.noseclasses, so -# IPython.external._decorators works without numpy being installed. - -# These classes implement a "known failure" error class. - -import os - -from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin - -class KnownFailureTest(Exception): - '''Raise this exception to mark a test as a known failing test.''' - pass - - -class KnownFailure(ErrorClassPlugin): - '''Plugin that installs a KNOWNFAIL error class for the - KnownFailureClass exception. When KnownFailureTest is raised, - the exception will be logged in the knownfail attribute of the - result, 'K' or 'KNOWNFAIL' (verbose) will be output, and the - exception will not be counted as an error or failure.''' - enabled = True - knownfail = ErrorClass(KnownFailureTest, - label='KNOWNFAIL', - isfailure=False) - - def options(self, parser, env=os.environ): - env_opt = 'NOSE_WITHOUT_KNOWNFAIL' - parser.add_option('--no-knownfail', action='store_true', - dest='noKnownFail', default=env.get(env_opt, False), - help='Disable special handling of KnownFailureTest ' - 'exceptions') - - def configure(self, options, conf): - if not self.can_configure: - return - self.conf = conf - disable = getattr(options, 'noKnownFail', False) - if disable: - self.enabled = False - - +# IPython: modified copy of numpy.testing.noseclasses, so +# IPython.external._decorators works without numpy being installed. + +# These classes implement a "known failure" error class. + +import os + +from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin + +class KnownFailureTest(Exception): + '''Raise this exception to mark a test as a known failing test.''' + pass + + +class KnownFailure(ErrorClassPlugin): + '''Plugin that installs a KNOWNFAIL error class for the + KnownFailureClass exception. When KnownFailureTest is raised, + the exception will be logged in the knownfail attribute of the + result, 'K' or 'KNOWNFAIL' (verbose) will be output, and the + exception will not be counted as an error or failure.''' + enabled = True + knownfail = ErrorClass(KnownFailureTest, + label='KNOWNFAIL', + isfailure=False) + + def options(self, parser, env=os.environ): + env_opt = 'NOSE_WITHOUT_KNOWNFAIL' + parser.add_option('--no-knownfail', action='store_true', + dest='noKnownFail', default=env.get(env_opt, False), + help='Disable special handling of KnownFailureTest ' + 'exceptions') + + def configure(self, options, conf): + if not self.can_configure: + return + self.conf = conf + disable = getattr(options, 'noKnownFail', False) + if disable: + self.enabled = False + + diff --git a/contrib/python/ipython/py3/IPython/external/mathjax.py b/contrib/python/ipython/py3/IPython/external/mathjax.py index c614e465796..1b9b80905ba 100644 --- a/contrib/python/ipython/py3/IPython/external/mathjax.py +++ b/contrib/python/ipython/py3/IPython/external/mathjax.py @@ -1,13 +1,13 @@ -#!/usr/bin/python -""" -`IPython.external.mathjax` is deprecated with IPython 4.0+ - -mathjax is now install by default with the notebook package - -""" - -import sys - -if __name__ == '__main__' : - sys.exit("IPython.external.mathjax is deprecated, Mathjax is now installed by default with the notebook package") - +#!/usr/bin/python +""" +`IPython.external.mathjax` is deprecated with IPython 4.0+ + +mathjax is now install by default with the notebook package + +""" + +import sys + +if __name__ == '__main__' : + sys.exit("IPython.external.mathjax is deprecated, Mathjax is now installed by default with the notebook package") + diff --git a/contrib/python/ipython/py3/IPython/external/qt_for_kernel.py b/contrib/python/ipython/py3/IPython/external/qt_for_kernel.py index 6efb5f5d4d4..d2e7bd99f01 100644 --- a/contrib/python/ipython/py3/IPython/external/qt_for_kernel.py +++ b/contrib/python/ipython/py3/IPython/external/qt_for_kernel.py @@ -1,129 +1,129 @@ -""" Import Qt in a manner suitable for an IPython kernel. - -This is the import used for the `gui=qt` or `matplotlib=qt` initialization. - -Import Priority: - -if Qt has been imported anywhere else: - use that - -if matplotlib has been imported and doesn't support v2 (<= 1.0.1): - use PyQt4 @v1 - -Next, ask QT_API env variable - -if QT_API not set: - ask matplotlib what it's using. If Qt4Agg or Qt5Agg, then use the - version matplotlib is configured with - - else: (matplotlib said nothing) - # this is the default path - nobody told us anything - try in this order: - PyQt default version, PySide, PyQt5 -else: - use what QT_API says - -""" -# NOTE: This is no longer an external, third-party module, and should be -# considered part of IPython. For compatibility however, it is being kept in -# IPython/external. - -import os -import sys - -from IPython.utils.version import check_version -from IPython.external.qt_loaders import ( - load_qt, - loaded_api, - enum_factory, - # QT6 - QT_API_PYQT6, - QT_API_PYSIDE6, - # QT5 - QT_API_PYQT5, - QT_API_PYSIDE2, - # QT4 - QT_API_PYQTv1, - QT_API_PYQT, - QT_API_PYSIDE, - # default - QT_API_PYQT_DEFAULT, -) - -_qt_apis = ( - # QT6 - QT_API_PYQT6, - QT_API_PYSIDE6, - # QT5 - QT_API_PYQT5, - QT_API_PYSIDE2, - # QT4 - QT_API_PYQTv1, - QT_API_PYQT, - QT_API_PYSIDE, - # default - QT_API_PYQT_DEFAULT, -) - - -def matplotlib_options(mpl): - """Constraints placed on an imported matplotlib.""" - if mpl is None: - return - backend = mpl.rcParams.get('backend', None) - if backend == 'Qt4Agg': - mpqt = mpl.rcParams.get('backend.qt4', None) - if mpqt is None: - return None - if mpqt.lower() == 'pyside': - return [QT_API_PYSIDE] - elif mpqt.lower() == 'pyqt4': - return [QT_API_PYQT_DEFAULT] - elif mpqt.lower() == 'pyqt4v2': - return [QT_API_PYQT] - raise ImportError("unhandled value for backend.qt4 from matplotlib: %r" % - mpqt) - elif backend == 'Qt5Agg': - mpqt = mpl.rcParams.get('backend.qt5', None) - if mpqt is None: - return None - if mpqt.lower() == 'pyqt5': - return [QT_API_PYQT5] - raise ImportError("unhandled value for backend.qt5 from matplotlib: %r" % - mpqt) - -def get_options(): - """Return a list of acceptable QT APIs, in decreasing order of preference.""" - #already imported Qt somewhere. Use that - loaded = loaded_api() - if loaded is not None: - return [loaded] - - mpl = sys.modules.get('matplotlib', None) - - if mpl is not None and not check_version(mpl.__version__, '1.0.2'): - #1.0.1 only supports PyQt4 v1 - return [QT_API_PYQT_DEFAULT] - - qt_api = os.environ.get('QT_API', None) - if qt_api is None: - #no ETS variable. Ask mpl, then use default fallback path - return matplotlib_options(mpl) or [ - QT_API_PYQT_DEFAULT, - QT_API_PYQT6, - QT_API_PYSIDE6, - QT_API_PYQT5, - QT_API_PYSIDE2, - QT_API_PYQT, - QT_API_PYSIDE, - ] - elif qt_api not in _qt_apis: - raise RuntimeError("Invalid Qt API %r, valid values are: %r" % - (qt_api, ', '.join(_qt_apis))) - else: - return [qt_api] - - -api_opts = get_options() -QtCore, QtGui, QtSvg, QT_API = load_qt(api_opts) -enum_helper = enum_factory(QT_API, QtCore) +""" Import Qt in a manner suitable for an IPython kernel. + +This is the import used for the `gui=qt` or `matplotlib=qt` initialization. + +Import Priority: + +if Qt has been imported anywhere else: + use that + +if matplotlib has been imported and doesn't support v2 (<= 1.0.1): + use PyQt4 @v1 + +Next, ask QT_API env variable + +if QT_API not set: + ask matplotlib what it's using. If Qt4Agg or Qt5Agg, then use the + version matplotlib is configured with + + else: (matplotlib said nothing) + # this is the default path - nobody told us anything + try in this order: + PyQt default version, PySide, PyQt5 +else: + use what QT_API says + +""" +# NOTE: This is no longer an external, third-party module, and should be +# considered part of IPython. For compatibility however, it is being kept in +# IPython/external. + +import os +import sys + +from IPython.utils.version import check_version +from IPython.external.qt_loaders import ( + load_qt, + loaded_api, + enum_factory, + # QT6 + QT_API_PYQT6, + QT_API_PYSIDE6, + # QT5 + QT_API_PYQT5, + QT_API_PYSIDE2, + # QT4 + QT_API_PYQTv1, + QT_API_PYQT, + QT_API_PYSIDE, + # default + QT_API_PYQT_DEFAULT, +) + +_qt_apis = ( + # QT6 + QT_API_PYQT6, + QT_API_PYSIDE6, + # QT5 + QT_API_PYQT5, + QT_API_PYSIDE2, + # QT4 + QT_API_PYQTv1, + QT_API_PYQT, + QT_API_PYSIDE, + # default + QT_API_PYQT_DEFAULT, +) + + +def matplotlib_options(mpl): + """Constraints placed on an imported matplotlib.""" + if mpl is None: + return + backend = mpl.rcParams.get('backend', None) + if backend == 'Qt4Agg': + mpqt = mpl.rcParams.get('backend.qt4', None) + if mpqt is None: + return None + if mpqt.lower() == 'pyside': + return [QT_API_PYSIDE] + elif mpqt.lower() == 'pyqt4': + return [QT_API_PYQT_DEFAULT] + elif mpqt.lower() == 'pyqt4v2': + return [QT_API_PYQT] + raise ImportError("unhandled value for backend.qt4 from matplotlib: %r" % + mpqt) + elif backend == 'Qt5Agg': + mpqt = mpl.rcParams.get('backend.qt5', None) + if mpqt is None: + return None + if mpqt.lower() == 'pyqt5': + return [QT_API_PYQT5] + raise ImportError("unhandled value for backend.qt5 from matplotlib: %r" % + mpqt) + +def get_options(): + """Return a list of acceptable QT APIs, in decreasing order of preference.""" + #already imported Qt somewhere. Use that + loaded = loaded_api() + if loaded is not None: + return [loaded] + + mpl = sys.modules.get('matplotlib', None) + + if mpl is not None and not check_version(mpl.__version__, '1.0.2'): + #1.0.1 only supports PyQt4 v1 + return [QT_API_PYQT_DEFAULT] + + qt_api = os.environ.get('QT_API', None) + if qt_api is None: + #no ETS variable. Ask mpl, then use default fallback path + return matplotlib_options(mpl) or [ + QT_API_PYQT_DEFAULT, + QT_API_PYQT6, + QT_API_PYSIDE6, + QT_API_PYQT5, + QT_API_PYSIDE2, + QT_API_PYQT, + QT_API_PYSIDE, + ] + elif qt_api not in _qt_apis: + raise RuntimeError("Invalid Qt API %r, valid values are: %r" % + (qt_api, ', '.join(_qt_apis))) + else: + return [qt_api] + + +api_opts = get_options() +QtCore, QtGui, QtSvg, QT_API = load_qt(api_opts) +enum_helper = enum_factory(QT_API, QtCore) diff --git a/contrib/python/ipython/py3/IPython/external/qt_loaders.py b/contrib/python/ipython/py3/IPython/external/qt_loaders.py index ee42ca6ffb0..79805358e72 100644 --- a/contrib/python/ipython/py3/IPython/external/qt_loaders.py +++ b/contrib/python/ipython/py3/IPython/external/qt_loaders.py @@ -1,401 +1,401 @@ -""" -This module contains factory functions that attempt -to return Qt submodules from the various python Qt bindings. - -It also protects against double-importing Qt with different -bindings, which is unstable and likely to crash - -This is used primarily by qt and qt_for_kernel, and shouldn't -be accessed directly from the outside -""" -import sys -import types -from functools import partial, lru_cache -import operator - -from IPython.utils.version import check_version - -# ### Available APIs. -# Qt6 -QT_API_PYQT6 = "pyqt6" -QT_API_PYSIDE6 = "pyside6" - -# Qt5 -QT_API_PYQT5 = 'pyqt5' -QT_API_PYSIDE2 = 'pyside2' - -# Qt4 -QT_API_PYQT = "pyqt" # Force version 2 -QT_API_PYQTv1 = "pyqtv1" # Force version 2 -QT_API_PYSIDE = "pyside" - -QT_API_PYQT_DEFAULT = "pyqtdefault" # use system default for version 1 vs. 2 - -api_to_module = { - # Qt6 - QT_API_PYQT6: "PyQt6", - QT_API_PYSIDE6: "PySide6", - # Qt5 - QT_API_PYQT5: "PyQt5", - QT_API_PYSIDE2: "PySide2", - # Qt4 - QT_API_PYSIDE: "PySide", - QT_API_PYQT: "PyQt4", - QT_API_PYQTv1: "PyQt4", - # default - QT_API_PYQT_DEFAULT: "PyQt6", -} - - -class ImportDenier(object): - """Import Hook that will guard against bad Qt imports - once IPython commits to a specific binding - """ - - def __init__(self): - self.__forbidden = set() - - def forbid(self, module_name): - sys.modules.pop(module_name, None) - self.__forbidden.add(module_name) - - def find_module(self, fullname, path=None): - if path: - return - if fullname in self.__forbidden: - return self - - def load_module(self, fullname): - raise ImportError(""" - Importing %s disabled by IPython, which has - already imported an Incompatible QT Binding: %s - """ % (fullname, loaded_api())) - - -ID = ImportDenier() -sys.meta_path.insert(0, ID) - - -def commit_api(api): - """Commit to a particular API, and trigger ImportErrors on subsequent - dangerous imports""" - modules = set(api_to_module.values()) - - modules.remove(api_to_module[api]) - for mod in modules: - ID.forbid(mod) - - -def loaded_api(): - """Return which API is loaded, if any - - If this returns anything besides None, - importing any other Qt binding is unsafe. - - Returns - ------- - None, 'pyside6', 'pyqt6', 'pyside2', 'pyside', 'pyqt', 'pyqt5', 'pyqtv1' - """ - if sys.modules.get("PyQt6.QtCore"): - return QT_API_PYQT6 - elif sys.modules.get("PySide6.QtCore"): - return QT_API_PYSIDE6 - elif sys.modules.get("PyQt5.QtCore"): - return QT_API_PYQT5 - elif sys.modules.get("PySide2.QtCore"): - return QT_API_PYSIDE2 - elif sys.modules.get("PyQt4.QtCore"): - if qtapi_version() == 2: - return QT_API_PYQT - else: - return QT_API_PYQTv1 - elif sys.modules.get("PySide.QtCore"): - return QT_API_PYSIDE - - return None - - -def has_binding(api): - """Safely check for PyQt4/5, PySide or PySide2, without importing submodules - - Parameters - ---------- - api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault'] - Which module to check for - - Returns - ------- - True if the relevant module appears to be importable - """ - module_name = api_to_module[api] - from importlib.util import find_spec - - required = ['QtCore', 'QtGui', 'QtSvg'] - if api in (QT_API_PYQT5, QT_API_PYSIDE2, QT_API_PYQT6, QT_API_PYSIDE6): - # QT5 requires QtWidgets too - required.append('QtWidgets') - - for submod in required: - try: - spec = find_spec('%s.%s' % (module_name, submod)) - except ImportError: - # Package (e.g. PyQt5) not found - return False - else: - if spec is None: - # Submodule (e.g. PyQt5.QtCore) not found - return False - - if api == QT_API_PYSIDE: - # We can also safely check PySide version - import PySide - return check_version(PySide.__version__, '1.0.3') - - return True - - -def qtapi_version(): - """Return which QString API has been set, if any - - Returns - ------- - The QString API version (1 or 2), or None if not set - """ - try: - import sip - except ImportError: - # as of PyQt5 5.11, sip is no longer available as a top-level - # module and needs to be imported from the PyQt5 namespace - try: - from PyQt5 import sip - except ImportError: - return - try: - return sip.getapi('QString') - except ValueError: - return - - -def can_import(api): - """Safely query whether an API is importable, without importing it""" - if not has_binding(api): - return False - - current = loaded_api() - if api == QT_API_PYQT_DEFAULT: - return current in [QT_API_PYQT6, None] - else: - return current in [api, None] - - -def import_pyqt4(version=2): - """ - Import PyQt4 - - Parameters - ---------- - version : 1, 2, or None - Which QString/QVariant API to use. Set to None to use the system - default - - ImportErrors rasied within this function are non-recoverable - """ - # The new-style string API (version=2) automatically - # converts QStrings to Unicode Python strings. Also, automatically unpacks - # QVariants to their underlying objects. - import sip - - if version is not None: - sip.setapi('QString', version) - sip.setapi('QVariant', version) - - from PyQt4 import QtGui, QtCore, QtSvg - - if not check_version(QtCore.PYQT_VERSION_STR, '4.7'): - raise ImportError("IPython requires PyQt4 >= 4.7, found %s" % - QtCore.PYQT_VERSION_STR) - - # Alias PyQt-specific functions for PySide compatibility. - QtCore.Signal = QtCore.pyqtSignal - QtCore.Slot = QtCore.pyqtSlot - - # query for the API version (in case version == None) - version = sip.getapi('QString') - api = QT_API_PYQTv1 if version == 1 else QT_API_PYQT - return QtCore, QtGui, QtSvg, api - - -def import_pyqt5(): - """ - Import PyQt5 - - ImportErrors rasied within this function are non-recoverable - """ - - from PyQt5 import QtCore, QtSvg, QtWidgets, QtGui - - # Alias PyQt-specific functions for PySide compatibility. - QtCore.Signal = QtCore.pyqtSignal - QtCore.Slot = QtCore.pyqtSlot - - # Join QtGui and QtWidgets for Qt4 compatibility. - QtGuiCompat = types.ModuleType('QtGuiCompat') - QtGuiCompat.__dict__.update(QtGui.__dict__) - QtGuiCompat.__dict__.update(QtWidgets.__dict__) - - api = QT_API_PYQT5 - return QtCore, QtGuiCompat, QtSvg, api - - -def import_pyqt6(): - """ - Import PyQt6 - - ImportErrors rasied within this function are non-recoverable - """ - - from PyQt6 import QtCore, QtSvg, QtWidgets, QtGui - - # Alias PyQt-specific functions for PySide compatibility. - QtCore.Signal = QtCore.pyqtSignal - QtCore.Slot = QtCore.pyqtSlot - - # Join QtGui and QtWidgets for Qt4 compatibility. - QtGuiCompat = types.ModuleType("QtGuiCompat") - QtGuiCompat.__dict__.update(QtGui.__dict__) - QtGuiCompat.__dict__.update(QtWidgets.__dict__) - - api = QT_API_PYQT6 - return QtCore, QtGuiCompat, QtSvg, api - - -def import_pyside(): - """ - Import PySide - - ImportErrors raised within this function are non-recoverable - """ - from PySide import QtGui, QtCore, QtSvg - return QtCore, QtGui, QtSvg, QT_API_PYSIDE - -def import_pyside2(): - """ - Import PySide2 - - ImportErrors raised within this function are non-recoverable - """ - from PySide2 import QtGui, QtCore, QtSvg, QtWidgets, QtPrintSupport - - # Join QtGui and QtWidgets for Qt4 compatibility. - QtGuiCompat = types.ModuleType('QtGuiCompat') - QtGuiCompat.__dict__.update(QtGui.__dict__) - QtGuiCompat.__dict__.update(QtWidgets.__dict__) - QtGuiCompat.__dict__.update(QtPrintSupport.__dict__) - - return QtCore, QtGuiCompat, QtSvg, QT_API_PYSIDE2 - - -def import_pyside6(): - """ - Import PySide6 - - ImportErrors raised within this function are non-recoverable - """ - from PySide6 import QtGui, QtCore, QtSvg, QtWidgets, QtPrintSupport - - # Join QtGui and QtWidgets for Qt4 compatibility. - QtGuiCompat = types.ModuleType("QtGuiCompat") - QtGuiCompat.__dict__.update(QtGui.__dict__) - QtGuiCompat.__dict__.update(QtWidgets.__dict__) - QtGuiCompat.__dict__.update(QtPrintSupport.__dict__) - - return QtCore, QtGuiCompat, QtSvg, QT_API_PYSIDE6 - - -def load_qt(api_options): - """ - Attempt to import Qt, given a preference list - of permissible bindings - - It is safe to call this function multiple times. - - Parameters - ---------- - api_options: List of strings - The order of APIs to try. Valid items are 'pyside', 'pyside2', - 'pyqt', 'pyqt5', 'pyqtv1' and 'pyqtdefault' - - Returns - ------- - - A tuple of QtCore, QtGui, QtSvg, QT_API - The first three are the Qt modules. The last is the - string indicating which module was loaded. - - Raises - ------ - ImportError, if it isn't possible to import any requested - bindings (either because they aren't installed, or because - an incompatible library has already been installed) - """ - loaders = { - # Qt6 - QT_API_PYQT6: import_pyqt6, - QT_API_PYSIDE6: import_pyside6, - # Qt5 - QT_API_PYQT5: import_pyqt5, - QT_API_PYSIDE2: import_pyside2, - # Qt4 - QT_API_PYSIDE: import_pyside, - QT_API_PYQT: import_pyqt4, - QT_API_PYQTv1: partial(import_pyqt4, version=1), - # default - QT_API_PYQT_DEFAULT: import_pyqt6, - } - - for api in api_options: - - if api not in loaders: - raise RuntimeError( - "Invalid Qt API %r, valid values are: %s" % - (api, ", ".join(["%r" % k for k in loaders.keys()]))) - - if not can_import(api): - continue - - #cannot safely recover from an ImportError during this - result = loaders[api]() - api = result[-1] # changed if api = QT_API_PYQT_DEFAULT - commit_api(api) - return result - else: - raise ImportError(""" - Could not load requested Qt binding. Please ensure that - PyQt4 >= 4.7, PyQt5, PySide >= 1.0.3 or PySide2 is available, - and only one is imported per session. - - Currently-imported Qt library: %r - PyQt4 available (requires QtCore, QtGui, QtSvg): %s - PyQt5 available (requires QtCore, QtGui, QtSvg, QtWidgets): %s - PySide >= 1.0.3 installed: %s - PySide2 installed: %s - Tried to load: %r - """ % (loaded_api(), - has_binding(QT_API_PYQT), - has_binding(QT_API_PYQT5), - has_binding(QT_API_PYSIDE), - has_binding(QT_API_PYSIDE2), - api_options)) - - -def enum_factory(QT_API, QtCore): - """Construct an enum helper to account for PyQt5 <-> PyQt6 changes.""" - - @lru_cache(None) - def _enum(name): - # foo.bar.Enum.Entry (PyQt6) <=> foo.bar.Entry (non-PyQt6). - return operator.attrgetter( - name if QT_API == QT_API_PYQT6 else name.rpartition(".")[0] - )(sys.modules[QtCore.__package__]) - - return _enum +""" +This module contains factory functions that attempt +to return Qt submodules from the various python Qt bindings. + +It also protects against double-importing Qt with different +bindings, which is unstable and likely to crash + +This is used primarily by qt and qt_for_kernel, and shouldn't +be accessed directly from the outside +""" +import sys +import types +from functools import partial, lru_cache +import operator + +from IPython.utils.version import check_version + +# ### Available APIs. +# Qt6 +QT_API_PYQT6 = "pyqt6" +QT_API_PYSIDE6 = "pyside6" + +# Qt5 +QT_API_PYQT5 = 'pyqt5' +QT_API_PYSIDE2 = 'pyside2' + +# Qt4 +QT_API_PYQT = "pyqt" # Force version 2 +QT_API_PYQTv1 = "pyqtv1" # Force version 2 +QT_API_PYSIDE = "pyside" + +QT_API_PYQT_DEFAULT = "pyqtdefault" # use system default for version 1 vs. 2 + +api_to_module = { + # Qt6 + QT_API_PYQT6: "PyQt6", + QT_API_PYSIDE6: "PySide6", + # Qt5 + QT_API_PYQT5: "PyQt5", + QT_API_PYSIDE2: "PySide2", + # Qt4 + QT_API_PYSIDE: "PySide", + QT_API_PYQT: "PyQt4", + QT_API_PYQTv1: "PyQt4", + # default + QT_API_PYQT_DEFAULT: "PyQt6", +} + + +class ImportDenier(object): + """Import Hook that will guard against bad Qt imports + once IPython commits to a specific binding + """ + + def __init__(self): + self.__forbidden = set() + + def forbid(self, module_name): + sys.modules.pop(module_name, None) + self.__forbidden.add(module_name) + + def find_module(self, fullname, path=None): + if path: + return + if fullname in self.__forbidden: + return self + + def load_module(self, fullname): + raise ImportError(""" + Importing %s disabled by IPython, which has + already imported an Incompatible QT Binding: %s + """ % (fullname, loaded_api())) + + +ID = ImportDenier() +sys.meta_path.insert(0, ID) + + +def commit_api(api): + """Commit to a particular API, and trigger ImportErrors on subsequent + dangerous imports""" + modules = set(api_to_module.values()) + + modules.remove(api_to_module[api]) + for mod in modules: + ID.forbid(mod) + + +def loaded_api(): + """Return which API is loaded, if any + + If this returns anything besides None, + importing any other Qt binding is unsafe. + + Returns + ------- + None, 'pyside6', 'pyqt6', 'pyside2', 'pyside', 'pyqt', 'pyqt5', 'pyqtv1' + """ + if sys.modules.get("PyQt6.QtCore"): + return QT_API_PYQT6 + elif sys.modules.get("PySide6.QtCore"): + return QT_API_PYSIDE6 + elif sys.modules.get("PyQt5.QtCore"): + return QT_API_PYQT5 + elif sys.modules.get("PySide2.QtCore"): + return QT_API_PYSIDE2 + elif sys.modules.get("PyQt4.QtCore"): + if qtapi_version() == 2: + return QT_API_PYQT + else: + return QT_API_PYQTv1 + elif sys.modules.get("PySide.QtCore"): + return QT_API_PYSIDE + + return None + + +def has_binding(api): + """Safely check for PyQt4/5, PySide or PySide2, without importing submodules + + Parameters + ---------- + api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault'] + Which module to check for + + Returns + ------- + True if the relevant module appears to be importable + """ + module_name = api_to_module[api] + from importlib.util import find_spec + + required = ['QtCore', 'QtGui', 'QtSvg'] + if api in (QT_API_PYQT5, QT_API_PYSIDE2, QT_API_PYQT6, QT_API_PYSIDE6): + # QT5 requires QtWidgets too + required.append('QtWidgets') + + for submod in required: + try: + spec = find_spec('%s.%s' % (module_name, submod)) + except ImportError: + # Package (e.g. PyQt5) not found + return False + else: + if spec is None: + # Submodule (e.g. PyQt5.QtCore) not found + return False + + if api == QT_API_PYSIDE: + # We can also safely check PySide version + import PySide + return check_version(PySide.__version__, '1.0.3') + + return True + + +def qtapi_version(): + """Return which QString API has been set, if any + + Returns + ------- + The QString API version (1 or 2), or None if not set + """ + try: + import sip + except ImportError: + # as of PyQt5 5.11, sip is no longer available as a top-level + # module and needs to be imported from the PyQt5 namespace + try: + from PyQt5 import sip + except ImportError: + return + try: + return sip.getapi('QString') + except ValueError: + return + + +def can_import(api): + """Safely query whether an API is importable, without importing it""" + if not has_binding(api): + return False + + current = loaded_api() + if api == QT_API_PYQT_DEFAULT: + return current in [QT_API_PYQT6, None] + else: + return current in [api, None] + + +def import_pyqt4(version=2): + """ + Import PyQt4 + + Parameters + ---------- + version : 1, 2, or None + Which QString/QVariant API to use. Set to None to use the system + default + + ImportErrors rasied within this function are non-recoverable + """ + # The new-style string API (version=2) automatically + # converts QStrings to Unicode Python strings. Also, automatically unpacks + # QVariants to their underlying objects. + import sip + + if version is not None: + sip.setapi('QString', version) + sip.setapi('QVariant', version) + + from PyQt4 import QtGui, QtCore, QtSvg + + if not check_version(QtCore.PYQT_VERSION_STR, '4.7'): + raise ImportError("IPython requires PyQt4 >= 4.7, found %s" % + QtCore.PYQT_VERSION_STR) + + # Alias PyQt-specific functions for PySide compatibility. + QtCore.Signal = QtCore.pyqtSignal + QtCore.Slot = QtCore.pyqtSlot + + # query for the API version (in case version == None) + version = sip.getapi('QString') + api = QT_API_PYQTv1 if version == 1 else QT_API_PYQT + return QtCore, QtGui, QtSvg, api + + +def import_pyqt5(): + """ + Import PyQt5 + + ImportErrors rasied within this function are non-recoverable + """ + + from PyQt5 import QtCore, QtSvg, QtWidgets, QtGui + + # Alias PyQt-specific functions for PySide compatibility. + QtCore.Signal = QtCore.pyqtSignal + QtCore.Slot = QtCore.pyqtSlot + + # Join QtGui and QtWidgets for Qt4 compatibility. + QtGuiCompat = types.ModuleType('QtGuiCompat') + QtGuiCompat.__dict__.update(QtGui.__dict__) + QtGuiCompat.__dict__.update(QtWidgets.__dict__) + + api = QT_API_PYQT5 + return QtCore, QtGuiCompat, QtSvg, api + + +def import_pyqt6(): + """ + Import PyQt6 + + ImportErrors rasied within this function are non-recoverable + """ + + from PyQt6 import QtCore, QtSvg, QtWidgets, QtGui + + # Alias PyQt-specific functions for PySide compatibility. + QtCore.Signal = QtCore.pyqtSignal + QtCore.Slot = QtCore.pyqtSlot + + # Join QtGui and QtWidgets for Qt4 compatibility. + QtGuiCompat = types.ModuleType("QtGuiCompat") + QtGuiCompat.__dict__.update(QtGui.__dict__) + QtGuiCompat.__dict__.update(QtWidgets.__dict__) + + api = QT_API_PYQT6 + return QtCore, QtGuiCompat, QtSvg, api + + +def import_pyside(): + """ + Import PySide + + ImportErrors raised within this function are non-recoverable + """ + from PySide import QtGui, QtCore, QtSvg + return QtCore, QtGui, QtSvg, QT_API_PYSIDE + +def import_pyside2(): + """ + Import PySide2 + + ImportErrors raised within this function are non-recoverable + """ + from PySide2 import QtGui, QtCore, QtSvg, QtWidgets, QtPrintSupport + + # Join QtGui and QtWidgets for Qt4 compatibility. + QtGuiCompat = types.ModuleType('QtGuiCompat') + QtGuiCompat.__dict__.update(QtGui.__dict__) + QtGuiCompat.__dict__.update(QtWidgets.__dict__) + QtGuiCompat.__dict__.update(QtPrintSupport.__dict__) + + return QtCore, QtGuiCompat, QtSvg, QT_API_PYSIDE2 + + +def import_pyside6(): + """ + Import PySide6 + + ImportErrors raised within this function are non-recoverable + """ + from PySide6 import QtGui, QtCore, QtSvg, QtWidgets, QtPrintSupport + + # Join QtGui and QtWidgets for Qt4 compatibility. + QtGuiCompat = types.ModuleType("QtGuiCompat") + QtGuiCompat.__dict__.update(QtGui.__dict__) + QtGuiCompat.__dict__.update(QtWidgets.__dict__) + QtGuiCompat.__dict__.update(QtPrintSupport.__dict__) + + return QtCore, QtGuiCompat, QtSvg, QT_API_PYSIDE6 + + +def load_qt(api_options): + """ + Attempt to import Qt, given a preference list + of permissible bindings + + It is safe to call this function multiple times. + + Parameters + ---------- + api_options: List of strings + The order of APIs to try. Valid items are 'pyside', 'pyside2', + 'pyqt', 'pyqt5', 'pyqtv1' and 'pyqtdefault' + + Returns + ------- + + A tuple of QtCore, QtGui, QtSvg, QT_API + The first three are the Qt modules. The last is the + string indicating which module was loaded. + + Raises + ------ + ImportError, if it isn't possible to import any requested + bindings (either because they aren't installed, or because + an incompatible library has already been installed) + """ + loaders = { + # Qt6 + QT_API_PYQT6: import_pyqt6, + QT_API_PYSIDE6: import_pyside6, + # Qt5 + QT_API_PYQT5: import_pyqt5, + QT_API_PYSIDE2: import_pyside2, + # Qt4 + QT_API_PYSIDE: import_pyside, + QT_API_PYQT: import_pyqt4, + QT_API_PYQTv1: partial(import_pyqt4, version=1), + # default + QT_API_PYQT_DEFAULT: import_pyqt6, + } + + for api in api_options: + + if api not in loaders: + raise RuntimeError( + "Invalid Qt API %r, valid values are: %s" % + (api, ", ".join(["%r" % k for k in loaders.keys()]))) + + if not can_import(api): + continue + + #cannot safely recover from an ImportError during this + result = loaders[api]() + api = result[-1] # changed if api = QT_API_PYQT_DEFAULT + commit_api(api) + return result + else: + raise ImportError(""" + Could not load requested Qt binding. Please ensure that + PyQt4 >= 4.7, PyQt5, PySide >= 1.0.3 or PySide2 is available, + and only one is imported per session. + + Currently-imported Qt library: %r + PyQt4 available (requires QtCore, QtGui, QtSvg): %s + PyQt5 available (requires QtCore, QtGui, QtSvg, QtWidgets): %s + PySide >= 1.0.3 installed: %s + PySide2 installed: %s + Tried to load: %r + """ % (loaded_api(), + has_binding(QT_API_PYQT), + has_binding(QT_API_PYQT5), + has_binding(QT_API_PYSIDE), + has_binding(QT_API_PYSIDE2), + api_options)) + + +def enum_factory(QT_API, QtCore): + """Construct an enum helper to account for PyQt5 <-> PyQt6 changes.""" + + @lru_cache(None) + def _enum(name): + # foo.bar.Enum.Entry (PyQt6) <=> foo.bar.Entry (non-PyQt6). + return operator.attrgetter( + name if QT_API == QT_API_PYQT6 else name.rpartition(".")[0] + )(sys.modules[QtCore.__package__]) + + return _enum diff --git a/contrib/python/ipython/py3/IPython/frontend.py b/contrib/python/ipython/py3/IPython/frontend.py index f231569e5e6..9cc3eaff2f0 100644 --- a/contrib/python/ipython/py3/IPython/frontend.py +++ b/contrib/python/ipython/py3/IPython/frontend.py @@ -1,29 +1,29 @@ -""" -Shim to maintain backwards compatibility with old frontend imports. - -We have moved all contents of the old `frontend` subpackage into top-level -subpackages (`html`, `qt` and `terminal`), and flattened the notebook into -just `IPython.html`, formerly `IPython.frontend.html.notebook`. - -This will let code that was making `from IPython.frontend...` calls continue -working, though a warning will be printed. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The top-level `frontend` package has been deprecated since IPython 1.0. " - "All its subpackages have been moved to the top `IPython` level.", ShimWarning) - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -sys.modules['IPython.frontend.html.notebook'] = ShimModule( - src='IPython.frontend.html.notebook', mirror='IPython.html') -sys.modules['IPython.frontend'] = ShimModule( - src='IPython.frontend', mirror='IPython') +""" +Shim to maintain backwards compatibility with old frontend imports. + +We have moved all contents of the old `frontend` subpackage into top-level +subpackages (`html`, `qt` and `terminal`), and flattened the notebook into +just `IPython.html`, formerly `IPython.frontend.html.notebook`. + +This will let code that was making `from IPython.frontend...` calls continue +working, though a warning will be printed. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import sys +from warnings import warn + +from IPython.utils.shimmodule import ShimModule, ShimWarning + +warn("The top-level `frontend` package has been deprecated since IPython 1.0. " + "All its subpackages have been moved to the top `IPython` level.", ShimWarning) + +# Unconditionally insert the shim into sys.modules so that further import calls +# trigger the custom attribute access above + +sys.modules['IPython.frontend.html.notebook'] = ShimModule( + src='IPython.frontend.html.notebook', mirror='IPython.html') +sys.modules['IPython.frontend'] = ShimModule( + src='IPython.frontend', mirror='IPython') diff --git a/contrib/python/ipython/py3/IPython/html.py b/contrib/python/ipython/py3/IPython/html.py index 9f1d0759971..050be5c5991 100644 --- a/contrib/python/ipython/py3/IPython/html.py +++ b/contrib/python/ipython/py3/IPython/html.py @@ -1,28 +1,28 @@ -""" -Shim to maintain backwards compatibility with old IPython.html imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.html` package has been deprecated since IPython 4.0. " - "You should import from `notebook` instead. " - "`IPython.html.widgets` has moved to `ipywidgets`.", ShimWarning) - -_widgets = sys.modules['IPython.html.widgets'] = ShimModule( - src='IPython.html.widgets', mirror='ipywidgets') - -_html = ShimModule( - src='IPython.html', mirror='notebook') - -# hook up widgets -_html.widgets = _widgets -sys.modules['IPython.html'] = _html - -if __name__ == '__main__': - from notebook import notebookapp as app - app.launch_new_instance() +""" +Shim to maintain backwards compatibility with old IPython.html imports. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import sys +from warnings import warn + +from IPython.utils.shimmodule import ShimModule, ShimWarning + +warn("The `IPython.html` package has been deprecated since IPython 4.0. " + "You should import from `notebook` instead. " + "`IPython.html.widgets` has moved to `ipywidgets`.", ShimWarning) + +_widgets = sys.modules['IPython.html.widgets'] = ShimModule( + src='IPython.html.widgets', mirror='ipywidgets') + +_html = ShimModule( + src='IPython.html', mirror='notebook') + +# hook up widgets +_html.widgets = _widgets +sys.modules['IPython.html'] = _html + +if __name__ == '__main__': + from notebook import notebookapp as app + app.launch_new_instance() diff --git a/contrib/python/ipython/py3/IPython/kernel/__init__.py b/contrib/python/ipython/py3/IPython/kernel/__init__.py index f40f6aa928a..70a05ed4aa5 100644 --- a/contrib/python/ipython/py3/IPython/kernel/__init__.py +++ b/contrib/python/ipython/py3/IPython/kernel/__init__.py @@ -1,35 +1,35 @@ -""" -Shim to maintain backwards compatibility with old IPython.kernel imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.kernel` package has been deprecated since IPython 4.0." - "You should import from ipykernel or jupyter_client instead.", ShimWarning) - - -# zmq subdir is gone -sys.modules['IPython.kernel.zmq.session'] = ShimModule( - src='IPython.kernel.zmq.session', mirror='jupyter_client.session') -sys.modules['IPython.kernel.zmq'] = ShimModule( - src='IPython.kernel.zmq', mirror='ipykernel') - -for pkg in ('comm', 'inprocess'): - src = 'IPython.kernel.%s' % pkg - sys.modules[src] = ShimModule(src=src, mirror='ipykernel.%s' % pkg) - -for pkg in ('ioloop', 'blocking'): - src = 'IPython.kernel.%s' % pkg - sys.modules[src] = ShimModule(src=src, mirror='jupyter_client.%s' % pkg) - -# required for `from IPython.kernel import PKG` -from ipykernel import comm, inprocess -from jupyter_client import ioloop, blocking -# public API -from ipykernel.connect import * -from jupyter_client import * +""" +Shim to maintain backwards compatibility with old IPython.kernel imports. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import sys +from warnings import warn + +from IPython.utils.shimmodule import ShimModule, ShimWarning + +warn("The `IPython.kernel` package has been deprecated since IPython 4.0." + "You should import from ipykernel or jupyter_client instead.", ShimWarning) + + +# zmq subdir is gone +sys.modules['IPython.kernel.zmq.session'] = ShimModule( + src='IPython.kernel.zmq.session', mirror='jupyter_client.session') +sys.modules['IPython.kernel.zmq'] = ShimModule( + src='IPython.kernel.zmq', mirror='ipykernel') + +for pkg in ('comm', 'inprocess'): + src = 'IPython.kernel.%s' % pkg + sys.modules[src] = ShimModule(src=src, mirror='ipykernel.%s' % pkg) + +for pkg in ('ioloop', 'blocking'): + src = 'IPython.kernel.%s' % pkg + sys.modules[src] = ShimModule(src=src, mirror='jupyter_client.%s' % pkg) + +# required for `from IPython.kernel import PKG` +from ipykernel import comm, inprocess +from jupyter_client import ioloop, blocking +# public API +from ipykernel.connect import * +from jupyter_client import * diff --git a/contrib/python/ipython/py3/IPython/kernel/__main__.py b/contrib/python/ipython/py3/IPython/kernel/__main__.py index adafe73d029..d1f0cf53340 100644 --- a/contrib/python/ipython/py3/IPython/kernel/__main__.py +++ b/contrib/python/ipython/py3/IPython/kernel/__main__.py @@ -1,3 +1,3 @@ -if __name__ == '__main__': - from ipykernel import kernelapp as app - app.launch_new_instance() +if __name__ == '__main__': + from ipykernel import kernelapp as app + app.launch_new_instance() diff --git a/contrib/python/ipython/py3/IPython/kernel/adapter.py b/contrib/python/ipython/py3/IPython/kernel/adapter.py index 8a52dbbc2ba..3b8c046b2d1 100644 --- a/contrib/python/ipython/py3/IPython/kernel/adapter.py +++ b/contrib/python/ipython/py3/IPython/kernel/adapter.py @@ -1 +1 @@ -from jupyter_client.adapter import * +from jupyter_client.adapter import * diff --git a/contrib/python/ipython/py3/IPython/kernel/channels.py b/contrib/python/ipython/py3/IPython/kernel/channels.py index f204db9ca30..8c7fe2a0630 100644 --- a/contrib/python/ipython/py3/IPython/kernel/channels.py +++ b/contrib/python/ipython/py3/IPython/kernel/channels.py @@ -1 +1 @@ -from jupyter_client.channels import * +from jupyter_client.channels import * diff --git a/contrib/python/ipython/py3/IPython/kernel/channelsabc.py b/contrib/python/ipython/py3/IPython/kernel/channelsabc.py index c0a44511b14..88944012d44 100644 --- a/contrib/python/ipython/py3/IPython/kernel/channelsabc.py +++ b/contrib/python/ipython/py3/IPython/kernel/channelsabc.py @@ -1 +1 @@ -from jupyter_client.channelsabc import * +from jupyter_client.channelsabc import * diff --git a/contrib/python/ipython/py3/IPython/kernel/client.py b/contrib/python/ipython/py3/IPython/kernel/client.py index d9768e5bc4c..a98690b74cc 100644 --- a/contrib/python/ipython/py3/IPython/kernel/client.py +++ b/contrib/python/ipython/py3/IPython/kernel/client.py @@ -1 +1 @@ -from jupyter_client.client import * +from jupyter_client.client import * diff --git a/contrib/python/ipython/py3/IPython/kernel/clientabc.py b/contrib/python/ipython/py3/IPython/kernel/clientabc.py index e82cb19e2a5..e0cf06c9420 100644 --- a/contrib/python/ipython/py3/IPython/kernel/clientabc.py +++ b/contrib/python/ipython/py3/IPython/kernel/clientabc.py @@ -1 +1 @@ -from jupyter_client.clientabc import * +from jupyter_client.clientabc import * diff --git a/contrib/python/ipython/py3/IPython/kernel/connect.py b/contrib/python/ipython/py3/IPython/kernel/connect.py index be992cc709c..5b6d40a5d34 100644 --- a/contrib/python/ipython/py3/IPython/kernel/connect.py +++ b/contrib/python/ipython/py3/IPython/kernel/connect.py @@ -1,2 +1,2 @@ -from ipykernel.connect import * -from jupyter_client.connect import * +from ipykernel.connect import * +from jupyter_client.connect import * diff --git a/contrib/python/ipython/py3/IPython/kernel/kernelspec.py b/contrib/python/ipython/py3/IPython/kernel/kernelspec.py index 8a643379200..123419b2f54 100644 --- a/contrib/python/ipython/py3/IPython/kernel/kernelspec.py +++ b/contrib/python/ipython/py3/IPython/kernel/kernelspec.py @@ -1 +1 @@ -from jupyter_client.kernelspec import * +from jupyter_client.kernelspec import * diff --git a/contrib/python/ipython/py3/IPython/kernel/kernelspecapp.py b/contrib/python/ipython/py3/IPython/kernel/kernelspecapp.py index 16f9f9eba96..28cd33abd35 100644 --- a/contrib/python/ipython/py3/IPython/kernel/kernelspecapp.py +++ b/contrib/python/ipython/py3/IPython/kernel/kernelspecapp.py @@ -1 +1 @@ -from jupyter_client.kernelspecapp import * +from jupyter_client.kernelspecapp import * diff --git a/contrib/python/ipython/py3/IPython/kernel/launcher.py b/contrib/python/ipython/py3/IPython/kernel/launcher.py index 0500ab198b9..1953bc4809e 100644 --- a/contrib/python/ipython/py3/IPython/kernel/launcher.py +++ b/contrib/python/ipython/py3/IPython/kernel/launcher.py @@ -1 +1 @@ -from jupyter_client.launcher import * +from jupyter_client.launcher import * diff --git a/contrib/python/ipython/py3/IPython/kernel/manager.py b/contrib/python/ipython/py3/IPython/kernel/manager.py index 9d9d84806ff..c88097cff64 100644 --- a/contrib/python/ipython/py3/IPython/kernel/manager.py +++ b/contrib/python/ipython/py3/IPython/kernel/manager.py @@ -1 +1 @@ -from jupyter_client.manager import * +from jupyter_client.manager import * diff --git a/contrib/python/ipython/py3/IPython/kernel/managerabc.py b/contrib/python/ipython/py3/IPython/kernel/managerabc.py index f748bdf2ce6..6b40827ff88 100644 --- a/contrib/python/ipython/py3/IPython/kernel/managerabc.py +++ b/contrib/python/ipython/py3/IPython/kernel/managerabc.py @@ -1 +1 @@ -from jupyter_client.managerabc import * +from jupyter_client.managerabc import * diff --git a/contrib/python/ipython/py3/IPython/kernel/multikernelmanager.py b/contrib/python/ipython/py3/IPython/kernel/multikernelmanager.py index 71fd8dbb34f..ce576e27eaf 100644 --- a/contrib/python/ipython/py3/IPython/kernel/multikernelmanager.py +++ b/contrib/python/ipython/py3/IPython/kernel/multikernelmanager.py @@ -1 +1 @@ -from jupyter_client.multikernelmanager import * +from jupyter_client.multikernelmanager import * diff --git a/contrib/python/ipython/py3/IPython/kernel/restarter.py b/contrib/python/ipython/py3/IPython/kernel/restarter.py index 2b1de99c8e2..dc24117c3ad 100644 --- a/contrib/python/ipython/py3/IPython/kernel/restarter.py +++ b/contrib/python/ipython/py3/IPython/kernel/restarter.py @@ -1 +1 @@ -from jupyter_client.restarter import * +from jupyter_client.restarter import * diff --git a/contrib/python/ipython/py3/IPython/kernel/threaded.py b/contrib/python/ipython/py3/IPython/kernel/threaded.py index 97997eeb705..4a1072f7fe3 100644 --- a/contrib/python/ipython/py3/IPython/kernel/threaded.py +++ b/contrib/python/ipython/py3/IPython/kernel/threaded.py @@ -1 +1 @@ -from jupyter_client.threaded import * +from jupyter_client.threaded import * diff --git a/contrib/python/ipython/py3/IPython/lib/__init__.py b/contrib/python/ipython/py3/IPython/lib/__init__.py index 21e34d84cad..8eb89012df1 100644 --- a/contrib/python/ipython/py3/IPython/lib/__init__.py +++ b/contrib/python/ipython/py3/IPython/lib/__init__.py @@ -1,21 +1,21 @@ -# encoding: utf-8 -""" -Extra capabilities for IPython -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -from IPython.lib.security import passwd - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- +# encoding: utf-8 +""" +Extra capabilities for IPython +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +from IPython.lib.security import passwd + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- diff --git a/contrib/python/ipython/py3/IPython/lib/backgroundjobs.py b/contrib/python/ipython/py3/IPython/lib/backgroundjobs.py index 5c57b034286..31997e13f28 100644 --- a/contrib/python/ipython/py3/IPython/lib/backgroundjobs.py +++ b/contrib/python/ipython/py3/IPython/lib/backgroundjobs.py @@ -1,491 +1,491 @@ -# -*- coding: utf-8 -*- -"""Manage background (threaded) jobs conveniently from an interactive shell. - -This module provides a BackgroundJobManager class. This is the main class -meant for public usage, it implements an object which can create and manage -new background jobs. - -It also provides the actual job classes managed by these BackgroundJobManager -objects, see their docstrings below. - - -This system was inspired by discussions with B. Granger and the -BackgroundCommand class described in the book Python Scripting for -Computational Science, by H. P. Langtangen: - -http://folk.uio.no/hpl/scripting - -(although ultimately no code from this text was used, as IPython's system is a -separate implementation). - -An example notebook is provided in our documentation illustrating interactive -use of the system. -""" - -#***************************************************************************** -# Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu> -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#***************************************************************************** - -# Code begins -import sys -import threading - -from IPython import get_ipython -from IPython.core.ultratb import AutoFormattedTB -from logging import error, debug - - -class BackgroundJobManager(object): - """Class to manage a pool of backgrounded threaded jobs. - - Below, we assume that 'jobs' is a BackgroundJobManager instance. - - Usage summary (see the method docstrings for details): - - jobs.new(...) -> start a new job - - jobs() or jobs.status() -> print status summary of all jobs - - jobs[N] -> returns job number N. - - foo = jobs[N].result -> assign to variable foo the result of job N - - jobs[N].traceback() -> print the traceback of dead job N - - jobs.remove(N) -> remove (finished) job N - - jobs.flush() -> remove all finished jobs - - As a convenience feature, BackgroundJobManager instances provide the - utility result and traceback methods which retrieve the corresponding - information from the jobs list: - - jobs.result(N) <--> jobs[N].result - jobs.traceback(N) <--> jobs[N].traceback() - - While this appears minor, it allows you to use tab completion - interactively on the job manager instance. - """ - - def __init__(self): - # Lists for job management, accessed via a property to ensure they're - # up to date.x - self._running = [] - self._completed = [] - self._dead = [] - # A dict of all jobs, so users can easily access any of them - self.all = {} - # For reporting - self._comp_report = [] - self._dead_report = [] - # Store status codes locally for fast lookups - self._s_created = BackgroundJobBase.stat_created_c - self._s_running = BackgroundJobBase.stat_running_c - self._s_completed = BackgroundJobBase.stat_completed_c - self._s_dead = BackgroundJobBase.stat_dead_c - self._current_job_id = 0 - - @property - def running(self): - self._update_status() - return self._running - - @property - def dead(self): - self._update_status() - return self._dead - - @property - def completed(self): - self._update_status() - return self._completed - - def new(self, func_or_exp, *args, **kwargs): - """Add a new background job and start it in a separate thread. - - There are two types of jobs which can be created: - - 1. Jobs based on expressions which can be passed to an eval() call. - The expression must be given as a string. For example: - - job_manager.new('myfunc(x,y,z=1)'[,glob[,loc]]) - - The given expression is passed to eval(), along with the optional - global/local dicts provided. If no dicts are given, they are - extracted automatically from the caller's frame. - - A Python statement is NOT a valid eval() expression. Basically, you - can only use as an eval() argument something which can go on the right - of an '=' sign and be assigned to a variable. - - For example,"print 'hello'" is not valid, but '2+3' is. - - 2. Jobs given a function object, optionally passing additional - positional arguments: - - job_manager.new(myfunc, x, y) - - The function is called with the given arguments. - - If you need to pass keyword arguments to your function, you must - supply them as a dict named kw: - - job_manager.new(myfunc, x, y, kw=dict(z=1)) - - The reason for this assymmetry is that the new() method needs to - maintain access to its own keywords, and this prevents name collisions - between arguments to new() and arguments to your own functions. - - In both cases, the result is stored in the job.result field of the - background job object. - - You can set `daemon` attribute of the thread by giving the keyword - argument `daemon`. - - Notes and caveats: - - 1. All threads running share the same standard output. Thus, if your - background jobs generate output, it will come out on top of whatever - you are currently writing. For this reason, background jobs are best - used with silent functions which simply return their output. - - 2. Threads also all work within the same global namespace, and this - system does not lock interactive variables. So if you send job to the - background which operates on a mutable object for a long time, and - start modifying that same mutable object interactively (or in another - backgrounded job), all sorts of bizarre behaviour will occur. - - 3. If a background job is spending a lot of time inside a C extension - module which does not release the Python Global Interpreter Lock - (GIL), this will block the IPython prompt. This is simply because the - Python interpreter can only switch between threads at Python - bytecodes. While the execution is inside C code, the interpreter must - simply wait unless the extension module releases the GIL. - - 4. There is no way, due to limitations in the Python threads library, - to kill a thread once it has started.""" - - if callable(func_or_exp): - kw = kwargs.get('kw',{}) - job = BackgroundJobFunc(func_or_exp,*args,**kw) - elif isinstance(func_or_exp, str): - if not args: - frame = sys._getframe(1) - glob, loc = frame.f_globals, frame.f_locals - elif len(args)==1: - glob = loc = args[0] - elif len(args)==2: - glob,loc = args - else: - raise ValueError( - 'Expression jobs take at most 2 args (globals,locals)') - job = BackgroundJobExpr(func_or_exp, glob, loc) - else: - raise TypeError('invalid args for new job') - - if kwargs.get('daemon', False): - job.daemon = True - job.num = self._current_job_id - self._current_job_id += 1 - self.running.append(job) - self.all[job.num] = job - debug('Starting job # %s in a separate thread.' % job.num) - job.start() - return job - - def __getitem__(self, job_key): - num = job_key if isinstance(job_key, int) else job_key.num - return self.all[num] - - def __call__(self): - """An alias to self.status(), - - This allows you to simply call a job manager instance much like the - Unix `jobs` shell command.""" - - return self.status() - - def _update_status(self): - """Update the status of the job lists. - - This method moves finished jobs to one of two lists: - - self.completed: jobs which completed successfully - - self.dead: jobs which finished but died. - - It also copies those jobs to corresponding _report lists. These lists - are used to report jobs completed/dead since the last update, and are - then cleared by the reporting function after each call.""" - - # Status codes - srun, scomp, sdead = self._s_running, self._s_completed, self._s_dead - # State lists, use the actual lists b/c the public names are properties - # that call this very function on access - running, completed, dead = self._running, self._completed, self._dead - - # Now, update all state lists - for num, job in enumerate(running): - stat = job.stat_code - if stat == srun: - continue - elif stat == scomp: - completed.append(job) - self._comp_report.append(job) - running[num] = False - elif stat == sdead: - dead.append(job) - self._dead_report.append(job) - running[num] = False - # Remove dead/completed jobs from running list - running[:] = filter(None, running) - - def _group_report(self,group,name): - """Report summary for a given job group. - - Return True if the group had any elements.""" - - if group: - print('%s jobs:' % name) - for job in group: - print('%s : %s' % (job.num,job)) - print() - return True - - def _group_flush(self,group,name): - """Flush a given job group - - Return True if the group had any elements.""" - - njobs = len(group) - if njobs: - plural = {1:''}.setdefault(njobs,'s') - print('Flushing %s %s job%s.' % (njobs,name,plural)) - group[:] = [] - return True - - def _status_new(self): - """Print the status of newly finished jobs. - - Return True if any new jobs are reported. - - This call resets its own state every time, so it only reports jobs - which have finished since the last time it was called.""" - - self._update_status() - new_comp = self._group_report(self._comp_report, 'Completed') - new_dead = self._group_report(self._dead_report, - 'Dead, call jobs.traceback() for details') - self._comp_report[:] = [] - self._dead_report[:] = [] - return new_comp or new_dead - - def status(self,verbose=0): - """Print a status of all jobs currently being managed.""" - - self._update_status() - self._group_report(self.running,'Running') - self._group_report(self.completed,'Completed') - self._group_report(self.dead,'Dead') - # Also flush the report queues - self._comp_report[:] = [] - self._dead_report[:] = [] - - def remove(self,num): - """Remove a finished (completed or dead) job.""" - - try: - job = self.all[num] - except KeyError: - error('Job #%s not found' % num) - else: - stat_code = job.stat_code - if stat_code == self._s_running: - error('Job #%s is still running, it can not be removed.' % num) - return - elif stat_code == self._s_completed: - self.completed.remove(job) - elif stat_code == self._s_dead: - self.dead.remove(job) - - def flush(self): - """Flush all finished jobs (completed and dead) from lists. - - Running jobs are never flushed. - - It first calls _status_new(), to update info. If any jobs have - completed since the last _status_new() call, the flush operation - aborts.""" - - # Remove the finished jobs from the master dict - alljobs = self.all - for job in self.completed+self.dead: - del(alljobs[job.num]) - - # Now flush these lists completely - fl_comp = self._group_flush(self.completed, 'Completed') - fl_dead = self._group_flush(self.dead, 'Dead') - if not (fl_comp or fl_dead): - print('No jobs to flush.') - - def result(self,num): - """result(N) -> return the result of job N.""" - try: - return self.all[num].result - except KeyError: - error('Job #%s not found' % num) - - def _traceback(self, job): - num = job if isinstance(job, int) else job.num - try: - self.all[num].traceback() - except KeyError: - error('Job #%s not found' % num) - - def traceback(self, job=None): - if job is None: - self._update_status() - for deadjob in self.dead: - print("Traceback for: %r" % deadjob) - self._traceback(deadjob) - print() - else: - self._traceback(job) - - -class BackgroundJobBase(threading.Thread): - """Base class to build BackgroundJob classes. - - The derived classes must implement: - - - Their own __init__, since the one here raises NotImplementedError. The - derived constructor must call self._init() at the end, to provide common - initialization. - - - A strform attribute used in calls to __str__. - - - A call() method, which will make the actual execution call and must - return a value to be held in the 'result' field of the job object. - """ - - # Class constants for status, in string and as numerical codes (when - # updating jobs lists, we don't want to do string comparisons). This will - # be done at every user prompt, so it has to be as fast as possible - stat_created = 'Created'; stat_created_c = 0 - stat_running = 'Running'; stat_running_c = 1 - stat_completed = 'Completed'; stat_completed_c = 2 - stat_dead = 'Dead (Exception), call jobs.traceback() for details' - stat_dead_c = -1 - - def __init__(self): - """Must be implemented in subclasses. - - Subclasses must call :meth:`_init` for standard initialisation. - """ - raise NotImplementedError("This class can not be instantiated directly.") - - def _init(self): - """Common initialization for all BackgroundJob objects""" - - for attr in ['call','strform']: - assert hasattr(self,attr), "Missing attribute <%s>" % attr - - # The num tag can be set by an external job manager - self.num = None - - self.status = BackgroundJobBase.stat_created - self.stat_code = BackgroundJobBase.stat_created_c - self.finished = False - self.result = '<BackgroundJob has not completed>' - - # reuse the ipython traceback handler if we can get to it, otherwise - # make a new one - try: - make_tb = get_ipython().InteractiveTB.text - except: - make_tb = AutoFormattedTB(mode = 'Context', - color_scheme='NoColor', - tb_offset = 1).text - # Note that the actual API for text() requires the three args to be - # passed in, so we wrap it in a simple lambda. - self._make_tb = lambda : make_tb(None, None, None) - - # Hold a formatted traceback if one is generated. - self._tb = None - - threading.Thread.__init__(self) - - def __str__(self): - return self.strform - - def __repr__(self): - return '<BackgroundJob #%d: %s>' % (self.num, self.strform) - - def traceback(self): - print(self._tb) - - def run(self): - try: - self.status = BackgroundJobBase.stat_running - self.stat_code = BackgroundJobBase.stat_running_c - self.result = self.call() - except: - self.status = BackgroundJobBase.stat_dead - self.stat_code = BackgroundJobBase.stat_dead_c - self.finished = None - self.result = ('<BackgroundJob died, call jobs.traceback() for details>') - self._tb = self._make_tb() - else: - self.status = BackgroundJobBase.stat_completed - self.stat_code = BackgroundJobBase.stat_completed_c - self.finished = True - - -class BackgroundJobExpr(BackgroundJobBase): - """Evaluate an expression as a background job (uses a separate thread).""" - - def __init__(self, expression, glob=None, loc=None): - """Create a new job from a string which can be fed to eval(). - - global/locals dicts can be provided, which will be passed to the eval - call.""" - - # fail immediately if the given expression can't be compiled - self.code = compile(expression,'<BackgroundJob compilation>','eval') - - glob = {} if glob is None else glob - loc = {} if loc is None else loc - self.expression = self.strform = expression - self.glob = glob - self.loc = loc - self._init() - - def call(self): - return eval(self.code,self.glob,self.loc) - - -class BackgroundJobFunc(BackgroundJobBase): - """Run a function call as a background job (uses a separate thread).""" - - def __init__(self, func, *args, **kwargs): - """Create a new job from a callable object. - - Any positional arguments and keyword args given to this constructor - after the initial callable are passed directly to it.""" - - if not callable(func): - raise TypeError( - 'first argument to BackgroundJobFunc must be callable') - - self.func = func - self.args = args - self.kwargs = kwargs - # The string form will only include the function passed, because - # generating string representations of the arguments is a potentially - # _very_ expensive operation (e.g. with large arrays). - self.strform = str(func) - self._init() - - def call(self): - return self.func(*self.args, **self.kwargs) +# -*- coding: utf-8 -*- +"""Manage background (threaded) jobs conveniently from an interactive shell. + +This module provides a BackgroundJobManager class. This is the main class +meant for public usage, it implements an object which can create and manage +new background jobs. + +It also provides the actual job classes managed by these BackgroundJobManager +objects, see their docstrings below. + + +This system was inspired by discussions with B. Granger and the +BackgroundCommand class described in the book Python Scripting for +Computational Science, by H. P. Langtangen: + +http://folk.uio.no/hpl/scripting + +(although ultimately no code from this text was used, as IPython's system is a +separate implementation). + +An example notebook is provided in our documentation illustrating interactive +use of the system. +""" + +#***************************************************************************** +# Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu> +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + +# Code begins +import sys +import threading + +from IPython import get_ipython +from IPython.core.ultratb import AutoFormattedTB +from logging import error, debug + + +class BackgroundJobManager(object): + """Class to manage a pool of backgrounded threaded jobs. + + Below, we assume that 'jobs' is a BackgroundJobManager instance. + + Usage summary (see the method docstrings for details): + + jobs.new(...) -> start a new job + + jobs() or jobs.status() -> print status summary of all jobs + + jobs[N] -> returns job number N. + + foo = jobs[N].result -> assign to variable foo the result of job N + + jobs[N].traceback() -> print the traceback of dead job N + + jobs.remove(N) -> remove (finished) job N + + jobs.flush() -> remove all finished jobs + + As a convenience feature, BackgroundJobManager instances provide the + utility result and traceback methods which retrieve the corresponding + information from the jobs list: + + jobs.result(N) <--> jobs[N].result + jobs.traceback(N) <--> jobs[N].traceback() + + While this appears minor, it allows you to use tab completion + interactively on the job manager instance. + """ + + def __init__(self): + # Lists for job management, accessed via a property to ensure they're + # up to date.x + self._running = [] + self._completed = [] + self._dead = [] + # A dict of all jobs, so users can easily access any of them + self.all = {} + # For reporting + self._comp_report = [] + self._dead_report = [] + # Store status codes locally for fast lookups + self._s_created = BackgroundJobBase.stat_created_c + self._s_running = BackgroundJobBase.stat_running_c + self._s_completed = BackgroundJobBase.stat_completed_c + self._s_dead = BackgroundJobBase.stat_dead_c + self._current_job_id = 0 + + @property + def running(self): + self._update_status() + return self._running + + @property + def dead(self): + self._update_status() + return self._dead + + @property + def completed(self): + self._update_status() + return self._completed + + def new(self, func_or_exp, *args, **kwargs): + """Add a new background job and start it in a separate thread. + + There are two types of jobs which can be created: + + 1. Jobs based on expressions which can be passed to an eval() call. + The expression must be given as a string. For example: + + job_manager.new('myfunc(x,y,z=1)'[,glob[,loc]]) + + The given expression is passed to eval(), along with the optional + global/local dicts provided. If no dicts are given, they are + extracted automatically from the caller's frame. + + A Python statement is NOT a valid eval() expression. Basically, you + can only use as an eval() argument something which can go on the right + of an '=' sign and be assigned to a variable. + + For example,"print 'hello'" is not valid, but '2+3' is. + + 2. Jobs given a function object, optionally passing additional + positional arguments: + + job_manager.new(myfunc, x, y) + + The function is called with the given arguments. + + If you need to pass keyword arguments to your function, you must + supply them as a dict named kw: + + job_manager.new(myfunc, x, y, kw=dict(z=1)) + + The reason for this assymmetry is that the new() method needs to + maintain access to its own keywords, and this prevents name collisions + between arguments to new() and arguments to your own functions. + + In both cases, the result is stored in the job.result field of the + background job object. + + You can set `daemon` attribute of the thread by giving the keyword + argument `daemon`. + + Notes and caveats: + + 1. All threads running share the same standard output. Thus, if your + background jobs generate output, it will come out on top of whatever + you are currently writing. For this reason, background jobs are best + used with silent functions which simply return their output. + + 2. Threads also all work within the same global namespace, and this + system does not lock interactive variables. So if you send job to the + background which operates on a mutable object for a long time, and + start modifying that same mutable object interactively (or in another + backgrounded job), all sorts of bizarre behaviour will occur. + + 3. If a background job is spending a lot of time inside a C extension + module which does not release the Python Global Interpreter Lock + (GIL), this will block the IPython prompt. This is simply because the + Python interpreter can only switch between threads at Python + bytecodes. While the execution is inside C code, the interpreter must + simply wait unless the extension module releases the GIL. + + 4. There is no way, due to limitations in the Python threads library, + to kill a thread once it has started.""" + + if callable(func_or_exp): + kw = kwargs.get('kw',{}) + job = BackgroundJobFunc(func_or_exp,*args,**kw) + elif isinstance(func_or_exp, str): + if not args: + frame = sys._getframe(1) + glob, loc = frame.f_globals, frame.f_locals + elif len(args)==1: + glob = loc = args[0] + elif len(args)==2: + glob,loc = args + else: + raise ValueError( + 'Expression jobs take at most 2 args (globals,locals)') + job = BackgroundJobExpr(func_or_exp, glob, loc) + else: + raise TypeError('invalid args for new job') + + if kwargs.get('daemon', False): + job.daemon = True + job.num = self._current_job_id + self._current_job_id += 1 + self.running.append(job) + self.all[job.num] = job + debug('Starting job # %s in a separate thread.' % job.num) + job.start() + return job + + def __getitem__(self, job_key): + num = job_key if isinstance(job_key, int) else job_key.num + return self.all[num] + + def __call__(self): + """An alias to self.status(), + + This allows you to simply call a job manager instance much like the + Unix `jobs` shell command.""" + + return self.status() + + def _update_status(self): + """Update the status of the job lists. + + This method moves finished jobs to one of two lists: + - self.completed: jobs which completed successfully + - self.dead: jobs which finished but died. + + It also copies those jobs to corresponding _report lists. These lists + are used to report jobs completed/dead since the last update, and are + then cleared by the reporting function after each call.""" + + # Status codes + srun, scomp, sdead = self._s_running, self._s_completed, self._s_dead + # State lists, use the actual lists b/c the public names are properties + # that call this very function on access + running, completed, dead = self._running, self._completed, self._dead + + # Now, update all state lists + for num, job in enumerate(running): + stat = job.stat_code + if stat == srun: + continue + elif stat == scomp: + completed.append(job) + self._comp_report.append(job) + running[num] = False + elif stat == sdead: + dead.append(job) + self._dead_report.append(job) + running[num] = False + # Remove dead/completed jobs from running list + running[:] = filter(None, running) + + def _group_report(self,group,name): + """Report summary for a given job group. + + Return True if the group had any elements.""" + + if group: + print('%s jobs:' % name) + for job in group: + print('%s : %s' % (job.num,job)) + print() + return True + + def _group_flush(self,group,name): + """Flush a given job group + + Return True if the group had any elements.""" + + njobs = len(group) + if njobs: + plural = {1:''}.setdefault(njobs,'s') + print('Flushing %s %s job%s.' % (njobs,name,plural)) + group[:] = [] + return True + + def _status_new(self): + """Print the status of newly finished jobs. + + Return True if any new jobs are reported. + + This call resets its own state every time, so it only reports jobs + which have finished since the last time it was called.""" + + self._update_status() + new_comp = self._group_report(self._comp_report, 'Completed') + new_dead = self._group_report(self._dead_report, + 'Dead, call jobs.traceback() for details') + self._comp_report[:] = [] + self._dead_report[:] = [] + return new_comp or new_dead + + def status(self,verbose=0): + """Print a status of all jobs currently being managed.""" + + self._update_status() + self._group_report(self.running,'Running') + self._group_report(self.completed,'Completed') + self._group_report(self.dead,'Dead') + # Also flush the report queues + self._comp_report[:] = [] + self._dead_report[:] = [] + + def remove(self,num): + """Remove a finished (completed or dead) job.""" + + try: + job = self.all[num] + except KeyError: + error('Job #%s not found' % num) + else: + stat_code = job.stat_code + if stat_code == self._s_running: + error('Job #%s is still running, it can not be removed.' % num) + return + elif stat_code == self._s_completed: + self.completed.remove(job) + elif stat_code == self._s_dead: + self.dead.remove(job) + + def flush(self): + """Flush all finished jobs (completed and dead) from lists. + + Running jobs are never flushed. + + It first calls _status_new(), to update info. If any jobs have + completed since the last _status_new() call, the flush operation + aborts.""" + + # Remove the finished jobs from the master dict + alljobs = self.all + for job in self.completed+self.dead: + del(alljobs[job.num]) + + # Now flush these lists completely + fl_comp = self._group_flush(self.completed, 'Completed') + fl_dead = self._group_flush(self.dead, 'Dead') + if not (fl_comp or fl_dead): + print('No jobs to flush.') + + def result(self,num): + """result(N) -> return the result of job N.""" + try: + return self.all[num].result + except KeyError: + error('Job #%s not found' % num) + + def _traceback(self, job): + num = job if isinstance(job, int) else job.num + try: + self.all[num].traceback() + except KeyError: + error('Job #%s not found' % num) + + def traceback(self, job=None): + if job is None: + self._update_status() + for deadjob in self.dead: + print("Traceback for: %r" % deadjob) + self._traceback(deadjob) + print() + else: + self._traceback(job) + + +class BackgroundJobBase(threading.Thread): + """Base class to build BackgroundJob classes. + + The derived classes must implement: + + - Their own __init__, since the one here raises NotImplementedError. The + derived constructor must call self._init() at the end, to provide common + initialization. + + - A strform attribute used in calls to __str__. + + - A call() method, which will make the actual execution call and must + return a value to be held in the 'result' field of the job object. + """ + + # Class constants for status, in string and as numerical codes (when + # updating jobs lists, we don't want to do string comparisons). This will + # be done at every user prompt, so it has to be as fast as possible + stat_created = 'Created'; stat_created_c = 0 + stat_running = 'Running'; stat_running_c = 1 + stat_completed = 'Completed'; stat_completed_c = 2 + stat_dead = 'Dead (Exception), call jobs.traceback() for details' + stat_dead_c = -1 + + def __init__(self): + """Must be implemented in subclasses. + + Subclasses must call :meth:`_init` for standard initialisation. + """ + raise NotImplementedError("This class can not be instantiated directly.") + + def _init(self): + """Common initialization for all BackgroundJob objects""" + + for attr in ['call','strform']: + assert hasattr(self,attr), "Missing attribute <%s>" % attr + + # The num tag can be set by an external job manager + self.num = None + + self.status = BackgroundJobBase.stat_created + self.stat_code = BackgroundJobBase.stat_created_c + self.finished = False + self.result = '<BackgroundJob has not completed>' + + # reuse the ipython traceback handler if we can get to it, otherwise + # make a new one + try: + make_tb = get_ipython().InteractiveTB.text + except: + make_tb = AutoFormattedTB(mode = 'Context', + color_scheme='NoColor', + tb_offset = 1).text + # Note that the actual API for text() requires the three args to be + # passed in, so we wrap it in a simple lambda. + self._make_tb = lambda : make_tb(None, None, None) + + # Hold a formatted traceback if one is generated. + self._tb = None + + threading.Thread.__init__(self) + + def __str__(self): + return self.strform + + def __repr__(self): + return '<BackgroundJob #%d: %s>' % (self.num, self.strform) + + def traceback(self): + print(self._tb) + + def run(self): + try: + self.status = BackgroundJobBase.stat_running + self.stat_code = BackgroundJobBase.stat_running_c + self.result = self.call() + except: + self.status = BackgroundJobBase.stat_dead + self.stat_code = BackgroundJobBase.stat_dead_c + self.finished = None + self.result = ('<BackgroundJob died, call jobs.traceback() for details>') + self._tb = self._make_tb() + else: + self.status = BackgroundJobBase.stat_completed + self.stat_code = BackgroundJobBase.stat_completed_c + self.finished = True + + +class BackgroundJobExpr(BackgroundJobBase): + """Evaluate an expression as a background job (uses a separate thread).""" + + def __init__(self, expression, glob=None, loc=None): + """Create a new job from a string which can be fed to eval(). + + global/locals dicts can be provided, which will be passed to the eval + call.""" + + # fail immediately if the given expression can't be compiled + self.code = compile(expression,'<BackgroundJob compilation>','eval') + + glob = {} if glob is None else glob + loc = {} if loc is None else loc + self.expression = self.strform = expression + self.glob = glob + self.loc = loc + self._init() + + def call(self): + return eval(self.code,self.glob,self.loc) + + +class BackgroundJobFunc(BackgroundJobBase): + """Run a function call as a background job (uses a separate thread).""" + + def __init__(self, func, *args, **kwargs): + """Create a new job from a callable object. + + Any positional arguments and keyword args given to this constructor + after the initial callable are passed directly to it.""" + + if not callable(func): + raise TypeError( + 'first argument to BackgroundJobFunc must be callable') + + self.func = func + self.args = args + self.kwargs = kwargs + # The string form will only include the function passed, because + # generating string representations of the arguments is a potentially + # _very_ expensive operation (e.g. with large arrays). + self.strform = str(func) + self._init() + + def call(self): + return self.func(*self.args, **self.kwargs) diff --git a/contrib/python/ipython/py3/IPython/lib/clipboard.py b/contrib/python/ipython/py3/IPython/lib/clipboard.py index 8868204ec8b..316a8ab1f8a 100644 --- a/contrib/python/ipython/py3/IPython/lib/clipboard.py +++ b/contrib/python/ipython/py3/IPython/lib/clipboard.py @@ -1,69 +1,69 @@ -""" Utilities for accessing the platform's clipboard. -""" - -import subprocess - -from IPython.core.error import TryNext -import IPython.utils.py3compat as py3compat - -class ClipboardEmpty(ValueError): - pass - -def win32_clipboard_get(): - """ Get the current clipboard's text on Windows. - - Requires Mark Hammond's pywin32 extensions. - """ - try: - import win32clipboard - except ImportError: - raise TryNext("Getting text from the clipboard requires the pywin32 " - "extensions: http://sourceforge.net/projects/pywin32/") - win32clipboard.OpenClipboard() - try: - text = win32clipboard.GetClipboardData(win32clipboard.CF_UNICODETEXT) - except (TypeError, win32clipboard.error): - try: - text = win32clipboard.GetClipboardData(win32clipboard.CF_TEXT) - text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING) - except (TypeError, win32clipboard.error): - raise ClipboardEmpty - finally: - win32clipboard.CloseClipboard() - return text - -def osx_clipboard_get() -> str: - """ Get the clipboard's text on OS X. - """ - p = subprocess.Popen(['pbpaste', '-Prefer', 'ascii'], - stdout=subprocess.PIPE) - bytes_, stderr = p.communicate() - # Text comes in with old Mac \r line endings. Change them to \n. - bytes_ = bytes_.replace(b'\r', b'\n') - text = py3compat.decode(bytes_) - return text - -def tkinter_clipboard_get(): - """ Get the clipboard's text using Tkinter. - - This is the default on systems that are not Windows or OS X. It may - interfere with other UI toolkits and should be replaced with an - implementation that uses that toolkit. - """ - try: - from tkinter import Tk, TclError - except ImportError: - raise TryNext("Getting text from the clipboard on this platform requires tkinter.") - - root = Tk() - root.withdraw() - try: - text = root.clipboard_get() - except TclError: - raise ClipboardEmpty - finally: - root.destroy() - text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING) - return text - - +""" Utilities for accessing the platform's clipboard. +""" + +import subprocess + +from IPython.core.error import TryNext +import IPython.utils.py3compat as py3compat + +class ClipboardEmpty(ValueError): + pass + +def win32_clipboard_get(): + """ Get the current clipboard's text on Windows. + + Requires Mark Hammond's pywin32 extensions. + """ + try: + import win32clipboard + except ImportError: + raise TryNext("Getting text from the clipboard requires the pywin32 " + "extensions: http://sourceforge.net/projects/pywin32/") + win32clipboard.OpenClipboard() + try: + text = win32clipboard.GetClipboardData(win32clipboard.CF_UNICODETEXT) + except (TypeError, win32clipboard.error): + try: + text = win32clipboard.GetClipboardData(win32clipboard.CF_TEXT) + text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING) + except (TypeError, win32clipboard.error): + raise ClipboardEmpty + finally: + win32clipboard.CloseClipboard() + return text + +def osx_clipboard_get() -> str: + """ Get the clipboard's text on OS X. + """ + p = subprocess.Popen(['pbpaste', '-Prefer', 'ascii'], + stdout=subprocess.PIPE) + bytes_, stderr = p.communicate() + # Text comes in with old Mac \r line endings. Change them to \n. + bytes_ = bytes_.replace(b'\r', b'\n') + text = py3compat.decode(bytes_) + return text + +def tkinter_clipboard_get(): + """ Get the clipboard's text using Tkinter. + + This is the default on systems that are not Windows or OS X. It may + interfere with other UI toolkits and should be replaced with an + implementation that uses that toolkit. + """ + try: + from tkinter import Tk, TclError + except ImportError: + raise TryNext("Getting text from the clipboard on this platform requires tkinter.") + + root = Tk() + root.withdraw() + try: + text = root.clipboard_get() + except TclError: + raise ClipboardEmpty + finally: + root.destroy() + text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING) + return text + + diff --git a/contrib/python/ipython/py3/IPython/lib/deepreload.py b/contrib/python/ipython/py3/IPython/lib/deepreload.py index 53cd64d65f6..bd8c01b2a75 100644 --- a/contrib/python/ipython/py3/IPython/lib/deepreload.py +++ b/contrib/python/ipython/py3/IPython/lib/deepreload.py @@ -1,341 +1,341 @@ -# -*- coding: utf-8 -*- -""" -Provides a reload() function that acts recursively. - -Python's normal :func:`python:reload` function only reloads the module that it's -passed. The :func:`reload` function in this module also reloads everything -imported from that module, which is useful when you're changing files deep -inside a package. - -To use this as your default reload function, type this:: - - import builtins - from IPython.lib import deepreload - builtins.reload = deepreload.reload - -A reference to the original :func:`python:reload` is stored in this module as -:data:`original_reload`, so you can restore it later. - -This code is almost entirely based on knee.py, which is a Python -re-implementation of hierarchical module import. -""" -#***************************************************************************** -# Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu> -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#***************************************************************************** - -import builtins as builtin_mod -from contextlib import contextmanager -import imp -import sys - -from types import ModuleType -from warnings import warn -import types - -original_import = builtin_mod.__import__ - -@contextmanager -def replace_import_hook(new_import): - saved_import = builtin_mod.__import__ - builtin_mod.__import__ = new_import - try: - yield - finally: - builtin_mod.__import__ = saved_import - -def get_parent(globals, level): - """ - parent, name = get_parent(globals, level) - - Return the package that an import is being performed in. If globals comes - from the module foo.bar.bat (not itself a package), this returns the - sys.modules entry for foo.bar. If globals is from a package's __init__.py, - the package's entry in sys.modules is returned. - - If globals doesn't come from a package or a module in a package, or a - corresponding entry is not found in sys.modules, None is returned. - """ - orig_level = level - - if not level or not isinstance(globals, dict): - return None, '' - - pkgname = globals.get('__package__', None) - - if pkgname is not None: - # __package__ is set, so use it - if not hasattr(pkgname, 'rindex'): - raise ValueError('__package__ set to non-string') - if len(pkgname) == 0: - if level > 0: - raise ValueError('Attempted relative import in non-package') - return None, '' - name = pkgname - else: - # __package__ not set, so figure it out and set it - if '__name__' not in globals: - return None, '' - modname = globals['__name__'] - - if '__path__' in globals: - # __path__ is set, so modname is already the package name - globals['__package__'] = name = modname - else: - # Normal module, so work out the package name if any - lastdot = modname.rfind('.') - if lastdot < 0 < level: - raise ValueError("Attempted relative import in non-package") - if lastdot < 0: - globals['__package__'] = None - return None, '' - globals['__package__'] = name = modname[:lastdot] - - dot = len(name) - for x in range(level, 1, -1): - try: - dot = name.rindex('.', 0, dot) - except ValueError: - raise ValueError("attempted relative import beyond top-level " - "package") - name = name[:dot] - - try: - parent = sys.modules[name] - except: - if orig_level < 1: - warn("Parent module '%.200s' not found while handling absolute " - "import" % name) - parent = None - else: - raise SystemError("Parent module '%.200s' not loaded, cannot " - "perform relative import" % name) - - # We expect, but can't guarantee, if parent != None, that: - # - parent.__name__ == name - # - parent.__dict__ is globals - # If this is violated... Who cares? - return parent, name - -def load_next(mod, altmod, name, buf): - """ - mod, name, buf = load_next(mod, altmod, name, buf) - - altmod is either None or same as mod - """ - - if len(name) == 0: - # completely empty module name should only happen in - # 'from . import' (or '__import__("")') - return mod, None, buf - - dot = name.find('.') - if dot == 0: - raise ValueError('Empty module name') - - if dot < 0: - subname = name - next = None - else: - subname = name[:dot] - next = name[dot+1:] - - if buf != '': - buf += '.' - buf += subname - - result = import_submodule(mod, subname, buf) - if result is None and mod != altmod: - result = import_submodule(altmod, subname, subname) - if result is not None: - buf = subname - - if result is None: - raise ImportError("No module named %.200s" % name) - - return result, next, buf - - -# Need to keep track of what we've already reloaded to prevent cyclic evil -found_now = {} - -def import_submodule(mod, subname, fullname): - """m = import_submodule(mod, subname, fullname)""" - # Require: - # if mod == None: subname == fullname - # else: mod.__name__ + "." + subname == fullname - - global found_now - if fullname in found_now and fullname in sys.modules: - m = sys.modules[fullname] - else: - print('Reloading', fullname) - found_now[fullname] = 1 - oldm = sys.modules.get(fullname, None) - - if mod is None: - path = None - elif hasattr(mod, '__path__'): - path = mod.__path__ - else: - return None - - try: - # This appears to be necessary on Python 3, because imp.find_module() - # tries to import standard libraries (like io) itself, and we don't - # want them to be processed by our deep_import_hook. - with replace_import_hook(original_import): - fp, filename, stuff = imp.find_module(subname, path) - except ImportError: - return None - - try: - m = imp.load_module(fullname, fp, filename, stuff) - except: - # load_module probably removed name from modules because of - # the error. Put back the original module object. - if oldm: - sys.modules[fullname] = oldm - raise - finally: - if fp: fp.close() - - add_submodule(mod, m, fullname, subname) - - return m - -def add_submodule(mod, submod, fullname, subname): - """mod.{subname} = submod""" - if mod is None: - return #Nothing to do here. - - if submod is None: - submod = sys.modules[fullname] - - setattr(mod, subname, submod) - - return - -def ensure_fromlist(mod, fromlist, buf, recursive): - """Handle 'from module import a, b, c' imports.""" - if not hasattr(mod, '__path__'): - return - for item in fromlist: - if not hasattr(item, 'rindex'): - raise TypeError("Item in ``from list'' not a string") - if item == '*': - if recursive: - continue # avoid endless recursion - try: - all = mod.__all__ - except AttributeError: - pass - else: - ret = ensure_fromlist(mod, all, buf, 1) - if not ret: - return 0 - elif not hasattr(mod, item): - import_submodule(mod, item, buf + '.' + item) - -def deep_import_hook(name, globals=None, locals=None, fromlist=None, level=-1): - """Replacement for __import__()""" - parent, buf = get_parent(globals, level) - - head, name, buf = load_next(parent, None if level < 0 else parent, name, buf) - - tail = head - while name: - tail, name, buf = load_next(tail, tail, name, buf) - - # If tail is None, both get_parent and load_next found - # an empty module name: someone called __import__("") or - # doctored faulty bytecode - if tail is None: - raise ValueError('Empty module name') - - if not fromlist: - return head - - ensure_fromlist(tail, fromlist, buf, 0) - return tail - -modules_reloading = {} - -def deep_reload_hook(m): - """Replacement for reload().""" - # Hardcode this one as it would raise a NotImplementedError from the - # bowels of Python and screw up the import machinery after. - # unlike other imports the `exclude` list already in place is not enough. - - if m is types: - return m - if not isinstance(m, ModuleType): - raise TypeError("reload() argument must be module") - - name = m.__name__ - - if name not in sys.modules: - raise ImportError("reload(): module %.200s not in sys.modules" % name) - - global modules_reloading - try: - return modules_reloading[name] - except: - modules_reloading[name] = m - - dot = name.rfind('.') - if dot < 0: - subname = name - path = None - else: - try: - parent = sys.modules[name[:dot]] - except KeyError: - modules_reloading.clear() - raise ImportError("reload(): parent %.200s not in sys.modules" % name[:dot]) - subname = name[dot+1:] - path = getattr(parent, "__path__", None) - - try: - # This appears to be necessary on Python 3, because imp.find_module() - # tries to import standard libraries (like io) itself, and we don't - # want them to be processed by our deep_import_hook. - with replace_import_hook(original_import): - fp, filename, stuff = imp.find_module(subname, path) - finally: - modules_reloading.clear() - - try: - newm = imp.load_module(name, fp, filename, stuff) - except: - # load_module probably removed name from modules because of - # the error. Put back the original module object. - sys.modules[name] = m - raise - finally: - if fp: fp.close() - - modules_reloading.clear() - return newm - -# Save the original hooks -original_reload = imp.reload - -# Replacement for reload() -def reload(module, exclude=('sys', 'os.path', 'builtins', '__main__', - 'numpy', 'numpy._globals')): - """Recursively reload all modules used in the given module. Optionally - takes a list of modules to exclude from reloading. The default exclude - list contains sys, __main__, and __builtin__, to prevent, e.g., resetting - display, exception, and io hooks. - """ - global found_now - for i in exclude: - found_now[i] = 1 - try: - with replace_import_hook(deep_import_hook): - return deep_reload_hook(module) - finally: - found_now = {} +# -*- coding: utf-8 -*- +""" +Provides a reload() function that acts recursively. + +Python's normal :func:`python:reload` function only reloads the module that it's +passed. The :func:`reload` function in this module also reloads everything +imported from that module, which is useful when you're changing files deep +inside a package. + +To use this as your default reload function, type this:: + + import builtins + from IPython.lib import deepreload + builtins.reload = deepreload.reload + +A reference to the original :func:`python:reload` is stored in this module as +:data:`original_reload`, so you can restore it later. + +This code is almost entirely based on knee.py, which is a Python +re-implementation of hierarchical module import. +""" +#***************************************************************************** +# Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu> +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + +import builtins as builtin_mod +from contextlib import contextmanager +import imp +import sys + +from types import ModuleType +from warnings import warn +import types + +original_import = builtin_mod.__import__ + +@contextmanager +def replace_import_hook(new_import): + saved_import = builtin_mod.__import__ + builtin_mod.__import__ = new_import + try: + yield + finally: + builtin_mod.__import__ = saved_import + +def get_parent(globals, level): + """ + parent, name = get_parent(globals, level) + + Return the package that an import is being performed in. If globals comes + from the module foo.bar.bat (not itself a package), this returns the + sys.modules entry for foo.bar. If globals is from a package's __init__.py, + the package's entry in sys.modules is returned. + + If globals doesn't come from a package or a module in a package, or a + corresponding entry is not found in sys.modules, None is returned. + """ + orig_level = level + + if not level or not isinstance(globals, dict): + return None, '' + + pkgname = globals.get('__package__', None) + + if pkgname is not None: + # __package__ is set, so use it + if not hasattr(pkgname, 'rindex'): + raise ValueError('__package__ set to non-string') + if len(pkgname) == 0: + if level > 0: + raise ValueError('Attempted relative import in non-package') + return None, '' + name = pkgname + else: + # __package__ not set, so figure it out and set it + if '__name__' not in globals: + return None, '' + modname = globals['__name__'] + + if '__path__' in globals: + # __path__ is set, so modname is already the package name + globals['__package__'] = name = modname + else: + # Normal module, so work out the package name if any + lastdot = modname.rfind('.') + if lastdot < 0 < level: + raise ValueError("Attempted relative import in non-package") + if lastdot < 0: + globals['__package__'] = None + return None, '' + globals['__package__'] = name = modname[:lastdot] + + dot = len(name) + for x in range(level, 1, -1): + try: + dot = name.rindex('.', 0, dot) + except ValueError: + raise ValueError("attempted relative import beyond top-level " + "package") + name = name[:dot] + + try: + parent = sys.modules[name] + except: + if orig_level < 1: + warn("Parent module '%.200s' not found while handling absolute " + "import" % name) + parent = None + else: + raise SystemError("Parent module '%.200s' not loaded, cannot " + "perform relative import" % name) + + # We expect, but can't guarantee, if parent != None, that: + # - parent.__name__ == name + # - parent.__dict__ is globals + # If this is violated... Who cares? + return parent, name + +def load_next(mod, altmod, name, buf): + """ + mod, name, buf = load_next(mod, altmod, name, buf) + + altmod is either None or same as mod + """ + + if len(name) == 0: + # completely empty module name should only happen in + # 'from . import' (or '__import__("")') + return mod, None, buf + + dot = name.find('.') + if dot == 0: + raise ValueError('Empty module name') + + if dot < 0: + subname = name + next = None + else: + subname = name[:dot] + next = name[dot+1:] + + if buf != '': + buf += '.' + buf += subname + + result = import_submodule(mod, subname, buf) + if result is None and mod != altmod: + result = import_submodule(altmod, subname, subname) + if result is not None: + buf = subname + + if result is None: + raise ImportError("No module named %.200s" % name) + + return result, next, buf + + +# Need to keep track of what we've already reloaded to prevent cyclic evil +found_now = {} + +def import_submodule(mod, subname, fullname): + """m = import_submodule(mod, subname, fullname)""" + # Require: + # if mod == None: subname == fullname + # else: mod.__name__ + "." + subname == fullname + + global found_now + if fullname in found_now and fullname in sys.modules: + m = sys.modules[fullname] + else: + print('Reloading', fullname) + found_now[fullname] = 1 + oldm = sys.modules.get(fullname, None) + + if mod is None: + path = None + elif hasattr(mod, '__path__'): + path = mod.__path__ + else: + return None + + try: + # This appears to be necessary on Python 3, because imp.find_module() + # tries to import standard libraries (like io) itself, and we don't + # want them to be processed by our deep_import_hook. + with replace_import_hook(original_import): + fp, filename, stuff = imp.find_module(subname, path) + except ImportError: + return None + + try: + m = imp.load_module(fullname, fp, filename, stuff) + except: + # load_module probably removed name from modules because of + # the error. Put back the original module object. + if oldm: + sys.modules[fullname] = oldm + raise + finally: + if fp: fp.close() + + add_submodule(mod, m, fullname, subname) + + return m + +def add_submodule(mod, submod, fullname, subname): + """mod.{subname} = submod""" + if mod is None: + return #Nothing to do here. + + if submod is None: + submod = sys.modules[fullname] + + setattr(mod, subname, submod) + + return + +def ensure_fromlist(mod, fromlist, buf, recursive): + """Handle 'from module import a, b, c' imports.""" + if not hasattr(mod, '__path__'): + return + for item in fromlist: + if not hasattr(item, 'rindex'): + raise TypeError("Item in ``from list'' not a string") + if item == '*': + if recursive: + continue # avoid endless recursion + try: + all = mod.__all__ + except AttributeError: + pass + else: + ret = ensure_fromlist(mod, all, buf, 1) + if not ret: + return 0 + elif not hasattr(mod, item): + import_submodule(mod, item, buf + '.' + item) + +def deep_import_hook(name, globals=None, locals=None, fromlist=None, level=-1): + """Replacement for __import__()""" + parent, buf = get_parent(globals, level) + + head, name, buf = load_next(parent, None if level < 0 else parent, name, buf) + + tail = head + while name: + tail, name, buf = load_next(tail, tail, name, buf) + + # If tail is None, both get_parent and load_next found + # an empty module name: someone called __import__("") or + # doctored faulty bytecode + if tail is None: + raise ValueError('Empty module name') + + if not fromlist: + return head + + ensure_fromlist(tail, fromlist, buf, 0) + return tail + +modules_reloading = {} + +def deep_reload_hook(m): + """Replacement for reload().""" + # Hardcode this one as it would raise a NotImplementedError from the + # bowels of Python and screw up the import machinery after. + # unlike other imports the `exclude` list already in place is not enough. + + if m is types: + return m + if not isinstance(m, ModuleType): + raise TypeError("reload() argument must be module") + + name = m.__name__ + + if name not in sys.modules: + raise ImportError("reload(): module %.200s not in sys.modules" % name) + + global modules_reloading + try: + return modules_reloading[name] + except: + modules_reloading[name] = m + + dot = name.rfind('.') + if dot < 0: + subname = name + path = None + else: + try: + parent = sys.modules[name[:dot]] + except KeyError: + modules_reloading.clear() + raise ImportError("reload(): parent %.200s not in sys.modules" % name[:dot]) + subname = name[dot+1:] + path = getattr(parent, "__path__", None) + + try: + # This appears to be necessary on Python 3, because imp.find_module() + # tries to import standard libraries (like io) itself, and we don't + # want them to be processed by our deep_import_hook. + with replace_import_hook(original_import): + fp, filename, stuff = imp.find_module(subname, path) + finally: + modules_reloading.clear() + + try: + newm = imp.load_module(name, fp, filename, stuff) + except: + # load_module probably removed name from modules because of + # the error. Put back the original module object. + sys.modules[name] = m + raise + finally: + if fp: fp.close() + + modules_reloading.clear() + return newm + +# Save the original hooks +original_reload = imp.reload + +# Replacement for reload() +def reload(module, exclude=('sys', 'os.path', 'builtins', '__main__', + 'numpy', 'numpy._globals')): + """Recursively reload all modules used in the given module. Optionally + takes a list of modules to exclude from reloading. The default exclude + list contains sys, __main__, and __builtin__, to prevent, e.g., resetting + display, exception, and io hooks. + """ + global found_now + for i in exclude: + found_now[i] = 1 + try: + with replace_import_hook(deep_import_hook): + return deep_reload_hook(module) + finally: + found_now = {} diff --git a/contrib/python/ipython/py3/IPython/lib/demo.py b/contrib/python/ipython/py3/IPython/lib/demo.py index ce53ae8a8a5..0b19c413c37 100644 --- a/contrib/python/ipython/py3/IPython/lib/demo.py +++ b/contrib/python/ipython/py3/IPython/lib/demo.py @@ -1,671 +1,671 @@ -"""Module for interactive demos using IPython. - -This module implements a few classes for running Python scripts interactively -in IPython for demonstrations. With very simple markup (a few tags in -comments), you can control points where the script stops executing and returns -control to IPython. - - -Provided classes ----------------- - -The classes are (see their docstrings for further details): - - - Demo: pure python demos - - - IPythonDemo: demos with input to be processed by IPython as if it had been - typed interactively (so magics work, as well as any other special syntax you - may have added via input prefilters). - - - LineDemo: single-line version of the Demo class. These demos are executed - one line at a time, and require no markup. - - - IPythonLineDemo: IPython version of the LineDemo class (the demo is - executed a line at a time, but processed via IPython). - - - ClearMixin: mixin to make Demo classes with less visual clutter. It - declares an empty marquee and a pre_cmd that clears the screen before each - block (see Subclassing below). - - - ClearDemo, ClearIPDemo: mixin-enabled versions of the Demo and IPythonDemo - classes. - -Inheritance diagram: - -.. inheritance-diagram:: IPython.lib.demo - :parts: 3 - -Subclassing ------------ - -The classes here all include a few methods meant to make customization by -subclassing more convenient. Their docstrings below have some more details: - - - highlight(): format every block and optionally highlight comments and - docstring content. - - - marquee(): generates a marquee to provide visible on-screen markers at each - block start and end. - - - pre_cmd(): run right before the execution of each block. - - - post_cmd(): run right after the execution of each block. If the block - raises an exception, this is NOT called. - - -Operation ---------- - -The file is run in its own empty namespace (though you can pass it a string of -arguments as if in a command line environment, and it will see those as -sys.argv). But at each stop, the global IPython namespace is updated with the -current internal demo namespace, so you can work interactively with the data -accumulated so far. - -By default, each block of code is printed (with syntax highlighting) before -executing it and you have to confirm execution. This is intended to show the -code to an audience first so you can discuss it, and only proceed with -execution once you agree. There are a few tags which allow you to modify this -behavior. - -The supported tags are: - -# <demo> stop - - Defines block boundaries, the points where IPython stops execution of the - file and returns to the interactive prompt. - - You can optionally mark the stop tag with extra dashes before and after the - word 'stop', to help visually distinguish the blocks in a text editor: - - # <demo> --- stop --- - - -# <demo> silent - - Make a block execute silently (and hence automatically). Typically used in - cases where you have some boilerplate or initialization code which you need - executed but do not want to be seen in the demo. - -# <demo> auto - - Make a block execute automatically, but still being printed. Useful for - simple code which does not warrant discussion, since it avoids the extra - manual confirmation. - -# <demo> auto_all - - This tag can _only_ be in the first block, and if given it overrides the - individual auto tags to make the whole demo fully automatic (no block asks - for confirmation). It can also be given at creation time (or the attribute - set later) to override what's in the file. - -While _any_ python file can be run as a Demo instance, if there are no stop -tags the whole file will run in a single block (no different that calling -first %pycat and then %run). The minimal markup to make this useful is to -place a set of stop tags; the other tags are only there to let you fine-tune -the execution. - -This is probably best explained with the simple example file below. You can -copy this into a file named ex_demo.py, and try running it via:: - - from IPython.lib.demo import Demo - d = Demo('ex_demo.py') - d() - -Each time you call the demo object, it runs the next block. The demo object -has a few useful methods for navigation, like again(), edit(), jump(), seek() -and back(). It can be reset for a new run via reset() or reloaded from disk -(in case you've edited the source) via reload(). See their docstrings below. - -Note: To make this simpler to explore, a file called "demo-exercizer.py" has -been added to the "docs/examples/core" directory. Just cd to this directory in -an IPython session, and type:: - - %run demo-exercizer.py - -and then follow the directions. - -Example -------- - -The following is a very simple example of a valid demo file. - -:: - - #################### EXAMPLE DEMO <ex_demo.py> ############################### - '''A simple interactive demo to illustrate the use of IPython's Demo class.''' - - print 'Hello, welcome to an interactive IPython demo.' - - # The mark below defines a block boundary, which is a point where IPython will - # stop execution and return to the interactive prompt. The dashes are actually - # optional and used only as a visual aid to clearly separate blocks while - # editing the demo code. - # <demo> stop - - x = 1 - y = 2 - - # <demo> stop - - # the mark below makes this block as silent - # <demo> silent - - print 'This is a silent block, which gets executed but not printed.' - - # <demo> stop - # <demo> auto - print 'This is an automatic block.' - print 'It is executed without asking for confirmation, but printed.' - z = x+y - - print 'z=',x - - # <demo> stop - # This is just another normal block. - print 'z is now:', z - - print 'bye!' - ################### END EXAMPLE DEMO <ex_demo.py> ############################ -""" - - -#***************************************************************************** -# Copyright (C) 2005-2006 Fernando Perez. <Fernando.Perez@colorado.edu> -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -# -#***************************************************************************** - -import os -import re -import shlex -import sys -import pygments - -from IPython.utils.text import marquee -from IPython.utils import openpy -from IPython.utils import py3compat -__all__ = ['Demo','IPythonDemo','LineDemo','IPythonLineDemo','DemoError'] - -class DemoError(Exception): pass - -def re_mark(mark): - return re.compile(r'^\s*#\s+<demo>\s+%s\s*$' % mark,re.MULTILINE) - -class Demo(object): - - re_stop = re_mark(r'-*\s?stop\s?-*') - re_silent = re_mark('silent') - re_auto = re_mark('auto') - re_auto_all = re_mark('auto_all') - - def __init__(self,src,title='',arg_str='',auto_all=None, format_rst=False, - formatter='terminal', style='default'): - """Make a new demo object. To run the demo, simply call the object. - - See the module docstring for full details and an example (you can use - IPython.Demo? in IPython to see it). - - Inputs: - - - src is either a file, or file-like object, or a - string that can be resolved to a filename. - - Optional inputs: - - - title: a string to use as the demo name. Of most use when the demo - you are making comes from an object that has no filename, or if you - want an alternate denotation distinct from the filename. - - - arg_str(''): a string of arguments, internally converted to a list - just like sys.argv, so the demo script can see a similar - environment. - - - auto_all(None): global flag to run all blocks automatically without - confirmation. This attribute overrides the block-level tags and - applies to the whole demo. It is an attribute of the object, and - can be changed at runtime simply by reassigning it to a boolean - value. - - - format_rst(False): a bool to enable comments and doc strings - formatting with pygments rst lexer - - - formatter('terminal'): a string of pygments formatter name to be - used. Useful values for terminals: terminal, terminal256, - terminal16m - - - style('default'): a string of pygments style name to be used. - """ - if hasattr(src, "read"): - # It seems to be a file or a file-like object - self.fname = "from a file-like object" - if title == '': - self.title = "from a file-like object" - else: - self.title = title - else: - # Assume it's a string or something that can be converted to one - self.fname = src - if title == '': - (filepath, filename) = os.path.split(src) - self.title = filename - else: - self.title = title - self.sys_argv = [src] + shlex.split(arg_str) - self.auto_all = auto_all - self.src = src - - try: - ip = get_ipython() # this is in builtins whenever IPython is running - self.inside_ipython = True - except NameError: - self.inside_ipython = False - - if self.inside_ipython: - # get a few things from ipython. While it's a bit ugly design-wise, - # it ensures that things like color scheme and the like are always in - # sync with the ipython mode being used. This class is only meant to - # be used inside ipython anyways, so it's OK. - self.ip_ns = ip.user_ns - self.ip_colorize = ip.pycolorize - self.ip_showtb = ip.showtraceback - self.ip_run_cell = ip.run_cell - self.shell = ip - - self.formatter = pygments.formatters.get_formatter_by_name(formatter, - style=style) - self.python_lexer = pygments.lexers.get_lexer_by_name("py3") - self.format_rst = format_rst - if format_rst: - self.rst_lexer = pygments.lexers.get_lexer_by_name("rst") - - # load user data and initialize data structures - self.reload() - - def fload(self): - """Load file object.""" - # read data and parse into blocks - if hasattr(self, 'fobj') and self.fobj is not None: - self.fobj.close() - if hasattr(self.src, "read"): - # It seems to be a file or a file-like object - self.fobj = self.src - else: - # Assume it's a string or something that can be converted to one - self.fobj = openpy.open(self.fname) - - def reload(self): - """Reload source from disk and initialize state.""" - self.fload() - - self.src = "".join(openpy.strip_encoding_cookie(self.fobj)) - src_b = [b.strip() for b in self.re_stop.split(self.src) if b] - self._silent = [bool(self.re_silent.findall(b)) for b in src_b] - self._auto = [bool(self.re_auto.findall(b)) for b in src_b] - - # if auto_all is not given (def. None), we read it from the file - if self.auto_all is None: - self.auto_all = bool(self.re_auto_all.findall(src_b[0])) - else: - self.auto_all = bool(self.auto_all) - - # Clean the sources from all markup so it doesn't get displayed when - # running the demo - src_blocks = [] - auto_strip = lambda s: self.re_auto.sub('',s) - for i,b in enumerate(src_b): - if self._auto[i]: - src_blocks.append(auto_strip(b)) - else: - src_blocks.append(b) - # remove the auto_all marker - src_blocks[0] = self.re_auto_all.sub('',src_blocks[0]) - - self.nblocks = len(src_blocks) - self.src_blocks = src_blocks - - # also build syntax-highlighted source - self.src_blocks_colored = list(map(self.highlight,self.src_blocks)) - - # ensure clean namespace and seek offset - self.reset() - - def reset(self): - """Reset the namespace and seek pointer to restart the demo""" - self.user_ns = {} - self.finished = False - self.block_index = 0 - - def _validate_index(self,index): - if index<0 or index>=self.nblocks: - raise ValueError('invalid block index %s' % index) - - def _get_index(self,index): - """Get the current block index, validating and checking status. - - Returns None if the demo is finished""" - - if index is None: - if self.finished: - print('Demo finished. Use <demo_name>.reset() if you want to rerun it.') - return None - index = self.block_index - else: - self._validate_index(index) - return index - - def seek(self,index): - """Move the current seek pointer to the given block. - - You can use negative indices to seek from the end, with identical - semantics to those of Python lists.""" - if index<0: - index = self.nblocks + index - self._validate_index(index) - self.block_index = index - self.finished = False - - def back(self,num=1): - """Move the seek pointer back num blocks (default is 1).""" - self.seek(self.block_index-num) - - def jump(self,num=1): - """Jump a given number of blocks relative to the current one. - - The offset can be positive or negative, defaults to 1.""" - self.seek(self.block_index+num) - - def again(self): - """Move the seek pointer back one block and re-execute.""" - self.back(1) - self() - - def edit(self,index=None): - """Edit a block. - - If no number is given, use the last block executed. - - This edits the in-memory copy of the demo, it does NOT modify the - original source file. If you want to do that, simply open the file in - an editor and use reload() when you make changes to the file. This - method is meant to let you change a block during a demonstration for - explanatory purposes, without damaging your original script.""" - - index = self._get_index(index) - if index is None: - return - # decrease the index by one (unless we're at the very beginning), so - # that the default demo.edit() call opens up the sblock we've last run - if index>0: - index -= 1 - - filename = self.shell.mktempfile(self.src_blocks[index]) - self.shell.hooks.editor(filename,1) - with open(filename, 'r') as f: - new_block = f.read() - # update the source and colored block - self.src_blocks[index] = new_block - self.src_blocks_colored[index] = self.highlight(new_block) - self.block_index = index - # call to run with the newly edited index - self() - - def show(self,index=None): - """Show a single block on screen""" - - index = self._get_index(index) - if index is None: - return - - print(self.marquee('<%s> block # %s (%s remaining)' % - (self.title,index,self.nblocks-index-1))) - print(self.src_blocks_colored[index]) - sys.stdout.flush() - - def show_all(self): - """Show entire demo on screen, block by block""" - - fname = self.title - title = self.title - nblocks = self.nblocks - silent = self._silent - marquee = self.marquee - for index,block in enumerate(self.src_blocks_colored): - if silent[index]: - print(marquee('<%s> SILENT block # %s (%s remaining)' % - (title,index,nblocks-index-1))) - else: - print(marquee('<%s> block # %s (%s remaining)' % - (title,index,nblocks-index-1))) - print(block, end=' ') - sys.stdout.flush() - - def run_cell(self,source): - """Execute a string with one or more lines of code""" - - exec(source, self.user_ns) - - def __call__(self,index=None): - """run a block of the demo. - - If index is given, it should be an integer >=1 and <= nblocks. This - means that the calling convention is one off from typical Python - lists. The reason for the inconsistency is that the demo always - prints 'Block n/N, and N is the total, so it would be very odd to use - zero-indexing here.""" - - index = self._get_index(index) - if index is None: - return - try: - marquee = self.marquee - next_block = self.src_blocks[index] - self.block_index += 1 - if self._silent[index]: - print(marquee('Executing silent block # %s (%s remaining)' % - (index,self.nblocks-index-1))) - else: - self.pre_cmd() - self.show(index) - if self.auto_all or self._auto[index]: - print(marquee('output:')) - else: - print(marquee('Press <q> to quit, <Enter> to execute...'), end=' ') - ans = py3compat.input().strip() - if ans: - print(marquee('Block NOT executed')) - return - try: - save_argv = sys.argv - sys.argv = self.sys_argv - self.run_cell(next_block) - self.post_cmd() - finally: - sys.argv = save_argv - - except: - if self.inside_ipython: - self.ip_showtb(filename=self.fname) - else: - if self.inside_ipython: - self.ip_ns.update(self.user_ns) - - if self.block_index == self.nblocks: - mq1 = self.marquee('END OF DEMO') - if mq1: - # avoid spurious print if empty marquees are used - print() - print(mq1) - print(self.marquee('Use <demo_name>.reset() if you want to rerun it.')) - self.finished = True - - # These methods are meant to be overridden by subclasses who may wish to - # customize the behavior of of their demos. - def marquee(self,txt='',width=78,mark='*'): - """Return the input string centered in a 'marquee'.""" - return marquee(txt,width,mark) - - def pre_cmd(self): - """Method called before executing each block.""" - pass - - def post_cmd(self): - """Method called after executing each block.""" - pass - - def highlight(self, block): - """Method called on each block to highlight it content""" - tokens = pygments.lex(block, self.python_lexer) - if self.format_rst: - from pygments.token import Token - toks = [] - for token in tokens: - if token[0] == Token.String.Doc and len(token[1]) > 6: - toks += pygments.lex(token[1][:3], self.python_lexer) - # parse doc string content by rst lexer - toks += pygments.lex(token[1][3:-3], self.rst_lexer) - toks += pygments.lex(token[1][-3:], self.python_lexer) - elif token[0] == Token.Comment.Single: - toks.append((Token.Comment.Single, token[1][0])) - # parse comment content by rst lexer - # remove the extrat newline added by rst lexer - toks += list(pygments.lex(token[1][1:], self.rst_lexer))[:-1] - else: - toks.append(token) - tokens = toks - return pygments.format(tokens, self.formatter) - - -class IPythonDemo(Demo): - """Class for interactive demos with IPython's input processing applied. - - This subclasses Demo, but instead of executing each block by the Python - interpreter (via exec), it actually calls IPython on it, so that any input - filters which may be in place are applied to the input block. - - If you have an interactive environment which exposes special input - processing, you can use this class instead to write demo scripts which - operate exactly as if you had typed them interactively. The default Demo - class requires the input to be valid, pure Python code. - """ - - def run_cell(self,source): - """Execute a string with one or more lines of code""" - - self.shell.run_cell(source) - -class LineDemo(Demo): - """Demo where each line is executed as a separate block. - - The input script should be valid Python code. - - This class doesn't require any markup at all, and it's meant for simple - scripts (with no nesting or any kind of indentation) which consist of - multiple lines of input to be executed, one at a time, as if they had been - typed in the interactive prompt. - - Note: the input can not have *any* indentation, which means that only - single-lines of input are accepted, not even function definitions are - valid.""" - - def reload(self): - """Reload source from disk and initialize state.""" - # read data and parse into blocks - self.fload() - lines = self.fobj.readlines() - src_b = [l for l in lines if l.strip()] - nblocks = len(src_b) - self.src = ''.join(lines) - self._silent = [False]*nblocks - self._auto = [True]*nblocks - self.auto_all = True - self.nblocks = nblocks - self.src_blocks = src_b - - # also build syntax-highlighted source - self.src_blocks_colored = list(map(self.highlight,self.src_blocks)) - - # ensure clean namespace and seek offset - self.reset() - - -class IPythonLineDemo(IPythonDemo,LineDemo): - """Variant of the LineDemo class whose input is processed by IPython.""" - pass - - -class ClearMixin(object): - """Use this mixin to make Demo classes with less visual clutter. - - Demos using this mixin will clear the screen before every block and use - blank marquees. - - Note that in order for the methods defined here to actually override those - of the classes it's mixed with, it must go /first/ in the inheritance - tree. For example: - - class ClearIPDemo(ClearMixin,IPythonDemo): pass - - will provide an IPythonDemo class with the mixin's features. - """ - - def marquee(self,txt='',width=78,mark='*'): - """Blank marquee that returns '' no matter what the input.""" - return '' - - def pre_cmd(self): - """Method called before executing each block. - - This one simply clears the screen.""" - from IPython.utils.terminal import _term_clear - _term_clear() - -class ClearDemo(ClearMixin,Demo): - pass - - -class ClearIPDemo(ClearMixin,IPythonDemo): - pass - - -def slide(file_path, noclear=False, format_rst=True, formatter="terminal", - style="native", auto_all=False, delimiter='...'): - if noclear: - demo_class = Demo - else: - demo_class = ClearDemo - demo = demo_class(file_path, format_rst=format_rst, formatter=formatter, - style=style, auto_all=auto_all) - while not demo.finished: - demo() - try: - py3compat.input('\n' + delimiter) - except KeyboardInterrupt: - exit(1) - -if __name__ == '__main__': - import argparse - parser = argparse.ArgumentParser(description='Run python demos') - parser.add_argument('--noclear', '-C', action='store_true', - help='Do not clear terminal on each slide') - parser.add_argument('--rst', '-r', action='store_true', - help='Highlight comments and dostrings as rst') - parser.add_argument('--formatter', '-f', default='terminal', - help='pygments formatter name could be: terminal, ' - 'terminal256, terminal16m') - parser.add_argument('--style', '-s', default='default', - help='pygments style name') - parser.add_argument('--auto', '-a', action='store_true', - help='Run all blocks automatically without' - 'confirmation') - parser.add_argument('--delimiter', '-d', default='...', - help='slides delimiter added after each slide run') - parser.add_argument('file', nargs=1, - help='python demo file') - args = parser.parse_args() - slide(args.file[0], noclear=args.noclear, format_rst=args.rst, - formatter=args.formatter, style=args.style, auto_all=args.auto, - delimiter=args.delimiter) +"""Module for interactive demos using IPython. + +This module implements a few classes for running Python scripts interactively +in IPython for demonstrations. With very simple markup (a few tags in +comments), you can control points where the script stops executing and returns +control to IPython. + + +Provided classes +---------------- + +The classes are (see their docstrings for further details): + + - Demo: pure python demos + + - IPythonDemo: demos with input to be processed by IPython as if it had been + typed interactively (so magics work, as well as any other special syntax you + may have added via input prefilters). + + - LineDemo: single-line version of the Demo class. These demos are executed + one line at a time, and require no markup. + + - IPythonLineDemo: IPython version of the LineDemo class (the demo is + executed a line at a time, but processed via IPython). + + - ClearMixin: mixin to make Demo classes with less visual clutter. It + declares an empty marquee and a pre_cmd that clears the screen before each + block (see Subclassing below). + + - ClearDemo, ClearIPDemo: mixin-enabled versions of the Demo and IPythonDemo + classes. + +Inheritance diagram: + +.. inheritance-diagram:: IPython.lib.demo + :parts: 3 + +Subclassing +----------- + +The classes here all include a few methods meant to make customization by +subclassing more convenient. Their docstrings below have some more details: + + - highlight(): format every block and optionally highlight comments and + docstring content. + + - marquee(): generates a marquee to provide visible on-screen markers at each + block start and end. + + - pre_cmd(): run right before the execution of each block. + + - post_cmd(): run right after the execution of each block. If the block + raises an exception, this is NOT called. + + +Operation +--------- + +The file is run in its own empty namespace (though you can pass it a string of +arguments as if in a command line environment, and it will see those as +sys.argv). But at each stop, the global IPython namespace is updated with the +current internal demo namespace, so you can work interactively with the data +accumulated so far. + +By default, each block of code is printed (with syntax highlighting) before +executing it and you have to confirm execution. This is intended to show the +code to an audience first so you can discuss it, and only proceed with +execution once you agree. There are a few tags which allow you to modify this +behavior. + +The supported tags are: + +# <demo> stop + + Defines block boundaries, the points where IPython stops execution of the + file and returns to the interactive prompt. + + You can optionally mark the stop tag with extra dashes before and after the + word 'stop', to help visually distinguish the blocks in a text editor: + + # <demo> --- stop --- + + +# <demo> silent + + Make a block execute silently (and hence automatically). Typically used in + cases where you have some boilerplate or initialization code which you need + executed but do not want to be seen in the demo. + +# <demo> auto + + Make a block execute automatically, but still being printed. Useful for + simple code which does not warrant discussion, since it avoids the extra + manual confirmation. + +# <demo> auto_all + + This tag can _only_ be in the first block, and if given it overrides the + individual auto tags to make the whole demo fully automatic (no block asks + for confirmation). It can also be given at creation time (or the attribute + set later) to override what's in the file. + +While _any_ python file can be run as a Demo instance, if there are no stop +tags the whole file will run in a single block (no different that calling +first %pycat and then %run). The minimal markup to make this useful is to +place a set of stop tags; the other tags are only there to let you fine-tune +the execution. + +This is probably best explained with the simple example file below. You can +copy this into a file named ex_demo.py, and try running it via:: + + from IPython.lib.demo import Demo + d = Demo('ex_demo.py') + d() + +Each time you call the demo object, it runs the next block. The demo object +has a few useful methods for navigation, like again(), edit(), jump(), seek() +and back(). It can be reset for a new run via reset() or reloaded from disk +(in case you've edited the source) via reload(). See their docstrings below. + +Note: To make this simpler to explore, a file called "demo-exercizer.py" has +been added to the "docs/examples/core" directory. Just cd to this directory in +an IPython session, and type:: + + %run demo-exercizer.py + +and then follow the directions. + +Example +------- + +The following is a very simple example of a valid demo file. + +:: + + #################### EXAMPLE DEMO <ex_demo.py> ############################### + '''A simple interactive demo to illustrate the use of IPython's Demo class.''' + + print 'Hello, welcome to an interactive IPython demo.' + + # The mark below defines a block boundary, which is a point where IPython will + # stop execution and return to the interactive prompt. The dashes are actually + # optional and used only as a visual aid to clearly separate blocks while + # editing the demo code. + # <demo> stop + + x = 1 + y = 2 + + # <demo> stop + + # the mark below makes this block as silent + # <demo> silent + + print 'This is a silent block, which gets executed but not printed.' + + # <demo> stop + # <demo> auto + print 'This is an automatic block.' + print 'It is executed without asking for confirmation, but printed.' + z = x+y + + print 'z=',x + + # <demo> stop + # This is just another normal block. + print 'z is now:', z + + print 'bye!' + ################### END EXAMPLE DEMO <ex_demo.py> ############################ +""" + + +#***************************************************************************** +# Copyright (C) 2005-2006 Fernando Perez. <Fernando.Perez@colorado.edu> +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +# +#***************************************************************************** + +import os +import re +import shlex +import sys +import pygments + +from IPython.utils.text import marquee +from IPython.utils import openpy +from IPython.utils import py3compat +__all__ = ['Demo','IPythonDemo','LineDemo','IPythonLineDemo','DemoError'] + +class DemoError(Exception): pass + +def re_mark(mark): + return re.compile(r'^\s*#\s+<demo>\s+%s\s*$' % mark,re.MULTILINE) + +class Demo(object): + + re_stop = re_mark(r'-*\s?stop\s?-*') + re_silent = re_mark('silent') + re_auto = re_mark('auto') + re_auto_all = re_mark('auto_all') + + def __init__(self,src,title='',arg_str='',auto_all=None, format_rst=False, + formatter='terminal', style='default'): + """Make a new demo object. To run the demo, simply call the object. + + See the module docstring for full details and an example (you can use + IPython.Demo? in IPython to see it). + + Inputs: + + - src is either a file, or file-like object, or a + string that can be resolved to a filename. + + Optional inputs: + + - title: a string to use as the demo name. Of most use when the demo + you are making comes from an object that has no filename, or if you + want an alternate denotation distinct from the filename. + + - arg_str(''): a string of arguments, internally converted to a list + just like sys.argv, so the demo script can see a similar + environment. + + - auto_all(None): global flag to run all blocks automatically without + confirmation. This attribute overrides the block-level tags and + applies to the whole demo. It is an attribute of the object, and + can be changed at runtime simply by reassigning it to a boolean + value. + + - format_rst(False): a bool to enable comments and doc strings + formatting with pygments rst lexer + + - formatter('terminal'): a string of pygments formatter name to be + used. Useful values for terminals: terminal, terminal256, + terminal16m + + - style('default'): a string of pygments style name to be used. + """ + if hasattr(src, "read"): + # It seems to be a file or a file-like object + self.fname = "from a file-like object" + if title == '': + self.title = "from a file-like object" + else: + self.title = title + else: + # Assume it's a string or something that can be converted to one + self.fname = src + if title == '': + (filepath, filename) = os.path.split(src) + self.title = filename + else: + self.title = title + self.sys_argv = [src] + shlex.split(arg_str) + self.auto_all = auto_all + self.src = src + + try: + ip = get_ipython() # this is in builtins whenever IPython is running + self.inside_ipython = True + except NameError: + self.inside_ipython = False + + if self.inside_ipython: + # get a few things from ipython. While it's a bit ugly design-wise, + # it ensures that things like color scheme and the like are always in + # sync with the ipython mode being used. This class is only meant to + # be used inside ipython anyways, so it's OK. + self.ip_ns = ip.user_ns + self.ip_colorize = ip.pycolorize + self.ip_showtb = ip.showtraceback + self.ip_run_cell = ip.run_cell + self.shell = ip + + self.formatter = pygments.formatters.get_formatter_by_name(formatter, + style=style) + self.python_lexer = pygments.lexers.get_lexer_by_name("py3") + self.format_rst = format_rst + if format_rst: + self.rst_lexer = pygments.lexers.get_lexer_by_name("rst") + + # load user data and initialize data structures + self.reload() + + def fload(self): + """Load file object.""" + # read data and parse into blocks + if hasattr(self, 'fobj') and self.fobj is not None: + self.fobj.close() + if hasattr(self.src, "read"): + # It seems to be a file or a file-like object + self.fobj = self.src + else: + # Assume it's a string or something that can be converted to one + self.fobj = openpy.open(self.fname) + + def reload(self): + """Reload source from disk and initialize state.""" + self.fload() + + self.src = "".join(openpy.strip_encoding_cookie(self.fobj)) + src_b = [b.strip() for b in self.re_stop.split(self.src) if b] + self._silent = [bool(self.re_silent.findall(b)) for b in src_b] + self._auto = [bool(self.re_auto.findall(b)) for b in src_b] + + # if auto_all is not given (def. None), we read it from the file + if self.auto_all is None: + self.auto_all = bool(self.re_auto_all.findall(src_b[0])) + else: + self.auto_all = bool(self.auto_all) + + # Clean the sources from all markup so it doesn't get displayed when + # running the demo + src_blocks = [] + auto_strip = lambda s: self.re_auto.sub('',s) + for i,b in enumerate(src_b): + if self._auto[i]: + src_blocks.append(auto_strip(b)) + else: + src_blocks.append(b) + # remove the auto_all marker + src_blocks[0] = self.re_auto_all.sub('',src_blocks[0]) + + self.nblocks = len(src_blocks) + self.src_blocks = src_blocks + + # also build syntax-highlighted source + self.src_blocks_colored = list(map(self.highlight,self.src_blocks)) + + # ensure clean namespace and seek offset + self.reset() + + def reset(self): + """Reset the namespace and seek pointer to restart the demo""" + self.user_ns = {} + self.finished = False + self.block_index = 0 + + def _validate_index(self,index): + if index<0 or index>=self.nblocks: + raise ValueError('invalid block index %s' % index) + + def _get_index(self,index): + """Get the current block index, validating and checking status. + + Returns None if the demo is finished""" + + if index is None: + if self.finished: + print('Demo finished. Use <demo_name>.reset() if you want to rerun it.') + return None + index = self.block_index + else: + self._validate_index(index) + return index + + def seek(self,index): + """Move the current seek pointer to the given block. + + You can use negative indices to seek from the end, with identical + semantics to those of Python lists.""" + if index<0: + index = self.nblocks + index + self._validate_index(index) + self.block_index = index + self.finished = False + + def back(self,num=1): + """Move the seek pointer back num blocks (default is 1).""" + self.seek(self.block_index-num) + + def jump(self,num=1): + """Jump a given number of blocks relative to the current one. + + The offset can be positive or negative, defaults to 1.""" + self.seek(self.block_index+num) + + def again(self): + """Move the seek pointer back one block and re-execute.""" + self.back(1) + self() + + def edit(self,index=None): + """Edit a block. + + If no number is given, use the last block executed. + + This edits the in-memory copy of the demo, it does NOT modify the + original source file. If you want to do that, simply open the file in + an editor and use reload() when you make changes to the file. This + method is meant to let you change a block during a demonstration for + explanatory purposes, without damaging your original script.""" + + index = self._get_index(index) + if index is None: + return + # decrease the index by one (unless we're at the very beginning), so + # that the default demo.edit() call opens up the sblock we've last run + if index>0: + index -= 1 + + filename = self.shell.mktempfile(self.src_blocks[index]) + self.shell.hooks.editor(filename,1) + with open(filename, 'r') as f: + new_block = f.read() + # update the source and colored block + self.src_blocks[index] = new_block + self.src_blocks_colored[index] = self.highlight(new_block) + self.block_index = index + # call to run with the newly edited index + self() + + def show(self,index=None): + """Show a single block on screen""" + + index = self._get_index(index) + if index is None: + return + + print(self.marquee('<%s> block # %s (%s remaining)' % + (self.title,index,self.nblocks-index-1))) + print(self.src_blocks_colored[index]) + sys.stdout.flush() + + def show_all(self): + """Show entire demo on screen, block by block""" + + fname = self.title + title = self.title + nblocks = self.nblocks + silent = self._silent + marquee = self.marquee + for index,block in enumerate(self.src_blocks_colored): + if silent[index]: + print(marquee('<%s> SILENT block # %s (%s remaining)' % + (title,index,nblocks-index-1))) + else: + print(marquee('<%s> block # %s (%s remaining)' % + (title,index,nblocks-index-1))) + print(block, end=' ') + sys.stdout.flush() + + def run_cell(self,source): + """Execute a string with one or more lines of code""" + + exec(source, self.user_ns) + + def __call__(self,index=None): + """run a block of the demo. + + If index is given, it should be an integer >=1 and <= nblocks. This + means that the calling convention is one off from typical Python + lists. The reason for the inconsistency is that the demo always + prints 'Block n/N, and N is the total, so it would be very odd to use + zero-indexing here.""" + + index = self._get_index(index) + if index is None: + return + try: + marquee = self.marquee + next_block = self.src_blocks[index] + self.block_index += 1 + if self._silent[index]: + print(marquee('Executing silent block # %s (%s remaining)' % + (index,self.nblocks-index-1))) + else: + self.pre_cmd() + self.show(index) + if self.auto_all or self._auto[index]: + print(marquee('output:')) + else: + print(marquee('Press <q> to quit, <Enter> to execute...'), end=' ') + ans = py3compat.input().strip() + if ans: + print(marquee('Block NOT executed')) + return + try: + save_argv = sys.argv + sys.argv = self.sys_argv + self.run_cell(next_block) + self.post_cmd() + finally: + sys.argv = save_argv + + except: + if self.inside_ipython: + self.ip_showtb(filename=self.fname) + else: + if self.inside_ipython: + self.ip_ns.update(self.user_ns) + + if self.block_index == self.nblocks: + mq1 = self.marquee('END OF DEMO') + if mq1: + # avoid spurious print if empty marquees are used + print() + print(mq1) + print(self.marquee('Use <demo_name>.reset() if you want to rerun it.')) + self.finished = True + + # These methods are meant to be overridden by subclasses who may wish to + # customize the behavior of of their demos. + def marquee(self,txt='',width=78,mark='*'): + """Return the input string centered in a 'marquee'.""" + return marquee(txt,width,mark) + + def pre_cmd(self): + """Method called before executing each block.""" + pass + + def post_cmd(self): + """Method called after executing each block.""" + pass + + def highlight(self, block): + """Method called on each block to highlight it content""" + tokens = pygments.lex(block, self.python_lexer) + if self.format_rst: + from pygments.token import Token + toks = [] + for token in tokens: + if token[0] == Token.String.Doc and len(token[1]) > 6: + toks += pygments.lex(token[1][:3], self.python_lexer) + # parse doc string content by rst lexer + toks += pygments.lex(token[1][3:-3], self.rst_lexer) + toks += pygments.lex(token[1][-3:], self.python_lexer) + elif token[0] == Token.Comment.Single: + toks.append((Token.Comment.Single, token[1][0])) + # parse comment content by rst lexer + # remove the extrat newline added by rst lexer + toks += list(pygments.lex(token[1][1:], self.rst_lexer))[:-1] + else: + toks.append(token) + tokens = toks + return pygments.format(tokens, self.formatter) + + +class IPythonDemo(Demo): + """Class for interactive demos with IPython's input processing applied. + + This subclasses Demo, but instead of executing each block by the Python + interpreter (via exec), it actually calls IPython on it, so that any input + filters which may be in place are applied to the input block. + + If you have an interactive environment which exposes special input + processing, you can use this class instead to write demo scripts which + operate exactly as if you had typed them interactively. The default Demo + class requires the input to be valid, pure Python code. + """ + + def run_cell(self,source): + """Execute a string with one or more lines of code""" + + self.shell.run_cell(source) + +class LineDemo(Demo): + """Demo where each line is executed as a separate block. + + The input script should be valid Python code. + + This class doesn't require any markup at all, and it's meant for simple + scripts (with no nesting or any kind of indentation) which consist of + multiple lines of input to be executed, one at a time, as if they had been + typed in the interactive prompt. + + Note: the input can not have *any* indentation, which means that only + single-lines of input are accepted, not even function definitions are + valid.""" + + def reload(self): + """Reload source from disk and initialize state.""" + # read data and parse into blocks + self.fload() + lines = self.fobj.readlines() + src_b = [l for l in lines if l.strip()] + nblocks = len(src_b) + self.src = ''.join(lines) + self._silent = [False]*nblocks + self._auto = [True]*nblocks + self.auto_all = True + self.nblocks = nblocks + self.src_blocks = src_b + + # also build syntax-highlighted source + self.src_blocks_colored = list(map(self.highlight,self.src_blocks)) + + # ensure clean namespace and seek offset + self.reset() + + +class IPythonLineDemo(IPythonDemo,LineDemo): + """Variant of the LineDemo class whose input is processed by IPython.""" + pass + + +class ClearMixin(object): + """Use this mixin to make Demo classes with less visual clutter. + + Demos using this mixin will clear the screen before every block and use + blank marquees. + + Note that in order for the methods defined here to actually override those + of the classes it's mixed with, it must go /first/ in the inheritance + tree. For example: + + class ClearIPDemo(ClearMixin,IPythonDemo): pass + + will provide an IPythonDemo class with the mixin's features. + """ + + def marquee(self,txt='',width=78,mark='*'): + """Blank marquee that returns '' no matter what the input.""" + return '' + + def pre_cmd(self): + """Method called before executing each block. + + This one simply clears the screen.""" + from IPython.utils.terminal import _term_clear + _term_clear() + +class ClearDemo(ClearMixin,Demo): + pass + + +class ClearIPDemo(ClearMixin,IPythonDemo): + pass + + +def slide(file_path, noclear=False, format_rst=True, formatter="terminal", + style="native", auto_all=False, delimiter='...'): + if noclear: + demo_class = Demo + else: + demo_class = ClearDemo + demo = demo_class(file_path, format_rst=format_rst, formatter=formatter, + style=style, auto_all=auto_all) + while not demo.finished: + demo() + try: + py3compat.input('\n' + delimiter) + except KeyboardInterrupt: + exit(1) + +if __name__ == '__main__': + import argparse + parser = argparse.ArgumentParser(description='Run python demos') + parser.add_argument('--noclear', '-C', action='store_true', + help='Do not clear terminal on each slide') + parser.add_argument('--rst', '-r', action='store_true', + help='Highlight comments and dostrings as rst') + parser.add_argument('--formatter', '-f', default='terminal', + help='pygments formatter name could be: terminal, ' + 'terminal256, terminal16m') + parser.add_argument('--style', '-s', default='default', + help='pygments style name') + parser.add_argument('--auto', '-a', action='store_true', + help='Run all blocks automatically without' + 'confirmation') + parser.add_argument('--delimiter', '-d', default='...', + help='slides delimiter added after each slide run') + parser.add_argument('file', nargs=1, + help='python demo file') + args = parser.parse_args() + slide(args.file[0], noclear=args.noclear, format_rst=args.rst, + formatter=args.formatter, style=args.style, auto_all=args.auto, + delimiter=args.delimiter) diff --git a/contrib/python/ipython/py3/IPython/lib/display.py b/contrib/python/ipython/py3/IPython/lib/display.py index bd4ea2a89f5..7b94acf6395 100644 --- a/contrib/python/ipython/py3/IPython/lib/display.py +++ b/contrib/python/ipython/py3/IPython/lib/display.py @@ -1,667 +1,667 @@ -"""Various display related classes. - -Authors : MinRK, gregcaporaso, dannystaple -""" -from html import escape as html_escape -from os.path import exists, isfile, splitext, abspath, join, isdir -from os import walk, sep, fsdecode - -from IPython.core.display import DisplayObject, TextDisplayObject - -from typing import Tuple, Iterable - -__all__ = ['Audio', 'IFrame', 'YouTubeVideo', 'VimeoVideo', 'ScribdDocument', - 'FileLink', 'FileLinks', 'Code'] - - -class Audio(DisplayObject): - """Create an audio object. - - When this object is returned by an input cell or passed to the - display function, it will result in Audio controls being displayed - in the frontend (only works in the notebook). - - Parameters - ---------- - data : numpy array, list, unicode, str or bytes - Can be one of - - * Numpy 1d array containing the desired waveform (mono) - * Numpy 2d array containing waveforms for each channel. - Shape=(NCHAN, NSAMPLES). For the standard channel order, see - http://msdn.microsoft.com/en-us/library/windows/hardware/dn653308(v=vs.85).aspx - * List of float or integer representing the waveform (mono) - * String containing the filename - * Bytestring containing raw PCM data or - * URL pointing to a file on the web. - - If the array option is used, the waveform will be normalized. - - If a filename or url is used, the format support will be browser - dependent. - url : unicode - A URL to download the data from. - filename : unicode - Path to a local file to load the data from. - embed : boolean - Should the audio data be embedded using a data URI (True) or should - the original source be referenced. Set this to True if you want the - audio to playable later with no internet connection in the notebook. - - Default is `True`, unless the keyword argument `url` is set, then - default value is `False`. - rate : integer - The sampling rate of the raw data. - Only required when data parameter is being used as an array - autoplay : bool - Set to True if the audio should immediately start playing. - Default is `False`. - normalize : bool - Whether audio should be normalized (rescaled) to the maximum possible - range. Default is `True`. When set to `False`, `data` must be between - -1 and 1 (inclusive), otherwise an error is raised. - Applies only when `data` is a list or array of samples; other types of - audio are never normalized. - - Examples - -------- - :: - - # Generate a sound - import numpy as np - framerate = 44100 - t = np.linspace(0,5,framerate*5) - data = np.sin(2*np.pi*220*t) + np.sin(2*np.pi*224*t) - Audio(data,rate=framerate) - - # Can also do stereo or more channels - dataleft = np.sin(2*np.pi*220*t) - dataright = np.sin(2*np.pi*224*t) - Audio([dataleft, dataright],rate=framerate) - - Audio("http://www.nch.com.au/acm/8k16bitpcm.wav") # From URL - Audio(url="http://www.w3schools.com/html/horse.ogg") - - Audio('/path/to/sound.wav') # From file - Audio(filename='/path/to/sound.ogg') - - Audio(b'RAW_WAV_DATA..) # From bytes - Audio(data=b'RAW_WAV_DATA..) - - See Also - -------- - - See also the ``Audio`` widgets form the ``ipywidget`` package for more flexibility and options. - - """ - _read_flags = 'rb' - - def __init__(self, data=None, filename=None, url=None, embed=None, rate=None, autoplay=False, normalize=True, *, - element_id=None): - if filename is None and url is None and data is None: - raise ValueError("No audio data found. Expecting filename, url, or data.") - if embed is False and url is None: - raise ValueError("No url found. Expecting url when embed=False") - - if url is not None and embed is not True: - self.embed = False - else: - self.embed = True - self.autoplay = autoplay - self.element_id = element_id - super(Audio, self).__init__(data=data, url=url, filename=filename) - - if self.data is not None and not isinstance(self.data, bytes): - if rate is None: - raise ValueError("rate must be specified when data is a numpy array or list of audio samples.") - self.data = Audio._make_wav(data, rate, normalize) - - def reload(self): - """Reload the raw data from file or URL.""" - import mimetypes - if self.embed: - super(Audio, self).reload() - - if self.filename is not None: - self.mimetype = mimetypes.guess_type(self.filename)[0] - elif self.url is not None: - self.mimetype = mimetypes.guess_type(self.url)[0] - else: - self.mimetype = "audio/wav" - - @staticmethod - def _make_wav(data, rate, normalize): - """ Transform a numpy array to a PCM bytestring """ - from io import BytesIO - import wave - - try: - scaled, nchan = Audio._validate_and_normalize_with_numpy(data, normalize) - except ImportError: - scaled, nchan = Audio._validate_and_normalize_without_numpy(data, normalize) - - fp = BytesIO() - waveobj = wave.open(fp,mode='wb') - waveobj.setnchannels(nchan) - waveobj.setframerate(rate) - waveobj.setsampwidth(2) - waveobj.setcomptype('NONE','NONE') - waveobj.writeframes(scaled) - val = fp.getvalue() - waveobj.close() - - return val - - @staticmethod - def _validate_and_normalize_with_numpy(data, normalize) -> Tuple[bytes, int]: - import numpy as np - - data = np.array(data, dtype=float) - if len(data.shape) == 1: - nchan = 1 - elif len(data.shape) == 2: - # In wave files,channels are interleaved. E.g., - # "L1R1L2R2..." for stereo. See - # http://msdn.microsoft.com/en-us/library/windows/hardware/dn653308(v=vs.85).aspx - # for channel ordering - nchan = data.shape[0] - data = data.T.ravel() - else: - raise ValueError('Array audio input must be a 1D or 2D array') - - max_abs_value = np.max(np.abs(data)) - normalization_factor = Audio._get_normalization_factor(max_abs_value, normalize) - scaled = data / normalization_factor * 32767 - return scaled.astype("<h").tobytes(), nchan - - @staticmethod - def _validate_and_normalize_without_numpy(data, normalize): - import array - import sys - - data = array.array('f', data) - - try: - max_abs_value = float(max([abs(x) for x in data])) - except TypeError: - raise TypeError('Only lists of mono audio are ' - 'supported if numpy is not installed') - - normalization_factor = Audio._get_normalization_factor(max_abs_value, normalize) - scaled = array.array('h', [int(x / normalization_factor * 32767) for x in data]) - if sys.byteorder == 'big': - scaled.byteswap() - nchan = 1 - return scaled.tobytes(), nchan - - @staticmethod - def _get_normalization_factor(max_abs_value, normalize): - if not normalize and max_abs_value > 1: - raise ValueError('Audio data must be between -1 and 1 when normalize=False.') - return max_abs_value if normalize else 1 - - def _data_and_metadata(self): - """shortcut for returning metadata with url information, if defined""" - md = {} - if self.url: - md['url'] = self.url - if md: - return self.data, md - else: - return self.data - - def _repr_html_(self): - src = """ - <audio {element_id} controls="controls" {autoplay}> - <source src="{src}" type="{type}" /> - Your browser does not support the audio element. - </audio> - """ - return src.format(src=self.src_attr(), type=self.mimetype, autoplay=self.autoplay_attr(), - element_id=self.element_id_attr()) - - def src_attr(self): - import base64 - if self.embed and (self.data is not None): - data = base64=base64.b64encode(self.data).decode('ascii') - return """data:{type};base64,{base64}""".format(type=self.mimetype, - base64=data) - elif self.url is not None: - return self.url - else: - return "" - - def autoplay_attr(self): - if(self.autoplay): - return 'autoplay="autoplay"' - else: - return '' - - def element_id_attr(self): - if (self.element_id): - return 'id="{element_id}"'.format(element_id=self.element_id) - else: - return '' - -class IFrame(object): - """ - Generic class to embed an iframe in an IPython notebook - """ - - iframe = """ - <iframe - width="{width}" - height="{height}" - src="{src}{params}" - frameborder="0" - allowfullscreen - {extras} - ></iframe> - """ - - def __init__(self, src, width, height, extras: Iterable[str] = None, **kwargs): - if extras is None: - extras = [] - - self.src = src - self.width = width - self.height = height - self.extras = extras - self.params = kwargs - - def _repr_html_(self): - """return the embed iframe""" - if self.params: - try: - from urllib.parse import urlencode # Py 3 - except ImportError: - from urllib import urlencode - params = "?" + urlencode(self.params) - else: - params = "" - return self.iframe.format( - src=self.src, - width=self.width, - height=self.height, - params=params, - extras=" ".join(self.extras), - ) - - -class YouTubeVideo(IFrame): - """Class for embedding a YouTube Video in an IPython session, based on its video id. - - e.g. to embed the video from https://www.youtube.com/watch?v=foo , you would - do:: - - vid = YouTubeVideo("foo") - display(vid) - - To start from 30 seconds:: - - vid = YouTubeVideo("abc", start=30) - display(vid) - - To calculate seconds from time as hours, minutes, seconds use - :class:`datetime.timedelta`:: - - start=int(timedelta(hours=1, minutes=46, seconds=40).total_seconds()) - - Other parameters can be provided as documented at - https://developers.google.com/youtube/player_parameters#Parameters - - When converting the notebook using nbconvert, a jpeg representation of the video - will be inserted in the document. - """ - - def __init__(self, id, width=400, height=300, allow_autoplay=False, **kwargs): - self.id=id - src = "https://www.youtube.com/embed/{0}".format(id) - if allow_autoplay: - extras = list(kwargs.get("extras", [])) + ['allow="autoplay"'] - kwargs.update(autoplay=1, extras=extras) - super(YouTubeVideo, self).__init__(src, width, height, **kwargs) - - def _repr_jpeg_(self): - # Deferred import - from urllib.request import urlopen - - try: - return urlopen("https://img.youtube.com/vi/{id}/hqdefault.jpg".format(id=self.id)).read() - except IOError: - return None - -class VimeoVideo(IFrame): - """ - Class for embedding a Vimeo video in an IPython session, based on its video id. - """ - - def __init__(self, id, width=400, height=300, **kwargs): - src="https://player.vimeo.com/video/{0}".format(id) - super(VimeoVideo, self).__init__(src, width, height, **kwargs) - -class ScribdDocument(IFrame): - """ - Class for embedding a Scribd document in an IPython session - - Use the start_page params to specify a starting point in the document - Use the view_mode params to specify display type one off scroll | slideshow | book - - e.g to Display Wes' foundational paper about PANDAS in book mode from page 3 - - ScribdDocument(71048089, width=800, height=400, start_page=3, view_mode="book") - """ - - def __init__(self, id, width=400, height=300, **kwargs): - src="https://www.scribd.com/embeds/{0}/content".format(id) - super(ScribdDocument, self).__init__(src, width, height, **kwargs) - -class FileLink(object): - """Class for embedding a local file link in an IPython session, based on path - - e.g. to embed a link that was generated in the IPython notebook as my/data.txt - - you would do:: - - local_file = FileLink("my/data.txt") - display(local_file) - - or in the HTML notebook, just:: - - FileLink("my/data.txt") - """ - - html_link_str = "<a href='%s' target='_blank'>%s</a>" - - def __init__(self, - path, - url_prefix='', - result_html_prefix='', - result_html_suffix='<br>'): - """ - Parameters - ---------- - path : str - path to the file or directory that should be formatted - url_prefix : str - prefix to be prepended to all files to form a working link [default: - ''] - result_html_prefix : str - text to append to beginning to link [default: ''] - result_html_suffix : str - text to append at the end of link [default: '<br>'] - """ - if isdir(path): - raise ValueError("Cannot display a directory using FileLink. " - "Use FileLinks to display '%s'." % path) - self.path = fsdecode(path) - self.url_prefix = url_prefix - self.result_html_prefix = result_html_prefix - self.result_html_suffix = result_html_suffix - - def _format_path(self): - fp = ''.join([self.url_prefix, html_escape(self.path)]) - return ''.join([self.result_html_prefix, - self.html_link_str % \ - (fp, html_escape(self.path, quote=False)), - self.result_html_suffix]) - - def _repr_html_(self): - """return html link to file - """ - if not exists(self.path): - return ("Path (<tt>%s</tt>) doesn't exist. " - "It may still be in the process of " - "being generated, or you may have the " - "incorrect path." % self.path) - - return self._format_path() - - def __repr__(self): - """return absolute path to file - """ - return abspath(self.path) - -class FileLinks(FileLink): - """Class for embedding local file links in an IPython session, based on path - - e.g. to embed links to files that were generated in the IPython notebook - under ``my/data``, you would do:: - - local_files = FileLinks("my/data") - display(local_files) - - or in the HTML notebook, just:: - - FileLinks("my/data") - """ - def __init__(self, - path, - url_prefix='', - included_suffixes=None, - result_html_prefix='', - result_html_suffix='<br>', - notebook_display_formatter=None, - terminal_display_formatter=None, - recursive=True): - """ - See :class:`FileLink` for the ``path``, ``url_prefix``, - ``result_html_prefix`` and ``result_html_suffix`` parameters. - - included_suffixes : list - Filename suffixes to include when formatting output [default: include - all files] - - notebook_display_formatter : function - Used to format links for display in the notebook. See discussion of - formatter functions below. - - terminal_display_formatter : function - Used to format links for display in the terminal. See discussion of - formatter functions below. - - Formatter functions must be of the form:: - - f(dirname, fnames, included_suffixes) - - dirname : str - The name of a directory - fnames : list - The files in that directory - included_suffixes : list - The file suffixes that should be included in the output (passing None - meansto include all suffixes in the output in the built-in formatters) - recursive : boolean - Whether to recurse into subdirectories. Default is True. - - The function should return a list of lines that will be printed in the - notebook (if passing notebook_display_formatter) or the terminal (if - passing terminal_display_formatter). This function is iterated over for - each directory in self.path. Default formatters are in place, can be - passed here to support alternative formatting. - - """ - if isfile(path): - raise ValueError("Cannot display a file using FileLinks. " - "Use FileLink to display '%s'." % path) - self.included_suffixes = included_suffixes - # remove trailing slashes for more consistent output formatting - path = path.rstrip('/') - - self.path = path - self.url_prefix = url_prefix - self.result_html_prefix = result_html_prefix - self.result_html_suffix = result_html_suffix - - self.notebook_display_formatter = \ - notebook_display_formatter or self._get_notebook_display_formatter() - self.terminal_display_formatter = \ - terminal_display_formatter or self._get_terminal_display_formatter() - - self.recursive = recursive - - def _get_display_formatter(self, - dirname_output_format, - fname_output_format, - fp_format, - fp_cleaner=None): - """ generate built-in formatter function - - this is used to define both the notebook and terminal built-in - formatters as they only differ by some wrapper text for each entry - - dirname_output_format: string to use for formatting directory - names, dirname will be substituted for a single "%s" which - must appear in this string - fname_output_format: string to use for formatting file names, - if a single "%s" appears in the string, fname will be substituted - if two "%s" appear in the string, the path to fname will be - substituted for the first and fname will be substituted for the - second - fp_format: string to use for formatting filepaths, must contain - exactly two "%s" and the dirname will be substituted for the first - and fname will be substituted for the second - """ - def f(dirname, fnames, included_suffixes=None): - result = [] - # begin by figuring out which filenames, if any, - # are going to be displayed - display_fnames = [] - for fname in fnames: - if (isfile(join(dirname,fname)) and - (included_suffixes is None or - splitext(fname)[1] in included_suffixes)): - display_fnames.append(fname) - - if len(display_fnames) == 0: - # if there are no filenames to display, don't print anything - # (not even the directory name) - pass - else: - # otherwise print the formatted directory name followed by - # the formatted filenames - dirname_output_line = dirname_output_format % dirname - result.append(dirname_output_line) - for fname in display_fnames: - fp = fp_format % (dirname,fname) - if fp_cleaner is not None: - fp = fp_cleaner(fp) - try: - # output can include both a filepath and a filename... - fname_output_line = fname_output_format % (fp, fname) - except TypeError: - # ... or just a single filepath - fname_output_line = fname_output_format % fname - result.append(fname_output_line) - return result - return f - - def _get_notebook_display_formatter(self, - spacer=" "): - """ generate function to use for notebook formatting - """ - dirname_output_format = \ - self.result_html_prefix + "%s/" + self.result_html_suffix - fname_output_format = \ - self.result_html_prefix + spacer + self.html_link_str + self.result_html_suffix - fp_format = self.url_prefix + '%s/%s' - if sep == "\\": - # Working on a platform where the path separator is "\", so - # must convert these to "/" for generating a URI - def fp_cleaner(fp): - # Replace all occurrences of backslash ("\") with a forward - # slash ("/") - this is necessary on windows when a path is - # provided as input, but we must link to a URI - return fp.replace('\\','/') - else: - fp_cleaner = None - - return self._get_display_formatter(dirname_output_format, - fname_output_format, - fp_format, - fp_cleaner) - - def _get_terminal_display_formatter(self, - spacer=" "): - """ generate function to use for terminal formatting - """ - dirname_output_format = "%s/" - fname_output_format = spacer + "%s" - fp_format = '%s/%s' - - return self._get_display_formatter(dirname_output_format, - fname_output_format, - fp_format) - - def _format_path(self): - result_lines = [] - if self.recursive: - walked_dir = list(walk(self.path)) - else: - walked_dir = [next(walk(self.path))] - walked_dir.sort() - for dirname, subdirs, fnames in walked_dir: - result_lines += self.notebook_display_formatter(dirname, fnames, self.included_suffixes) - return '\n'.join(result_lines) - - def __repr__(self): - """return newline-separated absolute paths - """ - result_lines = [] - if self.recursive: - walked_dir = list(walk(self.path)) - else: - walked_dir = [next(walk(self.path))] - walked_dir.sort() - for dirname, subdirs, fnames in walked_dir: - result_lines += self.terminal_display_formatter(dirname, fnames, self.included_suffixes) - return '\n'.join(result_lines) - - -class Code(TextDisplayObject): - """Display syntax-highlighted source code. - - This uses Pygments to highlight the code for HTML and Latex output. - - Parameters - ---------- - data : str - The code as a string - url : str - A URL to fetch the code from - filename : str - A local filename to load the code from - language : str - The short name of a Pygments lexer to use for highlighting. - If not specified, it will guess the lexer based on the filename - or the code. Available lexers: http://pygments.org/docs/lexers/ - """ - def __init__(self, data=None, url=None, filename=None, language=None): - self.language = language - super().__init__(data=data, url=url, filename=filename) - - def _get_lexer(self): - if self.language: - from pygments.lexers import get_lexer_by_name - return get_lexer_by_name(self.language) - elif self.filename: - from pygments.lexers import get_lexer_for_filename - return get_lexer_for_filename(self.filename) - else: - from pygments.lexers import guess_lexer - return guess_lexer(self.data) - - def __repr__(self): - return self.data - - def _repr_html_(self): - from pygments import highlight - from pygments.formatters import HtmlFormatter - fmt = HtmlFormatter() - style = '<style>{}</style>'.format(fmt.get_style_defs('.output_html')) - return style + highlight(self.data, self._get_lexer(), fmt) - - def _repr_latex_(self): - from pygments import highlight - from pygments.formatters import LatexFormatter - return highlight(self.data, self._get_lexer(), LatexFormatter()) +"""Various display related classes. + +Authors : MinRK, gregcaporaso, dannystaple +""" +from html import escape as html_escape +from os.path import exists, isfile, splitext, abspath, join, isdir +from os import walk, sep, fsdecode + +from IPython.core.display import DisplayObject, TextDisplayObject + +from typing import Tuple, Iterable + +__all__ = ['Audio', 'IFrame', 'YouTubeVideo', 'VimeoVideo', 'ScribdDocument', + 'FileLink', 'FileLinks', 'Code'] + + +class Audio(DisplayObject): + """Create an audio object. + + When this object is returned by an input cell or passed to the + display function, it will result in Audio controls being displayed + in the frontend (only works in the notebook). + + Parameters + ---------- + data : numpy array, list, unicode, str or bytes + Can be one of + + * Numpy 1d array containing the desired waveform (mono) + * Numpy 2d array containing waveforms for each channel. + Shape=(NCHAN, NSAMPLES). For the standard channel order, see + http://msdn.microsoft.com/en-us/library/windows/hardware/dn653308(v=vs.85).aspx + * List of float or integer representing the waveform (mono) + * String containing the filename + * Bytestring containing raw PCM data or + * URL pointing to a file on the web. + + If the array option is used, the waveform will be normalized. + + If a filename or url is used, the format support will be browser + dependent. + url : unicode + A URL to download the data from. + filename : unicode + Path to a local file to load the data from. + embed : boolean + Should the audio data be embedded using a data URI (True) or should + the original source be referenced. Set this to True if you want the + audio to playable later with no internet connection in the notebook. + + Default is `True`, unless the keyword argument `url` is set, then + default value is `False`. + rate : integer + The sampling rate of the raw data. + Only required when data parameter is being used as an array + autoplay : bool + Set to True if the audio should immediately start playing. + Default is `False`. + normalize : bool + Whether audio should be normalized (rescaled) to the maximum possible + range. Default is `True`. When set to `False`, `data` must be between + -1 and 1 (inclusive), otherwise an error is raised. + Applies only when `data` is a list or array of samples; other types of + audio are never normalized. + + Examples + -------- + :: + + # Generate a sound + import numpy as np + framerate = 44100 + t = np.linspace(0,5,framerate*5) + data = np.sin(2*np.pi*220*t) + np.sin(2*np.pi*224*t) + Audio(data,rate=framerate) + + # Can also do stereo or more channels + dataleft = np.sin(2*np.pi*220*t) + dataright = np.sin(2*np.pi*224*t) + Audio([dataleft, dataright],rate=framerate) + + Audio("http://www.nch.com.au/acm/8k16bitpcm.wav") # From URL + Audio(url="http://www.w3schools.com/html/horse.ogg") + + Audio('/path/to/sound.wav') # From file + Audio(filename='/path/to/sound.ogg') + + Audio(b'RAW_WAV_DATA..) # From bytes + Audio(data=b'RAW_WAV_DATA..) + + See Also + -------- + + See also the ``Audio`` widgets form the ``ipywidget`` package for more flexibility and options. + + """ + _read_flags = 'rb' + + def __init__(self, data=None, filename=None, url=None, embed=None, rate=None, autoplay=False, normalize=True, *, + element_id=None): + if filename is None and url is None and data is None: + raise ValueError("No audio data found. Expecting filename, url, or data.") + if embed is False and url is None: + raise ValueError("No url found. Expecting url when embed=False") + + if url is not None and embed is not True: + self.embed = False + else: + self.embed = True + self.autoplay = autoplay + self.element_id = element_id + super(Audio, self).__init__(data=data, url=url, filename=filename) + + if self.data is not None and not isinstance(self.data, bytes): + if rate is None: + raise ValueError("rate must be specified when data is a numpy array or list of audio samples.") + self.data = Audio._make_wav(data, rate, normalize) + + def reload(self): + """Reload the raw data from file or URL.""" + import mimetypes + if self.embed: + super(Audio, self).reload() + + if self.filename is not None: + self.mimetype = mimetypes.guess_type(self.filename)[0] + elif self.url is not None: + self.mimetype = mimetypes.guess_type(self.url)[0] + else: + self.mimetype = "audio/wav" + + @staticmethod + def _make_wav(data, rate, normalize): + """ Transform a numpy array to a PCM bytestring """ + from io import BytesIO + import wave + + try: + scaled, nchan = Audio._validate_and_normalize_with_numpy(data, normalize) + except ImportError: + scaled, nchan = Audio._validate_and_normalize_without_numpy(data, normalize) + + fp = BytesIO() + waveobj = wave.open(fp,mode='wb') + waveobj.setnchannels(nchan) + waveobj.setframerate(rate) + waveobj.setsampwidth(2) + waveobj.setcomptype('NONE','NONE') + waveobj.writeframes(scaled) + val = fp.getvalue() + waveobj.close() + + return val + + @staticmethod + def _validate_and_normalize_with_numpy(data, normalize) -> Tuple[bytes, int]: + import numpy as np + + data = np.array(data, dtype=float) + if len(data.shape) == 1: + nchan = 1 + elif len(data.shape) == 2: + # In wave files,channels are interleaved. E.g., + # "L1R1L2R2..." for stereo. See + # http://msdn.microsoft.com/en-us/library/windows/hardware/dn653308(v=vs.85).aspx + # for channel ordering + nchan = data.shape[0] + data = data.T.ravel() + else: + raise ValueError('Array audio input must be a 1D or 2D array') + + max_abs_value = np.max(np.abs(data)) + normalization_factor = Audio._get_normalization_factor(max_abs_value, normalize) + scaled = data / normalization_factor * 32767 + return scaled.astype("<h").tobytes(), nchan + + @staticmethod + def _validate_and_normalize_without_numpy(data, normalize): + import array + import sys + + data = array.array('f', data) + + try: + max_abs_value = float(max([abs(x) for x in data])) + except TypeError: + raise TypeError('Only lists of mono audio are ' + 'supported if numpy is not installed') + + normalization_factor = Audio._get_normalization_factor(max_abs_value, normalize) + scaled = array.array('h', [int(x / normalization_factor * 32767) for x in data]) + if sys.byteorder == 'big': + scaled.byteswap() + nchan = 1 + return scaled.tobytes(), nchan + + @staticmethod + def _get_normalization_factor(max_abs_value, normalize): + if not normalize and max_abs_value > 1: + raise ValueError('Audio data must be between -1 and 1 when normalize=False.') + return max_abs_value if normalize else 1 + + def _data_and_metadata(self): + """shortcut for returning metadata with url information, if defined""" + md = {} + if self.url: + md['url'] = self.url + if md: + return self.data, md + else: + return self.data + + def _repr_html_(self): + src = """ + <audio {element_id} controls="controls" {autoplay}> + <source src="{src}" type="{type}" /> + Your browser does not support the audio element. + </audio> + """ + return src.format(src=self.src_attr(), type=self.mimetype, autoplay=self.autoplay_attr(), + element_id=self.element_id_attr()) + + def src_attr(self): + import base64 + if self.embed and (self.data is not None): + data = base64=base64.b64encode(self.data).decode('ascii') + return """data:{type};base64,{base64}""".format(type=self.mimetype, + base64=data) + elif self.url is not None: + return self.url + else: + return "" + + def autoplay_attr(self): + if(self.autoplay): + return 'autoplay="autoplay"' + else: + return '' + + def element_id_attr(self): + if (self.element_id): + return 'id="{element_id}"'.format(element_id=self.element_id) + else: + return '' + +class IFrame(object): + """ + Generic class to embed an iframe in an IPython notebook + """ + + iframe = """ + <iframe + width="{width}" + height="{height}" + src="{src}{params}" + frameborder="0" + allowfullscreen + {extras} + ></iframe> + """ + + def __init__(self, src, width, height, extras: Iterable[str] = None, **kwargs): + if extras is None: + extras = [] + + self.src = src + self.width = width + self.height = height + self.extras = extras + self.params = kwargs + + def _repr_html_(self): + """return the embed iframe""" + if self.params: + try: + from urllib.parse import urlencode # Py 3 + except ImportError: + from urllib import urlencode + params = "?" + urlencode(self.params) + else: + params = "" + return self.iframe.format( + src=self.src, + width=self.width, + height=self.height, + params=params, + extras=" ".join(self.extras), + ) + + +class YouTubeVideo(IFrame): + """Class for embedding a YouTube Video in an IPython session, based on its video id. + + e.g. to embed the video from https://www.youtube.com/watch?v=foo , you would + do:: + + vid = YouTubeVideo("foo") + display(vid) + + To start from 30 seconds:: + + vid = YouTubeVideo("abc", start=30) + display(vid) + + To calculate seconds from time as hours, minutes, seconds use + :class:`datetime.timedelta`:: + + start=int(timedelta(hours=1, minutes=46, seconds=40).total_seconds()) + + Other parameters can be provided as documented at + https://developers.google.com/youtube/player_parameters#Parameters + + When converting the notebook using nbconvert, a jpeg representation of the video + will be inserted in the document. + """ + + def __init__(self, id, width=400, height=300, allow_autoplay=False, **kwargs): + self.id=id + src = "https://www.youtube.com/embed/{0}".format(id) + if allow_autoplay: + extras = list(kwargs.get("extras", [])) + ['allow="autoplay"'] + kwargs.update(autoplay=1, extras=extras) + super(YouTubeVideo, self).__init__(src, width, height, **kwargs) + + def _repr_jpeg_(self): + # Deferred import + from urllib.request import urlopen + + try: + return urlopen("https://img.youtube.com/vi/{id}/hqdefault.jpg".format(id=self.id)).read() + except IOError: + return None + +class VimeoVideo(IFrame): + """ + Class for embedding a Vimeo video in an IPython session, based on its video id. + """ + + def __init__(self, id, width=400, height=300, **kwargs): + src="https://player.vimeo.com/video/{0}".format(id) + super(VimeoVideo, self).__init__(src, width, height, **kwargs) + +class ScribdDocument(IFrame): + """ + Class for embedding a Scribd document in an IPython session + + Use the start_page params to specify a starting point in the document + Use the view_mode params to specify display type one off scroll | slideshow | book + + e.g to Display Wes' foundational paper about PANDAS in book mode from page 3 + + ScribdDocument(71048089, width=800, height=400, start_page=3, view_mode="book") + """ + + def __init__(self, id, width=400, height=300, **kwargs): + src="https://www.scribd.com/embeds/{0}/content".format(id) + super(ScribdDocument, self).__init__(src, width, height, **kwargs) + +class FileLink(object): + """Class for embedding a local file link in an IPython session, based on path + + e.g. to embed a link that was generated in the IPython notebook as my/data.txt + + you would do:: + + local_file = FileLink("my/data.txt") + display(local_file) + + or in the HTML notebook, just:: + + FileLink("my/data.txt") + """ + + html_link_str = "<a href='%s' target='_blank'>%s</a>" + + def __init__(self, + path, + url_prefix='', + result_html_prefix='', + result_html_suffix='<br>'): + """ + Parameters + ---------- + path : str + path to the file or directory that should be formatted + url_prefix : str + prefix to be prepended to all files to form a working link [default: + ''] + result_html_prefix : str + text to append to beginning to link [default: ''] + result_html_suffix : str + text to append at the end of link [default: '<br>'] + """ + if isdir(path): + raise ValueError("Cannot display a directory using FileLink. " + "Use FileLinks to display '%s'." % path) + self.path = fsdecode(path) + self.url_prefix = url_prefix + self.result_html_prefix = result_html_prefix + self.result_html_suffix = result_html_suffix + + def _format_path(self): + fp = ''.join([self.url_prefix, html_escape(self.path)]) + return ''.join([self.result_html_prefix, + self.html_link_str % \ + (fp, html_escape(self.path, quote=False)), + self.result_html_suffix]) + + def _repr_html_(self): + """return html link to file + """ + if not exists(self.path): + return ("Path (<tt>%s</tt>) doesn't exist. " + "It may still be in the process of " + "being generated, or you may have the " + "incorrect path." % self.path) + + return self._format_path() + + def __repr__(self): + """return absolute path to file + """ + return abspath(self.path) + +class FileLinks(FileLink): + """Class for embedding local file links in an IPython session, based on path + + e.g. to embed links to files that were generated in the IPython notebook + under ``my/data``, you would do:: + + local_files = FileLinks("my/data") + display(local_files) + + or in the HTML notebook, just:: + + FileLinks("my/data") + """ + def __init__(self, + path, + url_prefix='', + included_suffixes=None, + result_html_prefix='', + result_html_suffix='<br>', + notebook_display_formatter=None, + terminal_display_formatter=None, + recursive=True): + """ + See :class:`FileLink` for the ``path``, ``url_prefix``, + ``result_html_prefix`` and ``result_html_suffix`` parameters. + + included_suffixes : list + Filename suffixes to include when formatting output [default: include + all files] + + notebook_display_formatter : function + Used to format links for display in the notebook. See discussion of + formatter functions below. + + terminal_display_formatter : function + Used to format links for display in the terminal. See discussion of + formatter functions below. + + Formatter functions must be of the form:: + + f(dirname, fnames, included_suffixes) + + dirname : str + The name of a directory + fnames : list + The files in that directory + included_suffixes : list + The file suffixes that should be included in the output (passing None + meansto include all suffixes in the output in the built-in formatters) + recursive : boolean + Whether to recurse into subdirectories. Default is True. + + The function should return a list of lines that will be printed in the + notebook (if passing notebook_display_formatter) or the terminal (if + passing terminal_display_formatter). This function is iterated over for + each directory in self.path. Default formatters are in place, can be + passed here to support alternative formatting. + + """ + if isfile(path): + raise ValueError("Cannot display a file using FileLinks. " + "Use FileLink to display '%s'." % path) + self.included_suffixes = included_suffixes + # remove trailing slashes for more consistent output formatting + path = path.rstrip('/') + + self.path = path + self.url_prefix = url_prefix + self.result_html_prefix = result_html_prefix + self.result_html_suffix = result_html_suffix + + self.notebook_display_formatter = \ + notebook_display_formatter or self._get_notebook_display_formatter() + self.terminal_display_formatter = \ + terminal_display_formatter or self._get_terminal_display_formatter() + + self.recursive = recursive + + def _get_display_formatter(self, + dirname_output_format, + fname_output_format, + fp_format, + fp_cleaner=None): + """ generate built-in formatter function + + this is used to define both the notebook and terminal built-in + formatters as they only differ by some wrapper text for each entry + + dirname_output_format: string to use for formatting directory + names, dirname will be substituted for a single "%s" which + must appear in this string + fname_output_format: string to use for formatting file names, + if a single "%s" appears in the string, fname will be substituted + if two "%s" appear in the string, the path to fname will be + substituted for the first and fname will be substituted for the + second + fp_format: string to use for formatting filepaths, must contain + exactly two "%s" and the dirname will be substituted for the first + and fname will be substituted for the second + """ + def f(dirname, fnames, included_suffixes=None): + result = [] + # begin by figuring out which filenames, if any, + # are going to be displayed + display_fnames = [] + for fname in fnames: + if (isfile(join(dirname,fname)) and + (included_suffixes is None or + splitext(fname)[1] in included_suffixes)): + display_fnames.append(fname) + + if len(display_fnames) == 0: + # if there are no filenames to display, don't print anything + # (not even the directory name) + pass + else: + # otherwise print the formatted directory name followed by + # the formatted filenames + dirname_output_line = dirname_output_format % dirname + result.append(dirname_output_line) + for fname in display_fnames: + fp = fp_format % (dirname,fname) + if fp_cleaner is not None: + fp = fp_cleaner(fp) + try: + # output can include both a filepath and a filename... + fname_output_line = fname_output_format % (fp, fname) + except TypeError: + # ... or just a single filepath + fname_output_line = fname_output_format % fname + result.append(fname_output_line) + return result + return f + + def _get_notebook_display_formatter(self, + spacer=" "): + """ generate function to use for notebook formatting + """ + dirname_output_format = \ + self.result_html_prefix + "%s/" + self.result_html_suffix + fname_output_format = \ + self.result_html_prefix + spacer + self.html_link_str + self.result_html_suffix + fp_format = self.url_prefix + '%s/%s' + if sep == "\\": + # Working on a platform where the path separator is "\", so + # must convert these to "/" for generating a URI + def fp_cleaner(fp): + # Replace all occurrences of backslash ("\") with a forward + # slash ("/") - this is necessary on windows when a path is + # provided as input, but we must link to a URI + return fp.replace('\\','/') + else: + fp_cleaner = None + + return self._get_display_formatter(dirname_output_format, + fname_output_format, + fp_format, + fp_cleaner) + + def _get_terminal_display_formatter(self, + spacer=" "): + """ generate function to use for terminal formatting + """ + dirname_output_format = "%s/" + fname_output_format = spacer + "%s" + fp_format = '%s/%s' + + return self._get_display_formatter(dirname_output_format, + fname_output_format, + fp_format) + + def _format_path(self): + result_lines = [] + if self.recursive: + walked_dir = list(walk(self.path)) + else: + walked_dir = [next(walk(self.path))] + walked_dir.sort() + for dirname, subdirs, fnames in walked_dir: + result_lines += self.notebook_display_formatter(dirname, fnames, self.included_suffixes) + return '\n'.join(result_lines) + + def __repr__(self): + """return newline-separated absolute paths + """ + result_lines = [] + if self.recursive: + walked_dir = list(walk(self.path)) + else: + walked_dir = [next(walk(self.path))] + walked_dir.sort() + for dirname, subdirs, fnames in walked_dir: + result_lines += self.terminal_display_formatter(dirname, fnames, self.included_suffixes) + return '\n'.join(result_lines) + + +class Code(TextDisplayObject): + """Display syntax-highlighted source code. + + This uses Pygments to highlight the code for HTML and Latex output. + + Parameters + ---------- + data : str + The code as a string + url : str + A URL to fetch the code from + filename : str + A local filename to load the code from + language : str + The short name of a Pygments lexer to use for highlighting. + If not specified, it will guess the lexer based on the filename + or the code. Available lexers: http://pygments.org/docs/lexers/ + """ + def __init__(self, data=None, url=None, filename=None, language=None): + self.language = language + super().__init__(data=data, url=url, filename=filename) + + def _get_lexer(self): + if self.language: + from pygments.lexers import get_lexer_by_name + return get_lexer_by_name(self.language) + elif self.filename: + from pygments.lexers import get_lexer_for_filename + return get_lexer_for_filename(self.filename) + else: + from pygments.lexers import guess_lexer + return guess_lexer(self.data) + + def __repr__(self): + return self.data + + def _repr_html_(self): + from pygments import highlight + from pygments.formatters import HtmlFormatter + fmt = HtmlFormatter() + style = '<style>{}</style>'.format(fmt.get_style_defs('.output_html')) + return style + highlight(self.data, self._get_lexer(), fmt) + + def _repr_latex_(self): + from pygments import highlight + from pygments.formatters import LatexFormatter + return highlight(self.data, self._get_lexer(), LatexFormatter()) diff --git a/contrib/python/ipython/py3/IPython/lib/editorhooks.py b/contrib/python/ipython/py3/IPython/lib/editorhooks.py index 8c076f894aa..d8bd6ac81bc 100644 --- a/contrib/python/ipython/py3/IPython/lib/editorhooks.py +++ b/contrib/python/ipython/py3/IPython/lib/editorhooks.py @@ -1,127 +1,127 @@ -""" 'editor' hooks for common editors that work well with ipython - -They should honor the line number argument, at least. - -Contributions are *very* welcome. -""" - -import os -import shlex -import subprocess -import sys - -from IPython import get_ipython -from IPython.core.error import TryNext -from IPython.utils import py3compat - - -def install_editor(template, wait=False): - """Installs the editor that is called by IPython for the %edit magic. - - This overrides the default editor, which is generally set by your EDITOR - environment variable or is notepad (windows) or vi (linux). By supplying a - template string `run_template`, you can control how the editor is invoked - by IPython -- (e.g. the format in which it accepts command line options) - - Parameters - ---------- - template : basestring - run_template acts as a template for how your editor is invoked by - the shell. It should contain '{filename}', which will be replaced on - invocation with the file name, and '{line}', $line by line number - (or 0) to invoke the file with. - wait : bool - If `wait` is true, wait until the user presses enter before returning, - to facilitate non-blocking editors that exit immediately after - the call. - """ - - # not all editors support $line, so we'll leave out this check - # for substitution in ['$file', '$line']: - # if not substitution in run_template: - # raise ValueError(('run_template should contain %s' - # ' for string substitution. You supplied "%s"' % (substitution, - # run_template))) - - def call_editor(self, filename, line=0): - if line is None: - line = 0 - cmd = template.format(filename=shlex.quote(filename), line=line) - print(">", cmd) - # shlex.quote doesn't work right on Windows, but it does after splitting - if sys.platform.startswith('win'): - cmd = shlex.split(cmd) - proc = subprocess.Popen(cmd, shell=True) - if proc.wait() != 0: - raise TryNext() - if wait: - py3compat.input("Press Enter when done editing:") - - get_ipython().set_hook('editor', call_editor) - get_ipython().editor = template - - -# in these, exe is always the path/name of the executable. Useful -# if you don't have the editor directory in your path -def komodo(exe=u'komodo'): - """ Activestate Komodo [Edit] """ - install_editor(exe + u' -l {line} {filename}', wait=True) - - -def scite(exe=u"scite"): - """ SciTE or Sc1 """ - install_editor(exe + u' {filename} -goto:{line}') - - -def notepadplusplus(exe=u'notepad++'): - """ Notepad++ http://notepad-plus.sourceforge.net """ - install_editor(exe + u' -n{line} {filename}') - - -def jed(exe=u'jed'): - """ JED, the lightweight emacsish editor """ - install_editor(exe + u' +{line} {filename}') - - -def idle(exe=u'idle'): - """ Idle, the editor bundled with python - - Parameters - ---------- - exe : str, None - If none, should be pretty smart about finding the executable. - """ - if exe is None: - import idlelib - p = os.path.dirname(idlelib.__filename__) - # i'm not sure if this actually works. Is this idle.py script - # guaranteed to be executable? - exe = os.path.join(p, 'idle.py') - install_editor(exe + u' {filename}') - - -def mate(exe=u'mate'): - """ TextMate, the missing editor""" - # wait=True is not required since we're using the -w flag to mate - install_editor(exe + u' -w -l {line} {filename}') - - -# ########################################## -# these are untested, report any problems -# ########################################## - - -def emacs(exe=u'emacs'): - install_editor(exe + u' +{line} {filename}') - - -def gnuclient(exe=u'gnuclient'): - install_editor(exe + u' -nw +{line} {filename}') - - -def crimson_editor(exe=u'cedt.exe'): - install_editor(exe + u' /L:{line} {filename}') - - -def kate(exe=u'kate'): - install_editor(exe + u' -u -l {line} {filename}') +""" 'editor' hooks for common editors that work well with ipython + +They should honor the line number argument, at least. + +Contributions are *very* welcome. +""" + +import os +import shlex +import subprocess +import sys + +from IPython import get_ipython +from IPython.core.error import TryNext +from IPython.utils import py3compat + + +def install_editor(template, wait=False): + """Installs the editor that is called by IPython for the %edit magic. + + This overrides the default editor, which is generally set by your EDITOR + environment variable or is notepad (windows) or vi (linux). By supplying a + template string `run_template`, you can control how the editor is invoked + by IPython -- (e.g. the format in which it accepts command line options) + + Parameters + ---------- + template : basestring + run_template acts as a template for how your editor is invoked by + the shell. It should contain '{filename}', which will be replaced on + invocation with the file name, and '{line}', $line by line number + (or 0) to invoke the file with. + wait : bool + If `wait` is true, wait until the user presses enter before returning, + to facilitate non-blocking editors that exit immediately after + the call. + """ + + # not all editors support $line, so we'll leave out this check + # for substitution in ['$file', '$line']: + # if not substitution in run_template: + # raise ValueError(('run_template should contain %s' + # ' for string substitution. You supplied "%s"' % (substitution, + # run_template))) + + def call_editor(self, filename, line=0): + if line is None: + line = 0 + cmd = template.format(filename=shlex.quote(filename), line=line) + print(">", cmd) + # shlex.quote doesn't work right on Windows, but it does after splitting + if sys.platform.startswith('win'): + cmd = shlex.split(cmd) + proc = subprocess.Popen(cmd, shell=True) + if proc.wait() != 0: + raise TryNext() + if wait: + py3compat.input("Press Enter when done editing:") + + get_ipython().set_hook('editor', call_editor) + get_ipython().editor = template + + +# in these, exe is always the path/name of the executable. Useful +# if you don't have the editor directory in your path +def komodo(exe=u'komodo'): + """ Activestate Komodo [Edit] """ + install_editor(exe + u' -l {line} {filename}', wait=True) + + +def scite(exe=u"scite"): + """ SciTE or Sc1 """ + install_editor(exe + u' {filename} -goto:{line}') + + +def notepadplusplus(exe=u'notepad++'): + """ Notepad++ http://notepad-plus.sourceforge.net """ + install_editor(exe + u' -n{line} {filename}') + + +def jed(exe=u'jed'): + """ JED, the lightweight emacsish editor """ + install_editor(exe + u' +{line} {filename}') + + +def idle(exe=u'idle'): + """ Idle, the editor bundled with python + + Parameters + ---------- + exe : str, None + If none, should be pretty smart about finding the executable. + """ + if exe is None: + import idlelib + p = os.path.dirname(idlelib.__filename__) + # i'm not sure if this actually works. Is this idle.py script + # guaranteed to be executable? + exe = os.path.join(p, 'idle.py') + install_editor(exe + u' {filename}') + + +def mate(exe=u'mate'): + """ TextMate, the missing editor""" + # wait=True is not required since we're using the -w flag to mate + install_editor(exe + u' -w -l {line} {filename}') + + +# ########################################## +# these are untested, report any problems +# ########################################## + + +def emacs(exe=u'emacs'): + install_editor(exe + u' +{line} {filename}') + + +def gnuclient(exe=u'gnuclient'): + install_editor(exe + u' -nw +{line} {filename}') + + +def crimson_editor(exe=u'cedt.exe'): + install_editor(exe + u' /L:{line} {filename}') + + +def kate(exe=u'kate'): + install_editor(exe + u' -u -l {line} {filename}') diff --git a/contrib/python/ipython/py3/IPython/lib/guisupport.py b/contrib/python/ipython/py3/IPython/lib/guisupport.py index 0cf95ee6277..cfd325e9da8 100644 --- a/contrib/python/ipython/py3/IPython/lib/guisupport.py +++ b/contrib/python/ipython/py3/IPython/lib/guisupport.py @@ -1,155 +1,155 @@ -# coding: utf-8 -""" -Support for creating GUI apps and starting event loops. - -IPython's GUI integration allows interactive plotting and GUI usage in IPython -session. IPython has two different types of GUI integration: - -1. The terminal based IPython supports GUI event loops through Python's - PyOS_InputHook. PyOS_InputHook is a hook that Python calls periodically - whenever raw_input is waiting for a user to type code. We implement GUI - support in the terminal by setting PyOS_InputHook to a function that - iterates the event loop for a short while. It is important to note that - in this situation, the real GUI event loop is NOT run in the normal - manner, so you can't use the normal means to detect that it is running. -2. In the two process IPython kernel/frontend, the GUI event loop is run in - the kernel. In this case, the event loop is run in the normal manner by - calling the function or method of the GUI toolkit that starts the event - loop. - -In addition to starting the GUI event loops in one of these two ways, IPython -will *always* create an appropriate GUI application object when GUi -integration is enabled. - -If you want your GUI apps to run in IPython you need to do two things: - -1. Test to see if there is already an existing main application object. If - there is, you should use it. If there is not an existing application object - you should create one. -2. Test to see if the GUI event loop is running. If it is, you should not - start it. If the event loop is not running you may start it. - -This module contains functions for each toolkit that perform these things -in a consistent manner. Because of how PyOS_InputHook runs the event loop -you cannot detect if the event loop is running using the traditional calls -(such as ``wx.GetApp.IsMainLoopRunning()`` in wxPython). If PyOS_InputHook is -set These methods will return a false negative. That is, they will say the -event loop is not running, when is actually is. To work around this limitation -we proposed the following informal protocol: - -* Whenever someone starts the event loop, they *must* set the ``_in_event_loop`` - attribute of the main application object to ``True``. This should be done - regardless of how the event loop is actually run. -* Whenever someone stops the event loop, they *must* set the ``_in_event_loop`` - attribute of the main application object to ``False``. -* If you want to see if the event loop is running, you *must* use ``hasattr`` - to see if ``_in_event_loop`` attribute has been set. If it is set, you - *must* use its value. If it has not been set, you can query the toolkit - in the normal manner. -* If you want GUI support and no one else has created an application or - started the event loop you *must* do this. We don't want projects to - attempt to defer these things to someone else if they themselves need it. - -The functions below implement this logic for each GUI toolkit. If you need -to create custom application subclasses, you will likely have to modify this -code for your own purposes. This code can be copied into your own project -so you don't have to depend on IPython. - -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -from IPython.core.getipython import get_ipython - -#----------------------------------------------------------------------------- -# wx -#----------------------------------------------------------------------------- - -def get_app_wx(*args, **kwargs): - """Create a new wx app or return an exiting one.""" - import wx - app = wx.GetApp() - if app is None: - if 'redirect' not in kwargs: - kwargs['redirect'] = False - app = wx.PySimpleApp(*args, **kwargs) - return app - -def is_event_loop_running_wx(app=None): - """Is the wx event loop running.""" - # New way: check attribute on shell instance - ip = get_ipython() - if ip is not None: - if ip.active_eventloop and ip.active_eventloop == 'wx': - return True - # Fall through to checking the application, because Wx has a native way - # to check if the event loop is running, unlike Qt. - - # Old way: check Wx application - if app is None: - app = get_app_wx() - if hasattr(app, '_in_event_loop'): - return app._in_event_loop - else: - return app.IsMainLoopRunning() - -def start_event_loop_wx(app=None): - """Start the wx event loop in a consistent manner.""" - if app is None: - app = get_app_wx() - if not is_event_loop_running_wx(app): - app._in_event_loop = True - app.MainLoop() - app._in_event_loop = False - else: - app._in_event_loop = True - -#----------------------------------------------------------------------------- -# qt4 -#----------------------------------------------------------------------------- - -def get_app_qt4(*args, **kwargs): - """Create a new qt4 app or return an existing one.""" - from IPython.external.qt_for_kernel import QtGui - app = QtGui.QApplication.instance() - if app is None: - if not args: - args = ([''],) - app = QtGui.QApplication(*args, **kwargs) - return app - -def is_event_loop_running_qt4(app=None): - """Is the qt4 event loop running.""" - # New way: check attribute on shell instance - ip = get_ipython() - if ip is not None: - return ip.active_eventloop and ip.active_eventloop.startswith('qt') - - # Old way: check attribute on QApplication singleton - if app is None: - app = get_app_qt4(['']) - if hasattr(app, '_in_event_loop'): - return app._in_event_loop - else: - # Does qt4 provide a other way to detect this? - return False - -def start_event_loop_qt4(app=None): - """Start the qt4 event loop in a consistent manner.""" - if app is None: - app = get_app_qt4(['']) - if not is_event_loop_running_qt4(app): - app._in_event_loop = True - app.exec_() - app._in_event_loop = False - else: - app._in_event_loop = True - -#----------------------------------------------------------------------------- -# Tk -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# gtk -#----------------------------------------------------------------------------- +# coding: utf-8 +""" +Support for creating GUI apps and starting event loops. + +IPython's GUI integration allows interactive plotting and GUI usage in IPython +session. IPython has two different types of GUI integration: + +1. The terminal based IPython supports GUI event loops through Python's + PyOS_InputHook. PyOS_InputHook is a hook that Python calls periodically + whenever raw_input is waiting for a user to type code. We implement GUI + support in the terminal by setting PyOS_InputHook to a function that + iterates the event loop for a short while. It is important to note that + in this situation, the real GUI event loop is NOT run in the normal + manner, so you can't use the normal means to detect that it is running. +2. In the two process IPython kernel/frontend, the GUI event loop is run in + the kernel. In this case, the event loop is run in the normal manner by + calling the function or method of the GUI toolkit that starts the event + loop. + +In addition to starting the GUI event loops in one of these two ways, IPython +will *always* create an appropriate GUI application object when GUi +integration is enabled. + +If you want your GUI apps to run in IPython you need to do two things: + +1. Test to see if there is already an existing main application object. If + there is, you should use it. If there is not an existing application object + you should create one. +2. Test to see if the GUI event loop is running. If it is, you should not + start it. If the event loop is not running you may start it. + +This module contains functions for each toolkit that perform these things +in a consistent manner. Because of how PyOS_InputHook runs the event loop +you cannot detect if the event loop is running using the traditional calls +(such as ``wx.GetApp.IsMainLoopRunning()`` in wxPython). If PyOS_InputHook is +set These methods will return a false negative. That is, they will say the +event loop is not running, when is actually is. To work around this limitation +we proposed the following informal protocol: + +* Whenever someone starts the event loop, they *must* set the ``_in_event_loop`` + attribute of the main application object to ``True``. This should be done + regardless of how the event loop is actually run. +* Whenever someone stops the event loop, they *must* set the ``_in_event_loop`` + attribute of the main application object to ``False``. +* If you want to see if the event loop is running, you *must* use ``hasattr`` + to see if ``_in_event_loop`` attribute has been set. If it is set, you + *must* use its value. If it has not been set, you can query the toolkit + in the normal manner. +* If you want GUI support and no one else has created an application or + started the event loop you *must* do this. We don't want projects to + attempt to defer these things to someone else if they themselves need it. + +The functions below implement this logic for each GUI toolkit. If you need +to create custom application subclasses, you will likely have to modify this +code for your own purposes. This code can be copied into your own project +so you don't have to depend on IPython. + +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +from IPython.core.getipython import get_ipython + +#----------------------------------------------------------------------------- +# wx +#----------------------------------------------------------------------------- + +def get_app_wx(*args, **kwargs): + """Create a new wx app or return an exiting one.""" + import wx + app = wx.GetApp() + if app is None: + if 'redirect' not in kwargs: + kwargs['redirect'] = False + app = wx.PySimpleApp(*args, **kwargs) + return app + +def is_event_loop_running_wx(app=None): + """Is the wx event loop running.""" + # New way: check attribute on shell instance + ip = get_ipython() + if ip is not None: + if ip.active_eventloop and ip.active_eventloop == 'wx': + return True + # Fall through to checking the application, because Wx has a native way + # to check if the event loop is running, unlike Qt. + + # Old way: check Wx application + if app is None: + app = get_app_wx() + if hasattr(app, '_in_event_loop'): + return app._in_event_loop + else: + return app.IsMainLoopRunning() + +def start_event_loop_wx(app=None): + """Start the wx event loop in a consistent manner.""" + if app is None: + app = get_app_wx() + if not is_event_loop_running_wx(app): + app._in_event_loop = True + app.MainLoop() + app._in_event_loop = False + else: + app._in_event_loop = True + +#----------------------------------------------------------------------------- +# qt4 +#----------------------------------------------------------------------------- + +def get_app_qt4(*args, **kwargs): + """Create a new qt4 app or return an existing one.""" + from IPython.external.qt_for_kernel import QtGui + app = QtGui.QApplication.instance() + if app is None: + if not args: + args = ([''],) + app = QtGui.QApplication(*args, **kwargs) + return app + +def is_event_loop_running_qt4(app=None): + """Is the qt4 event loop running.""" + # New way: check attribute on shell instance + ip = get_ipython() + if ip is not None: + return ip.active_eventloop and ip.active_eventloop.startswith('qt') + + # Old way: check attribute on QApplication singleton + if app is None: + app = get_app_qt4(['']) + if hasattr(app, '_in_event_loop'): + return app._in_event_loop + else: + # Does qt4 provide a other way to detect this? + return False + +def start_event_loop_qt4(app=None): + """Start the qt4 event loop in a consistent manner.""" + if app is None: + app = get_app_qt4(['']) + if not is_event_loop_running_qt4(app): + app._in_event_loop = True + app.exec_() + app._in_event_loop = False + else: + app._in_event_loop = True + +#----------------------------------------------------------------------------- +# Tk +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# gtk +#----------------------------------------------------------------------------- diff --git a/contrib/python/ipython/py3/IPython/lib/inputhook.py b/contrib/python/ipython/py3/IPython/lib/inputhook.py index eb36537ea25..e6e8f2dbbc7 100644 --- a/contrib/python/ipython/py3/IPython/lib/inputhook.py +++ b/contrib/python/ipython/py3/IPython/lib/inputhook.py @@ -1,666 +1,666 @@ -# coding: utf-8 -""" -Deprecated since IPython 5.0 - -Inputhook management for GUI event loop integration. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -try: - import ctypes -except ImportError: - ctypes = None -except SystemError: # IronPython issue, 2/8/2014 - ctypes = None -import os -import platform -import sys -from distutils.version import LooseVersion as V - -from warnings import warn - - -warn("`IPython.lib.inputhook` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - - -#----------------------------------------------------------------------------- -# Constants -#----------------------------------------------------------------------------- - -# Constants for identifying the GUI toolkits. -GUI_WX = 'wx' -GUI_QT = 'qt' -GUI_QT4 = 'qt4' -GUI_GTK = 'gtk' -GUI_TK = 'tk' -GUI_OSX = 'osx' -GUI_GLUT = 'glut' -GUI_PYGLET = 'pyglet' -GUI_GTK3 = 'gtk3' -GUI_NONE = 'none' # i.e. disable - -#----------------------------------------------------------------------------- -# Utilities -#----------------------------------------------------------------------------- - -def _stdin_ready_posix(): - """Return True if there's something to read on stdin (posix version).""" - infds, outfds, erfds = select.select([sys.stdin],[],[],0) - return bool(infds) - -def _stdin_ready_nt(): - """Return True if there's something to read on stdin (nt version).""" - return msvcrt.kbhit() - -def _stdin_ready_other(): - """Return True, assuming there's something to read on stdin.""" - return True - -def _use_appnope(): - """Should we use appnope for dealing with OS X app nap? - - Checks if we are on OS X 10.9 or greater. - """ - return sys.platform == 'darwin' and V(platform.mac_ver()[0]) >= V('10.9') - -def _ignore_CTRL_C_posix(): - """Ignore CTRL+C (SIGINT).""" - signal.signal(signal.SIGINT, signal.SIG_IGN) - -def _allow_CTRL_C_posix(): - """Take CTRL+C into account (SIGINT).""" - signal.signal(signal.SIGINT, signal.default_int_handler) - -def _ignore_CTRL_C_other(): - """Ignore CTRL+C (not implemented).""" - pass - -def _allow_CTRL_C_other(): - """Take CTRL+C into account (not implemented).""" - pass - -if os.name == 'posix': - import select - import signal - stdin_ready = _stdin_ready_posix - ignore_CTRL_C = _ignore_CTRL_C_posix - allow_CTRL_C = _allow_CTRL_C_posix -elif os.name == 'nt': - import msvcrt - stdin_ready = _stdin_ready_nt - ignore_CTRL_C = _ignore_CTRL_C_other - allow_CTRL_C = _allow_CTRL_C_other -else: - stdin_ready = _stdin_ready_other - ignore_CTRL_C = _ignore_CTRL_C_other - allow_CTRL_C = _allow_CTRL_C_other - - -#----------------------------------------------------------------------------- -# Main InputHookManager class -#----------------------------------------------------------------------------- - - -class InputHookManager(object): - """DEPRECATED since IPython 5.0 - - Manage PyOS_InputHook for different GUI toolkits. - - This class installs various hooks under ``PyOSInputHook`` to handle - GUI event loop integration. - """ - - def __init__(self): - if ctypes is None: - warn("IPython GUI event loop requires ctypes, %gui will not be available") - else: - self.PYFUNC = ctypes.PYFUNCTYPE(ctypes.c_int) - self.guihooks = {} - self.aliases = {} - self.apps = {} - self._reset() - - def _reset(self): - self._callback_pyfunctype = None - self._callback = None - self._installed = False - self._current_gui = None - - def get_pyos_inputhook(self): - """DEPRECATED since IPython 5.0 - - Return the current PyOS_InputHook as a ctypes.c_void_p.""" - warn("`get_pyos_inputhook` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - return ctypes.c_void_p.in_dll(ctypes.pythonapi,"PyOS_InputHook") - - def get_pyos_inputhook_as_func(self): - """DEPRECATED since IPython 5.0 - - Return the current PyOS_InputHook as a ctypes.PYFUNCYPE.""" - warn("`get_pyos_inputhook_as_func` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - return self.PYFUNC.in_dll(ctypes.pythonapi,"PyOS_InputHook") - - def set_inputhook(self, callback): - """DEPRECATED since IPython 5.0 - - Set PyOS_InputHook to callback and return the previous one.""" - # On platforms with 'readline' support, it's all too likely to - # have a KeyboardInterrupt signal delivered *even before* an - # initial ``try:`` clause in the callback can be executed, so - # we need to disable CTRL+C in this situation. - ignore_CTRL_C() - self._callback = callback - self._callback_pyfunctype = self.PYFUNC(callback) - pyos_inputhook_ptr = self.get_pyos_inputhook() - original = self.get_pyos_inputhook_as_func() - pyos_inputhook_ptr.value = \ - ctypes.cast(self._callback_pyfunctype, ctypes.c_void_p).value - self._installed = True - return original - - def clear_inputhook(self, app=None): - """DEPRECATED since IPython 5.0 - - Set PyOS_InputHook to NULL and return the previous one. - - Parameters - ---------- - app : optional, ignored - This parameter is allowed only so that clear_inputhook() can be - called with a similar interface as all the ``enable_*`` methods. But - the actual value of the parameter is ignored. This uniform interface - makes it easier to have user-level entry points in the main IPython - app like :meth:`enable_gui`.""" - warn("`clear_inputhook` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - pyos_inputhook_ptr = self.get_pyos_inputhook() - original = self.get_pyos_inputhook_as_func() - pyos_inputhook_ptr.value = ctypes.c_void_p(None).value - allow_CTRL_C() - self._reset() - return original - - def clear_app_refs(self, gui=None): - """DEPRECATED since IPython 5.0 - - Clear IPython's internal reference to an application instance. - - Whenever we create an app for a user on qt4 or wx, we hold a - reference to the app. This is needed because in some cases bad things - can happen if a user doesn't hold a reference themselves. This - method is provided to clear the references we are holding. - - Parameters - ---------- - gui : None or str - If None, clear all app references. If ('wx', 'qt4') clear - the app for that toolkit. References are not held for gtk or tk - as those toolkits don't have the notion of an app. - """ - warn("`clear_app_refs` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - if gui is None: - self.apps = {} - elif gui in self.apps: - del self.apps[gui] - - def register(self, toolkitname, *aliases): - """DEPRECATED since IPython 5.0 - - Register a class to provide the event loop for a given GUI. - - This is intended to be used as a class decorator. It should be passed - the names with which to register this GUI integration. The classes - themselves should subclass :class:`InputHookBase`. - - :: - - @inputhook_manager.register('qt') - class QtInputHook(InputHookBase): - def enable(self, app=None): - ... - """ - warn("`register` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - def decorator(cls): - if ctypes is not None: - inst = cls(self) - self.guihooks[toolkitname] = inst - for a in aliases: - self.aliases[a] = toolkitname - return cls - return decorator - - def current_gui(self): - """DEPRECATED since IPython 5.0 - - Return a string indicating the currently active GUI or None.""" - warn("`current_gui` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - return self._current_gui - - def enable_gui(self, gui=None, app=None): - """DEPRECATED since IPython 5.0 - - Switch amongst GUI input hooks by name. - - This is a higher level method than :meth:`set_inputhook` - it uses the - GUI name to look up a registered object which enables the input hook - for that GUI. - - Parameters - ---------- - gui : optional, string or None - If None (or 'none'), clears input hook, otherwise it must be one - of the recognized GUI names (see ``GUI_*`` constants in module). - - app : optional, existing application object. - For toolkits that have the concept of a global app, you can supply an - existing one. If not given, the toolkit will be probed for one, and if - none is found, a new one will be created. Note that GTK does not have - this concept, and passing an app if ``gui=="GTK"`` will raise an error. - - Returns - ------- - The output of the underlying gui switch routine, typically the actual - PyOS_InputHook wrapper object or the GUI toolkit app created, if there was - one. - """ - warn("`enable_gui` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - if gui in (None, GUI_NONE): - return self.disable_gui() - - if gui in self.aliases: - return self.enable_gui(self.aliases[gui], app) - - try: - gui_hook = self.guihooks[gui] - except KeyError: - e = "Invalid GUI request {!r}, valid ones are: {}" - raise ValueError(e.format(gui, ', '.join(self.guihooks))) - self._current_gui = gui - - app = gui_hook.enable(app) - if app is not None: - app._in_event_loop = True - self.apps[gui] = app - return app - - def disable_gui(self): - """DEPRECATED since IPython 5.0 - - Disable GUI event loop integration. - - If an application was registered, this sets its ``_in_event_loop`` - attribute to False. It then calls :meth:`clear_inputhook`. - """ - warn("`disable_gui` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - gui = self._current_gui - if gui in self.apps: - self.apps[gui]._in_event_loop = False - return self.clear_inputhook() - -class InputHookBase(object): - """DEPRECATED since IPython 5.0 - - Base class for input hooks for specific toolkits. - - Subclasses should define an :meth:`enable` method with one argument, ``app``, - which will either be an instance of the toolkit's application class, or None. - They may also define a :meth:`disable` method with no arguments. - """ - def __init__(self, manager): - self.manager = manager - - def disable(self): - pass - -inputhook_manager = InputHookManager() - -@inputhook_manager.register('osx') -class NullInputHook(InputHookBase): - """DEPRECATED since IPython 5.0 - - A null inputhook that doesn't need to do anything""" - def enable(self, app=None): - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - -@inputhook_manager.register('wx') -class WxInputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with wxPython. - - Parameters - ---------- - app : WX Application, optional. - Running application to use. If not given, we probe WX for an - existing application object, and create a new one if none is found. - - Notes - ----- - This methods sets the ``PyOS_InputHook`` for wxPython, which allows - the wxPython to integrate with terminal based applications like - IPython. - - If ``app`` is not given we probe for an existing one, and return it if - found. If no existing app is found, we create an :class:`wx.App` as - follows:: - - import wx - app = wx.App(redirect=False, clearSigInt=False) - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - import wx - - wx_version = V(wx.__version__).version - - if wx_version < [2, 8]: - raise ValueError("requires wxPython >= 2.8, but you have %s" % wx.__version__) - - from IPython.lib.inputhookwx import inputhook_wx - self.manager.set_inputhook(inputhook_wx) - if _use_appnope(): - from appnope import nope - nope() - - import wx - if app is None: - app = wx.GetApp() - if app is None: - app = wx.App(redirect=False, clearSigInt=False) - - return app - - def disable(self): - """DEPRECATED since IPython 5.0 - - Disable event loop integration with wxPython. - - This restores appnapp on OS X - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - if _use_appnope(): - from appnope import nap - nap() - -@inputhook_manager.register('qt', 'qt4') -class Qt4InputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with PyQt4. - - Parameters - ---------- - app : Qt Application, optional. - Running application to use. If not given, we probe Qt for an - existing application object, and create a new one if none is found. - - Notes - ----- - This methods sets the PyOS_InputHook for PyQt4, which allows - the PyQt4 to integrate with terminal based applications like - IPython. - - If ``app`` is not given we probe for an existing one, and return it if - found. If no existing app is found, we create an :class:`QApplication` - as follows:: - - from PyQt4 import QtCore - app = QtGui.QApplication(sys.argv) - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - from IPython.lib.inputhookqt4 import create_inputhook_qt4 - app, inputhook_qt4 = create_inputhook_qt4(self.manager, app) - self.manager.set_inputhook(inputhook_qt4) - if _use_appnope(): - from appnope import nope - nope() - - return app - - def disable_qt4(self): - """DEPRECATED since IPython 5.0 - - Disable event loop integration with PyQt4. - - This restores appnapp on OS X - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - if _use_appnope(): - from appnope import nap - nap() - - -@inputhook_manager.register('qt5') -class Qt5InputHook(Qt4InputHook): - def enable(self, app=None): - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - os.environ['QT_API'] = 'pyqt5' - return Qt4InputHook.enable(self, app) - - -@inputhook_manager.register('gtk') -class GtkInputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with PyGTK. - - Parameters - ---------- - app : ignored - Ignored, it's only a placeholder to keep the call signature of all - gui activation methods consistent, which simplifies the logic of - supporting magics. - - Notes - ----- - This methods sets the PyOS_InputHook for PyGTK, which allows - the PyGTK to integrate with terminal based applications like - IPython. - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - import gtk - try: - gtk.set_interactive(True) - except AttributeError: - # For older versions of gtk, use our own ctypes version - from IPython.lib.inputhookgtk import inputhook_gtk - self.manager.set_inputhook(inputhook_gtk) - - -@inputhook_manager.register('tk') -class TkInputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with Tk. - - Parameters - ---------- - app : toplevel :class:`Tkinter.Tk` widget, optional. - Running toplevel widget to use. If not given, we probe Tk for an - existing one, and create a new one if none is found. - - Notes - ----- - If you have already created a :class:`Tkinter.Tk` object, the only - thing done by this method is to register with the - :class:`InputHookManager`, since creating that object automatically - sets ``PyOS_InputHook``. - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - if app is None: - try: - from tkinter import Tk # Py 3 - except ImportError: - from Tkinter import Tk # Py 2 - app = Tk() - app.withdraw() - self.manager.apps[GUI_TK] = app - return app - - -@inputhook_manager.register('glut') -class GlutInputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with GLUT. - - Parameters - ---------- - - app : ignored - Ignored, it's only a placeholder to keep the call signature of all - gui activation methods consistent, which simplifies the logic of - supporting magics. - - Notes - ----- - - This methods sets the PyOS_InputHook for GLUT, which allows the GLUT to - integrate with terminal based applications like IPython. Due to GLUT - limitations, it is currently not possible to start the event loop - without first creating a window. You should thus not create another - window but use instead the created one. See 'gui-glut.py' in the - docs/examples/lib directory. - - The default screen mode is set to: - glut.GLUT_DOUBLE | glut.GLUT_RGBA | glut.GLUT_DEPTH - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - - import OpenGL.GLUT as glut - from IPython.lib.inputhookglut import glut_display_mode, \ - glut_close, glut_display, \ - glut_idle, inputhook_glut - - if GUI_GLUT not in self.manager.apps: - glut.glutInit( sys.argv ) - glut.glutInitDisplayMode( glut_display_mode ) - # This is specific to freeglut - if bool(glut.glutSetOption): - glut.glutSetOption( glut.GLUT_ACTION_ON_WINDOW_CLOSE, - glut.GLUT_ACTION_GLUTMAINLOOP_RETURNS ) - glut.glutCreateWindow( sys.argv[0] ) - glut.glutReshapeWindow( 1, 1 ) - glut.glutHideWindow( ) - glut.glutWMCloseFunc( glut_close ) - glut.glutDisplayFunc( glut_display ) - glut.glutIdleFunc( glut_idle ) - else: - glut.glutWMCloseFunc( glut_close ) - glut.glutDisplayFunc( glut_display ) - glut.glutIdleFunc( glut_idle) - self.manager.set_inputhook( inputhook_glut ) - - - def disable(self): - """DEPRECATED since IPython 5.0 - - Disable event loop integration with glut. - - This sets PyOS_InputHook to NULL and set the display function to a - dummy one and set the timer to a dummy timer that will be triggered - very far in the future. - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - import OpenGL.GLUT as glut - from glut_support import glutMainLoopEvent - - glut.glutHideWindow() # This is an event to be processed below - glutMainLoopEvent() - super(GlutInputHook, self).disable() - -@inputhook_manager.register('pyglet') -class PygletInputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with pyglet. - - Parameters - ---------- - app : ignored - Ignored, it's only a placeholder to keep the call signature of all - gui activation methods consistent, which simplifies the logic of - supporting magics. - - Notes - ----- - This methods sets the ``PyOS_InputHook`` for pyglet, which allows - pyglet to integrate with terminal based applications like - IPython. - - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - from IPython.lib.inputhookpyglet import inputhook_pyglet - self.manager.set_inputhook(inputhook_pyglet) - return app - - -@inputhook_manager.register('gtk3') -class Gtk3InputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with Gtk3 (gir bindings). - - Parameters - ---------- - app : ignored - Ignored, it's only a placeholder to keep the call signature of all - gui activation methods consistent, which simplifies the logic of - supporting magics. - - Notes - ----- - This methods sets the PyOS_InputHook for Gtk3, which allows - the Gtk3 to integrate with terminal based applications like - IPython. - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - from IPython.lib.inputhookgtk3 import inputhook_gtk3 - self.manager.set_inputhook(inputhook_gtk3) - - -clear_inputhook = inputhook_manager.clear_inputhook -set_inputhook = inputhook_manager.set_inputhook -current_gui = inputhook_manager.current_gui -clear_app_refs = inputhook_manager.clear_app_refs -enable_gui = inputhook_manager.enable_gui -disable_gui = inputhook_manager.disable_gui -register = inputhook_manager.register -guis = inputhook_manager.guihooks - - -def _deprecated_disable(): - warn("This function is deprecated since IPython 4.0 use disable_gui() instead", - DeprecationWarning, stacklevel=2) - inputhook_manager.disable_gui() - -disable_wx = disable_qt4 = disable_gtk = disable_gtk3 = disable_glut = \ - disable_pyglet = disable_osx = _deprecated_disable +# coding: utf-8 +""" +Deprecated since IPython 5.0 + +Inputhook management for GUI event loop integration. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +try: + import ctypes +except ImportError: + ctypes = None +except SystemError: # IronPython issue, 2/8/2014 + ctypes = None +import os +import platform +import sys +from distutils.version import LooseVersion as V + +from warnings import warn + + +warn("`IPython.lib.inputhook` is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + + +#----------------------------------------------------------------------------- +# Constants +#----------------------------------------------------------------------------- + +# Constants for identifying the GUI toolkits. +GUI_WX = 'wx' +GUI_QT = 'qt' +GUI_QT4 = 'qt4' +GUI_GTK = 'gtk' +GUI_TK = 'tk' +GUI_OSX = 'osx' +GUI_GLUT = 'glut' +GUI_PYGLET = 'pyglet' +GUI_GTK3 = 'gtk3' +GUI_NONE = 'none' # i.e. disable + +#----------------------------------------------------------------------------- +# Utilities +#----------------------------------------------------------------------------- + +def _stdin_ready_posix(): + """Return True if there's something to read on stdin (posix version).""" + infds, outfds, erfds = select.select([sys.stdin],[],[],0) + return bool(infds) + +def _stdin_ready_nt(): + """Return True if there's something to read on stdin (nt version).""" + return msvcrt.kbhit() + +def _stdin_ready_other(): + """Return True, assuming there's something to read on stdin.""" + return True + +def _use_appnope(): + """Should we use appnope for dealing with OS X app nap? + + Checks if we are on OS X 10.9 or greater. + """ + return sys.platform == 'darwin' and V(platform.mac_ver()[0]) >= V('10.9') + +def _ignore_CTRL_C_posix(): + """Ignore CTRL+C (SIGINT).""" + signal.signal(signal.SIGINT, signal.SIG_IGN) + +def _allow_CTRL_C_posix(): + """Take CTRL+C into account (SIGINT).""" + signal.signal(signal.SIGINT, signal.default_int_handler) + +def _ignore_CTRL_C_other(): + """Ignore CTRL+C (not implemented).""" + pass + +def _allow_CTRL_C_other(): + """Take CTRL+C into account (not implemented).""" + pass + +if os.name == 'posix': + import select + import signal + stdin_ready = _stdin_ready_posix + ignore_CTRL_C = _ignore_CTRL_C_posix + allow_CTRL_C = _allow_CTRL_C_posix +elif os.name == 'nt': + import msvcrt + stdin_ready = _stdin_ready_nt + ignore_CTRL_C = _ignore_CTRL_C_other + allow_CTRL_C = _allow_CTRL_C_other +else: + stdin_ready = _stdin_ready_other + ignore_CTRL_C = _ignore_CTRL_C_other + allow_CTRL_C = _allow_CTRL_C_other + + +#----------------------------------------------------------------------------- +# Main InputHookManager class +#----------------------------------------------------------------------------- + + +class InputHookManager(object): + """DEPRECATED since IPython 5.0 + + Manage PyOS_InputHook for different GUI toolkits. + + This class installs various hooks under ``PyOSInputHook`` to handle + GUI event loop integration. + """ + + def __init__(self): + if ctypes is None: + warn("IPython GUI event loop requires ctypes, %gui will not be available") + else: + self.PYFUNC = ctypes.PYFUNCTYPE(ctypes.c_int) + self.guihooks = {} + self.aliases = {} + self.apps = {} + self._reset() + + def _reset(self): + self._callback_pyfunctype = None + self._callback = None + self._installed = False + self._current_gui = None + + def get_pyos_inputhook(self): + """DEPRECATED since IPython 5.0 + + Return the current PyOS_InputHook as a ctypes.c_void_p.""" + warn("`get_pyos_inputhook` is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + return ctypes.c_void_p.in_dll(ctypes.pythonapi,"PyOS_InputHook") + + def get_pyos_inputhook_as_func(self): + """DEPRECATED since IPython 5.0 + + Return the current PyOS_InputHook as a ctypes.PYFUNCYPE.""" + warn("`get_pyos_inputhook_as_func` is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + return self.PYFUNC.in_dll(ctypes.pythonapi,"PyOS_InputHook") + + def set_inputhook(self, callback): + """DEPRECATED since IPython 5.0 + + Set PyOS_InputHook to callback and return the previous one.""" + # On platforms with 'readline' support, it's all too likely to + # have a KeyboardInterrupt signal delivered *even before* an + # initial ``try:`` clause in the callback can be executed, so + # we need to disable CTRL+C in this situation. + ignore_CTRL_C() + self._callback = callback + self._callback_pyfunctype = self.PYFUNC(callback) + pyos_inputhook_ptr = self.get_pyos_inputhook() + original = self.get_pyos_inputhook_as_func() + pyos_inputhook_ptr.value = \ + ctypes.cast(self._callback_pyfunctype, ctypes.c_void_p).value + self._installed = True + return original + + def clear_inputhook(self, app=None): + """DEPRECATED since IPython 5.0 + + Set PyOS_InputHook to NULL and return the previous one. + + Parameters + ---------- + app : optional, ignored + This parameter is allowed only so that clear_inputhook() can be + called with a similar interface as all the ``enable_*`` methods. But + the actual value of the parameter is ignored. This uniform interface + makes it easier to have user-level entry points in the main IPython + app like :meth:`enable_gui`.""" + warn("`clear_inputhook` is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + pyos_inputhook_ptr = self.get_pyos_inputhook() + original = self.get_pyos_inputhook_as_func() + pyos_inputhook_ptr.value = ctypes.c_void_p(None).value + allow_CTRL_C() + self._reset() + return original + + def clear_app_refs(self, gui=None): + """DEPRECATED since IPython 5.0 + + Clear IPython's internal reference to an application instance. + + Whenever we create an app for a user on qt4 or wx, we hold a + reference to the app. This is needed because in some cases bad things + can happen if a user doesn't hold a reference themselves. This + method is provided to clear the references we are holding. + + Parameters + ---------- + gui : None or str + If None, clear all app references. If ('wx', 'qt4') clear + the app for that toolkit. References are not held for gtk or tk + as those toolkits don't have the notion of an app. + """ + warn("`clear_app_refs` is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + if gui is None: + self.apps = {} + elif gui in self.apps: + del self.apps[gui] + + def register(self, toolkitname, *aliases): + """DEPRECATED since IPython 5.0 + + Register a class to provide the event loop for a given GUI. + + This is intended to be used as a class decorator. It should be passed + the names with which to register this GUI integration. The classes + themselves should subclass :class:`InputHookBase`. + + :: + + @inputhook_manager.register('qt') + class QtInputHook(InputHookBase): + def enable(self, app=None): + ... + """ + warn("`register` is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + def decorator(cls): + if ctypes is not None: + inst = cls(self) + self.guihooks[toolkitname] = inst + for a in aliases: + self.aliases[a] = toolkitname + return cls + return decorator + + def current_gui(self): + """DEPRECATED since IPython 5.0 + + Return a string indicating the currently active GUI or None.""" + warn("`current_gui` is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + return self._current_gui + + def enable_gui(self, gui=None, app=None): + """DEPRECATED since IPython 5.0 + + Switch amongst GUI input hooks by name. + + This is a higher level method than :meth:`set_inputhook` - it uses the + GUI name to look up a registered object which enables the input hook + for that GUI. + + Parameters + ---------- + gui : optional, string or None + If None (or 'none'), clears input hook, otherwise it must be one + of the recognized GUI names (see ``GUI_*`` constants in module). + + app : optional, existing application object. + For toolkits that have the concept of a global app, you can supply an + existing one. If not given, the toolkit will be probed for one, and if + none is found, a new one will be created. Note that GTK does not have + this concept, and passing an app if ``gui=="GTK"`` will raise an error. + + Returns + ------- + The output of the underlying gui switch routine, typically the actual + PyOS_InputHook wrapper object or the GUI toolkit app created, if there was + one. + """ + warn("`enable_gui` is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + if gui in (None, GUI_NONE): + return self.disable_gui() + + if gui in self.aliases: + return self.enable_gui(self.aliases[gui], app) + + try: + gui_hook = self.guihooks[gui] + except KeyError: + e = "Invalid GUI request {!r}, valid ones are: {}" + raise ValueError(e.format(gui, ', '.join(self.guihooks))) + self._current_gui = gui + + app = gui_hook.enable(app) + if app is not None: + app._in_event_loop = True + self.apps[gui] = app + return app + + def disable_gui(self): + """DEPRECATED since IPython 5.0 + + Disable GUI event loop integration. + + If an application was registered, this sets its ``_in_event_loop`` + attribute to False. It then calls :meth:`clear_inputhook`. + """ + warn("`disable_gui` is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + gui = self._current_gui + if gui in self.apps: + self.apps[gui]._in_event_loop = False + return self.clear_inputhook() + +class InputHookBase(object): + """DEPRECATED since IPython 5.0 + + Base class for input hooks for specific toolkits. + + Subclasses should define an :meth:`enable` method with one argument, ``app``, + which will either be an instance of the toolkit's application class, or None. + They may also define a :meth:`disable` method with no arguments. + """ + def __init__(self, manager): + self.manager = manager + + def disable(self): + pass + +inputhook_manager = InputHookManager() + +@inputhook_manager.register('osx') +class NullInputHook(InputHookBase): + """DEPRECATED since IPython 5.0 + + A null inputhook that doesn't need to do anything""" + def enable(self, app=None): + warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + +@inputhook_manager.register('wx') +class WxInputHook(InputHookBase): + def enable(self, app=None): + """DEPRECATED since IPython 5.0 + + Enable event loop integration with wxPython. + + Parameters + ---------- + app : WX Application, optional. + Running application to use. If not given, we probe WX for an + existing application object, and create a new one if none is found. + + Notes + ----- + This methods sets the ``PyOS_InputHook`` for wxPython, which allows + the wxPython to integrate with terminal based applications like + IPython. + + If ``app`` is not given we probe for an existing one, and return it if + found. If no existing app is found, we create an :class:`wx.App` as + follows:: + + import wx + app = wx.App(redirect=False, clearSigInt=False) + """ + warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + import wx + + wx_version = V(wx.__version__).version + + if wx_version < [2, 8]: + raise ValueError("requires wxPython >= 2.8, but you have %s" % wx.__version__) + + from IPython.lib.inputhookwx import inputhook_wx + self.manager.set_inputhook(inputhook_wx) + if _use_appnope(): + from appnope import nope + nope() + + import wx + if app is None: + app = wx.GetApp() + if app is None: + app = wx.App(redirect=False, clearSigInt=False) + + return app + + def disable(self): + """DEPRECATED since IPython 5.0 + + Disable event loop integration with wxPython. + + This restores appnapp on OS X + """ + warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + if _use_appnope(): + from appnope import nap + nap() + +@inputhook_manager.register('qt', 'qt4') +class Qt4InputHook(InputHookBase): + def enable(self, app=None): + """DEPRECATED since IPython 5.0 + + Enable event loop integration with PyQt4. + + Parameters + ---------- + app : Qt Application, optional. + Running application to use. If not given, we probe Qt for an + existing application object, and create a new one if none is found. + + Notes + ----- + This methods sets the PyOS_InputHook for PyQt4, which allows + the PyQt4 to integrate with terminal based applications like + IPython. + + If ``app`` is not given we probe for an existing one, and return it if + found. If no existing app is found, we create an :class:`QApplication` + as follows:: + + from PyQt4 import QtCore + app = QtGui.QApplication(sys.argv) + """ + warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + from IPython.lib.inputhookqt4 import create_inputhook_qt4 + app, inputhook_qt4 = create_inputhook_qt4(self.manager, app) + self.manager.set_inputhook(inputhook_qt4) + if _use_appnope(): + from appnope import nope + nope() + + return app + + def disable_qt4(self): + """DEPRECATED since IPython 5.0 + + Disable event loop integration with PyQt4. + + This restores appnapp on OS X + """ + warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + if _use_appnope(): + from appnope import nap + nap() + + +@inputhook_manager.register('qt5') +class Qt5InputHook(Qt4InputHook): + def enable(self, app=None): + warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + os.environ['QT_API'] = 'pyqt5' + return Qt4InputHook.enable(self, app) + + +@inputhook_manager.register('gtk') +class GtkInputHook(InputHookBase): + def enable(self, app=None): + """DEPRECATED since IPython 5.0 + + Enable event loop integration with PyGTK. + + Parameters + ---------- + app : ignored + Ignored, it's only a placeholder to keep the call signature of all + gui activation methods consistent, which simplifies the logic of + supporting magics. + + Notes + ----- + This methods sets the PyOS_InputHook for PyGTK, which allows + the PyGTK to integrate with terminal based applications like + IPython. + """ + warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + import gtk + try: + gtk.set_interactive(True) + except AttributeError: + # For older versions of gtk, use our own ctypes version + from IPython.lib.inputhookgtk import inputhook_gtk + self.manager.set_inputhook(inputhook_gtk) + + +@inputhook_manager.register('tk') +class TkInputHook(InputHookBase): + def enable(self, app=None): + """DEPRECATED since IPython 5.0 + + Enable event loop integration with Tk. + + Parameters + ---------- + app : toplevel :class:`Tkinter.Tk` widget, optional. + Running toplevel widget to use. If not given, we probe Tk for an + existing one, and create a new one if none is found. + + Notes + ----- + If you have already created a :class:`Tkinter.Tk` object, the only + thing done by this method is to register with the + :class:`InputHookManager`, since creating that object automatically + sets ``PyOS_InputHook``. + """ + warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + if app is None: + try: + from tkinter import Tk # Py 3 + except ImportError: + from Tkinter import Tk # Py 2 + app = Tk() + app.withdraw() + self.manager.apps[GUI_TK] = app + return app + + +@inputhook_manager.register('glut') +class GlutInputHook(InputHookBase): + def enable(self, app=None): + """DEPRECATED since IPython 5.0 + + Enable event loop integration with GLUT. + + Parameters + ---------- + + app : ignored + Ignored, it's only a placeholder to keep the call signature of all + gui activation methods consistent, which simplifies the logic of + supporting magics. + + Notes + ----- + + This methods sets the PyOS_InputHook for GLUT, which allows the GLUT to + integrate with terminal based applications like IPython. Due to GLUT + limitations, it is currently not possible to start the event loop + without first creating a window. You should thus not create another + window but use instead the created one. See 'gui-glut.py' in the + docs/examples/lib directory. + + The default screen mode is set to: + glut.GLUT_DOUBLE | glut.GLUT_RGBA | glut.GLUT_DEPTH + """ + warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + + import OpenGL.GLUT as glut + from IPython.lib.inputhookglut import glut_display_mode, \ + glut_close, glut_display, \ + glut_idle, inputhook_glut + + if GUI_GLUT not in self.manager.apps: + glut.glutInit( sys.argv ) + glut.glutInitDisplayMode( glut_display_mode ) + # This is specific to freeglut + if bool(glut.glutSetOption): + glut.glutSetOption( glut.GLUT_ACTION_ON_WINDOW_CLOSE, + glut.GLUT_ACTION_GLUTMAINLOOP_RETURNS ) + glut.glutCreateWindow( sys.argv[0] ) + glut.glutReshapeWindow( 1, 1 ) + glut.glutHideWindow( ) + glut.glutWMCloseFunc( glut_close ) + glut.glutDisplayFunc( glut_display ) + glut.glutIdleFunc( glut_idle ) + else: + glut.glutWMCloseFunc( glut_close ) + glut.glutDisplayFunc( glut_display ) + glut.glutIdleFunc( glut_idle) + self.manager.set_inputhook( inputhook_glut ) + + + def disable(self): + """DEPRECATED since IPython 5.0 + + Disable event loop integration with glut. + + This sets PyOS_InputHook to NULL and set the display function to a + dummy one and set the timer to a dummy timer that will be triggered + very far in the future. + """ + warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + import OpenGL.GLUT as glut + from glut_support import glutMainLoopEvent + + glut.glutHideWindow() # This is an event to be processed below + glutMainLoopEvent() + super(GlutInputHook, self).disable() + +@inputhook_manager.register('pyglet') +class PygletInputHook(InputHookBase): + def enable(self, app=None): + """DEPRECATED since IPython 5.0 + + Enable event loop integration with pyglet. + + Parameters + ---------- + app : ignored + Ignored, it's only a placeholder to keep the call signature of all + gui activation methods consistent, which simplifies the logic of + supporting magics. + + Notes + ----- + This methods sets the ``PyOS_InputHook`` for pyglet, which allows + pyglet to integrate with terminal based applications like + IPython. + + """ + warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + from IPython.lib.inputhookpyglet import inputhook_pyglet + self.manager.set_inputhook(inputhook_pyglet) + return app + + +@inputhook_manager.register('gtk3') +class Gtk3InputHook(InputHookBase): + def enable(self, app=None): + """DEPRECATED since IPython 5.0 + + Enable event loop integration with Gtk3 (gir bindings). + + Parameters + ---------- + app : ignored + Ignored, it's only a placeholder to keep the call signature of all + gui activation methods consistent, which simplifies the logic of + supporting magics. + + Notes + ----- + This methods sets the PyOS_InputHook for Gtk3, which allows + the Gtk3 to integrate with terminal based applications like + IPython. + """ + warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", + DeprecationWarning, stacklevel=2) + from IPython.lib.inputhookgtk3 import inputhook_gtk3 + self.manager.set_inputhook(inputhook_gtk3) + + +clear_inputhook = inputhook_manager.clear_inputhook +set_inputhook = inputhook_manager.set_inputhook +current_gui = inputhook_manager.current_gui +clear_app_refs = inputhook_manager.clear_app_refs +enable_gui = inputhook_manager.enable_gui +disable_gui = inputhook_manager.disable_gui +register = inputhook_manager.register +guis = inputhook_manager.guihooks + + +def _deprecated_disable(): + warn("This function is deprecated since IPython 4.0 use disable_gui() instead", + DeprecationWarning, stacklevel=2) + inputhook_manager.disable_gui() + +disable_wx = disable_qt4 = disable_gtk = disable_gtk3 = disable_glut = \ + disable_pyglet = disable_osx = _deprecated_disable diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookglut.py b/contrib/python/ipython/py3/IPython/lib/inputhookglut.py index 65b2a81b348..e6f7f125758 100644 --- a/contrib/python/ipython/py3/IPython/lib/inputhookglut.py +++ b/contrib/python/ipython/py3/IPython/lib/inputhookglut.py @@ -1,172 +1,172 @@ -# coding: utf-8 -""" -GLUT Inputhook support functions -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -# GLUT is quite an old library and it is difficult to ensure proper -# integration within IPython since original GLUT does not allow to handle -# events one by one. Instead, it requires for the mainloop to be entered -# and never returned (there is not even a function to exit he -# mainloop). Fortunately, there are alternatives such as freeglut -# (available for linux and windows) and the OSX implementation gives -# access to a glutCheckLoop() function that blocks itself until a new -# event is received. This means we have to setup the idle callback to -# ensure we got at least one event that will unblock the function. -# -# Furthermore, it is not possible to install these handlers without a window -# being first created. We choose to make this window invisible. This means that -# display mode options are set at this level and user won't be able to change -# them later without modifying the code. This should probably be made available -# via IPython options system. - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- -import os -import sys -import time -import signal -import OpenGL.GLUT as glut -import OpenGL.platform as platform -from timeit import default_timer as clock - -#----------------------------------------------------------------------------- -# Constants -#----------------------------------------------------------------------------- - -# Frame per second : 60 -# Should probably be an IPython option -glut_fps = 60 - - -# Display mode : double buffeed + rgba + depth -# Should probably be an IPython option -glut_display_mode = (glut.GLUT_DOUBLE | - glut.GLUT_RGBA | - glut.GLUT_DEPTH) - -glutMainLoopEvent = None -if sys.platform == 'darwin': - try: - glutCheckLoop = platform.createBaseFunction( - 'glutCheckLoop', dll=platform.GLUT, resultType=None, - argTypes=[], - doc='glutCheckLoop( ) -> None', - argNames=(), - ) - except AttributeError: - raise RuntimeError( - '''Your glut implementation does not allow interactive sessions''' - '''Consider installing freeglut.''') - glutMainLoopEvent = glutCheckLoop -elif glut.HAVE_FREEGLUT: - glutMainLoopEvent = glut.glutMainLoopEvent -else: - raise RuntimeError( - '''Your glut implementation does not allow interactive sessions. ''' - '''Consider installing freeglut.''') - - -#----------------------------------------------------------------------------- -# Platform-dependent imports and functions -#----------------------------------------------------------------------------- - -if os.name == 'posix': - import select - - def stdin_ready(): - infds, outfds, erfds = select.select([sys.stdin],[],[],0) - if infds: - return True - else: - return False - -elif sys.platform == 'win32': - import msvcrt - - def stdin_ready(): - return msvcrt.kbhit() - -#----------------------------------------------------------------------------- -# Callback functions -#----------------------------------------------------------------------------- - -def glut_display(): - # Dummy display function - pass - -def glut_idle(): - # Dummy idle function - pass - -def glut_close(): - # Close function only hides the current window - glut.glutHideWindow() - glutMainLoopEvent() - -def glut_int_handler(signum, frame): - # Catch sigint and print the default message - signal.signal(signal.SIGINT, signal.default_int_handler) - print('\nKeyboardInterrupt') - # Need to reprint the prompt at this stage - - - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- -def inputhook_glut(): - """Run the pyglet event loop by processing pending events only. - - This keeps processing pending events until stdin is ready. After - processing all pending events, a call to time.sleep is inserted. This is - needed, otherwise, CPU usage is at 100%. This sleep time should be tuned - though for best performance. - """ - # We need to protect against a user pressing Control-C when IPython is - # idle and this is running. We trap KeyboardInterrupt and pass. - - signal.signal(signal.SIGINT, glut_int_handler) - - try: - t = clock() - - # Make sure the default window is set after a window has been closed - if glut.glutGetWindow() == 0: - glut.glutSetWindow( 1 ) - glutMainLoopEvent() - return 0 - - while not stdin_ready(): - glutMainLoopEvent() - # We need to sleep at this point to keep the idle CPU load - # low. However, if sleep to long, GUI response is poor. As - # a compromise, we watch how often GUI events are being processed - # and switch between a short and long sleep time. Here are some - # stats useful in helping to tune this. - # time CPU load - # 0.001 13% - # 0.005 3% - # 0.01 1.5% - # 0.05 0.5% - used_time = clock() - t - if used_time > 10.0: - # print 'Sleep for 1 s' # dbg - time.sleep(1.0) - elif used_time > 0.1: - # Few GUI events coming in, so we can sleep longer - # print 'Sleep for 0.05 s' # dbg - time.sleep(0.05) - else: - # Many GUI events coming in, so sleep only very little - time.sleep(0.001) - except KeyboardInterrupt: - pass - return 0 +# coding: utf-8 +""" +GLUT Inputhook support functions +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +# GLUT is quite an old library and it is difficult to ensure proper +# integration within IPython since original GLUT does not allow to handle +# events one by one. Instead, it requires for the mainloop to be entered +# and never returned (there is not even a function to exit he +# mainloop). Fortunately, there are alternatives such as freeglut +# (available for linux and windows) and the OSX implementation gives +# access to a glutCheckLoop() function that blocks itself until a new +# event is received. This means we have to setup the idle callback to +# ensure we got at least one event that will unblock the function. +# +# Furthermore, it is not possible to install these handlers without a window +# being first created. We choose to make this window invisible. This means that +# display mode options are set at this level and user won't be able to change +# them later without modifying the code. This should probably be made available +# via IPython options system. + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- +import os +import sys +import time +import signal +import OpenGL.GLUT as glut +import OpenGL.platform as platform +from timeit import default_timer as clock + +#----------------------------------------------------------------------------- +# Constants +#----------------------------------------------------------------------------- + +# Frame per second : 60 +# Should probably be an IPython option +glut_fps = 60 + + +# Display mode : double buffeed + rgba + depth +# Should probably be an IPython option +glut_display_mode = (glut.GLUT_DOUBLE | + glut.GLUT_RGBA | + glut.GLUT_DEPTH) + +glutMainLoopEvent = None +if sys.platform == 'darwin': + try: + glutCheckLoop = platform.createBaseFunction( + 'glutCheckLoop', dll=platform.GLUT, resultType=None, + argTypes=[], + doc='glutCheckLoop( ) -> None', + argNames=(), + ) + except AttributeError: + raise RuntimeError( + '''Your glut implementation does not allow interactive sessions''' + '''Consider installing freeglut.''') + glutMainLoopEvent = glutCheckLoop +elif glut.HAVE_FREEGLUT: + glutMainLoopEvent = glut.glutMainLoopEvent +else: + raise RuntimeError( + '''Your glut implementation does not allow interactive sessions. ''' + '''Consider installing freeglut.''') + + +#----------------------------------------------------------------------------- +# Platform-dependent imports and functions +#----------------------------------------------------------------------------- + +if os.name == 'posix': + import select + + def stdin_ready(): + infds, outfds, erfds = select.select([sys.stdin],[],[],0) + if infds: + return True + else: + return False + +elif sys.platform == 'win32': + import msvcrt + + def stdin_ready(): + return msvcrt.kbhit() + +#----------------------------------------------------------------------------- +# Callback functions +#----------------------------------------------------------------------------- + +def glut_display(): + # Dummy display function + pass + +def glut_idle(): + # Dummy idle function + pass + +def glut_close(): + # Close function only hides the current window + glut.glutHideWindow() + glutMainLoopEvent() + +def glut_int_handler(signum, frame): + # Catch sigint and print the default message + signal.signal(signal.SIGINT, signal.default_int_handler) + print('\nKeyboardInterrupt') + # Need to reprint the prompt at this stage + + + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- +def inputhook_glut(): + """Run the pyglet event loop by processing pending events only. + + This keeps processing pending events until stdin is ready. After + processing all pending events, a call to time.sleep is inserted. This is + needed, otherwise, CPU usage is at 100%. This sleep time should be tuned + though for best performance. + """ + # We need to protect against a user pressing Control-C when IPython is + # idle and this is running. We trap KeyboardInterrupt and pass. + + signal.signal(signal.SIGINT, glut_int_handler) + + try: + t = clock() + + # Make sure the default window is set after a window has been closed + if glut.glutGetWindow() == 0: + glut.glutSetWindow( 1 ) + glutMainLoopEvent() + return 0 + + while not stdin_ready(): + glutMainLoopEvent() + # We need to sleep at this point to keep the idle CPU load + # low. However, if sleep to long, GUI response is poor. As + # a compromise, we watch how often GUI events are being processed + # and switch between a short and long sleep time. Here are some + # stats useful in helping to tune this. + # time CPU load + # 0.001 13% + # 0.005 3% + # 0.01 1.5% + # 0.05 0.5% + used_time = clock() - t + if used_time > 10.0: + # print 'Sleep for 1 s' # dbg + time.sleep(1.0) + elif used_time > 0.1: + # Few GUI events coming in, so we can sleep longer + # print 'Sleep for 0.05 s' # dbg + time.sleep(0.05) + else: + # Many GUI events coming in, so sleep only very little + time.sleep(0.001) + except KeyboardInterrupt: + pass + return 0 diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookgtk.py b/contrib/python/ipython/py3/IPython/lib/inputhookgtk.py index 7e704370840..98569f54d75 100644 --- a/contrib/python/ipython/py3/IPython/lib/inputhookgtk.py +++ b/contrib/python/ipython/py3/IPython/lib/inputhookgtk.py @@ -1,35 +1,35 @@ -# encoding: utf-8 -""" -Enable pygtk to be used interactively by setting PyOS_InputHook. - -Authors: Brian Granger -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import sys -import gtk, gobject - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - - -def _main_quit(*args, **kwargs): - gtk.main_quit() - return False - -def inputhook_gtk(): - gobject.io_add_watch(sys.stdin, gobject.IO_IN, _main_quit) - gtk.main() - return 0 - +# encoding: utf-8 +""" +Enable pygtk to be used interactively by setting PyOS_InputHook. + +Authors: Brian Granger +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import sys +import gtk, gobject + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + + +def _main_quit(*args, **kwargs): + gtk.main_quit() + return False + +def inputhook_gtk(): + gobject.io_add_watch(sys.stdin, gobject.IO_IN, _main_quit) + gtk.main() + return 0 + diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookgtk3.py b/contrib/python/ipython/py3/IPython/lib/inputhookgtk3.py index a123ed41120..b797e862558 100644 --- a/contrib/python/ipython/py3/IPython/lib/inputhookgtk3.py +++ b/contrib/python/ipython/py3/IPython/lib/inputhookgtk3.py @@ -1,34 +1,34 @@ -# encoding: utf-8 -""" -Enable Gtk3 to be used interactively by IPython. - -Authors: Thomi Richards -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012, the IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import sys -from gi.repository import Gtk, GLib - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -def _main_quit(*args, **kwargs): - Gtk.main_quit() - return False - - -def inputhook_gtk3(): - GLib.io_add_watch(sys.stdin, GLib.PRIORITY_DEFAULT, GLib.IO_IN, _main_quit) - Gtk.main() - return 0 +# encoding: utf-8 +""" +Enable Gtk3 to be used interactively by IPython. + +Authors: Thomi Richards +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012, the IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import sys +from gi.repository import Gtk, GLib + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def _main_quit(*args, **kwargs): + Gtk.main_quit() + return False + + +def inputhook_gtk3(): + GLib.io_add_watch(sys.stdin, GLib.PRIORITY_DEFAULT, GLib.IO_IN, _main_quit) + Gtk.main() + return 0 diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookgtk4.py b/contrib/python/ipython/py3/IPython/lib/inputhookgtk4.py index c19937c03ba..a872cee36a0 100644 --- a/contrib/python/ipython/py3/IPython/lib/inputhookgtk4.py +++ b/contrib/python/ipython/py3/IPython/lib/inputhookgtk4.py @@ -1,43 +1,43 @@ -""" -Enable Gtk4 to be used interactively by IPython. -""" -# ----------------------------------------------------------------------------- -# Copyright (c) 2021, the IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -# ----------------------------------------------------------------------------- - -# ----------------------------------------------------------------------------- -# Imports -# ----------------------------------------------------------------------------- - -import sys - -from gi.repository import GLib - -# ----------------------------------------------------------------------------- -# Code -# ----------------------------------------------------------------------------- - - -class _InputHook: - def __init__(self, context): - self._quit = False - GLib.io_add_watch(sys.stdin, GLib.PRIORITY_DEFAULT, GLib.IO_IN, self.quit) - - def quit(self, *args, **kwargs): - self._quit = True - return False - - def run(self): - context = GLib.MainContext.default() - while not self._quit: - context.iteration(True) - - -def inputhook_gtk4(): - hook = _InputHook() - hook.run() - return 0 +""" +Enable Gtk4 to be used interactively by IPython. +""" +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, the IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# Imports +# ----------------------------------------------------------------------------- + +import sys + +from gi.repository import GLib + +# ----------------------------------------------------------------------------- +# Code +# ----------------------------------------------------------------------------- + + +class _InputHook: + def __init__(self, context): + self._quit = False + GLib.io_add_watch(sys.stdin, GLib.PRIORITY_DEFAULT, GLib.IO_IN, self.quit) + + def quit(self, *args, **kwargs): + self._quit = True + return False + + def run(self): + context = GLib.MainContext.default() + while not self._quit: + context.iteration(True) + + +def inputhook_gtk4(): + hook = _InputHook() + hook.run() + return 0 diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookpyglet.py b/contrib/python/ipython/py3/IPython/lib/inputhookpyglet.py index be7cc777789..fb91ffed177 100644 --- a/contrib/python/ipython/py3/IPython/lib/inputhookpyglet.py +++ b/contrib/python/ipython/py3/IPython/lib/inputhookpyglet.py @@ -1,111 +1,111 @@ -# encoding: utf-8 -""" -Enable pyglet to be used interactively by setting PyOS_InputHook. - -Authors -------- - -* Nicolas P. Rougier -* Fernando Perez -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import os -import sys -import time -from timeit import default_timer as clock -import pyglet - -#----------------------------------------------------------------------------- -# Platform-dependent imports and functions -#----------------------------------------------------------------------------- - -if os.name == 'posix': - import select - - def stdin_ready(): - infds, outfds, erfds = select.select([sys.stdin],[],[],0) - if infds: - return True - else: - return False - -elif sys.platform == 'win32': - import msvcrt - - def stdin_ready(): - return msvcrt.kbhit() - - -# On linux only, window.flip() has a bug that causes an AttributeError on -# window close. For details, see: -# http://groups.google.com/group/pyglet-users/browse_thread/thread/47c1aab9aa4a3d23/c22f9e819826799e?#c22f9e819826799e - -if sys.platform.startswith('linux'): - def flip(window): - try: - window.flip() - except AttributeError: - pass -else: - def flip(window): - window.flip() - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -def inputhook_pyglet(): - """Run the pyglet event loop by processing pending events only. - - This keeps processing pending events until stdin is ready. After - processing all pending events, a call to time.sleep is inserted. This is - needed, otherwise, CPU usage is at 100%. This sleep time should be tuned - though for best performance. - """ - # We need to protect against a user pressing Control-C when IPython is - # idle and this is running. We trap KeyboardInterrupt and pass. - try: - t = clock() - while not stdin_ready(): - pyglet.clock.tick() - for window in pyglet.app.windows: - window.switch_to() - window.dispatch_events() - window.dispatch_event('on_draw') - flip(window) - - # We need to sleep at this point to keep the idle CPU load - # low. However, if sleep to long, GUI response is poor. As - # a compromise, we watch how often GUI events are being processed - # and switch between a short and long sleep time. Here are some - # stats useful in helping to tune this. - # time CPU load - # 0.001 13% - # 0.005 3% - # 0.01 1.5% - # 0.05 0.5% - used_time = clock() - t - if used_time > 10.0: - # print 'Sleep for 1 s' # dbg - time.sleep(1.0) - elif used_time > 0.1: - # Few GUI events coming in, so we can sleep longer - # print 'Sleep for 0.05 s' # dbg - time.sleep(0.05) - else: - # Many GUI events coming in, so sleep only very little - time.sleep(0.001) - except KeyboardInterrupt: - pass - return 0 +# encoding: utf-8 +""" +Enable pyglet to be used interactively by setting PyOS_InputHook. + +Authors +------- + +* Nicolas P. Rougier +* Fernando Perez +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import sys +import time +from timeit import default_timer as clock +import pyglet + +#----------------------------------------------------------------------------- +# Platform-dependent imports and functions +#----------------------------------------------------------------------------- + +if os.name == 'posix': + import select + + def stdin_ready(): + infds, outfds, erfds = select.select([sys.stdin],[],[],0) + if infds: + return True + else: + return False + +elif sys.platform == 'win32': + import msvcrt + + def stdin_ready(): + return msvcrt.kbhit() + + +# On linux only, window.flip() has a bug that causes an AttributeError on +# window close. For details, see: +# http://groups.google.com/group/pyglet-users/browse_thread/thread/47c1aab9aa4a3d23/c22f9e819826799e?#c22f9e819826799e + +if sys.platform.startswith('linux'): + def flip(window): + try: + window.flip() + except AttributeError: + pass +else: + def flip(window): + window.flip() + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def inputhook_pyglet(): + """Run the pyglet event loop by processing pending events only. + + This keeps processing pending events until stdin is ready. After + processing all pending events, a call to time.sleep is inserted. This is + needed, otherwise, CPU usage is at 100%. This sleep time should be tuned + though for best performance. + """ + # We need to protect against a user pressing Control-C when IPython is + # idle and this is running. We trap KeyboardInterrupt and pass. + try: + t = clock() + while not stdin_ready(): + pyglet.clock.tick() + for window in pyglet.app.windows: + window.switch_to() + window.dispatch_events() + window.dispatch_event('on_draw') + flip(window) + + # We need to sleep at this point to keep the idle CPU load + # low. However, if sleep to long, GUI response is poor. As + # a compromise, we watch how often GUI events are being processed + # and switch between a short and long sleep time. Here are some + # stats useful in helping to tune this. + # time CPU load + # 0.001 13% + # 0.005 3% + # 0.01 1.5% + # 0.05 0.5% + used_time = clock() - t + if used_time > 10.0: + # print 'Sleep for 1 s' # dbg + time.sleep(1.0) + elif used_time > 0.1: + # Few GUI events coming in, so we can sleep longer + # print 'Sleep for 0.05 s' # dbg + time.sleep(0.05) + else: + # Many GUI events coming in, so sleep only very little + time.sleep(0.001) + except KeyboardInterrupt: + pass + return 0 diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookqt4.py b/contrib/python/ipython/py3/IPython/lib/inputhookqt4.py index a0d0f9e348c..8a83902fc0e 100644 --- a/contrib/python/ipython/py3/IPython/lib/inputhookqt4.py +++ b/contrib/python/ipython/py3/IPython/lib/inputhookqt4.py @@ -1,180 +1,180 @@ -# -*- coding: utf-8 -*- -""" -Qt4's inputhook support function - -Author: Christian Boos -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import os -import signal -import threading - -from IPython.core.interactiveshell import InteractiveShell -from IPython.external.qt_for_kernel import QtCore, QtGui -from IPython.lib.inputhook import allow_CTRL_C, ignore_CTRL_C, stdin_ready - -#----------------------------------------------------------------------------- -# Module Globals -#----------------------------------------------------------------------------- - -got_kbdint = False -sigint_timer = None - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -def create_inputhook_qt4(mgr, app=None): - """Create an input hook for running the Qt4 application event loop. - - Parameters - ---------- - mgr : an InputHookManager - - app : Qt Application, optional. - Running application to use. If not given, we probe Qt for an - existing application object, and create a new one if none is found. - - Returns - ------- - A pair consisting of a Qt Application (either the one given or the - one found or created) and a inputhook. - - Notes - ----- - We use a custom input hook instead of PyQt4's default one, as it - interacts better with the readline packages (issue #481). - - The inputhook function works in tandem with a 'pre_prompt_hook' - which automatically restores the hook as an inputhook in case the - latter has been temporarily disabled after having intercepted a - KeyboardInterrupt. - """ - - if app is None: - app = QtCore.QCoreApplication.instance() - if app is None: - app = QtGui.QApplication([" "]) - - # Re-use previously created inputhook if any - ip = InteractiveShell.instance() - if hasattr(ip, '_inputhook_qt4'): - return app, ip._inputhook_qt4 - - # Otherwise create the inputhook_qt4/preprompthook_qt4 pair of - # hooks (they both share the got_kbdint flag) - - def inputhook_qt4(): - """PyOS_InputHook python hook for Qt4. - - Process pending Qt events and if there's no pending keyboard - input, spend a short slice of time (50ms) running the Qt event - loop. - - As a Python ctypes callback can't raise an exception, we catch - the KeyboardInterrupt and temporarily deactivate the hook, - which will let a *second* CTRL+C be processed normally and go - back to a clean prompt line. - """ - try: - allow_CTRL_C() - app = QtCore.QCoreApplication.instance() - if not app: # shouldn't happen, but safer if it happens anyway... - return 0 - app.processEvents(QtCore.QEventLoop.AllEvents, 300) - if not stdin_ready(): - # Generally a program would run QCoreApplication::exec() - # from main() to enter and process the Qt event loop until - # quit() or exit() is called and the program terminates. - # - # For our input hook integration, we need to repeatedly - # enter and process the Qt event loop for only a short - # amount of time (say 50ms) to ensure that Python stays - # responsive to other user inputs. - # - # A naive approach would be to repeatedly call - # QCoreApplication::exec(), using a timer to quit after a - # short amount of time. Unfortunately, QCoreApplication - # emits an aboutToQuit signal before stopping, which has - # the undesirable effect of closing all modal windows. - # - # To work around this problem, we instead create a - # QEventLoop and call QEventLoop::exec(). Other than - # setting some state variables which do not seem to be - # used anywhere, the only thing QCoreApplication adds is - # the aboutToQuit signal which is precisely what we are - # trying to avoid. - timer = QtCore.QTimer() - event_loop = QtCore.QEventLoop() - timer.timeout.connect(event_loop.quit) - while not stdin_ready(): - timer.start(50) - event_loop.exec_() - timer.stop() - except KeyboardInterrupt: - global got_kbdint, sigint_timer - - ignore_CTRL_C() - got_kbdint = True - mgr.clear_inputhook() - - # This generates a second SIGINT so the user doesn't have to - # press CTRL+C twice to get a clean prompt. - # - # Since we can't catch the resulting KeyboardInterrupt here - # (because this is a ctypes callback), we use a timer to - # generate the SIGINT after we leave this callback. - # - # Unfortunately this doesn't work on Windows (SIGINT kills - # Python and CTRL_C_EVENT doesn't work). - if(os.name == 'posix'): - pid = os.getpid() - if(not sigint_timer): - sigint_timer = threading.Timer(.01, os.kill, - args=[pid, signal.SIGINT] ) - sigint_timer.start() - else: - print("\nKeyboardInterrupt - Ctrl-C again for new prompt") - - - except: # NO exceptions are allowed to escape from a ctypes callback - ignore_CTRL_C() - from traceback import print_exc - print_exc() - print("Got exception from inputhook_qt4, unregistering.") - mgr.clear_inputhook() - finally: - allow_CTRL_C() - return 0 - - def preprompthook_qt4(ishell): - """'pre_prompt_hook' used to restore the Qt4 input hook - - (in case the latter was temporarily deactivated after a - CTRL+C) - """ - global got_kbdint, sigint_timer - - if(sigint_timer): - sigint_timer.cancel() - sigint_timer = None - - if got_kbdint: - mgr.set_inputhook(inputhook_qt4) - got_kbdint = False - - ip._inputhook_qt4 = inputhook_qt4 - ip.set_hook('pre_prompt_hook', preprompthook_qt4) - - return app, inputhook_qt4 +# -*- coding: utf-8 -*- +""" +Qt4's inputhook support function + +Author: Christian Boos +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import signal +import threading + +from IPython.core.interactiveshell import InteractiveShell +from IPython.external.qt_for_kernel import QtCore, QtGui +from IPython.lib.inputhook import allow_CTRL_C, ignore_CTRL_C, stdin_ready + +#----------------------------------------------------------------------------- +# Module Globals +#----------------------------------------------------------------------------- + +got_kbdint = False +sigint_timer = None + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def create_inputhook_qt4(mgr, app=None): + """Create an input hook for running the Qt4 application event loop. + + Parameters + ---------- + mgr : an InputHookManager + + app : Qt Application, optional. + Running application to use. If not given, we probe Qt for an + existing application object, and create a new one if none is found. + + Returns + ------- + A pair consisting of a Qt Application (either the one given or the + one found or created) and a inputhook. + + Notes + ----- + We use a custom input hook instead of PyQt4's default one, as it + interacts better with the readline packages (issue #481). + + The inputhook function works in tandem with a 'pre_prompt_hook' + which automatically restores the hook as an inputhook in case the + latter has been temporarily disabled after having intercepted a + KeyboardInterrupt. + """ + + if app is None: + app = QtCore.QCoreApplication.instance() + if app is None: + app = QtGui.QApplication([" "]) + + # Re-use previously created inputhook if any + ip = InteractiveShell.instance() + if hasattr(ip, '_inputhook_qt4'): + return app, ip._inputhook_qt4 + + # Otherwise create the inputhook_qt4/preprompthook_qt4 pair of + # hooks (they both share the got_kbdint flag) + + def inputhook_qt4(): + """PyOS_InputHook python hook for Qt4. + + Process pending Qt events and if there's no pending keyboard + input, spend a short slice of time (50ms) running the Qt event + loop. + + As a Python ctypes callback can't raise an exception, we catch + the KeyboardInterrupt and temporarily deactivate the hook, + which will let a *second* CTRL+C be processed normally and go + back to a clean prompt line. + """ + try: + allow_CTRL_C() + app = QtCore.QCoreApplication.instance() + if not app: # shouldn't happen, but safer if it happens anyway... + return 0 + app.processEvents(QtCore.QEventLoop.AllEvents, 300) + if not stdin_ready(): + # Generally a program would run QCoreApplication::exec() + # from main() to enter and process the Qt event loop until + # quit() or exit() is called and the program terminates. + # + # For our input hook integration, we need to repeatedly + # enter and process the Qt event loop for only a short + # amount of time (say 50ms) to ensure that Python stays + # responsive to other user inputs. + # + # A naive approach would be to repeatedly call + # QCoreApplication::exec(), using a timer to quit after a + # short amount of time. Unfortunately, QCoreApplication + # emits an aboutToQuit signal before stopping, which has + # the undesirable effect of closing all modal windows. + # + # To work around this problem, we instead create a + # QEventLoop and call QEventLoop::exec(). Other than + # setting some state variables which do not seem to be + # used anywhere, the only thing QCoreApplication adds is + # the aboutToQuit signal which is precisely what we are + # trying to avoid. + timer = QtCore.QTimer() + event_loop = QtCore.QEventLoop() + timer.timeout.connect(event_loop.quit) + while not stdin_ready(): + timer.start(50) + event_loop.exec_() + timer.stop() + except KeyboardInterrupt: + global got_kbdint, sigint_timer + + ignore_CTRL_C() + got_kbdint = True + mgr.clear_inputhook() + + # This generates a second SIGINT so the user doesn't have to + # press CTRL+C twice to get a clean prompt. + # + # Since we can't catch the resulting KeyboardInterrupt here + # (because this is a ctypes callback), we use a timer to + # generate the SIGINT after we leave this callback. + # + # Unfortunately this doesn't work on Windows (SIGINT kills + # Python and CTRL_C_EVENT doesn't work). + if(os.name == 'posix'): + pid = os.getpid() + if(not sigint_timer): + sigint_timer = threading.Timer(.01, os.kill, + args=[pid, signal.SIGINT] ) + sigint_timer.start() + else: + print("\nKeyboardInterrupt - Ctrl-C again for new prompt") + + + except: # NO exceptions are allowed to escape from a ctypes callback + ignore_CTRL_C() + from traceback import print_exc + print_exc() + print("Got exception from inputhook_qt4, unregistering.") + mgr.clear_inputhook() + finally: + allow_CTRL_C() + return 0 + + def preprompthook_qt4(ishell): + """'pre_prompt_hook' used to restore the Qt4 input hook + + (in case the latter was temporarily deactivated after a + CTRL+C) + """ + global got_kbdint, sigint_timer + + if(sigint_timer): + sigint_timer.cancel() + sigint_timer = None + + if got_kbdint: + mgr.set_inputhook(inputhook_qt4) + got_kbdint = False + + ip._inputhook_qt4 = inputhook_qt4 + ip.set_hook('pre_prompt_hook', preprompthook_qt4) + + return app, inputhook_qt4 diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookwx.py b/contrib/python/ipython/py3/IPython/lib/inputhookwx.py index 6e181400e6f..60520a299c3 100644 --- a/contrib/python/ipython/py3/IPython/lib/inputhookwx.py +++ b/contrib/python/ipython/py3/IPython/lib/inputhookwx.py @@ -1,167 +1,167 @@ -# encoding: utf-8 - -""" -Enable wxPython to be used interactively by setting PyOS_InputHook. - -Authors: Robin Dunn, Brian Granger, Ondrej Certik -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import sys -import signal -import time -from timeit import default_timer as clock -import wx - -from IPython.lib.inputhook import stdin_ready - - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -def inputhook_wx1(): - """Run the wx event loop by processing pending events only. - - This approach seems to work, but its performance is not great as it - relies on having PyOS_InputHook called regularly. - """ - try: - app = wx.GetApp() - if app is not None: - assert wx.Thread_IsMain() - - # Make a temporary event loop and process system events until - # there are no more waiting, then allow idle events (which - # will also deal with pending or posted wx events.) - evtloop = wx.EventLoop() - ea = wx.EventLoopActivator(evtloop) - while evtloop.Pending(): - evtloop.Dispatch() - app.ProcessIdle() - del ea - except KeyboardInterrupt: - pass - return 0 - -class EventLoopTimer(wx.Timer): - - def __init__(self, func): - self.func = func - wx.Timer.__init__(self) - - def Notify(self): - self.func() - -class EventLoopRunner(object): - - def Run(self, time): - self.evtloop = wx.EventLoop() - self.timer = EventLoopTimer(self.check_stdin) - self.timer.Start(time) - self.evtloop.Run() - - def check_stdin(self): - if stdin_ready(): - self.timer.Stop() - self.evtloop.Exit() - -def inputhook_wx2(): - """Run the wx event loop, polling for stdin. - - This version runs the wx eventloop for an undetermined amount of time, - during which it periodically checks to see if anything is ready on - stdin. If anything is ready on stdin, the event loop exits. - - The argument to elr.Run controls how often the event loop looks at stdin. - This determines the responsiveness at the keyboard. A setting of 1000 - enables a user to type at most 1 char per second. I have found that a - setting of 10 gives good keyboard response. We can shorten it further, - but eventually performance would suffer from calling select/kbhit too - often. - """ - try: - app = wx.GetApp() - if app is not None: - assert wx.Thread_IsMain() - elr = EventLoopRunner() - # As this time is made shorter, keyboard response improves, but idle - # CPU load goes up. 10 ms seems like a good compromise. - elr.Run(time=10) # CHANGE time here to control polling interval - except KeyboardInterrupt: - pass - return 0 - -def inputhook_wx3(): - """Run the wx event loop by processing pending events only. - - This is like inputhook_wx1, but it keeps processing pending events - until stdin is ready. After processing all pending events, a call to - time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%. - This sleep time should be tuned though for best performance. - """ - # We need to protect against a user pressing Control-C when IPython is - # idle and this is running. We trap KeyboardInterrupt and pass. - try: - app = wx.GetApp() - if app is not None: - assert wx.Thread_IsMain() - - # The import of wx on Linux sets the handler for signal.SIGINT - # to 0. This is a bug in wx or gtk. We fix by just setting it - # back to the Python default. - if not callable(signal.getsignal(signal.SIGINT)): - signal.signal(signal.SIGINT, signal.default_int_handler) - - evtloop = wx.EventLoop() - ea = wx.EventLoopActivator(evtloop) - t = clock() - while not stdin_ready(): - while evtloop.Pending(): - t = clock() - evtloop.Dispatch() - app.ProcessIdle() - # We need to sleep at this point to keep the idle CPU load - # low. However, if sleep to long, GUI response is poor. As - # a compromise, we watch how often GUI events are being processed - # and switch between a short and long sleep time. Here are some - # stats useful in helping to tune this. - # time CPU load - # 0.001 13% - # 0.005 3% - # 0.01 1.5% - # 0.05 0.5% - used_time = clock() - t - if used_time > 10.0: - # print 'Sleep for 1 s' # dbg - time.sleep(1.0) - elif used_time > 0.1: - # Few GUI events coming in, so we can sleep longer - # print 'Sleep for 0.05 s' # dbg - time.sleep(0.05) - else: - # Many GUI events coming in, so sleep only very little - time.sleep(0.001) - del ea - except KeyboardInterrupt: - pass - return 0 - -if sys.platform == 'darwin': - # On OSX, evtloop.Pending() always returns True, regardless of there being - # any events pending. As such we can't use implementations 1 or 3 of the - # inputhook as those depend on a pending/dispatch loop. - inputhook_wx = inputhook_wx2 -else: - # This is our default implementation - inputhook_wx = inputhook_wx3 +# encoding: utf-8 + +""" +Enable wxPython to be used interactively by setting PyOS_InputHook. + +Authors: Robin Dunn, Brian Granger, Ondrej Certik +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import sys +import signal +import time +from timeit import default_timer as clock +import wx + +from IPython.lib.inputhook import stdin_ready + + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def inputhook_wx1(): + """Run the wx event loop by processing pending events only. + + This approach seems to work, but its performance is not great as it + relies on having PyOS_InputHook called regularly. + """ + try: + app = wx.GetApp() + if app is not None: + assert wx.Thread_IsMain() + + # Make a temporary event loop and process system events until + # there are no more waiting, then allow idle events (which + # will also deal with pending or posted wx events.) + evtloop = wx.EventLoop() + ea = wx.EventLoopActivator(evtloop) + while evtloop.Pending(): + evtloop.Dispatch() + app.ProcessIdle() + del ea + except KeyboardInterrupt: + pass + return 0 + +class EventLoopTimer(wx.Timer): + + def __init__(self, func): + self.func = func + wx.Timer.__init__(self) + + def Notify(self): + self.func() + +class EventLoopRunner(object): + + def Run(self, time): + self.evtloop = wx.EventLoop() + self.timer = EventLoopTimer(self.check_stdin) + self.timer.Start(time) + self.evtloop.Run() + + def check_stdin(self): + if stdin_ready(): + self.timer.Stop() + self.evtloop.Exit() + +def inputhook_wx2(): + """Run the wx event loop, polling for stdin. + + This version runs the wx eventloop for an undetermined amount of time, + during which it periodically checks to see if anything is ready on + stdin. If anything is ready on stdin, the event loop exits. + + The argument to elr.Run controls how often the event loop looks at stdin. + This determines the responsiveness at the keyboard. A setting of 1000 + enables a user to type at most 1 char per second. I have found that a + setting of 10 gives good keyboard response. We can shorten it further, + but eventually performance would suffer from calling select/kbhit too + often. + """ + try: + app = wx.GetApp() + if app is not None: + assert wx.Thread_IsMain() + elr = EventLoopRunner() + # As this time is made shorter, keyboard response improves, but idle + # CPU load goes up. 10 ms seems like a good compromise. + elr.Run(time=10) # CHANGE time here to control polling interval + except KeyboardInterrupt: + pass + return 0 + +def inputhook_wx3(): + """Run the wx event loop by processing pending events only. + + This is like inputhook_wx1, but it keeps processing pending events + until stdin is ready. After processing all pending events, a call to + time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%. + This sleep time should be tuned though for best performance. + """ + # We need to protect against a user pressing Control-C when IPython is + # idle and this is running. We trap KeyboardInterrupt and pass. + try: + app = wx.GetApp() + if app is not None: + assert wx.Thread_IsMain() + + # The import of wx on Linux sets the handler for signal.SIGINT + # to 0. This is a bug in wx or gtk. We fix by just setting it + # back to the Python default. + if not callable(signal.getsignal(signal.SIGINT)): + signal.signal(signal.SIGINT, signal.default_int_handler) + + evtloop = wx.EventLoop() + ea = wx.EventLoopActivator(evtloop) + t = clock() + while not stdin_ready(): + while evtloop.Pending(): + t = clock() + evtloop.Dispatch() + app.ProcessIdle() + # We need to sleep at this point to keep the idle CPU load + # low. However, if sleep to long, GUI response is poor. As + # a compromise, we watch how often GUI events are being processed + # and switch between a short and long sleep time. Here are some + # stats useful in helping to tune this. + # time CPU load + # 0.001 13% + # 0.005 3% + # 0.01 1.5% + # 0.05 0.5% + used_time = clock() - t + if used_time > 10.0: + # print 'Sleep for 1 s' # dbg + time.sleep(1.0) + elif used_time > 0.1: + # Few GUI events coming in, so we can sleep longer + # print 'Sleep for 0.05 s' # dbg + time.sleep(0.05) + else: + # Many GUI events coming in, so sleep only very little + time.sleep(0.001) + del ea + except KeyboardInterrupt: + pass + return 0 + +if sys.platform == 'darwin': + # On OSX, evtloop.Pending() always returns True, regardless of there being + # any events pending. As such we can't use implementations 1 or 3 of the + # inputhook as those depend on a pending/dispatch loop. + inputhook_wx = inputhook_wx2 +else: + # This is our default implementation + inputhook_wx = inputhook_wx3 diff --git a/contrib/python/ipython/py3/IPython/lib/kernel.py b/contrib/python/ipython/py3/IPython/lib/kernel.py index 7de2ea4b122..af9827667fb 100644 --- a/contrib/python/ipython/py3/IPython/lib/kernel.py +++ b/contrib/python/ipython/py3/IPython/lib/kernel.py @@ -1,13 +1,13 @@ -"""[DEPRECATED] Utilities for connecting to kernels - -Moved to IPython.kernel.connect -""" - -import warnings -warnings.warn("IPython.lib.kernel moved to IPython.kernel.connect in IPython 1.0," - " and will be removed in IPython 6.0.", - DeprecationWarning -) - -from ipykernel.connect import * - +"""[DEPRECATED] Utilities for connecting to kernels + +Moved to IPython.kernel.connect +""" + +import warnings +warnings.warn("IPython.lib.kernel moved to IPython.kernel.connect in IPython 1.0," + " and will be removed in IPython 6.0.", + DeprecationWarning +) + +from ipykernel.connect import * + diff --git a/contrib/python/ipython/py3/IPython/lib/latextools.py b/contrib/python/ipython/py3/IPython/lib/latextools.py index 0e467a57e61..f976f2edb13 100644 --- a/contrib/python/ipython/py3/IPython/lib/latextools.py +++ b/contrib/python/ipython/py3/IPython/lib/latextools.py @@ -1,237 +1,237 @@ -# -*- coding: utf-8 -*- -"""Tools for handling LaTeX.""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -from io import BytesIO, open -import os -import tempfile -import shutil -import subprocess -from base64 import encodebytes -import textwrap - -from IPython.utils.process import find_cmd, FindCmdError -from traitlets.config import get_config -from traitlets.config.configurable import SingletonConfigurable -from traitlets import List, Bool, Unicode -from IPython.utils.py3compat import cast_unicode - - -class LaTeXTool(SingletonConfigurable): - """An object to store configuration of the LaTeX tool.""" - def _config_default(self): - return get_config() - - backends = List( - Unicode(), ["matplotlib", "dvipng"], - help="Preferred backend to draw LaTeX math equations. " - "Backends in the list are checked one by one and the first " - "usable one is used. Note that `matplotlib` backend " - "is usable only for inline style equations. To draw " - "display style equations, `dvipng` backend must be specified. ", - # It is a List instead of Enum, to make configuration more - # flexible. For example, to use matplotlib mainly but dvipng - # for display style, the default ["matplotlib", "dvipng"] can - # be used. To NOT use dvipng so that other repr such as - # unicode pretty printing is used, you can use ["matplotlib"]. - ).tag(config=True) - - use_breqn = Bool( - True, - help="Use breqn.sty to automatically break long equations. " - "This configuration takes effect only for dvipng backend.", - ).tag(config=True) - - packages = List( - ['amsmath', 'amsthm', 'amssymb', 'bm'], - help="A list of packages to use for dvipng backend. " - "'breqn' will be automatically appended when use_breqn=True.", - ).tag(config=True) - - preamble = Unicode( - help="Additional preamble to use when generating LaTeX source " - "for dvipng backend.", - ).tag(config=True) - - -def latex_to_png(s, encode=False, backend=None, wrap=False, color='Black', - scale=1.0): - """Render a LaTeX string to PNG. - - Parameters - ---------- - s : str - The raw string containing valid inline LaTeX. - encode : bool, optional - Should the PNG data base64 encoded to make it JSON'able. - backend : {matplotlib, dvipng} - Backend for producing PNG data. - wrap : bool - If true, Automatically wrap `s` as a LaTeX equation. - color : string - Foreground color name among dvipsnames, e.g. 'Maroon' or on hex RGB - format, e.g. '#AA20FA'. - scale : float - Scale factor for the resulting PNG. - - None is returned when the backend cannot be used. - - """ - s = cast_unicode(s) - allowed_backends = LaTeXTool.instance().backends - if backend is None: - backend = allowed_backends[0] - if backend not in allowed_backends: - return None - if backend == 'matplotlib': - f = latex_to_png_mpl - elif backend == 'dvipng': - f = latex_to_png_dvipng - if color.startswith('#'): - # Convert hex RGB color to LaTeX RGB color. - if len(color) == 7: - try: - color = "RGB {}".format(" ".join([str(int(x, 16)) for x in - textwrap.wrap(color[1:], 2)])) - except ValueError: - raise ValueError('Invalid color specification {}.'.format(color)) - else: - raise ValueError('Invalid color specification {}.'.format(color)) - else: - raise ValueError('No such backend {0}'.format(backend)) - bin_data = f(s, wrap, color, scale) - if encode and bin_data: - bin_data = encodebytes(bin_data) - return bin_data - - -def latex_to_png_mpl(s, wrap, color='Black', scale=1.0): - try: - from matplotlib import mathtext - from pyparsing import ParseFatalException - except ImportError: - return None - - # mpl mathtext doesn't support display math, force inline - s = s.replace('$$', '$') - if wrap: - s = u'${0}$'.format(s) - - try: - mt = mathtext.MathTextParser('bitmap') - f = BytesIO() - dpi = 120*scale - mt.to_png(f, s, fontsize=12, dpi=dpi, color=color) - return f.getvalue() - except (ValueError, RuntimeError, ParseFatalException): - return None - - -def latex_to_png_dvipng(s, wrap, color='Black', scale=1.0): - try: - find_cmd('latex') - find_cmd('dvipng') - except FindCmdError: - return None - try: - workdir = tempfile.mkdtemp() - tmpfile = os.path.join(workdir, "tmp.tex") - dvifile = os.path.join(workdir, "tmp.dvi") - outfile = os.path.join(workdir, "tmp.png") - - with open(tmpfile, "w", encoding='utf8') as f: - f.writelines(genelatex(s, wrap)) - - with open(os.devnull, 'wb') as devnull: - subprocess.check_call( - ["latex", "-halt-on-error", "-interaction", "batchmode", tmpfile], - cwd=workdir, stdout=devnull, stderr=devnull) - - resolution = round(150*scale) - subprocess.check_call( - [ - "dvipng", - "-T", - "tight", - "-D", - str(resolution), - "-z", - "9", - "-bg", - "Transparent", - "-o", - outfile, - dvifile, - "-fg", - color, - ], - cwd=workdir, - stdout=devnull, - stderr=devnull, - ) - - with open(outfile, "rb") as f: - return f.read() - except subprocess.CalledProcessError: - return None - finally: - shutil.rmtree(workdir) - - -def kpsewhich(filename): - """Invoke kpsewhich command with an argument `filename`.""" - try: - find_cmd("kpsewhich") - proc = subprocess.Popen( - ["kpsewhich", filename], - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - (stdout, stderr) = proc.communicate() - return stdout.strip().decode('utf8', 'replace') - except FindCmdError: - pass - - -def genelatex(body, wrap): - """Generate LaTeX document for dvipng backend.""" - lt = LaTeXTool.instance() - breqn = wrap and lt.use_breqn and kpsewhich("breqn.sty") - yield r'\documentclass{article}' - packages = lt.packages - if breqn: - packages = packages + ['breqn'] - for pack in packages: - yield r'\usepackage{{{0}}}'.format(pack) - yield r'\pagestyle{empty}' - if lt.preamble: - yield lt.preamble - yield r'\begin{document}' - if breqn: - yield r'\begin{dmath*}' - yield body - yield r'\end{dmath*}' - elif wrap: - yield u'$${0}$$'.format(body) - else: - yield body - yield u'\\end{document}' - - -_data_uri_template_png = u"""<img src="data:image/png;base64,%s" alt=%s />""" - -def latex_to_html(s, alt='image'): - """Render LaTeX to HTML with embedded PNG data using data URIs. - - Parameters - ---------- - s : str - The raw string containing valid inline LateX. - alt : str - The alt text to use for the HTML. - """ - base64_data = latex_to_png(s, encode=True).decode('ascii') - if base64_data: - return _data_uri_template_png % (base64_data, alt) - - +# -*- coding: utf-8 -*- +"""Tools for handling LaTeX.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +from io import BytesIO, open +import os +import tempfile +import shutil +import subprocess +from base64 import encodebytes +import textwrap + +from IPython.utils.process import find_cmd, FindCmdError +from traitlets.config import get_config +from traitlets.config.configurable import SingletonConfigurable +from traitlets import List, Bool, Unicode +from IPython.utils.py3compat import cast_unicode + + +class LaTeXTool(SingletonConfigurable): + """An object to store configuration of the LaTeX tool.""" + def _config_default(self): + return get_config() + + backends = List( + Unicode(), ["matplotlib", "dvipng"], + help="Preferred backend to draw LaTeX math equations. " + "Backends in the list are checked one by one and the first " + "usable one is used. Note that `matplotlib` backend " + "is usable only for inline style equations. To draw " + "display style equations, `dvipng` backend must be specified. ", + # It is a List instead of Enum, to make configuration more + # flexible. For example, to use matplotlib mainly but dvipng + # for display style, the default ["matplotlib", "dvipng"] can + # be used. To NOT use dvipng so that other repr such as + # unicode pretty printing is used, you can use ["matplotlib"]. + ).tag(config=True) + + use_breqn = Bool( + True, + help="Use breqn.sty to automatically break long equations. " + "This configuration takes effect only for dvipng backend.", + ).tag(config=True) + + packages = List( + ['amsmath', 'amsthm', 'amssymb', 'bm'], + help="A list of packages to use for dvipng backend. " + "'breqn' will be automatically appended when use_breqn=True.", + ).tag(config=True) + + preamble = Unicode( + help="Additional preamble to use when generating LaTeX source " + "for dvipng backend.", + ).tag(config=True) + + +def latex_to_png(s, encode=False, backend=None, wrap=False, color='Black', + scale=1.0): + """Render a LaTeX string to PNG. + + Parameters + ---------- + s : str + The raw string containing valid inline LaTeX. + encode : bool, optional + Should the PNG data base64 encoded to make it JSON'able. + backend : {matplotlib, dvipng} + Backend for producing PNG data. + wrap : bool + If true, Automatically wrap `s` as a LaTeX equation. + color : string + Foreground color name among dvipsnames, e.g. 'Maroon' or on hex RGB + format, e.g. '#AA20FA'. + scale : float + Scale factor for the resulting PNG. + + None is returned when the backend cannot be used. + + """ + s = cast_unicode(s) + allowed_backends = LaTeXTool.instance().backends + if backend is None: + backend = allowed_backends[0] + if backend not in allowed_backends: + return None + if backend == 'matplotlib': + f = latex_to_png_mpl + elif backend == 'dvipng': + f = latex_to_png_dvipng + if color.startswith('#'): + # Convert hex RGB color to LaTeX RGB color. + if len(color) == 7: + try: + color = "RGB {}".format(" ".join([str(int(x, 16)) for x in + textwrap.wrap(color[1:], 2)])) + except ValueError: + raise ValueError('Invalid color specification {}.'.format(color)) + else: + raise ValueError('Invalid color specification {}.'.format(color)) + else: + raise ValueError('No such backend {0}'.format(backend)) + bin_data = f(s, wrap, color, scale) + if encode and bin_data: + bin_data = encodebytes(bin_data) + return bin_data + + +def latex_to_png_mpl(s, wrap, color='Black', scale=1.0): + try: + from matplotlib import mathtext + from pyparsing import ParseFatalException + except ImportError: + return None + + # mpl mathtext doesn't support display math, force inline + s = s.replace('$$', '$') + if wrap: + s = u'${0}$'.format(s) + + try: + mt = mathtext.MathTextParser('bitmap') + f = BytesIO() + dpi = 120*scale + mt.to_png(f, s, fontsize=12, dpi=dpi, color=color) + return f.getvalue() + except (ValueError, RuntimeError, ParseFatalException): + return None + + +def latex_to_png_dvipng(s, wrap, color='Black', scale=1.0): + try: + find_cmd('latex') + find_cmd('dvipng') + except FindCmdError: + return None + try: + workdir = tempfile.mkdtemp() + tmpfile = os.path.join(workdir, "tmp.tex") + dvifile = os.path.join(workdir, "tmp.dvi") + outfile = os.path.join(workdir, "tmp.png") + + with open(tmpfile, "w", encoding='utf8') as f: + f.writelines(genelatex(s, wrap)) + + with open(os.devnull, 'wb') as devnull: + subprocess.check_call( + ["latex", "-halt-on-error", "-interaction", "batchmode", tmpfile], + cwd=workdir, stdout=devnull, stderr=devnull) + + resolution = round(150*scale) + subprocess.check_call( + [ + "dvipng", + "-T", + "tight", + "-D", + str(resolution), + "-z", + "9", + "-bg", + "Transparent", + "-o", + outfile, + dvifile, + "-fg", + color, + ], + cwd=workdir, + stdout=devnull, + stderr=devnull, + ) + + with open(outfile, "rb") as f: + return f.read() + except subprocess.CalledProcessError: + return None + finally: + shutil.rmtree(workdir) + + +def kpsewhich(filename): + """Invoke kpsewhich command with an argument `filename`.""" + try: + find_cmd("kpsewhich") + proc = subprocess.Popen( + ["kpsewhich", filename], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (stdout, stderr) = proc.communicate() + return stdout.strip().decode('utf8', 'replace') + except FindCmdError: + pass + + +def genelatex(body, wrap): + """Generate LaTeX document for dvipng backend.""" + lt = LaTeXTool.instance() + breqn = wrap and lt.use_breqn and kpsewhich("breqn.sty") + yield r'\documentclass{article}' + packages = lt.packages + if breqn: + packages = packages + ['breqn'] + for pack in packages: + yield r'\usepackage{{{0}}}'.format(pack) + yield r'\pagestyle{empty}' + if lt.preamble: + yield lt.preamble + yield r'\begin{document}' + if breqn: + yield r'\begin{dmath*}' + yield body + yield r'\end{dmath*}' + elif wrap: + yield u'$${0}$$'.format(body) + else: + yield body + yield u'\\end{document}' + + +_data_uri_template_png = u"""<img src="data:image/png;base64,%s" alt=%s />""" + +def latex_to_html(s, alt='image'): + """Render LaTeX to HTML with embedded PNG data using data URIs. + + Parameters + ---------- + s : str + The raw string containing valid inline LateX. + alt : str + The alt text to use for the HTML. + """ + base64_data = latex_to_png(s, encode=True).decode('ascii') + if base64_data: + return _data_uri_template_png % (base64_data, alt) + + diff --git a/contrib/python/ipython/py3/IPython/lib/lexers.py b/contrib/python/ipython/py3/IPython/lib/lexers.py index a1dc79ad468..4494da56571 100644 --- a/contrib/python/ipython/py3/IPython/lib/lexers.py +++ b/contrib/python/ipython/py3/IPython/lib/lexers.py @@ -1,532 +1,532 @@ -# -*- coding: utf-8 -*- -""" -Defines a variety of Pygments lexers for highlighting IPython code. - -This includes: - - IPythonLexer, IPython3Lexer - Lexers for pure IPython (python + magic/shell commands) - - IPythonPartialTracebackLexer, IPythonTracebackLexer - Supports 2.x and 3.x via keyword `python3`. The partial traceback - lexer reads everything but the Python code appearing in a traceback. - The full lexer combines the partial lexer with an IPython lexer. - - IPythonConsoleLexer - A lexer for IPython console sessions, with support for tracebacks. - - IPyLexer - A friendly lexer which examines the first line of text and from it, - decides whether to use an IPython lexer or an IPython console lexer. - This is probably the only lexer that needs to be explicitly added - to Pygments. - -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2013, the IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -# Standard library -import re - -# Third party -from pygments.lexers import ( - BashLexer, HtmlLexer, JavascriptLexer, RubyLexer, PerlLexer, PythonLexer, - Python3Lexer, TexLexer) -from pygments.lexer import ( - Lexer, DelegatingLexer, RegexLexer, do_insertions, bygroups, using, -) -from pygments.token import ( - Generic, Keyword, Literal, Name, Operator, Other, Text, Error, -) -from pygments.util import get_bool_opt - -# Local - -line_re = re.compile('.*?\n') - -__all__ = ['build_ipy_lexer', 'IPython3Lexer', 'IPythonLexer', - 'IPythonPartialTracebackLexer', 'IPythonTracebackLexer', - 'IPythonConsoleLexer', 'IPyLexer'] - - -def build_ipy_lexer(python3): - """Builds IPython lexers depending on the value of `python3`. - - The lexer inherits from an appropriate Python lexer and then adds - information about IPython specific keywords (i.e. magic commands, - shell commands, etc.) - - Parameters - ---------- - python3 : bool - If `True`, then build an IPython lexer from a Python 3 lexer. - - """ - # It would be nice to have a single IPython lexer class which takes - # a boolean `python3`. But since there are two Python lexer classes, - # we will also have two IPython lexer classes. - if python3: - PyLexer = Python3Lexer - name = 'IPython3' - aliases = ['ipython3'] - doc = """IPython3 Lexer""" - else: - PyLexer = PythonLexer - name = 'IPython' - aliases = ['ipython2', 'ipython'] - doc = """IPython Lexer""" - - ipython_tokens = [ - (r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), - (r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), - (r'(?is)(\s*)(%%html)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(HtmlLexer))), - (r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))), - (r'(?s)(\s*)(%%js)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))), - (r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(TexLexer))), - (r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PerlLexer))), - (r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), - (r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), - (r'(?s)(\s*)(%%python)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), - (r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PythonLexer))), - (r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))), - (r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(RubyLexer))), - (r'(?s)(\s*)(%%time)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), - (r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), - (r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), - (r'(?s)(\s*)(%%file)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), - (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)), - (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))), - (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)), - (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)), - (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword, - using(BashLexer), Text)), - (r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)), - (r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)), - (r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)), - (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)), - (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)), - ] - - tokens = PyLexer.tokens.copy() - tokens['root'] = ipython_tokens + tokens['root'] - - attrs = {'name': name, 'aliases': aliases, 'filenames': [], - '__doc__': doc, 'tokens': tokens} - - return type(name, (PyLexer,), attrs) - - -IPython3Lexer = build_ipy_lexer(python3=True) -IPythonLexer = build_ipy_lexer(python3=False) - - -class IPythonPartialTracebackLexer(RegexLexer): - """ - Partial lexer for IPython tracebacks. - - Handles all the non-python output. - - """ - name = 'IPython Partial Traceback' - - tokens = { - 'root': [ - # Tracebacks for syntax errors have a different style. - # For both types of tracebacks, we mark the first line with - # Generic.Traceback. For syntax errors, we mark the filename - # as we mark the filenames for non-syntax tracebacks. - # - # These two regexps define how IPythonConsoleLexer finds a - # traceback. - # - ## Non-syntax traceback - (r'^(\^C)?(-+\n)', bygroups(Error, Generic.Traceback)), - ## Syntax traceback - (r'^( File)(.*)(, line )(\d+\n)', - bygroups(Generic.Traceback, Name.Namespace, - Generic.Traceback, Literal.Number.Integer)), - - # (Exception Identifier)(Whitespace)(Traceback Message) - (r'(?u)(^[^\d\W]\w*)(\s*)(Traceback.*?\n)', - bygroups(Name.Exception, Generic.Whitespace, Text)), - # (Module/Filename)(Text)(Callee)(Function Signature) - # Better options for callee and function signature? - (r'(.*)( in )(.*)(\(.*\)\n)', - bygroups(Name.Namespace, Text, Name.Entity, Name.Tag)), - # Regular line: (Whitespace)(Line Number)(Python Code) - (r'(\s*?)(\d+)(.*?\n)', - bygroups(Generic.Whitespace, Literal.Number.Integer, Other)), - # Emphasized line: (Arrow)(Line Number)(Python Code) - # Using Exception token so arrow color matches the Exception. - (r'(-*>?\s?)(\d+)(.*?\n)', - bygroups(Name.Exception, Literal.Number.Integer, Other)), - # (Exception Identifier)(Message) - (r'(?u)(^[^\d\W]\w*)(:.*?\n)', - bygroups(Name.Exception, Text)), - # Tag everything else as Other, will be handled later. - (r'.*\n', Other), - ], - } - - -class IPythonTracebackLexer(DelegatingLexer): - """ - IPython traceback lexer. - - For doctests, the tracebacks can be snipped as much as desired with the - exception to the lines that designate a traceback. For non-syntax error - tracebacks, this is the line of hyphens. For syntax error tracebacks, - this is the line which lists the File and line number. - - """ - # The lexer inherits from DelegatingLexer. The "root" lexer is an - # appropriate IPython lexer, which depends on the value of the boolean - # `python3`. First, we parse with the partial IPython traceback lexer. - # Then, any code marked with the "Other" token is delegated to the root - # lexer. - # - name = 'IPython Traceback' - aliases = ['ipythontb'] - - def __init__(self, **options): - self.python3 = get_bool_opt(options, 'python3', False) - if self.python3: - self.aliases = ['ipython3tb'] - else: - self.aliases = ['ipython2tb', 'ipythontb'] - - if self.python3: - IPyLexer = IPython3Lexer - else: - IPyLexer = IPythonLexer - - DelegatingLexer.__init__(self, IPyLexer, - IPythonPartialTracebackLexer, **options) - -class IPythonConsoleLexer(Lexer): - """ - An IPython console lexer for IPython code-blocks and doctests, such as: - - .. code-block:: rst - - .. code-block:: ipythonconsole - - In [1]: a = 'foo' - - In [2]: a - Out[2]: 'foo' - - In [3]: print a - foo - - In [4]: 1 / 0 - - - Support is also provided for IPython exceptions: - - .. code-block:: rst - - .. code-block:: ipythonconsole - - In [1]: raise Exception - - --------------------------------------------------------------------------- - Exception Traceback (most recent call last) - <ipython-input-1-fca2ab0ca76b> in <module> - ----> 1 raise Exception - - Exception: - - """ - name = 'IPython console session' - aliases = ['ipythonconsole'] - mimetypes = ['text/x-ipython-console'] - - # The regexps used to determine what is input and what is output. - # The default prompts for IPython are: - # - # in = 'In [#]: ' - # continuation = ' .D.: ' - # template = 'Out[#]: ' - # - # Where '#' is the 'prompt number' or 'execution count' and 'D' - # D is a number of dots matching the width of the execution count - # - in1_regex = r'In \[[0-9]+\]: ' - in2_regex = r' \.\.+\.: ' - out_regex = r'Out\[[0-9]+\]: ' - - #: The regex to determine when a traceback starts. - ipytb_start = re.compile(r'^(\^C)?(-+\n)|^( File)(.*)(, line )(\d+\n)') - - def __init__(self, **options): - """Initialize the IPython console lexer. - - Parameters - ---------- - python3 : bool - If `True`, then the console inputs are parsed using a Python 3 - lexer. Otherwise, they are parsed using a Python 2 lexer. - in1_regex : RegexObject - The compiled regular expression used to detect the start - of inputs. Although the IPython configuration setting may have a - trailing whitespace, do not include it in the regex. If `None`, - then the default input prompt is assumed. - in2_regex : RegexObject - The compiled regular expression used to detect the continuation - of inputs. Although the IPython configuration setting may have a - trailing whitespace, do not include it in the regex. If `None`, - then the default input prompt is assumed. - out_regex : RegexObject - The compiled regular expression used to detect outputs. If `None`, - then the default output prompt is assumed. - - """ - self.python3 = get_bool_opt(options, 'python3', False) - if self.python3: - self.aliases = ['ipython3console'] - else: - self.aliases = ['ipython2console', 'ipythonconsole'] - - in1_regex = options.get('in1_regex', self.in1_regex) - in2_regex = options.get('in2_regex', self.in2_regex) - out_regex = options.get('out_regex', self.out_regex) - - # So that we can work with input and output prompts which have been - # rstrip'd (possibly by editors) we also need rstrip'd variants. If - # we do not do this, then such prompts will be tagged as 'output'. - # The reason can't just use the rstrip'd variants instead is because - # we want any whitespace associated with the prompt to be inserted - # with the token. This allows formatted code to be modified so as hide - # the appearance of prompts, with the whitespace included. One example - # use of this is in copybutton.js from the standard lib Python docs. - in1_regex_rstrip = in1_regex.rstrip() + '\n' - in2_regex_rstrip = in2_regex.rstrip() + '\n' - out_regex_rstrip = out_regex.rstrip() + '\n' - - # Compile and save them all. - attrs = ['in1_regex', 'in2_regex', 'out_regex', - 'in1_regex_rstrip', 'in2_regex_rstrip', 'out_regex_rstrip'] - for attr in attrs: - self.__setattr__(attr, re.compile(locals()[attr])) - - Lexer.__init__(self, **options) - - if self.python3: - pylexer = IPython3Lexer - tblexer = IPythonTracebackLexer - else: - pylexer = IPythonLexer - tblexer = IPythonTracebackLexer - - self.pylexer = pylexer(**options) - self.tblexer = tblexer(**options) - - self.reset() - - def reset(self): - self.mode = 'output' - self.index = 0 - self.buffer = u'' - self.insertions = [] - - def buffered_tokens(self): - """ - Generator of unprocessed tokens after doing insertions and before - changing to a new state. - - """ - if self.mode == 'output': - tokens = [(0, Generic.Output, self.buffer)] - elif self.mode == 'input': - tokens = self.pylexer.get_tokens_unprocessed(self.buffer) - else: # traceback - tokens = self.tblexer.get_tokens_unprocessed(self.buffer) - - for i, t, v in do_insertions(self.insertions, tokens): - # All token indexes are relative to the buffer. - yield self.index + i, t, v - - # Clear it all - self.index += len(self.buffer) - self.buffer = u'' - self.insertions = [] - - def get_mci(self, line): - """ - Parses the line and returns a 3-tuple: (mode, code, insertion). - - `mode` is the next mode (or state) of the lexer, and is always equal - to 'input', 'output', or 'tb'. - - `code` is a portion of the line that should be added to the buffer - corresponding to the next mode and eventually lexed by another lexer. - For example, `code` could be Python code if `mode` were 'input'. - - `insertion` is a 3-tuple (index, token, text) representing an - unprocessed "token" that will be inserted into the stream of tokens - that are created from the buffer once we change modes. This is usually - the input or output prompt. - - In general, the next mode depends on current mode and on the contents - of `line`. - - """ - # To reduce the number of regex match checks, we have multiple - # 'if' blocks instead of 'if-elif' blocks. - - # Check for possible end of input - in2_match = self.in2_regex.match(line) - in2_match_rstrip = self.in2_regex_rstrip.match(line) - if (in2_match and in2_match.group().rstrip() == line.rstrip()) or \ - in2_match_rstrip: - end_input = True - else: - end_input = False - if end_input and self.mode != 'tb': - # Only look for an end of input when not in tb mode. - # An ellipsis could appear within the traceback. - mode = 'output' - code = u'' - insertion = (0, Generic.Prompt, line) - return mode, code, insertion - - # Check for output prompt - out_match = self.out_regex.match(line) - out_match_rstrip = self.out_regex_rstrip.match(line) - if out_match or out_match_rstrip: - mode = 'output' - if out_match: - idx = out_match.end() - else: - idx = out_match_rstrip.end() - code = line[idx:] - # Use the 'heading' token for output. We cannot use Generic.Error - # since it would conflict with exceptions. - insertion = (0, Generic.Heading, line[:idx]) - return mode, code, insertion - - - # Check for input or continuation prompt (non stripped version) - in1_match = self.in1_regex.match(line) - if in1_match or (in2_match and self.mode != 'tb'): - # New input or when not in tb, continued input. - # We do not check for continued input when in tb since it is - # allowable to replace a long stack with an ellipsis. - mode = 'input' - if in1_match: - idx = in1_match.end() - else: # in2_match - idx = in2_match.end() - code = line[idx:] - insertion = (0, Generic.Prompt, line[:idx]) - return mode, code, insertion - - # Check for input or continuation prompt (stripped version) - in1_match_rstrip = self.in1_regex_rstrip.match(line) - if in1_match_rstrip or (in2_match_rstrip and self.mode != 'tb'): - # New input or when not in tb, continued input. - # We do not check for continued input when in tb since it is - # allowable to replace a long stack with an ellipsis. - mode = 'input' - if in1_match_rstrip: - idx = in1_match_rstrip.end() - else: # in2_match - idx = in2_match_rstrip.end() - code = line[idx:] - insertion = (0, Generic.Prompt, line[:idx]) - return mode, code, insertion - - # Check for traceback - if self.ipytb_start.match(line): - mode = 'tb' - code = line - insertion = None - return mode, code, insertion - - # All other stuff... - if self.mode in ('input', 'output'): - # We assume all other text is output. Multiline input that - # does not use the continuation marker cannot be detected. - # For example, the 3 in the following is clearly output: - # - # In [1]: print 3 - # 3 - # - # But the following second line is part of the input: - # - # In [2]: while True: - # print True - # - # In both cases, the 2nd line will be 'output'. - # - mode = 'output' - else: - mode = 'tb' - - code = line - insertion = None - - return mode, code, insertion - - def get_tokens_unprocessed(self, text): - self.reset() - for match in line_re.finditer(text): - line = match.group() - mode, code, insertion = self.get_mci(line) - - if mode != self.mode: - # Yield buffered tokens before transitioning to new mode. - for token in self.buffered_tokens(): - yield token - self.mode = mode - - if insertion: - self.insertions.append((len(self.buffer), [insertion])) - self.buffer += code - - for token in self.buffered_tokens(): - yield token - -class IPyLexer(Lexer): - r""" - Primary lexer for all IPython-like code. - - This is a simple helper lexer. If the first line of the text begins with - "In \[[0-9]+\]:", then the entire text is parsed with an IPython console - lexer. If not, then the entire text is parsed with an IPython lexer. - - The goal is to reduce the number of lexers that are registered - with Pygments. - - """ - name = 'IPy session' - aliases = ['ipy'] - - def __init__(self, **options): - self.python3 = get_bool_opt(options, 'python3', False) - if self.python3: - self.aliases = ['ipy3'] - else: - self.aliases = ['ipy2', 'ipy'] - - Lexer.__init__(self, **options) - - self.IPythonLexer = IPythonLexer(**options) - self.IPythonConsoleLexer = IPythonConsoleLexer(**options) - - def get_tokens_unprocessed(self, text): - # Search for the input prompt anywhere...this allows code blocks to - # begin with comments as well. - if re.match(r'.*(In \[[0-9]+\]:)', text.strip(), re.DOTALL): - lex = self.IPythonConsoleLexer - else: - lex = self.IPythonLexer - for token in lex.get_tokens_unprocessed(text): - yield token - +# -*- coding: utf-8 -*- +""" +Defines a variety of Pygments lexers for highlighting IPython code. + +This includes: + + IPythonLexer, IPython3Lexer + Lexers for pure IPython (python + magic/shell commands) + + IPythonPartialTracebackLexer, IPythonTracebackLexer + Supports 2.x and 3.x via keyword `python3`. The partial traceback + lexer reads everything but the Python code appearing in a traceback. + The full lexer combines the partial lexer with an IPython lexer. + + IPythonConsoleLexer + A lexer for IPython console sessions, with support for tracebacks. + + IPyLexer + A friendly lexer which examines the first line of text and from it, + decides whether to use an IPython lexer or an IPython console lexer. + This is probably the only lexer that needs to be explicitly added + to Pygments. + +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2013, the IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +# Standard library +import re + +# Third party +from pygments.lexers import ( + BashLexer, HtmlLexer, JavascriptLexer, RubyLexer, PerlLexer, PythonLexer, + Python3Lexer, TexLexer) +from pygments.lexer import ( + Lexer, DelegatingLexer, RegexLexer, do_insertions, bygroups, using, +) +from pygments.token import ( + Generic, Keyword, Literal, Name, Operator, Other, Text, Error, +) +from pygments.util import get_bool_opt + +# Local + +line_re = re.compile('.*?\n') + +__all__ = ['build_ipy_lexer', 'IPython3Lexer', 'IPythonLexer', + 'IPythonPartialTracebackLexer', 'IPythonTracebackLexer', + 'IPythonConsoleLexer', 'IPyLexer'] + + +def build_ipy_lexer(python3): + """Builds IPython lexers depending on the value of `python3`. + + The lexer inherits from an appropriate Python lexer and then adds + information about IPython specific keywords (i.e. magic commands, + shell commands, etc.) + + Parameters + ---------- + python3 : bool + If `True`, then build an IPython lexer from a Python 3 lexer. + + """ + # It would be nice to have a single IPython lexer class which takes + # a boolean `python3`. But since there are two Python lexer classes, + # we will also have two IPython lexer classes. + if python3: + PyLexer = Python3Lexer + name = 'IPython3' + aliases = ['ipython3'] + doc = """IPython3 Lexer""" + else: + PyLexer = PythonLexer + name = 'IPython' + aliases = ['ipython2', 'ipython'] + doc = """IPython Lexer""" + + ipython_tokens = [ + (r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?is)(\s*)(%%html)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(HtmlLexer))), + (r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))), + (r'(?s)(\s*)(%%js)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))), + (r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(TexLexer))), + (r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PerlLexer))), + (r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%python)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PythonLexer))), + (r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))), + (r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(RubyLexer))), + (r'(?s)(\s*)(%%time)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%file)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)), + (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))), + (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)), + (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)), + (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword, + using(BashLexer), Text)), + (r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)), + (r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)), + (r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)), + (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)), + (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)), + ] + + tokens = PyLexer.tokens.copy() + tokens['root'] = ipython_tokens + tokens['root'] + + attrs = {'name': name, 'aliases': aliases, 'filenames': [], + '__doc__': doc, 'tokens': tokens} + + return type(name, (PyLexer,), attrs) + + +IPython3Lexer = build_ipy_lexer(python3=True) +IPythonLexer = build_ipy_lexer(python3=False) + + +class IPythonPartialTracebackLexer(RegexLexer): + """ + Partial lexer for IPython tracebacks. + + Handles all the non-python output. + + """ + name = 'IPython Partial Traceback' + + tokens = { + 'root': [ + # Tracebacks for syntax errors have a different style. + # For both types of tracebacks, we mark the first line with + # Generic.Traceback. For syntax errors, we mark the filename + # as we mark the filenames for non-syntax tracebacks. + # + # These two regexps define how IPythonConsoleLexer finds a + # traceback. + # + ## Non-syntax traceback + (r'^(\^C)?(-+\n)', bygroups(Error, Generic.Traceback)), + ## Syntax traceback + (r'^( File)(.*)(, line )(\d+\n)', + bygroups(Generic.Traceback, Name.Namespace, + Generic.Traceback, Literal.Number.Integer)), + + # (Exception Identifier)(Whitespace)(Traceback Message) + (r'(?u)(^[^\d\W]\w*)(\s*)(Traceback.*?\n)', + bygroups(Name.Exception, Generic.Whitespace, Text)), + # (Module/Filename)(Text)(Callee)(Function Signature) + # Better options for callee and function signature? + (r'(.*)( in )(.*)(\(.*\)\n)', + bygroups(Name.Namespace, Text, Name.Entity, Name.Tag)), + # Regular line: (Whitespace)(Line Number)(Python Code) + (r'(\s*?)(\d+)(.*?\n)', + bygroups(Generic.Whitespace, Literal.Number.Integer, Other)), + # Emphasized line: (Arrow)(Line Number)(Python Code) + # Using Exception token so arrow color matches the Exception. + (r'(-*>?\s?)(\d+)(.*?\n)', + bygroups(Name.Exception, Literal.Number.Integer, Other)), + # (Exception Identifier)(Message) + (r'(?u)(^[^\d\W]\w*)(:.*?\n)', + bygroups(Name.Exception, Text)), + # Tag everything else as Other, will be handled later. + (r'.*\n', Other), + ], + } + + +class IPythonTracebackLexer(DelegatingLexer): + """ + IPython traceback lexer. + + For doctests, the tracebacks can be snipped as much as desired with the + exception to the lines that designate a traceback. For non-syntax error + tracebacks, this is the line of hyphens. For syntax error tracebacks, + this is the line which lists the File and line number. + + """ + # The lexer inherits from DelegatingLexer. The "root" lexer is an + # appropriate IPython lexer, which depends on the value of the boolean + # `python3`. First, we parse with the partial IPython traceback lexer. + # Then, any code marked with the "Other" token is delegated to the root + # lexer. + # + name = 'IPython Traceback' + aliases = ['ipythontb'] + + def __init__(self, **options): + self.python3 = get_bool_opt(options, 'python3', False) + if self.python3: + self.aliases = ['ipython3tb'] + else: + self.aliases = ['ipython2tb', 'ipythontb'] + + if self.python3: + IPyLexer = IPython3Lexer + else: + IPyLexer = IPythonLexer + + DelegatingLexer.__init__(self, IPyLexer, + IPythonPartialTracebackLexer, **options) + +class IPythonConsoleLexer(Lexer): + """ + An IPython console lexer for IPython code-blocks and doctests, such as: + + .. code-block:: rst + + .. code-block:: ipythonconsole + + In [1]: a = 'foo' + + In [2]: a + Out[2]: 'foo' + + In [3]: print a + foo + + In [4]: 1 / 0 + + + Support is also provided for IPython exceptions: + + .. code-block:: rst + + .. code-block:: ipythonconsole + + In [1]: raise Exception + + --------------------------------------------------------------------------- + Exception Traceback (most recent call last) + <ipython-input-1-fca2ab0ca76b> in <module> + ----> 1 raise Exception + + Exception: + + """ + name = 'IPython console session' + aliases = ['ipythonconsole'] + mimetypes = ['text/x-ipython-console'] + + # The regexps used to determine what is input and what is output. + # The default prompts for IPython are: + # + # in = 'In [#]: ' + # continuation = ' .D.: ' + # template = 'Out[#]: ' + # + # Where '#' is the 'prompt number' or 'execution count' and 'D' + # D is a number of dots matching the width of the execution count + # + in1_regex = r'In \[[0-9]+\]: ' + in2_regex = r' \.\.+\.: ' + out_regex = r'Out\[[0-9]+\]: ' + + #: The regex to determine when a traceback starts. + ipytb_start = re.compile(r'^(\^C)?(-+\n)|^( File)(.*)(, line )(\d+\n)') + + def __init__(self, **options): + """Initialize the IPython console lexer. + + Parameters + ---------- + python3 : bool + If `True`, then the console inputs are parsed using a Python 3 + lexer. Otherwise, they are parsed using a Python 2 lexer. + in1_regex : RegexObject + The compiled regular expression used to detect the start + of inputs. Although the IPython configuration setting may have a + trailing whitespace, do not include it in the regex. If `None`, + then the default input prompt is assumed. + in2_regex : RegexObject + The compiled regular expression used to detect the continuation + of inputs. Although the IPython configuration setting may have a + trailing whitespace, do not include it in the regex. If `None`, + then the default input prompt is assumed. + out_regex : RegexObject + The compiled regular expression used to detect outputs. If `None`, + then the default output prompt is assumed. + + """ + self.python3 = get_bool_opt(options, 'python3', False) + if self.python3: + self.aliases = ['ipython3console'] + else: + self.aliases = ['ipython2console', 'ipythonconsole'] + + in1_regex = options.get('in1_regex', self.in1_regex) + in2_regex = options.get('in2_regex', self.in2_regex) + out_regex = options.get('out_regex', self.out_regex) + + # So that we can work with input and output prompts which have been + # rstrip'd (possibly by editors) we also need rstrip'd variants. If + # we do not do this, then such prompts will be tagged as 'output'. + # The reason can't just use the rstrip'd variants instead is because + # we want any whitespace associated with the prompt to be inserted + # with the token. This allows formatted code to be modified so as hide + # the appearance of prompts, with the whitespace included. One example + # use of this is in copybutton.js from the standard lib Python docs. + in1_regex_rstrip = in1_regex.rstrip() + '\n' + in2_regex_rstrip = in2_regex.rstrip() + '\n' + out_regex_rstrip = out_regex.rstrip() + '\n' + + # Compile and save them all. + attrs = ['in1_regex', 'in2_regex', 'out_regex', + 'in1_regex_rstrip', 'in2_regex_rstrip', 'out_regex_rstrip'] + for attr in attrs: + self.__setattr__(attr, re.compile(locals()[attr])) + + Lexer.__init__(self, **options) + + if self.python3: + pylexer = IPython3Lexer + tblexer = IPythonTracebackLexer + else: + pylexer = IPythonLexer + tblexer = IPythonTracebackLexer + + self.pylexer = pylexer(**options) + self.tblexer = tblexer(**options) + + self.reset() + + def reset(self): + self.mode = 'output' + self.index = 0 + self.buffer = u'' + self.insertions = [] + + def buffered_tokens(self): + """ + Generator of unprocessed tokens after doing insertions and before + changing to a new state. + + """ + if self.mode == 'output': + tokens = [(0, Generic.Output, self.buffer)] + elif self.mode == 'input': + tokens = self.pylexer.get_tokens_unprocessed(self.buffer) + else: # traceback + tokens = self.tblexer.get_tokens_unprocessed(self.buffer) + + for i, t, v in do_insertions(self.insertions, tokens): + # All token indexes are relative to the buffer. + yield self.index + i, t, v + + # Clear it all + self.index += len(self.buffer) + self.buffer = u'' + self.insertions = [] + + def get_mci(self, line): + """ + Parses the line and returns a 3-tuple: (mode, code, insertion). + + `mode` is the next mode (or state) of the lexer, and is always equal + to 'input', 'output', or 'tb'. + + `code` is a portion of the line that should be added to the buffer + corresponding to the next mode and eventually lexed by another lexer. + For example, `code` could be Python code if `mode` were 'input'. + + `insertion` is a 3-tuple (index, token, text) representing an + unprocessed "token" that will be inserted into the stream of tokens + that are created from the buffer once we change modes. This is usually + the input or output prompt. + + In general, the next mode depends on current mode and on the contents + of `line`. + + """ + # To reduce the number of regex match checks, we have multiple + # 'if' blocks instead of 'if-elif' blocks. + + # Check for possible end of input + in2_match = self.in2_regex.match(line) + in2_match_rstrip = self.in2_regex_rstrip.match(line) + if (in2_match and in2_match.group().rstrip() == line.rstrip()) or \ + in2_match_rstrip: + end_input = True + else: + end_input = False + if end_input and self.mode != 'tb': + # Only look for an end of input when not in tb mode. + # An ellipsis could appear within the traceback. + mode = 'output' + code = u'' + insertion = (0, Generic.Prompt, line) + return mode, code, insertion + + # Check for output prompt + out_match = self.out_regex.match(line) + out_match_rstrip = self.out_regex_rstrip.match(line) + if out_match or out_match_rstrip: + mode = 'output' + if out_match: + idx = out_match.end() + else: + idx = out_match_rstrip.end() + code = line[idx:] + # Use the 'heading' token for output. We cannot use Generic.Error + # since it would conflict with exceptions. + insertion = (0, Generic.Heading, line[:idx]) + return mode, code, insertion + + + # Check for input or continuation prompt (non stripped version) + in1_match = self.in1_regex.match(line) + if in1_match or (in2_match and self.mode != 'tb'): + # New input or when not in tb, continued input. + # We do not check for continued input when in tb since it is + # allowable to replace a long stack with an ellipsis. + mode = 'input' + if in1_match: + idx = in1_match.end() + else: # in2_match + idx = in2_match.end() + code = line[idx:] + insertion = (0, Generic.Prompt, line[:idx]) + return mode, code, insertion + + # Check for input or continuation prompt (stripped version) + in1_match_rstrip = self.in1_regex_rstrip.match(line) + if in1_match_rstrip or (in2_match_rstrip and self.mode != 'tb'): + # New input or when not in tb, continued input. + # We do not check for continued input when in tb since it is + # allowable to replace a long stack with an ellipsis. + mode = 'input' + if in1_match_rstrip: + idx = in1_match_rstrip.end() + else: # in2_match + idx = in2_match_rstrip.end() + code = line[idx:] + insertion = (0, Generic.Prompt, line[:idx]) + return mode, code, insertion + + # Check for traceback + if self.ipytb_start.match(line): + mode = 'tb' + code = line + insertion = None + return mode, code, insertion + + # All other stuff... + if self.mode in ('input', 'output'): + # We assume all other text is output. Multiline input that + # does not use the continuation marker cannot be detected. + # For example, the 3 in the following is clearly output: + # + # In [1]: print 3 + # 3 + # + # But the following second line is part of the input: + # + # In [2]: while True: + # print True + # + # In both cases, the 2nd line will be 'output'. + # + mode = 'output' + else: + mode = 'tb' + + code = line + insertion = None + + return mode, code, insertion + + def get_tokens_unprocessed(self, text): + self.reset() + for match in line_re.finditer(text): + line = match.group() + mode, code, insertion = self.get_mci(line) + + if mode != self.mode: + # Yield buffered tokens before transitioning to new mode. + for token in self.buffered_tokens(): + yield token + self.mode = mode + + if insertion: + self.insertions.append((len(self.buffer), [insertion])) + self.buffer += code + + for token in self.buffered_tokens(): + yield token + +class IPyLexer(Lexer): + r""" + Primary lexer for all IPython-like code. + + This is a simple helper lexer. If the first line of the text begins with + "In \[[0-9]+\]:", then the entire text is parsed with an IPython console + lexer. If not, then the entire text is parsed with an IPython lexer. + + The goal is to reduce the number of lexers that are registered + with Pygments. + + """ + name = 'IPy session' + aliases = ['ipy'] + + def __init__(self, **options): + self.python3 = get_bool_opt(options, 'python3', False) + if self.python3: + self.aliases = ['ipy3'] + else: + self.aliases = ['ipy2', 'ipy'] + + Lexer.__init__(self, **options) + + self.IPythonLexer = IPythonLexer(**options) + self.IPythonConsoleLexer = IPythonConsoleLexer(**options) + + def get_tokens_unprocessed(self, text): + # Search for the input prompt anywhere...this allows code blocks to + # begin with comments as well. + if re.match(r'.*(In \[[0-9]+\]:)', text.strip(), re.DOTALL): + lex = self.IPythonConsoleLexer + else: + lex = self.IPythonLexer + for token in lex.get_tokens_unprocessed(text): + yield token + diff --git a/contrib/python/ipython/py3/IPython/lib/pretty.py b/contrib/python/ipython/py3/IPython/lib/pretty.py index c996619df50..1cb46b1413d 100644 --- a/contrib/python/ipython/py3/IPython/lib/pretty.py +++ b/contrib/python/ipython/py3/IPython/lib/pretty.py @@ -1,873 +1,873 @@ -# -*- coding: utf-8 -*- -""" -Python advanced pretty printer. This pretty printer is intended to -replace the old `pprint` python module which does not allow developers -to provide their own pretty print callbacks. - -This module is based on ruby's `prettyprint.rb` library by `Tanaka Akira`. - - -Example Usage -------------- - -To directly print the representation of an object use `pprint`:: - - from pretty import pprint - pprint(complex_object) - -To get a string of the output use `pretty`:: - - from pretty import pretty - string = pretty(complex_object) - - -Extending ---------- - -The pretty library allows developers to add pretty printing rules for their -own objects. This process is straightforward. All you have to do is to -add a `_repr_pretty_` method to your object and call the methods on the -pretty printer passed:: - - class MyObject(object): - - def _repr_pretty_(self, p, cycle): - ... - -Here is an example implementation of a `_repr_pretty_` method for a list -subclass:: - - class MyList(list): - - def _repr_pretty_(self, p, cycle): - if cycle: - p.text('MyList(...)') - else: - with p.group(8, 'MyList([', '])'): - for idx, item in enumerate(self): - if idx: - p.text(',') - p.breakable() - p.pretty(item) - -The `cycle` parameter is `True` if pretty detected a cycle. You *have* to -react to that or the result is an infinite loop. `p.text()` just adds -non breaking text to the output, `p.breakable()` either adds a whitespace -or breaks here. If you pass it an argument it's used instead of the -default space. `p.pretty` prettyprints another object using the pretty print -method. - -The first parameter to the `group` function specifies the extra indentation -of the next line. In this example the next item will either be on the same -line (if the items are short enough) or aligned with the right edge of the -opening bracket of `MyList`. - -If you just want to indent something you can use the group function -without open / close parameters. You can also use this code:: - - with p.indent(2): - ... - -Inheritance diagram: - -.. inheritance-diagram:: IPython.lib.pretty - :parts: 3 - -:copyright: 2007 by Armin Ronacher. - Portions (c) 2009 by Robert Kern. -:license: BSD License. -""" - -from contextlib import contextmanager -import datetime -import os -import re -import sys -import types -from collections import deque -from inspect import signature -from io import StringIO -from warnings import warn - -from IPython.utils.decorators import undoc -from IPython.utils.py3compat import PYPY - -__all__ = ['pretty', 'pprint', 'PrettyPrinter', 'RepresentationPrinter', - 'for_type', 'for_type_by_name'] - - -MAX_SEQ_LENGTH = 1000 -_re_pattern_type = type(re.compile('')) - -def _safe_getattr(obj, attr, default=None): - """Safe version of getattr. - - Same as getattr, but will return ``default`` on any Exception, - rather than raising. - """ - try: - return getattr(obj, attr, default) - except Exception: - return default - -@undoc -class CUnicodeIO(StringIO): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - warn(("CUnicodeIO is deprecated since IPython 6.0. " - "Please use io.StringIO instead."), - DeprecationWarning, stacklevel=2) - -def _sorted_for_pprint(items): - """ - Sort the given items for pretty printing. Since some predictable - sorting is better than no sorting at all, we sort on the string - representation if normal sorting fails. - """ - items = list(items) - try: - return sorted(items) - except Exception: - try: - return sorted(items, key=str) - except Exception: - return items - -def pretty(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH): - """ - Pretty print the object's representation. - """ - stream = StringIO() - printer = RepresentationPrinter(stream, verbose, max_width, newline, max_seq_length=max_seq_length) - printer.pretty(obj) - printer.flush() - return stream.getvalue() - - -def pprint(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH): - """ - Like `pretty` but print to stdout. - """ - printer = RepresentationPrinter(sys.stdout, verbose, max_width, newline, max_seq_length=max_seq_length) - printer.pretty(obj) - printer.flush() - sys.stdout.write(newline) - sys.stdout.flush() - -class _PrettyPrinterBase(object): - - @contextmanager - def indent(self, indent): - """with statement support for indenting/dedenting.""" - self.indentation += indent - try: - yield - finally: - self.indentation -= indent - - @contextmanager - def group(self, indent=0, open='', close=''): - """like begin_group / end_group but for the with statement.""" - self.begin_group(indent, open) - try: - yield - finally: - self.end_group(indent, close) - -class PrettyPrinter(_PrettyPrinterBase): - """ - Baseclass for the `RepresentationPrinter` prettyprinter that is used to - generate pretty reprs of objects. Contrary to the `RepresentationPrinter` - this printer knows nothing about the default pprinters or the `_repr_pretty_` - callback method. - """ - - def __init__(self, output, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH): - self.output = output - self.max_width = max_width - self.newline = newline - self.max_seq_length = max_seq_length - self.output_width = 0 - self.buffer_width = 0 - self.buffer = deque() - - root_group = Group(0) - self.group_stack = [root_group] - self.group_queue = GroupQueue(root_group) - self.indentation = 0 - - def _break_one_group(self, group): - while group.breakables: - x = self.buffer.popleft() - self.output_width = x.output(self.output, self.output_width) - self.buffer_width -= x.width - while self.buffer and isinstance(self.buffer[0], Text): - x = self.buffer.popleft() - self.output_width = x.output(self.output, self.output_width) - self.buffer_width -= x.width - - def _break_outer_groups(self): - while self.max_width < self.output_width + self.buffer_width: - group = self.group_queue.deq() - if not group: - return - self._break_one_group(group) - - def text(self, obj): - """Add literal text to the output.""" - width = len(obj) - if self.buffer: - text = self.buffer[-1] - if not isinstance(text, Text): - text = Text() - self.buffer.append(text) - text.add(obj, width) - self.buffer_width += width - self._break_outer_groups() - else: - self.output.write(obj) - self.output_width += width - - def breakable(self, sep=' '): - """ - Add a breakable separator to the output. This does not mean that it - will automatically break here. If no breaking on this position takes - place the `sep` is inserted which default to one space. - """ - width = len(sep) - group = self.group_stack[-1] - if group.want_break: - self.flush() - self.output.write(self.newline) - self.output.write(' ' * self.indentation) - self.output_width = self.indentation - self.buffer_width = 0 - else: - self.buffer.append(Breakable(sep, width, self)) - self.buffer_width += width - self._break_outer_groups() - - def break_(self): - """ - Explicitly insert a newline into the output, maintaining correct indentation. - """ - group = self.group_queue.deq() - if group: - self._break_one_group(group) - self.flush() - self.output.write(self.newline) - self.output.write(' ' * self.indentation) - self.output_width = self.indentation - self.buffer_width = 0 - - - def begin_group(self, indent=0, open=''): - """ - Begin a group. - The first parameter specifies the indentation for the next line (usually - the width of the opening text), the second the opening text. All - parameters are optional. - """ - if open: - self.text(open) - group = Group(self.group_stack[-1].depth + 1) - self.group_stack.append(group) - self.group_queue.enq(group) - self.indentation += indent - - def _enumerate(self, seq): - """like enumerate, but with an upper limit on the number of items""" - for idx, x in enumerate(seq): - if self.max_seq_length and idx >= self.max_seq_length: - self.text(',') - self.breakable() - self.text('...') - return - yield idx, x - - def end_group(self, dedent=0, close=''): - """End a group. See `begin_group` for more details.""" - self.indentation -= dedent - group = self.group_stack.pop() - if not group.breakables: - self.group_queue.remove(group) - if close: - self.text(close) - - def flush(self): - """Flush data that is left in the buffer.""" - for data in self.buffer: - self.output_width += data.output(self.output, self.output_width) - self.buffer.clear() - self.buffer_width = 0 - - -def _get_mro(obj_class): - """ Get a reasonable method resolution order of a class and its superclasses - for both old-style and new-style classes. - """ - if not hasattr(obj_class, '__mro__'): - # Old-style class. Mix in object to make a fake new-style class. - try: - obj_class = type(obj_class.__name__, (obj_class, object), {}) - except TypeError: - # Old-style extension type that does not descend from object. - # FIXME: try to construct a more thorough MRO. - mro = [obj_class] - else: - mro = obj_class.__mro__[1:-1] - else: - mro = obj_class.__mro__ - return mro - - -class RepresentationPrinter(PrettyPrinter): - """ - Special pretty printer that has a `pretty` method that calls the pretty - printer for a python object. - - This class stores processing data on `self` so you must *never* use - this class in a threaded environment. Always lock it or reinstanciate - it. - - Instances also have a verbose flag callbacks can access to control their - output. For example the default instance repr prints all attributes and - methods that are not prefixed by an underscore if the printer is in - verbose mode. - """ - - def __init__(self, output, verbose=False, max_width=79, newline='\n', - singleton_pprinters=None, type_pprinters=None, deferred_pprinters=None, - max_seq_length=MAX_SEQ_LENGTH): - - PrettyPrinter.__init__(self, output, max_width, newline, max_seq_length=max_seq_length) - self.verbose = verbose - self.stack = [] - if singleton_pprinters is None: - singleton_pprinters = _singleton_pprinters.copy() - self.singleton_pprinters = singleton_pprinters - if type_pprinters is None: - type_pprinters = _type_pprinters.copy() - self.type_pprinters = type_pprinters - if deferred_pprinters is None: - deferred_pprinters = _deferred_type_pprinters.copy() - self.deferred_pprinters = deferred_pprinters - - def pretty(self, obj): - """Pretty print the given object.""" - obj_id = id(obj) - cycle = obj_id in self.stack - self.stack.append(obj_id) - self.begin_group() - try: - obj_class = _safe_getattr(obj, '__class__', None) or type(obj) - # First try to find registered singleton printers for the type. - try: - printer = self.singleton_pprinters[obj_id] - except (TypeError, KeyError): - pass - else: - return printer(obj, self, cycle) - # Next walk the mro and check for either: - # 1) a registered printer - # 2) a _repr_pretty_ method - for cls in _get_mro(obj_class): - if cls in self.type_pprinters: - # printer registered in self.type_pprinters - return self.type_pprinters[cls](obj, self, cycle) - else: - # deferred printer - printer = self._in_deferred_types(cls) - if printer is not None: - return printer(obj, self, cycle) - else: - # Finally look for special method names. - # Some objects automatically create any requested - # attribute. Try to ignore most of them by checking for - # callability. - if '_repr_pretty_' in cls.__dict__: - meth = cls._repr_pretty_ - if callable(meth): - return meth(obj, self, cycle) - if cls is not object \ - and callable(cls.__dict__.get('__repr__')): - return _repr_pprint(obj, self, cycle) - - return _default_pprint(obj, self, cycle) - finally: - self.end_group() - self.stack.pop() - - def _in_deferred_types(self, cls): - """ - Check if the given class is specified in the deferred type registry. - - Returns the printer from the registry if it exists, and None if the - class is not in the registry. Successful matches will be moved to the - regular type registry for future use. - """ - mod = _safe_getattr(cls, '__module__', None) - name = _safe_getattr(cls, '__name__', None) - key = (mod, name) - printer = None - if key in self.deferred_pprinters: - # Move the printer over to the regular registry. - printer = self.deferred_pprinters.pop(key) - self.type_pprinters[cls] = printer - return printer - - -class Printable(object): - - def output(self, stream, output_width): - return output_width - - -class Text(Printable): - - def __init__(self): - self.objs = [] - self.width = 0 - - def output(self, stream, output_width): - for obj in self.objs: - stream.write(obj) - return output_width + self.width - - def add(self, obj, width): - self.objs.append(obj) - self.width += width - - -class Breakable(Printable): - - def __init__(self, seq, width, pretty): - self.obj = seq - self.width = width - self.pretty = pretty - self.indentation = pretty.indentation - self.group = pretty.group_stack[-1] - self.group.breakables.append(self) - - def output(self, stream, output_width): - self.group.breakables.popleft() - if self.group.want_break: - stream.write(self.pretty.newline) - stream.write(' ' * self.indentation) - return self.indentation - if not self.group.breakables: - self.pretty.group_queue.remove(self.group) - stream.write(self.obj) - return output_width + self.width - - -class Group(Printable): - - def __init__(self, depth): - self.depth = depth - self.breakables = deque() - self.want_break = False - - -class GroupQueue(object): - - def __init__(self, *groups): - self.queue = [] - for group in groups: - self.enq(group) - - def enq(self, group): - depth = group.depth - while depth > len(self.queue) - 1: - self.queue.append([]) - self.queue[depth].append(group) - - def deq(self): - for stack in self.queue: - for idx, group in enumerate(reversed(stack)): - if group.breakables: - del stack[idx] - group.want_break = True - return group - for group in stack: - group.want_break = True - del stack[:] - - def remove(self, group): - try: - self.queue[group.depth].remove(group) - except ValueError: - pass - - -def _default_pprint(obj, p, cycle): - """ - The default print function. Used if an object does not provide one and - it's none of the builtin objects. - """ - klass = _safe_getattr(obj, '__class__', None) or type(obj) - if _safe_getattr(klass, '__repr__', None) is not object.__repr__: - # A user-provided repr. Find newlines and replace them with p.break_() - _repr_pprint(obj, p, cycle) - return - p.begin_group(1, '<') - p.pretty(klass) - p.text(' at 0x%x' % id(obj)) - if cycle: - p.text(' ...') - elif p.verbose: - first = True - for key in dir(obj): - if not key.startswith('_'): - try: - value = getattr(obj, key) - except AttributeError: - continue - if isinstance(value, types.MethodType): - continue - if not first: - p.text(',') - p.breakable() - p.text(key) - p.text('=') - step = len(key) + 1 - p.indentation += step - p.pretty(value) - p.indentation -= step - first = False - p.end_group(1, '>') - - -def _seq_pprinter_factory(start, end): - """ - Factory that returns a pprint function useful for sequences. Used by - the default pprint for tuples, dicts, and lists. - """ - def inner(obj, p, cycle): - if cycle: - return p.text(start + '...' + end) - step = len(start) - p.begin_group(step, start) - for idx, x in p._enumerate(obj): - if idx: - p.text(',') - p.breakable() - p.pretty(x) - if len(obj) == 1 and type(obj) is tuple: - # Special case for 1-item tuples. - p.text(',') - p.end_group(step, end) - return inner - - -def _set_pprinter_factory(start, end): - """ - Factory that returns a pprint function useful for sets and frozensets. - """ - def inner(obj, p, cycle): - if cycle: - return p.text(start + '...' + end) - if len(obj) == 0: - # Special case. - p.text(type(obj).__name__ + '()') - else: - step = len(start) - p.begin_group(step, start) - # Like dictionary keys, we will try to sort the items if there aren't too many - if not (p.max_seq_length and len(obj) >= p.max_seq_length): - items = _sorted_for_pprint(obj) - else: - items = obj - for idx, x in p._enumerate(items): - if idx: - p.text(',') - p.breakable() - p.pretty(x) - p.end_group(step, end) - return inner - - -def _dict_pprinter_factory(start, end): - """ - Factory that returns a pprint function used by the default pprint of - dicts and dict proxies. - """ - def inner(obj, p, cycle): - if cycle: - return p.text('{...}') - step = len(start) - p.begin_group(step, start) - keys = obj.keys() - for idx, key in p._enumerate(keys): - if idx: - p.text(',') - p.breakable() - p.pretty(key) - p.text(': ') - p.pretty(obj[key]) - p.end_group(step, end) - return inner - - -def _super_pprint(obj, p, cycle): - """The pprint for the super type.""" - p.begin_group(8, '<super: ') - p.pretty(obj.__thisclass__) - p.text(',') - p.breakable() - if PYPY: # In PyPy, super() objects don't have __self__ attributes - dself = obj.__repr__.__self__ - p.pretty(None if dself is obj else dself) - else: - p.pretty(obj.__self__) - p.end_group(8, '>') - - -def _re_pattern_pprint(obj, p, cycle): - """The pprint function for regular expression patterns.""" - p.text('re.compile(') - pattern = repr(obj.pattern) - if pattern[:1] in 'uU': - pattern = pattern[1:] - prefix = 'ur' - else: - prefix = 'r' - pattern = prefix + pattern.replace('\\\\', '\\') - p.text(pattern) - if obj.flags: - p.text(',') - p.breakable() - done_one = False - for flag in ('TEMPLATE', 'IGNORECASE', 'LOCALE', 'MULTILINE', 'DOTALL', - 'UNICODE', 'VERBOSE', 'DEBUG'): - if obj.flags & getattr(re, flag): - if done_one: - p.text('|') - p.text('re.' + flag) - done_one = True - p.text(')') - - -def _types_simplenamespace_pprint(obj, p, cycle): - """The pprint function for types.SimpleNamespace.""" - name = 'namespace' - with p.group(len(name) + 1, name + '(', ')'): - if cycle: - p.text('...') - else: - for idx, (attr, value) in enumerate(obj.__dict__.items()): - if idx: - p.text(',') - p.breakable() - attr_kwarg = '{}='.format(attr) - with p.group(len(attr_kwarg), attr_kwarg): - p.pretty(value) - - -def _type_pprint(obj, p, cycle): - """The pprint for classes and types.""" - # Heap allocated types might not have the module attribute, - # and others may set it to None. - - # Checks for a __repr__ override in the metaclass. Can't compare the - # type(obj).__repr__ directly because in PyPy the representation function - # inherited from type isn't the same type.__repr__ - if [m for m in _get_mro(type(obj)) if "__repr__" in vars(m)][:1] != [type]: - _repr_pprint(obj, p, cycle) - return - - mod = _safe_getattr(obj, '__module__', None) - try: - name = obj.__qualname__ - if not isinstance(name, str): - # This can happen if the type implements __qualname__ as a property - # or other descriptor in Python 2. - raise Exception("Try __name__") - except Exception: - name = obj.__name__ - if not isinstance(name, str): - name = '<unknown type>' - - if mod in (None, '__builtin__', 'builtins', 'exceptions'): - p.text(name) - else: - p.text(mod + '.' + name) - - -def _repr_pprint(obj, p, cycle): - """A pprint that just redirects to the normal repr function.""" - # Find newlines and replace them with p.break_() - output = repr(obj) - lines = output.splitlines() - with p.group(): - for idx, output_line in enumerate(lines): - if idx: - p.break_() - p.text(output_line) - - -def _function_pprint(obj, p, cycle): - """Base pprint for all functions and builtin functions.""" - name = _safe_getattr(obj, '__qualname__', obj.__name__) - mod = obj.__module__ - if mod and mod not in ('__builtin__', 'builtins', 'exceptions'): - name = mod + '.' + name - try: - func_def = name + str(signature(obj)) - except ValueError: - func_def = name - p.text('<function %s>' % func_def) - - -def _exception_pprint(obj, p, cycle): - """Base pprint for all exceptions.""" - name = getattr(obj.__class__, '__qualname__', obj.__class__.__name__) - if obj.__class__.__module__ not in ('exceptions', 'builtins'): - name = '%s.%s' % (obj.__class__.__module__, name) - step = len(name) + 1 - p.begin_group(step, name + '(') - for idx, arg in enumerate(getattr(obj, 'args', ())): - if idx: - p.text(',') - p.breakable() - p.pretty(arg) - p.end_group(step, ')') - - -#: the exception base -try: - _exception_base = BaseException -except NameError: - _exception_base = Exception - - -#: printers for builtin types -_type_pprinters = { - int: _repr_pprint, - float: _repr_pprint, - str: _repr_pprint, - tuple: _seq_pprinter_factory('(', ')'), - list: _seq_pprinter_factory('[', ']'), - dict: _dict_pprinter_factory('{', '}'), - set: _set_pprinter_factory('{', '}'), - frozenset: _set_pprinter_factory('frozenset({', '})'), - super: _super_pprint, - _re_pattern_type: _re_pattern_pprint, - type: _type_pprint, - types.FunctionType: _function_pprint, - types.BuiltinFunctionType: _function_pprint, - types.MethodType: _repr_pprint, - types.SimpleNamespace: _types_simplenamespace_pprint, - datetime.datetime: _repr_pprint, - datetime.timedelta: _repr_pprint, - _exception_base: _exception_pprint -} - -# render os.environ like a dict -_env_type = type(os.environ) -# future-proof in case os.environ becomes a plain dict? -if _env_type is not dict: - _type_pprinters[_env_type] = _dict_pprinter_factory('environ{', '}') - -try: - # In PyPy, types.DictProxyType is dict, setting the dictproxy printer - # using dict.setdefault avoids overwriting the dict printer - _type_pprinters.setdefault(types.DictProxyType, - _dict_pprinter_factory('dict_proxy({', '})')) - _type_pprinters[types.ClassType] = _type_pprint - _type_pprinters[types.SliceType] = _repr_pprint -except AttributeError: # Python 3 - _type_pprinters[types.MappingProxyType] = \ - _dict_pprinter_factory('mappingproxy({', '})') - _type_pprinters[slice] = _repr_pprint - -_type_pprinters[range] = _repr_pprint -_type_pprinters[bytes] = _repr_pprint - -#: printers for types specified by name -_deferred_type_pprinters = { -} - -def for_type(typ, func): - """ - Add a pretty printer for a given type. - """ - oldfunc = _type_pprinters.get(typ, None) - if func is not None: - # To support easy restoration of old pprinters, we need to ignore Nones. - _type_pprinters[typ] = func - return oldfunc - -def for_type_by_name(type_module, type_name, func): - """ - Add a pretty printer for a type specified by the module and name of a type - rather than the type object itself. - """ - key = (type_module, type_name) - oldfunc = _deferred_type_pprinters.get(key, None) - if func is not None: - # To support easy restoration of old pprinters, we need to ignore Nones. - _deferred_type_pprinters[key] = func - return oldfunc - - -#: printers for the default singletons -_singleton_pprinters = dict.fromkeys(map(id, [None, True, False, Ellipsis, - NotImplemented]), _repr_pprint) - - -def _defaultdict_pprint(obj, p, cycle): - name = obj.__class__.__name__ - with p.group(len(name) + 1, name + '(', ')'): - if cycle: - p.text('...') - else: - p.pretty(obj.default_factory) - p.text(',') - p.breakable() - p.pretty(dict(obj)) - -def _ordereddict_pprint(obj, p, cycle): - name = obj.__class__.__name__ - with p.group(len(name) + 1, name + '(', ')'): - if cycle: - p.text('...') - elif len(obj): - p.pretty(list(obj.items())) - -def _deque_pprint(obj, p, cycle): - name = obj.__class__.__name__ - with p.group(len(name) + 1, name + '(', ')'): - if cycle: - p.text('...') - else: - p.pretty(list(obj)) - - -def _counter_pprint(obj, p, cycle): - name = obj.__class__.__name__ - with p.group(len(name) + 1, name + '(', ')'): - if cycle: - p.text('...') - elif len(obj): - p.pretty(dict(obj)) - -for_type_by_name('collections', 'defaultdict', _defaultdict_pprint) -for_type_by_name('collections', 'OrderedDict', _ordereddict_pprint) -for_type_by_name('collections', 'deque', _deque_pprint) -for_type_by_name('collections', 'Counter', _counter_pprint) - -if __name__ == '__main__': - from random import randrange - class Foo(object): - def __init__(self): - self.foo = 1 - self.bar = re.compile(r'\s+') - self.blub = dict.fromkeys(range(30), randrange(1, 40)) - self.hehe = 23424.234234 - self.list = ["blub", "blah", self] - - def get_foo(self): - print("foo") - - pprint(Foo(), verbose=True) +# -*- coding: utf-8 -*- +""" +Python advanced pretty printer. This pretty printer is intended to +replace the old `pprint` python module which does not allow developers +to provide their own pretty print callbacks. + +This module is based on ruby's `prettyprint.rb` library by `Tanaka Akira`. + + +Example Usage +------------- + +To directly print the representation of an object use `pprint`:: + + from pretty import pprint + pprint(complex_object) + +To get a string of the output use `pretty`:: + + from pretty import pretty + string = pretty(complex_object) + + +Extending +--------- + +The pretty library allows developers to add pretty printing rules for their +own objects. This process is straightforward. All you have to do is to +add a `_repr_pretty_` method to your object and call the methods on the +pretty printer passed:: + + class MyObject(object): + + def _repr_pretty_(self, p, cycle): + ... + +Here is an example implementation of a `_repr_pretty_` method for a list +subclass:: + + class MyList(list): + + def _repr_pretty_(self, p, cycle): + if cycle: + p.text('MyList(...)') + else: + with p.group(8, 'MyList([', '])'): + for idx, item in enumerate(self): + if idx: + p.text(',') + p.breakable() + p.pretty(item) + +The `cycle` parameter is `True` if pretty detected a cycle. You *have* to +react to that or the result is an infinite loop. `p.text()` just adds +non breaking text to the output, `p.breakable()` either adds a whitespace +or breaks here. If you pass it an argument it's used instead of the +default space. `p.pretty` prettyprints another object using the pretty print +method. + +The first parameter to the `group` function specifies the extra indentation +of the next line. In this example the next item will either be on the same +line (if the items are short enough) or aligned with the right edge of the +opening bracket of `MyList`. + +If you just want to indent something you can use the group function +without open / close parameters. You can also use this code:: + + with p.indent(2): + ... + +Inheritance diagram: + +.. inheritance-diagram:: IPython.lib.pretty + :parts: 3 + +:copyright: 2007 by Armin Ronacher. + Portions (c) 2009 by Robert Kern. +:license: BSD License. +""" + +from contextlib import contextmanager +import datetime +import os +import re +import sys +import types +from collections import deque +from inspect import signature +from io import StringIO +from warnings import warn + +from IPython.utils.decorators import undoc +from IPython.utils.py3compat import PYPY + +__all__ = ['pretty', 'pprint', 'PrettyPrinter', 'RepresentationPrinter', + 'for_type', 'for_type_by_name'] + + +MAX_SEQ_LENGTH = 1000 +_re_pattern_type = type(re.compile('')) + +def _safe_getattr(obj, attr, default=None): + """Safe version of getattr. + + Same as getattr, but will return ``default`` on any Exception, + rather than raising. + """ + try: + return getattr(obj, attr, default) + except Exception: + return default + +@undoc +class CUnicodeIO(StringIO): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + warn(("CUnicodeIO is deprecated since IPython 6.0. " + "Please use io.StringIO instead."), + DeprecationWarning, stacklevel=2) + +def _sorted_for_pprint(items): + """ + Sort the given items for pretty printing. Since some predictable + sorting is better than no sorting at all, we sort on the string + representation if normal sorting fails. + """ + items = list(items) + try: + return sorted(items) + except Exception: + try: + return sorted(items, key=str) + except Exception: + return items + +def pretty(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH): + """ + Pretty print the object's representation. + """ + stream = StringIO() + printer = RepresentationPrinter(stream, verbose, max_width, newline, max_seq_length=max_seq_length) + printer.pretty(obj) + printer.flush() + return stream.getvalue() + + +def pprint(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH): + """ + Like `pretty` but print to stdout. + """ + printer = RepresentationPrinter(sys.stdout, verbose, max_width, newline, max_seq_length=max_seq_length) + printer.pretty(obj) + printer.flush() + sys.stdout.write(newline) + sys.stdout.flush() + +class _PrettyPrinterBase(object): + + @contextmanager + def indent(self, indent): + """with statement support for indenting/dedenting.""" + self.indentation += indent + try: + yield + finally: + self.indentation -= indent + + @contextmanager + def group(self, indent=0, open='', close=''): + """like begin_group / end_group but for the with statement.""" + self.begin_group(indent, open) + try: + yield + finally: + self.end_group(indent, close) + +class PrettyPrinter(_PrettyPrinterBase): + """ + Baseclass for the `RepresentationPrinter` prettyprinter that is used to + generate pretty reprs of objects. Contrary to the `RepresentationPrinter` + this printer knows nothing about the default pprinters or the `_repr_pretty_` + callback method. + """ + + def __init__(self, output, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH): + self.output = output + self.max_width = max_width + self.newline = newline + self.max_seq_length = max_seq_length + self.output_width = 0 + self.buffer_width = 0 + self.buffer = deque() + + root_group = Group(0) + self.group_stack = [root_group] + self.group_queue = GroupQueue(root_group) + self.indentation = 0 + + def _break_one_group(self, group): + while group.breakables: + x = self.buffer.popleft() + self.output_width = x.output(self.output, self.output_width) + self.buffer_width -= x.width + while self.buffer and isinstance(self.buffer[0], Text): + x = self.buffer.popleft() + self.output_width = x.output(self.output, self.output_width) + self.buffer_width -= x.width + + def _break_outer_groups(self): + while self.max_width < self.output_width + self.buffer_width: + group = self.group_queue.deq() + if not group: + return + self._break_one_group(group) + + def text(self, obj): + """Add literal text to the output.""" + width = len(obj) + if self.buffer: + text = self.buffer[-1] + if not isinstance(text, Text): + text = Text() + self.buffer.append(text) + text.add(obj, width) + self.buffer_width += width + self._break_outer_groups() + else: + self.output.write(obj) + self.output_width += width + + def breakable(self, sep=' '): + """ + Add a breakable separator to the output. This does not mean that it + will automatically break here. If no breaking on this position takes + place the `sep` is inserted which default to one space. + """ + width = len(sep) + group = self.group_stack[-1] + if group.want_break: + self.flush() + self.output.write(self.newline) + self.output.write(' ' * self.indentation) + self.output_width = self.indentation + self.buffer_width = 0 + else: + self.buffer.append(Breakable(sep, width, self)) + self.buffer_width += width + self._break_outer_groups() + + def break_(self): + """ + Explicitly insert a newline into the output, maintaining correct indentation. + """ + group = self.group_queue.deq() + if group: + self._break_one_group(group) + self.flush() + self.output.write(self.newline) + self.output.write(' ' * self.indentation) + self.output_width = self.indentation + self.buffer_width = 0 + + + def begin_group(self, indent=0, open=''): + """ + Begin a group. + The first parameter specifies the indentation for the next line (usually + the width of the opening text), the second the opening text. All + parameters are optional. + """ + if open: + self.text(open) + group = Group(self.group_stack[-1].depth + 1) + self.group_stack.append(group) + self.group_queue.enq(group) + self.indentation += indent + + def _enumerate(self, seq): + """like enumerate, but with an upper limit on the number of items""" + for idx, x in enumerate(seq): + if self.max_seq_length and idx >= self.max_seq_length: + self.text(',') + self.breakable() + self.text('...') + return + yield idx, x + + def end_group(self, dedent=0, close=''): + """End a group. See `begin_group` for more details.""" + self.indentation -= dedent + group = self.group_stack.pop() + if not group.breakables: + self.group_queue.remove(group) + if close: + self.text(close) + + def flush(self): + """Flush data that is left in the buffer.""" + for data in self.buffer: + self.output_width += data.output(self.output, self.output_width) + self.buffer.clear() + self.buffer_width = 0 + + +def _get_mro(obj_class): + """ Get a reasonable method resolution order of a class and its superclasses + for both old-style and new-style classes. + """ + if not hasattr(obj_class, '__mro__'): + # Old-style class. Mix in object to make a fake new-style class. + try: + obj_class = type(obj_class.__name__, (obj_class, object), {}) + except TypeError: + # Old-style extension type that does not descend from object. + # FIXME: try to construct a more thorough MRO. + mro = [obj_class] + else: + mro = obj_class.__mro__[1:-1] + else: + mro = obj_class.__mro__ + return mro + + +class RepresentationPrinter(PrettyPrinter): + """ + Special pretty printer that has a `pretty` method that calls the pretty + printer for a python object. + + This class stores processing data on `self` so you must *never* use + this class in a threaded environment. Always lock it or reinstanciate + it. + + Instances also have a verbose flag callbacks can access to control their + output. For example the default instance repr prints all attributes and + methods that are not prefixed by an underscore if the printer is in + verbose mode. + """ + + def __init__(self, output, verbose=False, max_width=79, newline='\n', + singleton_pprinters=None, type_pprinters=None, deferred_pprinters=None, + max_seq_length=MAX_SEQ_LENGTH): + + PrettyPrinter.__init__(self, output, max_width, newline, max_seq_length=max_seq_length) + self.verbose = verbose + self.stack = [] + if singleton_pprinters is None: + singleton_pprinters = _singleton_pprinters.copy() + self.singleton_pprinters = singleton_pprinters + if type_pprinters is None: + type_pprinters = _type_pprinters.copy() + self.type_pprinters = type_pprinters + if deferred_pprinters is None: + deferred_pprinters = _deferred_type_pprinters.copy() + self.deferred_pprinters = deferred_pprinters + + def pretty(self, obj): + """Pretty print the given object.""" + obj_id = id(obj) + cycle = obj_id in self.stack + self.stack.append(obj_id) + self.begin_group() + try: + obj_class = _safe_getattr(obj, '__class__', None) or type(obj) + # First try to find registered singleton printers for the type. + try: + printer = self.singleton_pprinters[obj_id] + except (TypeError, KeyError): + pass + else: + return printer(obj, self, cycle) + # Next walk the mro and check for either: + # 1) a registered printer + # 2) a _repr_pretty_ method + for cls in _get_mro(obj_class): + if cls in self.type_pprinters: + # printer registered in self.type_pprinters + return self.type_pprinters[cls](obj, self, cycle) + else: + # deferred printer + printer = self._in_deferred_types(cls) + if printer is not None: + return printer(obj, self, cycle) + else: + # Finally look for special method names. + # Some objects automatically create any requested + # attribute. Try to ignore most of them by checking for + # callability. + if '_repr_pretty_' in cls.__dict__: + meth = cls._repr_pretty_ + if callable(meth): + return meth(obj, self, cycle) + if cls is not object \ + and callable(cls.__dict__.get('__repr__')): + return _repr_pprint(obj, self, cycle) + + return _default_pprint(obj, self, cycle) + finally: + self.end_group() + self.stack.pop() + + def _in_deferred_types(self, cls): + """ + Check if the given class is specified in the deferred type registry. + + Returns the printer from the registry if it exists, and None if the + class is not in the registry. Successful matches will be moved to the + regular type registry for future use. + """ + mod = _safe_getattr(cls, '__module__', None) + name = _safe_getattr(cls, '__name__', None) + key = (mod, name) + printer = None + if key in self.deferred_pprinters: + # Move the printer over to the regular registry. + printer = self.deferred_pprinters.pop(key) + self.type_pprinters[cls] = printer + return printer + + +class Printable(object): + + def output(self, stream, output_width): + return output_width + + +class Text(Printable): + + def __init__(self): + self.objs = [] + self.width = 0 + + def output(self, stream, output_width): + for obj in self.objs: + stream.write(obj) + return output_width + self.width + + def add(self, obj, width): + self.objs.append(obj) + self.width += width + + +class Breakable(Printable): + + def __init__(self, seq, width, pretty): + self.obj = seq + self.width = width + self.pretty = pretty + self.indentation = pretty.indentation + self.group = pretty.group_stack[-1] + self.group.breakables.append(self) + + def output(self, stream, output_width): + self.group.breakables.popleft() + if self.group.want_break: + stream.write(self.pretty.newline) + stream.write(' ' * self.indentation) + return self.indentation + if not self.group.breakables: + self.pretty.group_queue.remove(self.group) + stream.write(self.obj) + return output_width + self.width + + +class Group(Printable): + + def __init__(self, depth): + self.depth = depth + self.breakables = deque() + self.want_break = False + + +class GroupQueue(object): + + def __init__(self, *groups): + self.queue = [] + for group in groups: + self.enq(group) + + def enq(self, group): + depth = group.depth + while depth > len(self.queue) - 1: + self.queue.append([]) + self.queue[depth].append(group) + + def deq(self): + for stack in self.queue: + for idx, group in enumerate(reversed(stack)): + if group.breakables: + del stack[idx] + group.want_break = True + return group + for group in stack: + group.want_break = True + del stack[:] + + def remove(self, group): + try: + self.queue[group.depth].remove(group) + except ValueError: + pass + + +def _default_pprint(obj, p, cycle): + """ + The default print function. Used if an object does not provide one and + it's none of the builtin objects. + """ + klass = _safe_getattr(obj, '__class__', None) or type(obj) + if _safe_getattr(klass, '__repr__', None) is not object.__repr__: + # A user-provided repr. Find newlines and replace them with p.break_() + _repr_pprint(obj, p, cycle) + return + p.begin_group(1, '<') + p.pretty(klass) + p.text(' at 0x%x' % id(obj)) + if cycle: + p.text(' ...') + elif p.verbose: + first = True + for key in dir(obj): + if not key.startswith('_'): + try: + value = getattr(obj, key) + except AttributeError: + continue + if isinstance(value, types.MethodType): + continue + if not first: + p.text(',') + p.breakable() + p.text(key) + p.text('=') + step = len(key) + 1 + p.indentation += step + p.pretty(value) + p.indentation -= step + first = False + p.end_group(1, '>') + + +def _seq_pprinter_factory(start, end): + """ + Factory that returns a pprint function useful for sequences. Used by + the default pprint for tuples, dicts, and lists. + """ + def inner(obj, p, cycle): + if cycle: + return p.text(start + '...' + end) + step = len(start) + p.begin_group(step, start) + for idx, x in p._enumerate(obj): + if idx: + p.text(',') + p.breakable() + p.pretty(x) + if len(obj) == 1 and type(obj) is tuple: + # Special case for 1-item tuples. + p.text(',') + p.end_group(step, end) + return inner + + +def _set_pprinter_factory(start, end): + """ + Factory that returns a pprint function useful for sets and frozensets. + """ + def inner(obj, p, cycle): + if cycle: + return p.text(start + '...' + end) + if len(obj) == 0: + # Special case. + p.text(type(obj).__name__ + '()') + else: + step = len(start) + p.begin_group(step, start) + # Like dictionary keys, we will try to sort the items if there aren't too many + if not (p.max_seq_length and len(obj) >= p.max_seq_length): + items = _sorted_for_pprint(obj) + else: + items = obj + for idx, x in p._enumerate(items): + if idx: + p.text(',') + p.breakable() + p.pretty(x) + p.end_group(step, end) + return inner + + +def _dict_pprinter_factory(start, end): + """ + Factory that returns a pprint function used by the default pprint of + dicts and dict proxies. + """ + def inner(obj, p, cycle): + if cycle: + return p.text('{...}') + step = len(start) + p.begin_group(step, start) + keys = obj.keys() + for idx, key in p._enumerate(keys): + if idx: + p.text(',') + p.breakable() + p.pretty(key) + p.text(': ') + p.pretty(obj[key]) + p.end_group(step, end) + return inner + + +def _super_pprint(obj, p, cycle): + """The pprint for the super type.""" + p.begin_group(8, '<super: ') + p.pretty(obj.__thisclass__) + p.text(',') + p.breakable() + if PYPY: # In PyPy, super() objects don't have __self__ attributes + dself = obj.__repr__.__self__ + p.pretty(None if dself is obj else dself) + else: + p.pretty(obj.__self__) + p.end_group(8, '>') + + +def _re_pattern_pprint(obj, p, cycle): + """The pprint function for regular expression patterns.""" + p.text('re.compile(') + pattern = repr(obj.pattern) + if pattern[:1] in 'uU': + pattern = pattern[1:] + prefix = 'ur' + else: + prefix = 'r' + pattern = prefix + pattern.replace('\\\\', '\\') + p.text(pattern) + if obj.flags: + p.text(',') + p.breakable() + done_one = False + for flag in ('TEMPLATE', 'IGNORECASE', 'LOCALE', 'MULTILINE', 'DOTALL', + 'UNICODE', 'VERBOSE', 'DEBUG'): + if obj.flags & getattr(re, flag): + if done_one: + p.text('|') + p.text('re.' + flag) + done_one = True + p.text(')') + + +def _types_simplenamespace_pprint(obj, p, cycle): + """The pprint function for types.SimpleNamespace.""" + name = 'namespace' + with p.group(len(name) + 1, name + '(', ')'): + if cycle: + p.text('...') + else: + for idx, (attr, value) in enumerate(obj.__dict__.items()): + if idx: + p.text(',') + p.breakable() + attr_kwarg = '{}='.format(attr) + with p.group(len(attr_kwarg), attr_kwarg): + p.pretty(value) + + +def _type_pprint(obj, p, cycle): + """The pprint for classes and types.""" + # Heap allocated types might not have the module attribute, + # and others may set it to None. + + # Checks for a __repr__ override in the metaclass. Can't compare the + # type(obj).__repr__ directly because in PyPy the representation function + # inherited from type isn't the same type.__repr__ + if [m for m in _get_mro(type(obj)) if "__repr__" in vars(m)][:1] != [type]: + _repr_pprint(obj, p, cycle) + return + + mod = _safe_getattr(obj, '__module__', None) + try: + name = obj.__qualname__ + if not isinstance(name, str): + # This can happen if the type implements __qualname__ as a property + # or other descriptor in Python 2. + raise Exception("Try __name__") + except Exception: + name = obj.__name__ + if not isinstance(name, str): + name = '<unknown type>' + + if mod in (None, '__builtin__', 'builtins', 'exceptions'): + p.text(name) + else: + p.text(mod + '.' + name) + + +def _repr_pprint(obj, p, cycle): + """A pprint that just redirects to the normal repr function.""" + # Find newlines and replace them with p.break_() + output = repr(obj) + lines = output.splitlines() + with p.group(): + for idx, output_line in enumerate(lines): + if idx: + p.break_() + p.text(output_line) + + +def _function_pprint(obj, p, cycle): + """Base pprint for all functions and builtin functions.""" + name = _safe_getattr(obj, '__qualname__', obj.__name__) + mod = obj.__module__ + if mod and mod not in ('__builtin__', 'builtins', 'exceptions'): + name = mod + '.' + name + try: + func_def = name + str(signature(obj)) + except ValueError: + func_def = name + p.text('<function %s>' % func_def) + + +def _exception_pprint(obj, p, cycle): + """Base pprint for all exceptions.""" + name = getattr(obj.__class__, '__qualname__', obj.__class__.__name__) + if obj.__class__.__module__ not in ('exceptions', 'builtins'): + name = '%s.%s' % (obj.__class__.__module__, name) + step = len(name) + 1 + p.begin_group(step, name + '(') + for idx, arg in enumerate(getattr(obj, 'args', ())): + if idx: + p.text(',') + p.breakable() + p.pretty(arg) + p.end_group(step, ')') + + +#: the exception base +try: + _exception_base = BaseException +except NameError: + _exception_base = Exception + + +#: printers for builtin types +_type_pprinters = { + int: _repr_pprint, + float: _repr_pprint, + str: _repr_pprint, + tuple: _seq_pprinter_factory('(', ')'), + list: _seq_pprinter_factory('[', ']'), + dict: _dict_pprinter_factory('{', '}'), + set: _set_pprinter_factory('{', '}'), + frozenset: _set_pprinter_factory('frozenset({', '})'), + super: _super_pprint, + _re_pattern_type: _re_pattern_pprint, + type: _type_pprint, + types.FunctionType: _function_pprint, + types.BuiltinFunctionType: _function_pprint, + types.MethodType: _repr_pprint, + types.SimpleNamespace: _types_simplenamespace_pprint, + datetime.datetime: _repr_pprint, + datetime.timedelta: _repr_pprint, + _exception_base: _exception_pprint +} + +# render os.environ like a dict +_env_type = type(os.environ) +# future-proof in case os.environ becomes a plain dict? +if _env_type is not dict: + _type_pprinters[_env_type] = _dict_pprinter_factory('environ{', '}') + +try: + # In PyPy, types.DictProxyType is dict, setting the dictproxy printer + # using dict.setdefault avoids overwriting the dict printer + _type_pprinters.setdefault(types.DictProxyType, + _dict_pprinter_factory('dict_proxy({', '})')) + _type_pprinters[types.ClassType] = _type_pprint + _type_pprinters[types.SliceType] = _repr_pprint +except AttributeError: # Python 3 + _type_pprinters[types.MappingProxyType] = \ + _dict_pprinter_factory('mappingproxy({', '})') + _type_pprinters[slice] = _repr_pprint + +_type_pprinters[range] = _repr_pprint +_type_pprinters[bytes] = _repr_pprint + +#: printers for types specified by name +_deferred_type_pprinters = { +} + +def for_type(typ, func): + """ + Add a pretty printer for a given type. + """ + oldfunc = _type_pprinters.get(typ, None) + if func is not None: + # To support easy restoration of old pprinters, we need to ignore Nones. + _type_pprinters[typ] = func + return oldfunc + +def for_type_by_name(type_module, type_name, func): + """ + Add a pretty printer for a type specified by the module and name of a type + rather than the type object itself. + """ + key = (type_module, type_name) + oldfunc = _deferred_type_pprinters.get(key, None) + if func is not None: + # To support easy restoration of old pprinters, we need to ignore Nones. + _deferred_type_pprinters[key] = func + return oldfunc + + +#: printers for the default singletons +_singleton_pprinters = dict.fromkeys(map(id, [None, True, False, Ellipsis, + NotImplemented]), _repr_pprint) + + +def _defaultdict_pprint(obj, p, cycle): + name = obj.__class__.__name__ + with p.group(len(name) + 1, name + '(', ')'): + if cycle: + p.text('...') + else: + p.pretty(obj.default_factory) + p.text(',') + p.breakable() + p.pretty(dict(obj)) + +def _ordereddict_pprint(obj, p, cycle): + name = obj.__class__.__name__ + with p.group(len(name) + 1, name + '(', ')'): + if cycle: + p.text('...') + elif len(obj): + p.pretty(list(obj.items())) + +def _deque_pprint(obj, p, cycle): + name = obj.__class__.__name__ + with p.group(len(name) + 1, name + '(', ')'): + if cycle: + p.text('...') + else: + p.pretty(list(obj)) + + +def _counter_pprint(obj, p, cycle): + name = obj.__class__.__name__ + with p.group(len(name) + 1, name + '(', ')'): + if cycle: + p.text('...') + elif len(obj): + p.pretty(dict(obj)) + +for_type_by_name('collections', 'defaultdict', _defaultdict_pprint) +for_type_by_name('collections', 'OrderedDict', _ordereddict_pprint) +for_type_by_name('collections', 'deque', _deque_pprint) +for_type_by_name('collections', 'Counter', _counter_pprint) + +if __name__ == '__main__': + from random import randrange + class Foo(object): + def __init__(self): + self.foo = 1 + self.bar = re.compile(r'\s+') + self.blub = dict.fromkeys(range(30), randrange(1, 40)) + self.hehe = 23424.234234 + self.list = ["blub", "blah", self] + + def get_foo(self): + print("foo") + + pprint(Foo(), verbose=True) diff --git a/contrib/python/ipython/py3/IPython/lib/security.py b/contrib/python/ipython/py3/IPython/lib/security.py index a2db1e170a8..91a2344eab8 100644 --- a/contrib/python/ipython/py3/IPython/lib/security.py +++ b/contrib/python/ipython/py3/IPython/lib/security.py @@ -1,114 +1,114 @@ -""" -Password generation for the IPython notebook. -""" -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- -# Stdlib -import getpass -import hashlib -import random - -# Our own -from IPython.core.error import UsageError -from IPython.utils.py3compat import encode - -#----------------------------------------------------------------------------- -# Globals -#----------------------------------------------------------------------------- - -# Length of the salt in nr of hex chars, which implies salt_len * 4 -# bits of randomness. -salt_len = 12 - -#----------------------------------------------------------------------------- -# Functions -#----------------------------------------------------------------------------- - -def passwd(passphrase=None, algorithm='sha1'): - """Generate hashed password and salt for use in notebook configuration. - - In the notebook configuration, set `c.NotebookApp.password` to - the generated string. - - Parameters - ---------- - passphrase : str - Password to hash. If unspecified, the user is asked to input - and verify a password. - algorithm : str - Hashing algorithm to use (e.g, 'sha1' or any argument supported - by :func:`hashlib.new`). - - Returns - ------- - hashed_passphrase : str - Hashed password, in the format 'hash_algorithm:salt:passphrase_hash'. - - Examples - -------- - >>> passwd('mypassword') - 'sha1:7cf3:b7d6da294ea9592a9480c8f52e63cd42cfb9dd12' - - """ - if passphrase is None: - for i in range(3): - p0 = getpass.getpass('Enter password: ') - p1 = getpass.getpass('Verify password: ') - if p0 == p1: - passphrase = p0 - break - else: - print('Passwords do not match.') - else: - raise UsageError('No matching passwords found. Giving up.') - - h = hashlib.new(algorithm) - salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len) - h.update(encode(passphrase, 'utf-8') + encode(salt, 'ascii')) - - return ':'.join((algorithm, salt, h.hexdigest())) - - -def passwd_check(hashed_passphrase, passphrase): - """Verify that a given passphrase matches its hashed version. - - Parameters - ---------- - hashed_passphrase : str - Hashed password, in the format returned by `passwd`. - passphrase : str - Passphrase to validate. - - Returns - ------- - valid : bool - True if the passphrase matches the hash. - - Examples - -------- - >>> from IPython.lib.security import passwd_check - >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', - ... 'mypassword') - True - - >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', - ... 'anotherpassword') - False - """ - try: - algorithm, salt, pw_digest = hashed_passphrase.split(':', 2) - except (ValueError, TypeError): - return False - - try: - h = hashlib.new(algorithm) - except ValueError: - return False - - if len(pw_digest) == 0: - return False - - h.update(encode(passphrase, 'utf-8') + encode(salt, 'ascii')) - - return h.hexdigest() == pw_digest +""" +Password generation for the IPython notebook. +""" +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- +# Stdlib +import getpass +import hashlib +import random + +# Our own +from IPython.core.error import UsageError +from IPython.utils.py3compat import encode + +#----------------------------------------------------------------------------- +# Globals +#----------------------------------------------------------------------------- + +# Length of the salt in nr of hex chars, which implies salt_len * 4 +# bits of randomness. +salt_len = 12 + +#----------------------------------------------------------------------------- +# Functions +#----------------------------------------------------------------------------- + +def passwd(passphrase=None, algorithm='sha1'): + """Generate hashed password and salt for use in notebook configuration. + + In the notebook configuration, set `c.NotebookApp.password` to + the generated string. + + Parameters + ---------- + passphrase : str + Password to hash. If unspecified, the user is asked to input + and verify a password. + algorithm : str + Hashing algorithm to use (e.g, 'sha1' or any argument supported + by :func:`hashlib.new`). + + Returns + ------- + hashed_passphrase : str + Hashed password, in the format 'hash_algorithm:salt:passphrase_hash'. + + Examples + -------- + >>> passwd('mypassword') + 'sha1:7cf3:b7d6da294ea9592a9480c8f52e63cd42cfb9dd12' + + """ + if passphrase is None: + for i in range(3): + p0 = getpass.getpass('Enter password: ') + p1 = getpass.getpass('Verify password: ') + if p0 == p1: + passphrase = p0 + break + else: + print('Passwords do not match.') + else: + raise UsageError('No matching passwords found. Giving up.') + + h = hashlib.new(algorithm) + salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len) + h.update(encode(passphrase, 'utf-8') + encode(salt, 'ascii')) + + return ':'.join((algorithm, salt, h.hexdigest())) + + +def passwd_check(hashed_passphrase, passphrase): + """Verify that a given passphrase matches its hashed version. + + Parameters + ---------- + hashed_passphrase : str + Hashed password, in the format returned by `passwd`. + passphrase : str + Passphrase to validate. + + Returns + ------- + valid : bool + True if the passphrase matches the hash. + + Examples + -------- + >>> from IPython.lib.security import passwd_check + >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', + ... 'mypassword') + True + + >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', + ... 'anotherpassword') + False + """ + try: + algorithm, salt, pw_digest = hashed_passphrase.split(':', 2) + except (ValueError, TypeError): + return False + + try: + h = hashlib.new(algorithm) + except ValueError: + return False + + if len(pw_digest) == 0: + return False + + h.update(encode(passphrase, 'utf-8') + encode(salt, 'ascii')) + + return h.hexdigest() == pw_digest diff --git a/contrib/python/ipython/py3/IPython/nbconvert.py b/contrib/python/ipython/py3/IPython/nbconvert.py index c73b9727925..2de4ee50bc7 100644 --- a/contrib/python/ipython/py3/IPython/nbconvert.py +++ b/contrib/python/ipython/py3/IPython/nbconvert.py @@ -1,19 +1,19 @@ -""" -Shim to maintain backwards compatibility with old IPython.nbconvert imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.nbconvert` package has been deprecated since IPython 4.0. " - "You should import from nbconvert instead.", ShimWarning) - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -sys.modules['IPython.nbconvert'] = ShimModule( - src='IPython.nbconvert', mirror='nbconvert') +""" +Shim to maintain backwards compatibility with old IPython.nbconvert imports. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import sys +from warnings import warn + +from IPython.utils.shimmodule import ShimModule, ShimWarning + +warn("The `IPython.nbconvert` package has been deprecated since IPython 4.0. " + "You should import from nbconvert instead.", ShimWarning) + +# Unconditionally insert the shim into sys.modules so that further import calls +# trigger the custom attribute access above + +sys.modules['IPython.nbconvert'] = ShimModule( + src='IPython.nbconvert', mirror='nbconvert') diff --git a/contrib/python/ipython/py3/IPython/nbformat.py b/contrib/python/ipython/py3/IPython/nbformat.py index 3be80bf70d9..310277de009 100644 --- a/contrib/python/ipython/py3/IPython/nbformat.py +++ b/contrib/python/ipython/py3/IPython/nbformat.py @@ -1,19 +1,19 @@ -""" -Shim to maintain backwards compatibility with old IPython.nbformat imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.nbformat` package has been deprecated since IPython 4.0. " - "You should import from nbformat instead.", ShimWarning) - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -sys.modules['IPython.nbformat'] = ShimModule( - src='IPython.nbformat', mirror='nbformat') +""" +Shim to maintain backwards compatibility with old IPython.nbformat imports. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import sys +from warnings import warn + +from IPython.utils.shimmodule import ShimModule, ShimWarning + +warn("The `IPython.nbformat` package has been deprecated since IPython 4.0. " + "You should import from nbformat instead.", ShimWarning) + +# Unconditionally insert the shim into sys.modules so that further import calls +# trigger the custom attribute access above + +sys.modules['IPython.nbformat'] = ShimModule( + src='IPython.nbformat', mirror='nbformat') diff --git a/contrib/python/ipython/py3/IPython/parallel.py b/contrib/python/ipython/py3/IPython/parallel.py index 8e795db0a30..0f100127839 100644 --- a/contrib/python/ipython/py3/IPython/parallel.py +++ b/contrib/python/ipython/py3/IPython/parallel.py @@ -1,20 +1,20 @@ -""" -Shim to maintain backwards compatibility with old IPython.parallel imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.parallel` package has been deprecated since IPython 4.0. " - "You should import from ipyparallel instead.", ShimWarning) - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -sys.modules['IPython.parallel'] = ShimModule( - src='IPython.parallel', mirror='ipyparallel') - +""" +Shim to maintain backwards compatibility with old IPython.parallel imports. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import sys +from warnings import warn + +from IPython.utils.shimmodule import ShimModule, ShimWarning + +warn("The `IPython.parallel` package has been deprecated since IPython 4.0. " + "You should import from ipyparallel instead.", ShimWarning) + +# Unconditionally insert the shim into sys.modules so that further import calls +# trigger the custom attribute access above + +sys.modules['IPython.parallel'] = ShimModule( + src='IPython.parallel', mirror='ipyparallel') + diff --git a/contrib/python/ipython/py3/IPython/paths.py b/contrib/python/ipython/py3/IPython/paths.py index b0000dfe898..e19269058af 100644 --- a/contrib/python/ipython/py3/IPython/paths.py +++ b/contrib/python/ipython/py3/IPython/paths.py @@ -1,121 +1,121 @@ -"""Find files and directories which IPython uses. -""" -import os.path -import shutil -import tempfile -from warnings import warn - -import IPython -from IPython.utils.importstring import import_item -from IPython.utils.path import ( - get_home_dir, get_xdg_dir, get_xdg_cache_dir, compress_user, _writable_dir, - ensure_dir_exists, fs_encoding) -from IPython.utils import py3compat - -def get_ipython_dir() -> str: - """Get the IPython directory for this platform and user. - - This uses the logic in `get_home_dir` to find the home directory - and then adds .ipython to the end of the path. - """ - - env = os.environ - pjoin = os.path.join - - - ipdir_def = '.ipython' - - home_dir = get_home_dir() - xdg_dir = get_xdg_dir() - - if 'IPYTHON_DIR' in env: - warn('The environment variable IPYTHON_DIR is deprecated since IPython 3.0. ' - 'Please use IPYTHONDIR instead.', DeprecationWarning) - ipdir = env.get('IPYTHONDIR', env.get('IPYTHON_DIR', None)) - if ipdir is None: - # not set explicitly, use ~/.ipython - ipdir = pjoin(home_dir, ipdir_def) - if xdg_dir: - # Several IPython versions (up to 1.x) defaulted to .config/ipython - # on Linux. We have decided to go back to using .ipython everywhere - xdg_ipdir = pjoin(xdg_dir, 'ipython') - - if _writable_dir(xdg_ipdir): - cu = compress_user - if os.path.exists(ipdir): - warn(('Ignoring {0} in favour of {1}. Remove {0} to ' - 'get rid of this message').format(cu(xdg_ipdir), cu(ipdir))) - elif os.path.islink(xdg_ipdir): - warn(('{0} is deprecated. Move link to {1} to ' - 'get rid of this message').format(cu(xdg_ipdir), cu(ipdir))) - else: - warn('Moving {0} to {1}'.format(cu(xdg_ipdir), cu(ipdir))) - shutil.move(xdg_ipdir, ipdir) - - ipdir = os.path.normpath(os.path.expanduser(ipdir)) - - if os.path.exists(ipdir) and not _writable_dir(ipdir): - # ipdir exists, but is not writable - warn("IPython dir '{0}' is not a writable location," - " using a temp directory.".format(ipdir)) - ipdir = tempfile.mkdtemp() - elif not os.path.exists(ipdir): - parent = os.path.dirname(ipdir) - if not _writable_dir(parent): - # ipdir does not exist and parent isn't writable - warn("IPython parent '{0}' is not a writable location," - " using a temp directory.".format(parent)) - ipdir = tempfile.mkdtemp() - else: - os.makedirs(ipdir, exist_ok=True) - assert isinstance(ipdir, str), "all path manipulation should be str(unicode), but are not." - return ipdir - - -def get_ipython_cache_dir() -> str: - """Get the cache directory it is created if it does not exist.""" - xdgdir = get_xdg_cache_dir() - if xdgdir is None: - return get_ipython_dir() - ipdir = os.path.join(xdgdir, "ipython") - if not os.path.exists(ipdir) and _writable_dir(xdgdir): - ensure_dir_exists(ipdir) - elif not _writable_dir(xdgdir): - return get_ipython_dir() - - return ipdir - - -def get_ipython_package_dir() -> str: - """Get the base directory where IPython itself is installed.""" - ipdir = os.path.dirname(IPython.__file__) - assert isinstance(ipdir, str) - return ipdir - - -def get_ipython_module_path(module_str): - """Find the path to an IPython module in this version of IPython. - - This will always find the version of the module that is in this importable - IPython package. This will always return the path to the ``.py`` - version of the module. - """ - if module_str == 'IPython': - return os.path.join(get_ipython_package_dir(), '__init__.py') - mod = import_item(module_str) - the_path = mod.__file__.replace('.pyc', '.py') - the_path = the_path.replace('.pyo', '.py') - return py3compat.cast_unicode(the_path, fs_encoding) - -def locate_profile(profile='default'): - """Find the path to the folder associated with a given profile. - - I.e. find $IPYTHONDIR/profile_whatever. - """ - from IPython.core.profiledir import ProfileDir, ProfileDirError - try: - pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile) - except ProfileDirError: - # IOError makes more sense when people are expecting a path - raise IOError("Couldn't find profile %r" % profile) - return pd.location +"""Find files and directories which IPython uses. +""" +import os.path +import shutil +import tempfile +from warnings import warn + +import IPython +from IPython.utils.importstring import import_item +from IPython.utils.path import ( + get_home_dir, get_xdg_dir, get_xdg_cache_dir, compress_user, _writable_dir, + ensure_dir_exists, fs_encoding) +from IPython.utils import py3compat + +def get_ipython_dir() -> str: + """Get the IPython directory for this platform and user. + + This uses the logic in `get_home_dir` to find the home directory + and then adds .ipython to the end of the path. + """ + + env = os.environ + pjoin = os.path.join + + + ipdir_def = '.ipython' + + home_dir = get_home_dir() + xdg_dir = get_xdg_dir() + + if 'IPYTHON_DIR' in env: + warn('The environment variable IPYTHON_DIR is deprecated since IPython 3.0. ' + 'Please use IPYTHONDIR instead.', DeprecationWarning) + ipdir = env.get('IPYTHONDIR', env.get('IPYTHON_DIR', None)) + if ipdir is None: + # not set explicitly, use ~/.ipython + ipdir = pjoin(home_dir, ipdir_def) + if xdg_dir: + # Several IPython versions (up to 1.x) defaulted to .config/ipython + # on Linux. We have decided to go back to using .ipython everywhere + xdg_ipdir = pjoin(xdg_dir, 'ipython') + + if _writable_dir(xdg_ipdir): + cu = compress_user + if os.path.exists(ipdir): + warn(('Ignoring {0} in favour of {1}. Remove {0} to ' + 'get rid of this message').format(cu(xdg_ipdir), cu(ipdir))) + elif os.path.islink(xdg_ipdir): + warn(('{0} is deprecated. Move link to {1} to ' + 'get rid of this message').format(cu(xdg_ipdir), cu(ipdir))) + else: + warn('Moving {0} to {1}'.format(cu(xdg_ipdir), cu(ipdir))) + shutil.move(xdg_ipdir, ipdir) + + ipdir = os.path.normpath(os.path.expanduser(ipdir)) + + if os.path.exists(ipdir) and not _writable_dir(ipdir): + # ipdir exists, but is not writable + warn("IPython dir '{0}' is not a writable location," + " using a temp directory.".format(ipdir)) + ipdir = tempfile.mkdtemp() + elif not os.path.exists(ipdir): + parent = os.path.dirname(ipdir) + if not _writable_dir(parent): + # ipdir does not exist and parent isn't writable + warn("IPython parent '{0}' is not a writable location," + " using a temp directory.".format(parent)) + ipdir = tempfile.mkdtemp() + else: + os.makedirs(ipdir, exist_ok=True) + assert isinstance(ipdir, str), "all path manipulation should be str(unicode), but are not." + return ipdir + + +def get_ipython_cache_dir() -> str: + """Get the cache directory it is created if it does not exist.""" + xdgdir = get_xdg_cache_dir() + if xdgdir is None: + return get_ipython_dir() + ipdir = os.path.join(xdgdir, "ipython") + if not os.path.exists(ipdir) and _writable_dir(xdgdir): + ensure_dir_exists(ipdir) + elif not _writable_dir(xdgdir): + return get_ipython_dir() + + return ipdir + + +def get_ipython_package_dir() -> str: + """Get the base directory where IPython itself is installed.""" + ipdir = os.path.dirname(IPython.__file__) + assert isinstance(ipdir, str) + return ipdir + + +def get_ipython_module_path(module_str): + """Find the path to an IPython module in this version of IPython. + + This will always find the version of the module that is in this importable + IPython package. This will always return the path to the ``.py`` + version of the module. + """ + if module_str == 'IPython': + return os.path.join(get_ipython_package_dir(), '__init__.py') + mod = import_item(module_str) + the_path = mod.__file__.replace('.pyc', '.py') + the_path = the_path.replace('.pyo', '.py') + return py3compat.cast_unicode(the_path, fs_encoding) + +def locate_profile(profile='default'): + """Find the path to the folder associated with a given profile. + + I.e. find $IPYTHONDIR/profile_whatever. + """ + from IPython.core.profiledir import ProfileDir, ProfileDirError + try: + pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile) + except ProfileDirError: + # IOError makes more sense when people are expecting a path + raise IOError("Couldn't find profile %r" % profile) + return pd.location diff --git a/contrib/python/ipython/py3/IPython/qt.py b/contrib/python/ipython/py3/IPython/qt.py index 233b9db0c02..7557a3f3298 100644 --- a/contrib/python/ipython/py3/IPython/qt.py +++ b/contrib/python/ipython/py3/IPython/qt.py @@ -1,24 +1,24 @@ -""" -Shim to maintain backwards compatibility with old IPython.qt imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.qt` package has been deprecated since IPython 4.0. " - "You should import from qtconsole instead.", ShimWarning) - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -_console = sys.modules['IPython.qt.console'] = ShimModule( - src='IPython.qt.console', mirror='qtconsole') - -_qt = ShimModule(src='IPython.qt', mirror='qtconsole') - -_qt.console = _console -sys.modules['IPython.qt'] = _qt +""" +Shim to maintain backwards compatibility with old IPython.qt imports. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import sys +from warnings import warn + +from IPython.utils.shimmodule import ShimModule, ShimWarning + +warn("The `IPython.qt` package has been deprecated since IPython 4.0. " + "You should import from qtconsole instead.", ShimWarning) + +# Unconditionally insert the shim into sys.modules so that further import calls +# trigger the custom attribute access above + +_console = sys.modules['IPython.qt.console'] = ShimModule( + src='IPython.qt.console', mirror='qtconsole') + +_qt = ShimModule(src='IPython.qt', mirror='qtconsole') + +_qt.console = _console +sys.modules['IPython.qt'] = _qt diff --git a/contrib/python/ipython/py3/IPython/sphinxext/custom_doctests.py b/contrib/python/ipython/py3/IPython/sphinxext/custom_doctests.py index 65d7051f4e3..7678fd6801a 100644 --- a/contrib/python/ipython/py3/IPython/sphinxext/custom_doctests.py +++ b/contrib/python/ipython/py3/IPython/sphinxext/custom_doctests.py @@ -1,155 +1,155 @@ -""" -Handlers for IPythonDirective's @doctest pseudo-decorator. - -The Sphinx extension that provides support for embedded IPython code provides -a pseudo-decorator @doctest, which treats the input/output block as a -doctest, raising a RuntimeError during doc generation if the actual output -(after running the input) does not match the expected output. - -An example usage is: - -.. code-block:: rst - - .. ipython:: - - In [1]: x = 1 - - @doctest - In [2]: x + 2 - Out[3]: 3 - -One can also provide arguments to the decorator. The first argument should be -the name of a custom handler. The specification of any other arguments is -determined by the handler. For example, - -.. code-block:: rst - - .. ipython:: - - @doctest float - In [154]: 0.1 + 0.2 - Out[154]: 0.3 - -allows the actual output ``0.30000000000000004`` to match the expected output -due to a comparison with `np.allclose`. - -This module contains handlers for the @doctest pseudo-decorator. Handlers -should have the following function signature:: - - handler(sphinx_shell, args, input_lines, found, submitted) - -where `sphinx_shell` is the embedded Sphinx shell, `args` contains the list -of arguments that follow: '@doctest handler_name', `input_lines` contains -a list of the lines relevant to the current doctest, `found` is a string -containing the output from the IPython shell, and `submitted` is a string -containing the expected output from the IPython shell. - -Handlers must be registered in the `doctests` dict at the end of this module. - -""" - -def str_to_array(s): - """ - Simplistic converter of strings from repr to float NumPy arrays. - - If the repr representation has ellipsis in it, then this will fail. - - Parameters - ---------- - s : str - The repr version of a NumPy array. - - Examples - -------- - >>> s = "array([ 0.3, inf, nan])" - >>> a = str_to_array(s) - - """ - import numpy as np - - # Need to make sure eval() knows about inf and nan. - # This also assumes default printoptions for NumPy. - from numpy import inf, nan - - if s.startswith(u'array'): - # Remove array( and ) - s = s[6:-1] - - if s.startswith(u'['): - a = np.array(eval(s), dtype=float) - else: - # Assume its a regular float. Force 1D so we can index into it. - a = np.atleast_1d(float(s)) - return a - -def float_doctest(sphinx_shell, args, input_lines, found, submitted): - """ - Doctest which allow the submitted output to vary slightly from the input. - - Here is how it might appear in an rst file: - - .. code-block:: rst - - .. ipython:: - - @doctest float - In [1]: 0.1 + 0.2 - Out[1]: 0.3 - - """ - import numpy as np - - if len(args) == 2: - rtol = 1e-05 - atol = 1e-08 - else: - # Both must be specified if any are specified. - try: - rtol = float(args[2]) - atol = float(args[3]) - except IndexError: - e = ("Both `rtol` and `atol` must be specified " - "if either are specified: {0}".format(args)) - raise IndexError(e) - - try: - submitted = str_to_array(submitted) - found = str_to_array(found) - except: - # For example, if the array is huge and there are ellipsis in it. - error = True - else: - found_isnan = np.isnan(found) - submitted_isnan = np.isnan(submitted) - error = not np.allclose(found_isnan, submitted_isnan) - error |= not np.allclose(found[~found_isnan], - submitted[~submitted_isnan], - rtol=rtol, atol=atol) - - TAB = ' ' * 4 - directive = sphinx_shell.directive - if directive is None: - source = 'Unavailable' - content = 'Unavailable' - else: - source = directive.state.document.current_source - # Add tabs and make into a single string. - content = '\n'.join([TAB + line for line in directive.content]) - - if error: - - e = ('doctest float comparison failure\n\n' - 'Document source: {0}\n\n' - 'Raw content: \n{1}\n\n' - 'On input line(s):\n{TAB}{2}\n\n' - 'we found output:\n{TAB}{3}\n\n' - 'instead of the expected:\n{TAB}{4}\n\n') - e = e.format(source, content, '\n'.join(input_lines), repr(found), - repr(submitted), TAB=TAB) - raise RuntimeError(e) - -# dict of allowable doctest handlers. The key represents the first argument -# that must be given to @doctest in order to activate the handler. -doctests = { - 'float': float_doctest, -} +""" +Handlers for IPythonDirective's @doctest pseudo-decorator. + +The Sphinx extension that provides support for embedded IPython code provides +a pseudo-decorator @doctest, which treats the input/output block as a +doctest, raising a RuntimeError during doc generation if the actual output +(after running the input) does not match the expected output. + +An example usage is: + +.. code-block:: rst + + .. ipython:: + + In [1]: x = 1 + + @doctest + In [2]: x + 2 + Out[3]: 3 + +One can also provide arguments to the decorator. The first argument should be +the name of a custom handler. The specification of any other arguments is +determined by the handler. For example, + +.. code-block:: rst + + .. ipython:: + + @doctest float + In [154]: 0.1 + 0.2 + Out[154]: 0.3 + +allows the actual output ``0.30000000000000004`` to match the expected output +due to a comparison with `np.allclose`. + +This module contains handlers for the @doctest pseudo-decorator. Handlers +should have the following function signature:: + + handler(sphinx_shell, args, input_lines, found, submitted) + +where `sphinx_shell` is the embedded Sphinx shell, `args` contains the list +of arguments that follow: '@doctest handler_name', `input_lines` contains +a list of the lines relevant to the current doctest, `found` is a string +containing the output from the IPython shell, and `submitted` is a string +containing the expected output from the IPython shell. + +Handlers must be registered in the `doctests` dict at the end of this module. + +""" + +def str_to_array(s): + """ + Simplistic converter of strings from repr to float NumPy arrays. + + If the repr representation has ellipsis in it, then this will fail. + + Parameters + ---------- + s : str + The repr version of a NumPy array. + + Examples + -------- + >>> s = "array([ 0.3, inf, nan])" + >>> a = str_to_array(s) + + """ + import numpy as np + + # Need to make sure eval() knows about inf and nan. + # This also assumes default printoptions for NumPy. + from numpy import inf, nan + + if s.startswith(u'array'): + # Remove array( and ) + s = s[6:-1] + + if s.startswith(u'['): + a = np.array(eval(s), dtype=float) + else: + # Assume its a regular float. Force 1D so we can index into it. + a = np.atleast_1d(float(s)) + return a + +def float_doctest(sphinx_shell, args, input_lines, found, submitted): + """ + Doctest which allow the submitted output to vary slightly from the input. + + Here is how it might appear in an rst file: + + .. code-block:: rst + + .. ipython:: + + @doctest float + In [1]: 0.1 + 0.2 + Out[1]: 0.3 + + """ + import numpy as np + + if len(args) == 2: + rtol = 1e-05 + atol = 1e-08 + else: + # Both must be specified if any are specified. + try: + rtol = float(args[2]) + atol = float(args[3]) + except IndexError: + e = ("Both `rtol` and `atol` must be specified " + "if either are specified: {0}".format(args)) + raise IndexError(e) + + try: + submitted = str_to_array(submitted) + found = str_to_array(found) + except: + # For example, if the array is huge and there are ellipsis in it. + error = True + else: + found_isnan = np.isnan(found) + submitted_isnan = np.isnan(submitted) + error = not np.allclose(found_isnan, submitted_isnan) + error |= not np.allclose(found[~found_isnan], + submitted[~submitted_isnan], + rtol=rtol, atol=atol) + + TAB = ' ' * 4 + directive = sphinx_shell.directive + if directive is None: + source = 'Unavailable' + content = 'Unavailable' + else: + source = directive.state.document.current_source + # Add tabs and make into a single string. + content = '\n'.join([TAB + line for line in directive.content]) + + if error: + + e = ('doctest float comparison failure\n\n' + 'Document source: {0}\n\n' + 'Raw content: \n{1}\n\n' + 'On input line(s):\n{TAB}{2}\n\n' + 'we found output:\n{TAB}{3}\n\n' + 'instead of the expected:\n{TAB}{4}\n\n') + e = e.format(source, content, '\n'.join(input_lines), repr(found), + repr(submitted), TAB=TAB) + raise RuntimeError(e) + +# dict of allowable doctest handlers. The key represents the first argument +# that must be given to @doctest in order to activate the handler. +doctests = { + 'float': float_doctest, +} diff --git a/contrib/python/ipython/py3/IPython/sphinxext/ipython_console_highlighting.py b/contrib/python/ipython/py3/IPython/sphinxext/ipython_console_highlighting.py index bc64087797c..b93a151fb3c 100644 --- a/contrib/python/ipython/py3/IPython/sphinxext/ipython_console_highlighting.py +++ b/contrib/python/ipython/py3/IPython/sphinxext/ipython_console_highlighting.py @@ -1,28 +1,28 @@ -""" -reST directive for syntax-highlighting ipython interactive sessions. - -""" - -from sphinx import highlighting -from IPython.lib.lexers import IPyLexer - -def setup(app): - """Setup as a sphinx extension.""" - - # This is only a lexer, so adding it below to pygments appears sufficient. - # But if somebody knows what the right API usage should be to do that via - # sphinx, by all means fix it here. At least having this setup.py - # suppresses the sphinx warning we'd get without it. - metadata = {'parallel_read_safe': True, 'parallel_write_safe': True} - return metadata - -# Register the extension as a valid pygments lexer. -# Alternatively, we could register the lexer with pygments instead. This would -# require using setuptools entrypoints: http://pygments.org/docs/plugins - -ipy2 = IPyLexer(python3=False) -ipy3 = IPyLexer(python3=True) - -highlighting.lexers['ipython'] = ipy2 -highlighting.lexers['ipython2'] = ipy2 -highlighting.lexers['ipython3'] = ipy3 +""" +reST directive for syntax-highlighting ipython interactive sessions. + +""" + +from sphinx import highlighting +from IPython.lib.lexers import IPyLexer + +def setup(app): + """Setup as a sphinx extension.""" + + # This is only a lexer, so adding it below to pygments appears sufficient. + # But if somebody knows what the right API usage should be to do that via + # sphinx, by all means fix it here. At least having this setup.py + # suppresses the sphinx warning we'd get without it. + metadata = {'parallel_read_safe': True, 'parallel_write_safe': True} + return metadata + +# Register the extension as a valid pygments lexer. +# Alternatively, we could register the lexer with pygments instead. This would +# require using setuptools entrypoints: http://pygments.org/docs/plugins + +ipy2 = IPyLexer(python3=False) +ipy3 = IPyLexer(python3=True) + +highlighting.lexers['ipython'] = ipy2 +highlighting.lexers['ipython2'] = ipy2 +highlighting.lexers['ipython3'] = ipy3 diff --git a/contrib/python/ipython/py3/IPython/sphinxext/ipython_directive.py b/contrib/python/ipython/py3/IPython/sphinxext/ipython_directive.py index 426d9f7cef3..ac0964032a5 100644 --- a/contrib/python/ipython/py3/IPython/sphinxext/ipython_directive.py +++ b/contrib/python/ipython/py3/IPython/sphinxext/ipython_directive.py @@ -1,1260 +1,1260 @@ -# -*- coding: utf-8 -*- -""" -Sphinx directive to support embedded IPython code. - -IPython provides an extension for `Sphinx <http://www.sphinx-doc.org/>`_ to -highlight and run code. - -This directive allows pasting of entire interactive IPython sessions, prompts -and all, and their code will actually get re-executed at doc build time, with -all prompts renumbered sequentially. It also allows you to input code as a pure -python input by giving the argument python to the directive. The output looks -like an interactive ipython section. - -Here is an example of how the IPython directive can -**run** python code, at build time. - -.. ipython:: - - In [1]: 1+1 - - In [1]: import datetime - ...: datetime.datetime.now() - -It supports IPython construct that plain -Python does not understand (like magics): - -.. ipython:: - - In [0]: import time - - In [0]: %timeit time.sleep(0.05) - -This will also support top-level async when using IPython 7.0+ - -.. ipython:: - - In [2]: import asyncio - ...: print('before') - ...: await asyncio.sleep(1) - ...: print('after') - - -The namespace will persist across multiple code chucks, Let's define a variable: - -.. ipython:: - - In [0]: who = "World" - -And now say hello: - -.. ipython:: - - In [0]: print('Hello,', who) - -If the current section raises an exception, you can add the ``:okexcept:`` flag -to the current block, otherwise the build will fail. - -.. ipython:: - :okexcept: - - In [1]: 1/0 - -IPython Sphinx directive module -=============================== - -To enable this directive, simply list it in your Sphinx ``conf.py`` file -(making sure the directory where you placed it is visible to sphinx, as is -needed for all Sphinx directives). For example, to enable syntax highlighting -and the IPython directive:: - - extensions = ['IPython.sphinxext.ipython_console_highlighting', - 'IPython.sphinxext.ipython_directive'] - -The IPython directive outputs code-blocks with the language 'ipython'. So -if you do not have the syntax highlighting extension enabled as well, then -all rendered code-blocks will be uncolored. By default this directive assumes -that your prompts are unchanged IPython ones, but this can be customized. -The configurable options that can be placed in conf.py are: - -ipython_savefig_dir: - The directory in which to save the figures. This is relative to the - Sphinx source directory. The default is `html_static_path`. -ipython_rgxin: - The compiled regular expression to denote the start of IPython input - lines. The default is ``re.compile('In \\[(\\d+)\\]:\\s?(.*)\\s*')``. You - shouldn't need to change this. -ipython_warning_is_error: [default to True] - Fail the build if something unexpected happen, for example if a block raise - an exception but does not have the `:okexcept:` flag. The exact behavior of - what is considered strict, may change between the sphinx directive version. -ipython_rgxout: - The compiled regular expression to denote the start of IPython output - lines. The default is ``re.compile('Out\\[(\\d+)\\]:\\s?(.*)\\s*')``. You - shouldn't need to change this. -ipython_promptin: - The string to represent the IPython input prompt in the generated ReST. - The default is ``'In [%d]:'``. This expects that the line numbers are used - in the prompt. -ipython_promptout: - The string to represent the IPython prompt in the generated ReST. The - default is ``'Out [%d]:'``. This expects that the line numbers are used - in the prompt. -ipython_mplbackend: - The string which specifies if the embedded Sphinx shell should import - Matplotlib and set the backend. The value specifies a backend that is - passed to `matplotlib.use()` before any lines in `ipython_execlines` are - executed. If not specified in conf.py, then the default value of 'agg' is - used. To use the IPython directive without matplotlib as a dependency, set - the value to `None`. It may end up that matplotlib is still imported - if the user specifies so in `ipython_execlines` or makes use of the - @savefig pseudo decorator. -ipython_execlines: - A list of strings to be exec'd in the embedded Sphinx shell. Typical - usage is to make certain packages always available. Set this to an empty - list if you wish to have no imports always available. If specified in - ``conf.py`` as `None`, then it has the effect of making no imports available. - If omitted from conf.py altogether, then the default value of - ['import numpy as np', 'import matplotlib.pyplot as plt'] is used. -ipython_holdcount - When the @suppress pseudo-decorator is used, the execution count can be - incremented or not. The default behavior is to hold the execution count, - corresponding to a value of `True`. Set this to `False` to increment - the execution count after each suppressed command. - -As an example, to use the IPython directive when `matplotlib` is not available, -one sets the backend to `None`:: - - ipython_mplbackend = None - -An example usage of the directive is: - -.. code-block:: rst - - .. ipython:: - - In [1]: x = 1 - - In [2]: y = x**2 - - In [3]: print(y) - -See http://matplotlib.org/sampledoc/ipython_directive.html for additional -documentation. - -Pseudo-Decorators -================= - -Note: Only one decorator is supported per input. If more than one decorator -is specified, then only the last one is used. - -In addition to the Pseudo-Decorators/options described at the above link, -several enhancements have been made. The directive will emit a message to the -console at build-time if code-execution resulted in an exception or warning. -You can suppress these on a per-block basis by specifying the :okexcept: -or :okwarning: options: - -.. code-block:: rst - - .. ipython:: - :okexcept: - :okwarning: - - In [1]: 1/0 - In [2]: # raise warning. - -To Do -===== - -- Turn the ad-hoc test() function into a real test suite. -- Break up ipython-specific functionality from matplotlib stuff into better - separated code. - -""" - -# Authors -# ======= -# -# - John D Hunter: original author. -# - Fernando Perez: refactoring, documentation, cleanups, port to 0.11. -# - VáclavŠmilauer <eudoxos-AT-arcig.cz>: Prompt generalizations. -# - Skipper Seabold, refactoring, cleanups, pure python addition - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Stdlib -import atexit -import errno -import os -import pathlib -import re -import sys -import tempfile -import ast -import warnings -import shutil -from io import StringIO - -# Third-party -from docutils.parsers.rst import directives -from docutils.parsers.rst import Directive -from sphinx.util import logging - -# Our own -from traitlets.config import Config -from IPython import InteractiveShell -from IPython.core.profiledir import ProfileDir - -use_matplotlib = False -try: - import matplotlib - use_matplotlib = True -except Exception: - pass - -#----------------------------------------------------------------------------- -# Globals -#----------------------------------------------------------------------------- -# for tokenizing blocks -COMMENT, INPUT, OUTPUT = range(3) - -#----------------------------------------------------------------------------- -# Functions and class declarations -#----------------------------------------------------------------------------- - -def block_parser(part, rgxin, rgxout, fmtin, fmtout): - """ - part is a string of ipython text, comprised of at most one - input, one output, comments, and blank lines. The block parser - parses the text into a list of:: - - blocks = [ (TOKEN0, data0), (TOKEN1, data1), ...] - - where TOKEN is one of [COMMENT | INPUT | OUTPUT ] and - data is, depending on the type of token:: - - COMMENT : the comment string - - INPUT: the (DECORATOR, INPUT_LINE, REST) where - DECORATOR: the input decorator (or None) - INPUT_LINE: the input as string (possibly multi-line) - REST : any stdout generated by the input line (not OUTPUT) - - OUTPUT: the output string, possibly multi-line - - """ - block = [] - lines = part.split('\n') - N = len(lines) - i = 0 - decorator = None - while 1: - - if i==N: - # nothing left to parse -- the last line - break - - line = lines[i] - i += 1 - line_stripped = line.strip() - if line_stripped.startswith('#'): - block.append((COMMENT, line)) - continue - - if line_stripped.startswith('@'): - # Here is where we assume there is, at most, one decorator. - # Might need to rethink this. - decorator = line_stripped - continue - - # does this look like an input line? - matchin = rgxin.match(line) - if matchin: - lineno, inputline = int(matchin.group(1)), matchin.group(2) - - # the ....: continuation string - continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2)) - Nc = len(continuation) - # input lines can continue on for more than one line, if - # we have a '\' line continuation char or a function call - # echo line 'print'. The input line can only be - # terminated by the end of the block or an output line, so - # we parse out the rest of the input line if it is - # multiline as well as any echo text - - rest = [] - while i<N: - - # look ahead; if the next line is blank, or a comment, or - # an output line, we're done - - nextline = lines[i] - matchout = rgxout.match(nextline) - #print "nextline=%s, continuation=%s, starts=%s"%(nextline, continuation, nextline.startswith(continuation)) - if matchout or nextline.startswith('#'): - break - elif nextline.startswith(continuation): - # The default ipython_rgx* treat the space following the colon as optional. - # However, If the space is there we must consume it or code - # employing the cython_magic extension will fail to execute. - # - # This works with the default ipython_rgx* patterns, - # If you modify them, YMMV. - nextline = nextline[Nc:] - if nextline and nextline[0] == ' ': - nextline = nextline[1:] - - inputline += '\n' + nextline - else: - rest.append(nextline) - i+= 1 - - block.append((INPUT, (decorator, inputline, '\n'.join(rest)))) - continue - - # if it looks like an output line grab all the text to the end - # of the block - matchout = rgxout.match(line) - if matchout: - lineno, output = int(matchout.group(1)), matchout.group(2) - if i<N-1: - output = '\n'.join([output] + lines[i:]) - - block.append((OUTPUT, output)) - break - - return block - - -class EmbeddedSphinxShell(object): - """An embedded IPython instance to run inside Sphinx""" - - def __init__(self, exec_lines=None): - - self.cout = StringIO() - - if exec_lines is None: - exec_lines = [] - - # Create config object for IPython - config = Config() - config.HistoryManager.hist_file = ':memory:' - config.InteractiveShell.autocall = False - config.InteractiveShell.autoindent = False - config.InteractiveShell.colors = 'NoColor' - - # create a profile so instance history isn't saved - tmp_profile_dir = tempfile.mkdtemp(prefix='profile_') - profname = 'auto_profile_sphinx_build' - pdir = os.path.join(tmp_profile_dir,profname) - profile = ProfileDir.create_profile_dir(pdir) - - # Create and initialize global ipython, but don't start its mainloop. - # This will persist across different EmbeddedSphinxShell instances. - IP = InteractiveShell.instance(config=config, profile_dir=profile) - atexit.register(self.cleanup) - - # Store a few parts of IPython we'll need. - self.IP = IP - self.user_ns = self.IP.user_ns - self.user_global_ns = self.IP.user_global_ns - - self.input = '' - self.output = '' - self.tmp_profile_dir = tmp_profile_dir - - self.is_verbatim = False - self.is_doctest = False - self.is_suppress = False - - # Optionally, provide more detailed information to shell. - # this is assigned by the SetUp method of IPythonDirective - # to point at itself. - # - # So, you can access handy things at self.directive.state - self.directive = None - - # on the first call to the savefig decorator, we'll import - # pyplot as plt so we can make a call to the plt.gcf().savefig - self._pyplot_imported = False - - # Prepopulate the namespace. - for line in exec_lines: - self.process_input_line(line, store_history=False) - - def cleanup(self): - shutil.rmtree(self.tmp_profile_dir, ignore_errors=True) - - def clear_cout(self): - self.cout.seek(0) - self.cout.truncate(0) - - def process_input_line(self, line, store_history): - return self.process_input_lines([line], store_history=store_history) - - def process_input_lines(self, lines, store_history=True): - """process the input, capturing stdout""" - stdout = sys.stdout - source_raw = '\n'.join(lines) - try: - sys.stdout = self.cout - self.IP.run_cell(source_raw, store_history=store_history) - finally: - sys.stdout = stdout - - def process_image(self, decorator): - """ - # build out an image directive like - # .. image:: somefile.png - # :width 4in - # - # from an input like - # savefig somefile.png width=4in - """ - savefig_dir = self.savefig_dir - source_dir = self.source_dir - saveargs = decorator.split(' ') - filename = saveargs[1] - # insert relative path to image file in source - # as absolute path for Sphinx - # sphinx expects a posix path, even on Windows - path = pathlib.Path(savefig_dir, filename) - outfile = '/' + path.relative_to(source_dir).as_posix() - - imagerows = ['.. image:: %s' % outfile] - - for kwarg in saveargs[2:]: - arg, val = kwarg.split('=') - arg = arg.strip() - val = val.strip() - imagerows.append(' :%s: %s'%(arg, val)) - - image_file = os.path.basename(outfile) # only return file name - image_directive = '\n'.join(imagerows) - return image_file, image_directive - - # Callbacks for each type of token - def process_input(self, data, input_prompt, lineno): - """ - Process data block for INPUT token. - - """ - decorator, input, rest = data - image_file = None - image_directive = None - - is_verbatim = decorator=='@verbatim' or self.is_verbatim - is_doctest = (decorator is not None and \ - decorator.startswith('@doctest')) or self.is_doctest - is_suppress = decorator=='@suppress' or self.is_suppress - is_okexcept = decorator=='@okexcept' or self.is_okexcept - is_okwarning = decorator=='@okwarning' or self.is_okwarning - is_savefig = decorator is not None and \ - decorator.startswith('@savefig') - - input_lines = input.split('\n') - if len(input_lines) > 1: - if input_lines[-1] != "": - input_lines.append('') # make sure there's a blank line - # so splitter buffer gets reset - - continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2)) - - if is_savefig: - image_file, image_directive = self.process_image(decorator) - - ret = [] - is_semicolon = False - - # Hold the execution count, if requested to do so. - if is_suppress and self.hold_count: - store_history = False - else: - store_history = True - - # Note: catch_warnings is not thread safe - with warnings.catch_warnings(record=True) as ws: - if input_lines[0].endswith(';'): - is_semicolon = True - #for i, line in enumerate(input_lines): - - # process the first input line - if is_verbatim: - self.process_input_lines(['']) - self.IP.execution_count += 1 # increment it anyway - else: - # only submit the line in non-verbatim mode - self.process_input_lines(input_lines, store_history=store_history) - - if not is_suppress: - for i, line in enumerate(input_lines): - if i == 0: - formatted_line = '%s %s'%(input_prompt, line) - else: - formatted_line = '%s %s'%(continuation, line) - ret.append(formatted_line) - - if not is_suppress and len(rest.strip()) and is_verbatim: - # The "rest" is the standard output of the input. This needs to be - # added when in verbatim mode. If there is no "rest", then we don't - # add it, as the new line will be added by the processed output. - ret.append(rest) - - # Fetch the processed output. (This is not the submitted output.) - self.cout.seek(0) - processed_output = self.cout.read() - if not is_suppress and not is_semicolon: - # - # In IPythonDirective.run, the elements of `ret` are eventually - # combined such that '' entries correspond to newlines. So if - # `processed_output` is equal to '', then the adding it to `ret` - # ensures that there is a blank line between consecutive inputs - # that have no outputs, as in: - # - # In [1]: x = 4 - # - # In [2]: x = 5 - # - # When there is processed output, it has a '\n' at the tail end. So - # adding the output to `ret` will provide the necessary spacing - # between consecutive input/output blocks, as in: - # - # In [1]: x - # Out[1]: 5 - # - # In [2]: x - # Out[2]: 5 - # - # When there is stdout from the input, it also has a '\n' at the - # tail end, and so this ensures proper spacing as well. E.g.: - # - # In [1]: print x - # 5 - # - # In [2]: x = 5 - # - # When in verbatim mode, `processed_output` is empty (because - # nothing was passed to IP. Sometimes the submitted code block has - # an Out[] portion and sometimes it does not. When it does not, we - # need to ensure proper spacing, so we have to add '' to `ret`. - # However, if there is an Out[] in the submitted code, then we do - # not want to add a newline as `process_output` has stuff to add. - # The difficulty is that `process_input` doesn't know if - # `process_output` will be called---so it doesn't know if there is - # Out[] in the code block. The requires that we include a hack in - # `process_block`. See the comments there. - # - ret.append(processed_output) - elif is_semicolon: - # Make sure there is a newline after the semicolon. - ret.append('') - - # context information - filename = "Unknown" - lineno = 0 - if self.directive.state: - filename = self.directive.state.document.current_source - lineno = self.directive.state.document.current_line - - # Use sphinx logger for warnings - logger = logging.getLogger(__name__) - - # output any exceptions raised during execution to stdout - # unless :okexcept: has been specified. - if not is_okexcept and ( - ("Traceback" in processed_output) or ("SyntaxError" in processed_output) - ): - s = "\n>>>" + ("-" * 73) + "\n" - s += "Exception in %s at block ending on line %s\n" % (filename, lineno) - s += "Specify :okexcept: as an option in the ipython:: block to suppress this message\n" - s += processed_output + "\n" - s += "<<<" + ("-" * 73) - logger.warning(s) - if self.warning_is_error: - raise RuntimeError('Non Expected exception in `{}` line {}'.format(filename, lineno)) - - # output any warning raised during execution to stdout - # unless :okwarning: has been specified. - if not is_okwarning: - for w in ws: - s = "\n>>>" + ("-" * 73) + "\n" - s += "Warning in %s at block ending on line %s\n" % (filename, lineno) - s += "Specify :okwarning: as an option in the ipython:: block to suppress this message\n" - s += ("-" * 76) + "\n" - s += warnings.formatwarning( - w.message, w.category, w.filename, w.lineno, w.line - ) - s += "<<<" + ("-" * 73) - logger.warning(s) - if self.warning_is_error: - raise RuntimeError('Non Expected warning in `{}` line {}'.format(filename, lineno)) - - self.clear_cout() - return (ret, input_lines, processed_output, - is_doctest, decorator, image_file, image_directive) - - - def process_output(self, data, output_prompt, input_lines, output, - is_doctest, decorator, image_file): - """ - Process data block for OUTPUT token. - - """ - # Recall: `data` is the submitted output, and `output` is the processed - # output from `input_lines`. - - TAB = ' ' * 4 - - if is_doctest and output is not None: - - found = output # This is the processed output - found = found.strip() - submitted = data.strip() - - if self.directive is None: - source = 'Unavailable' - content = 'Unavailable' - else: - source = self.directive.state.document.current_source - content = self.directive.content - # Add tabs and join into a single string. - content = '\n'.join([TAB + line for line in content]) - - # Make sure the output contains the output prompt. - ind = found.find(output_prompt) - if ind < 0: - e = ('output does not contain output prompt\n\n' - 'Document source: {0}\n\n' - 'Raw content: \n{1}\n\n' - 'Input line(s):\n{TAB}{2}\n\n' - 'Output line(s):\n{TAB}{3}\n\n') - e = e.format(source, content, '\n'.join(input_lines), - repr(found), TAB=TAB) - raise RuntimeError(e) - found = found[len(output_prompt):].strip() - - # Handle the actual doctest comparison. - if decorator.strip() == '@doctest': - # Standard doctest - if found != submitted: - e = ('doctest failure\n\n' - 'Document source: {0}\n\n' - 'Raw content: \n{1}\n\n' - 'On input line(s):\n{TAB}{2}\n\n' - 'we found output:\n{TAB}{3}\n\n' - 'instead of the expected:\n{TAB}{4}\n\n') - e = e.format(source, content, '\n'.join(input_lines), - repr(found), repr(submitted), TAB=TAB) - raise RuntimeError(e) - else: - self.custom_doctest(decorator, input_lines, found, submitted) - - # When in verbatim mode, this holds additional submitted output - # to be written in the final Sphinx output. - # https://github.com/ipython/ipython/issues/5776 - out_data = [] - - is_verbatim = decorator=='@verbatim' or self.is_verbatim - if is_verbatim and data.strip(): - # Note that `ret` in `process_block` has '' as its last element if - # the code block was in verbatim mode. So if there is no submitted - # output, then we will have proper spacing only if we do not add - # an additional '' to `out_data`. This is why we condition on - # `and data.strip()`. - - # The submitted output has no output prompt. If we want the - # prompt and the code to appear, we need to join them now - # instead of adding them separately---as this would create an - # undesired newline. How we do this ultimately depends on the - # format of the output regex. I'll do what works for the default - # prompt for now, and we might have to adjust if it doesn't work - # in other cases. Finally, the submitted output does not have - # a trailing newline, so we must add it manually. - out_data.append("{0} {1}\n".format(output_prompt, data)) - - return out_data - - def process_comment(self, data): - """Process data fPblock for COMMENT token.""" - if not self.is_suppress: - return [data] - - def save_image(self, image_file): - """ - Saves the image file to disk. - """ - self.ensure_pyplot() - command = 'plt.gcf().savefig("%s")'%image_file - #print 'SAVEFIG', command # dbg - self.process_input_line('bookmark ipy_thisdir', store_history=False) - self.process_input_line('cd -b ipy_savedir', store_history=False) - self.process_input_line(command, store_history=False) - self.process_input_line('cd -b ipy_thisdir', store_history=False) - self.process_input_line('bookmark -d ipy_thisdir', store_history=False) - self.clear_cout() - - def process_block(self, block): - """ - process block from the block_parser and return a list of processed lines - """ - ret = [] - output = None - input_lines = None - lineno = self.IP.execution_count - - input_prompt = self.promptin % lineno - output_prompt = self.promptout % lineno - image_file = None - image_directive = None - - found_input = False - for token, data in block: - if token == COMMENT: - out_data = self.process_comment(data) - elif token == INPUT: - found_input = True - (out_data, input_lines, output, is_doctest, - decorator, image_file, image_directive) = \ - self.process_input(data, input_prompt, lineno) - elif token == OUTPUT: - if not found_input: - - TAB = ' ' * 4 - linenumber = 0 - source = 'Unavailable' - content = 'Unavailable' - if self.directive: - linenumber = self.directive.state.document.current_line - source = self.directive.state.document.current_source - content = self.directive.content - # Add tabs and join into a single string. - content = '\n'.join([TAB + line for line in content]) - - e = ('\n\nInvalid block: Block contains an output prompt ' - 'without an input prompt.\n\n' - 'Document source: {0}\n\n' - 'Content begins at line {1}: \n\n{2}\n\n' - 'Problematic block within content: \n\n{TAB}{3}\n\n') - e = e.format(source, linenumber, content, block, TAB=TAB) - - # Write, rather than include in exception, since Sphinx - # will truncate tracebacks. - sys.stdout.write(e) - raise RuntimeError('An invalid block was detected.') - out_data = \ - self.process_output(data, output_prompt, input_lines, - output, is_doctest, decorator, - image_file) - if out_data: - # Then there was user submitted output in verbatim mode. - # We need to remove the last element of `ret` that was - # added in `process_input`, as it is '' and would introduce - # an undesirable newline. - assert(ret[-1] == '') - del ret[-1] - - if out_data: - ret.extend(out_data) - - # save the image files - if image_file is not None: - self.save_image(image_file) - - return ret, image_directive - - def ensure_pyplot(self): - """ - Ensures that pyplot has been imported into the embedded IPython shell. - - Also, makes sure to set the backend appropriately if not set already. - - """ - # We are here if the @figure pseudo decorator was used. Thus, it's - # possible that we could be here even if python_mplbackend were set to - # `None`. That's also strange and perhaps worthy of raising an - # exception, but for now, we just set the backend to 'agg'. - - if not self._pyplot_imported: - if 'matplotlib.backends' not in sys.modules: - # Then ipython_matplotlib was set to None but there was a - # call to the @figure decorator (and ipython_execlines did - # not set a backend). - #raise Exception("No backend was set, but @figure was used!") - import matplotlib - matplotlib.use('agg') - - # Always import pyplot into embedded shell. - self.process_input_line('import matplotlib.pyplot as plt', - store_history=False) - self._pyplot_imported = True - - def process_pure_python(self, content): - """ - content is a list of strings. it is unedited directive content - - This runs it line by line in the InteractiveShell, prepends - prompts as needed capturing stderr and stdout, then returns - the content as a list as if it were ipython code - """ - output = [] - savefig = False # keep up with this to clear figure - multiline = False # to handle line continuation - multiline_start = None - fmtin = self.promptin - - ct = 0 - - for lineno, line in enumerate(content): - - line_stripped = line.strip() - if not len(line): - output.append(line) - continue - - # handle decorators - if line_stripped.startswith('@'): - output.extend([line]) - if 'savefig' in line: - savefig = True # and need to clear figure - continue - - # handle comments - if line_stripped.startswith('#'): - output.extend([line]) - continue - - # deal with lines checking for multiline - continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2)) - if not multiline: - modified = u"%s %s" % (fmtin % ct, line_stripped) - output.append(modified) - ct += 1 - try: - ast.parse(line_stripped) - output.append(u'') - except Exception: # on a multiline - multiline = True - multiline_start = lineno - else: # still on a multiline - modified = u'%s %s' % (continuation, line) - output.append(modified) - - # if the next line is indented, it should be part of multiline - if len(content) > lineno + 1: - nextline = content[lineno + 1] - if len(nextline) - len(nextline.lstrip()) > 3: - continue - try: - mod = ast.parse( - '\n'.join(content[multiline_start:lineno+1])) - if isinstance(mod.body[0], ast.FunctionDef): - # check to see if we have the whole function - for element in mod.body[0].body: - if isinstance(element, ast.Return): - multiline = False - else: - output.append(u'') - multiline = False - except Exception: - pass - - if savefig: # clear figure if plotted - self.ensure_pyplot() - self.process_input_line('plt.clf()', store_history=False) - self.clear_cout() - savefig = False - - return output - - def custom_doctest(self, decorator, input_lines, found, submitted): - """ - Perform a specialized doctest. - - """ - from .custom_doctests import doctests - - args = decorator.split() - doctest_type = args[1] - if doctest_type in doctests: - doctests[doctest_type](self, args, input_lines, found, submitted) - else: - e = "Invalid option to @doctest: {0}".format(doctest_type) - raise Exception(e) - - -class IPythonDirective(Directive): - - has_content = True - required_arguments = 0 - optional_arguments = 4 # python, suppress, verbatim, doctest - final_argumuent_whitespace = True - option_spec = { 'python': directives.unchanged, - 'suppress' : directives.flag, - 'verbatim' : directives.flag, - 'doctest' : directives.flag, - 'okexcept': directives.flag, - 'okwarning': directives.flag - } - - shell = None - - seen_docs = set() - - def get_config_options(self): - # contains sphinx configuration variables - config = self.state.document.settings.env.config - - # get config variables to set figure output directory - savefig_dir = config.ipython_savefig_dir - source_dir = self.state.document.settings.env.srcdir - savefig_dir = os.path.join(source_dir, savefig_dir) - - # get regex and prompt stuff - rgxin = config.ipython_rgxin - rgxout = config.ipython_rgxout - warning_is_error= config.ipython_warning_is_error - promptin = config.ipython_promptin - promptout = config.ipython_promptout - mplbackend = config.ipython_mplbackend - exec_lines = config.ipython_execlines - hold_count = config.ipython_holdcount - - return (savefig_dir, source_dir, rgxin, rgxout, - promptin, promptout, mplbackend, exec_lines, hold_count, warning_is_error) - - def setup(self): - # Get configuration values. - (savefig_dir, source_dir, rgxin, rgxout, promptin, promptout, - mplbackend, exec_lines, hold_count, warning_is_error) = self.get_config_options() - - try: - os.makedirs(savefig_dir) - except OSError as e: - if e.errno != errno.EEXIST: - raise - - if self.shell is None: - # We will be here many times. However, when the - # EmbeddedSphinxShell is created, its interactive shell member - # is the same for each instance. - - if mplbackend and 'matplotlib.backends' not in sys.modules and use_matplotlib: - import matplotlib - matplotlib.use(mplbackend) - - # Must be called after (potentially) importing matplotlib and - # setting its backend since exec_lines might import pylab. - self.shell = EmbeddedSphinxShell(exec_lines) - - # Store IPython directive to enable better error messages - self.shell.directive = self - - # reset the execution count if we haven't processed this doc - #NOTE: this may be borked if there are multiple seen_doc tmp files - #check time stamp? - if not self.state.document.current_source in self.seen_docs: - self.shell.IP.history_manager.reset() - self.shell.IP.execution_count = 1 - self.seen_docs.add(self.state.document.current_source) - - # and attach to shell so we don't have to pass them around - self.shell.rgxin = rgxin - self.shell.rgxout = rgxout - self.shell.promptin = promptin - self.shell.promptout = promptout - self.shell.savefig_dir = savefig_dir - self.shell.source_dir = source_dir - self.shell.hold_count = hold_count - self.shell.warning_is_error = warning_is_error - - # setup bookmark for saving figures directory - self.shell.process_input_line('bookmark ipy_savedir %s'%savefig_dir, - store_history=False) - self.shell.clear_cout() - - return rgxin, rgxout, promptin, promptout - - def teardown(self): - # delete last bookmark - self.shell.process_input_line('bookmark -d ipy_savedir', - store_history=False) - self.shell.clear_cout() - - def run(self): - debug = False - - #TODO, any reason block_parser can't be a method of embeddable shell - # then we wouldn't have to carry these around - rgxin, rgxout, promptin, promptout = self.setup() - - options = self.options - self.shell.is_suppress = 'suppress' in options - self.shell.is_doctest = 'doctest' in options - self.shell.is_verbatim = 'verbatim' in options - self.shell.is_okexcept = 'okexcept' in options - self.shell.is_okwarning = 'okwarning' in options - - # handle pure python code - if 'python' in self.arguments: - content = self.content - self.content = self.shell.process_pure_python(content) - - # parts consists of all text within the ipython-block. - # Each part is an input/output block. - parts = '\n'.join(self.content).split('\n\n') - - lines = ['.. code-block:: ipython', ''] - figures = [] - - # Use sphinx logger for warnings - logger = logging.getLogger(__name__) - - for part in parts: - block = block_parser(part, rgxin, rgxout, promptin, promptout) - if len(block): - rows, figure = self.shell.process_block(block) - for row in rows: - lines.extend([' {0}'.format(line) - for line in row.split('\n')]) - - if figure is not None: - figures.append(figure) - else: - message = 'Code input with no code at {}, line {}'\ - .format( - self.state.document.current_source, - self.state.document.current_line) - if self.shell.warning_is_error: - raise RuntimeError(message) - else: - logger.warning(message) - - for figure in figures: - lines.append('') - lines.extend(figure.split('\n')) - lines.append('') - - if len(lines) > 2: - if debug: - print('\n'.join(lines)) - else: - # This has to do with input, not output. But if we comment - # these lines out, then no IPython code will appear in the - # final output. - self.state_machine.insert_input( - lines, self.state_machine.input_lines.source(0)) - - # cleanup - self.teardown() - - return [] - -# Enable as a proper Sphinx directive -def setup(app): - setup.app = app - - app.add_directive('ipython', IPythonDirective) - app.add_config_value('ipython_savefig_dir', 'savefig', 'env') - app.add_config_value('ipython_warning_is_error', True, 'env') - app.add_config_value('ipython_rgxin', - re.compile(r'In \[(\d+)\]:\s?(.*)\s*'), 'env') - app.add_config_value('ipython_rgxout', - re.compile(r'Out\[(\d+)\]:\s?(.*)\s*'), 'env') - app.add_config_value('ipython_promptin', 'In [%d]:', 'env') - app.add_config_value('ipython_promptout', 'Out[%d]:', 'env') - - # We could just let matplotlib pick whatever is specified as the default - # backend in the matplotlibrc file, but this would cause issues if the - # backend didn't work in headless environments. For this reason, 'agg' - # is a good default backend choice. - app.add_config_value('ipython_mplbackend', 'agg', 'env') - - # If the user sets this config value to `None`, then EmbeddedSphinxShell's - # __init__ method will treat it as []. - execlines = ['import numpy as np'] - if use_matplotlib: - execlines.append('import matplotlib.pyplot as plt') - app.add_config_value('ipython_execlines', execlines, 'env') - - app.add_config_value('ipython_holdcount', True, 'env') - - metadata = {'parallel_read_safe': True, 'parallel_write_safe': True} - return metadata - -# Simple smoke test, needs to be converted to a proper automatic test. -def test(): - - examples = [ - r""" -In [9]: pwd -Out[9]: '/home/jdhunter/py4science/book' - -In [10]: cd bookdata/ -/home/jdhunter/py4science/book/bookdata - -In [2]: from pylab import * - -In [2]: ion() - -In [3]: im = imread('stinkbug.png') - -@savefig mystinkbug.png width=4in -In [4]: imshow(im) -Out[4]: <matplotlib.image.AxesImage object at 0x39ea850> - -""", - r""" - -In [1]: x = 'hello world' - -# string methods can be -# used to alter the string -@doctest -In [2]: x.upper() -Out[2]: 'HELLO WORLD' - -@verbatim -In [3]: x.st<TAB> -x.startswith x.strip -""", - r""" - -In [130]: url = 'http://ichart.finance.yahoo.com/table.csv?s=CROX\ - .....: &d=9&e=22&f=2009&g=d&a=1&br=8&c=2006&ignore=.csv' - -In [131]: print url.split('&') -['http://ichart.finance.yahoo.com/table.csv?s=CROX', 'd=9', 'e=22', 'f=2009', 'g=d', 'a=1', 'b=8', 'c=2006', 'ignore=.csv'] - -In [60]: import urllib - -""", - r"""\ - -In [133]: import numpy.random - -@suppress -In [134]: numpy.random.seed(2358) - -@doctest -In [135]: numpy.random.rand(10,2) -Out[135]: -array([[ 0.64524308, 0.59943846], - [ 0.47102322, 0.8715456 ], - [ 0.29370834, 0.74776844], - [ 0.99539577, 0.1313423 ], - [ 0.16250302, 0.21103583], - [ 0.81626524, 0.1312433 ], - [ 0.67338089, 0.72302393], - [ 0.7566368 , 0.07033696], - [ 0.22591016, 0.77731835], - [ 0.0072729 , 0.34273127]]) - -""", - - r""" -In [106]: print x -jdh - -In [109]: for i in range(10): - .....: print i - .....: - .....: -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -""", - - r""" - -In [144]: from pylab import * - -In [145]: ion() - -# use a semicolon to suppress the output -@savefig test_hist.png width=4in -In [151]: hist(np.random.randn(10000), 100); - - -@savefig test_plot.png width=4in -In [151]: plot(np.random.randn(10000), 'o'); - """, - - r""" -# use a semicolon to suppress the output -In [151]: plt.clf() - -@savefig plot_simple.png width=4in -In [151]: plot([1,2,3]) - -@savefig hist_simple.png width=4in -In [151]: hist(np.random.randn(10000), 100); - -""", - r""" -# update the current fig -In [151]: ylabel('number') - -In [152]: title('normal distribution') - - -@savefig hist_with_text.png -In [153]: grid(True) - -@doctest float -In [154]: 0.1 + 0.2 -Out[154]: 0.3 - -@doctest float -In [155]: np.arange(16).reshape(4,4) -Out[155]: -array([[ 0, 1, 2, 3], - [ 4, 5, 6, 7], - [ 8, 9, 10, 11], - [12, 13, 14, 15]]) - -In [1]: x = np.arange(16, dtype=float).reshape(4,4) - -In [2]: x[0,0] = np.inf - -In [3]: x[0,1] = np.nan - -@doctest float -In [4]: x -Out[4]: -array([[ inf, nan, 2., 3.], - [ 4., 5., 6., 7.], - [ 8., 9., 10., 11.], - [ 12., 13., 14., 15.]]) - - - """, - ] - # skip local-file depending first example: - examples = examples[1:] - - #ipython_directive.DEBUG = True # dbg - #options = dict(suppress=True) # dbg - options = {} - for example in examples: - content = example.split('\n') - IPythonDirective('debug', arguments=None, options=options, - content=content, lineno=0, - content_offset=None, block_text=None, - state=None, state_machine=None, - ) - -# Run test suite as a script -if __name__=='__main__': - if not os.path.isdir('_static'): - os.mkdir('_static') - test() - print('All OK? Check figures in _static/') +# -*- coding: utf-8 -*- +""" +Sphinx directive to support embedded IPython code. + +IPython provides an extension for `Sphinx <http://www.sphinx-doc.org/>`_ to +highlight and run code. + +This directive allows pasting of entire interactive IPython sessions, prompts +and all, and their code will actually get re-executed at doc build time, with +all prompts renumbered sequentially. It also allows you to input code as a pure +python input by giving the argument python to the directive. The output looks +like an interactive ipython section. + +Here is an example of how the IPython directive can +**run** python code, at build time. + +.. ipython:: + + In [1]: 1+1 + + In [1]: import datetime + ...: datetime.datetime.now() + +It supports IPython construct that plain +Python does not understand (like magics): + +.. ipython:: + + In [0]: import time + + In [0]: %timeit time.sleep(0.05) + +This will also support top-level async when using IPython 7.0+ + +.. ipython:: + + In [2]: import asyncio + ...: print('before') + ...: await asyncio.sleep(1) + ...: print('after') + + +The namespace will persist across multiple code chucks, Let's define a variable: + +.. ipython:: + + In [0]: who = "World" + +And now say hello: + +.. ipython:: + + In [0]: print('Hello,', who) + +If the current section raises an exception, you can add the ``:okexcept:`` flag +to the current block, otherwise the build will fail. + +.. ipython:: + :okexcept: + + In [1]: 1/0 + +IPython Sphinx directive module +=============================== + +To enable this directive, simply list it in your Sphinx ``conf.py`` file +(making sure the directory where you placed it is visible to sphinx, as is +needed for all Sphinx directives). For example, to enable syntax highlighting +and the IPython directive:: + + extensions = ['IPython.sphinxext.ipython_console_highlighting', + 'IPython.sphinxext.ipython_directive'] + +The IPython directive outputs code-blocks with the language 'ipython'. So +if you do not have the syntax highlighting extension enabled as well, then +all rendered code-blocks will be uncolored. By default this directive assumes +that your prompts are unchanged IPython ones, but this can be customized. +The configurable options that can be placed in conf.py are: + +ipython_savefig_dir: + The directory in which to save the figures. This is relative to the + Sphinx source directory. The default is `html_static_path`. +ipython_rgxin: + The compiled regular expression to denote the start of IPython input + lines. The default is ``re.compile('In \\[(\\d+)\\]:\\s?(.*)\\s*')``. You + shouldn't need to change this. +ipython_warning_is_error: [default to True] + Fail the build if something unexpected happen, for example if a block raise + an exception but does not have the `:okexcept:` flag. The exact behavior of + what is considered strict, may change between the sphinx directive version. +ipython_rgxout: + The compiled regular expression to denote the start of IPython output + lines. The default is ``re.compile('Out\\[(\\d+)\\]:\\s?(.*)\\s*')``. You + shouldn't need to change this. +ipython_promptin: + The string to represent the IPython input prompt in the generated ReST. + The default is ``'In [%d]:'``. This expects that the line numbers are used + in the prompt. +ipython_promptout: + The string to represent the IPython prompt in the generated ReST. The + default is ``'Out [%d]:'``. This expects that the line numbers are used + in the prompt. +ipython_mplbackend: + The string which specifies if the embedded Sphinx shell should import + Matplotlib and set the backend. The value specifies a backend that is + passed to `matplotlib.use()` before any lines in `ipython_execlines` are + executed. If not specified in conf.py, then the default value of 'agg' is + used. To use the IPython directive without matplotlib as a dependency, set + the value to `None`. It may end up that matplotlib is still imported + if the user specifies so in `ipython_execlines` or makes use of the + @savefig pseudo decorator. +ipython_execlines: + A list of strings to be exec'd in the embedded Sphinx shell. Typical + usage is to make certain packages always available. Set this to an empty + list if you wish to have no imports always available. If specified in + ``conf.py`` as `None`, then it has the effect of making no imports available. + If omitted from conf.py altogether, then the default value of + ['import numpy as np', 'import matplotlib.pyplot as plt'] is used. +ipython_holdcount + When the @suppress pseudo-decorator is used, the execution count can be + incremented or not. The default behavior is to hold the execution count, + corresponding to a value of `True`. Set this to `False` to increment + the execution count after each suppressed command. + +As an example, to use the IPython directive when `matplotlib` is not available, +one sets the backend to `None`:: + + ipython_mplbackend = None + +An example usage of the directive is: + +.. code-block:: rst + + .. ipython:: + + In [1]: x = 1 + + In [2]: y = x**2 + + In [3]: print(y) + +See http://matplotlib.org/sampledoc/ipython_directive.html for additional +documentation. + +Pseudo-Decorators +================= + +Note: Only one decorator is supported per input. If more than one decorator +is specified, then only the last one is used. + +In addition to the Pseudo-Decorators/options described at the above link, +several enhancements have been made. The directive will emit a message to the +console at build-time if code-execution resulted in an exception or warning. +You can suppress these on a per-block basis by specifying the :okexcept: +or :okwarning: options: + +.. code-block:: rst + + .. ipython:: + :okexcept: + :okwarning: + + In [1]: 1/0 + In [2]: # raise warning. + +To Do +===== + +- Turn the ad-hoc test() function into a real test suite. +- Break up ipython-specific functionality from matplotlib stuff into better + separated code. + +""" + +# Authors +# ======= +# +# - John D Hunter: original author. +# - Fernando Perez: refactoring, documentation, cleanups, port to 0.11. +# - VáclavŠmilauer <eudoxos-AT-arcig.cz>: Prompt generalizations. +# - Skipper Seabold, refactoring, cleanups, pure python addition + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib +import atexit +import errno +import os +import pathlib +import re +import sys +import tempfile +import ast +import warnings +import shutil +from io import StringIO + +# Third-party +from docutils.parsers.rst import directives +from docutils.parsers.rst import Directive +from sphinx.util import logging + +# Our own +from traitlets.config import Config +from IPython import InteractiveShell +from IPython.core.profiledir import ProfileDir + +use_matplotlib = False +try: + import matplotlib + use_matplotlib = True +except Exception: + pass + +#----------------------------------------------------------------------------- +# Globals +#----------------------------------------------------------------------------- +# for tokenizing blocks +COMMENT, INPUT, OUTPUT = range(3) + +#----------------------------------------------------------------------------- +# Functions and class declarations +#----------------------------------------------------------------------------- + +def block_parser(part, rgxin, rgxout, fmtin, fmtout): + """ + part is a string of ipython text, comprised of at most one + input, one output, comments, and blank lines. The block parser + parses the text into a list of:: + + blocks = [ (TOKEN0, data0), (TOKEN1, data1), ...] + + where TOKEN is one of [COMMENT | INPUT | OUTPUT ] and + data is, depending on the type of token:: + + COMMENT : the comment string + + INPUT: the (DECORATOR, INPUT_LINE, REST) where + DECORATOR: the input decorator (or None) + INPUT_LINE: the input as string (possibly multi-line) + REST : any stdout generated by the input line (not OUTPUT) + + OUTPUT: the output string, possibly multi-line + + """ + block = [] + lines = part.split('\n') + N = len(lines) + i = 0 + decorator = None + while 1: + + if i==N: + # nothing left to parse -- the last line + break + + line = lines[i] + i += 1 + line_stripped = line.strip() + if line_stripped.startswith('#'): + block.append((COMMENT, line)) + continue + + if line_stripped.startswith('@'): + # Here is where we assume there is, at most, one decorator. + # Might need to rethink this. + decorator = line_stripped + continue + + # does this look like an input line? + matchin = rgxin.match(line) + if matchin: + lineno, inputline = int(matchin.group(1)), matchin.group(2) + + # the ....: continuation string + continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2)) + Nc = len(continuation) + # input lines can continue on for more than one line, if + # we have a '\' line continuation char or a function call + # echo line 'print'. The input line can only be + # terminated by the end of the block or an output line, so + # we parse out the rest of the input line if it is + # multiline as well as any echo text + + rest = [] + while i<N: + + # look ahead; if the next line is blank, or a comment, or + # an output line, we're done + + nextline = lines[i] + matchout = rgxout.match(nextline) + #print "nextline=%s, continuation=%s, starts=%s"%(nextline, continuation, nextline.startswith(continuation)) + if matchout or nextline.startswith('#'): + break + elif nextline.startswith(continuation): + # The default ipython_rgx* treat the space following the colon as optional. + # However, If the space is there we must consume it or code + # employing the cython_magic extension will fail to execute. + # + # This works with the default ipython_rgx* patterns, + # If you modify them, YMMV. + nextline = nextline[Nc:] + if nextline and nextline[0] == ' ': + nextline = nextline[1:] + + inputline += '\n' + nextline + else: + rest.append(nextline) + i+= 1 + + block.append((INPUT, (decorator, inputline, '\n'.join(rest)))) + continue + + # if it looks like an output line grab all the text to the end + # of the block + matchout = rgxout.match(line) + if matchout: + lineno, output = int(matchout.group(1)), matchout.group(2) + if i<N-1: + output = '\n'.join([output] + lines[i:]) + + block.append((OUTPUT, output)) + break + + return block + + +class EmbeddedSphinxShell(object): + """An embedded IPython instance to run inside Sphinx""" + + def __init__(self, exec_lines=None): + + self.cout = StringIO() + + if exec_lines is None: + exec_lines = [] + + # Create config object for IPython + config = Config() + config.HistoryManager.hist_file = ':memory:' + config.InteractiveShell.autocall = False + config.InteractiveShell.autoindent = False + config.InteractiveShell.colors = 'NoColor' + + # create a profile so instance history isn't saved + tmp_profile_dir = tempfile.mkdtemp(prefix='profile_') + profname = 'auto_profile_sphinx_build' + pdir = os.path.join(tmp_profile_dir,profname) + profile = ProfileDir.create_profile_dir(pdir) + + # Create and initialize global ipython, but don't start its mainloop. + # This will persist across different EmbeddedSphinxShell instances. + IP = InteractiveShell.instance(config=config, profile_dir=profile) + atexit.register(self.cleanup) + + # Store a few parts of IPython we'll need. + self.IP = IP + self.user_ns = self.IP.user_ns + self.user_global_ns = self.IP.user_global_ns + + self.input = '' + self.output = '' + self.tmp_profile_dir = tmp_profile_dir + + self.is_verbatim = False + self.is_doctest = False + self.is_suppress = False + + # Optionally, provide more detailed information to shell. + # this is assigned by the SetUp method of IPythonDirective + # to point at itself. + # + # So, you can access handy things at self.directive.state + self.directive = None + + # on the first call to the savefig decorator, we'll import + # pyplot as plt so we can make a call to the plt.gcf().savefig + self._pyplot_imported = False + + # Prepopulate the namespace. + for line in exec_lines: + self.process_input_line(line, store_history=False) + + def cleanup(self): + shutil.rmtree(self.tmp_profile_dir, ignore_errors=True) + + def clear_cout(self): + self.cout.seek(0) + self.cout.truncate(0) + + def process_input_line(self, line, store_history): + return self.process_input_lines([line], store_history=store_history) + + def process_input_lines(self, lines, store_history=True): + """process the input, capturing stdout""" + stdout = sys.stdout + source_raw = '\n'.join(lines) + try: + sys.stdout = self.cout + self.IP.run_cell(source_raw, store_history=store_history) + finally: + sys.stdout = stdout + + def process_image(self, decorator): + """ + # build out an image directive like + # .. image:: somefile.png + # :width 4in + # + # from an input like + # savefig somefile.png width=4in + """ + savefig_dir = self.savefig_dir + source_dir = self.source_dir + saveargs = decorator.split(' ') + filename = saveargs[1] + # insert relative path to image file in source + # as absolute path for Sphinx + # sphinx expects a posix path, even on Windows + path = pathlib.Path(savefig_dir, filename) + outfile = '/' + path.relative_to(source_dir).as_posix() + + imagerows = ['.. image:: %s' % outfile] + + for kwarg in saveargs[2:]: + arg, val = kwarg.split('=') + arg = arg.strip() + val = val.strip() + imagerows.append(' :%s: %s'%(arg, val)) + + image_file = os.path.basename(outfile) # only return file name + image_directive = '\n'.join(imagerows) + return image_file, image_directive + + # Callbacks for each type of token + def process_input(self, data, input_prompt, lineno): + """ + Process data block for INPUT token. + + """ + decorator, input, rest = data + image_file = None + image_directive = None + + is_verbatim = decorator=='@verbatim' or self.is_verbatim + is_doctest = (decorator is not None and \ + decorator.startswith('@doctest')) or self.is_doctest + is_suppress = decorator=='@suppress' or self.is_suppress + is_okexcept = decorator=='@okexcept' or self.is_okexcept + is_okwarning = decorator=='@okwarning' or self.is_okwarning + is_savefig = decorator is not None and \ + decorator.startswith('@savefig') + + input_lines = input.split('\n') + if len(input_lines) > 1: + if input_lines[-1] != "": + input_lines.append('') # make sure there's a blank line + # so splitter buffer gets reset + + continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2)) + + if is_savefig: + image_file, image_directive = self.process_image(decorator) + + ret = [] + is_semicolon = False + + # Hold the execution count, if requested to do so. + if is_suppress and self.hold_count: + store_history = False + else: + store_history = True + + # Note: catch_warnings is not thread safe + with warnings.catch_warnings(record=True) as ws: + if input_lines[0].endswith(';'): + is_semicolon = True + #for i, line in enumerate(input_lines): + + # process the first input line + if is_verbatim: + self.process_input_lines(['']) + self.IP.execution_count += 1 # increment it anyway + else: + # only submit the line in non-verbatim mode + self.process_input_lines(input_lines, store_history=store_history) + + if not is_suppress: + for i, line in enumerate(input_lines): + if i == 0: + formatted_line = '%s %s'%(input_prompt, line) + else: + formatted_line = '%s %s'%(continuation, line) + ret.append(formatted_line) + + if not is_suppress and len(rest.strip()) and is_verbatim: + # The "rest" is the standard output of the input. This needs to be + # added when in verbatim mode. If there is no "rest", then we don't + # add it, as the new line will be added by the processed output. + ret.append(rest) + + # Fetch the processed output. (This is not the submitted output.) + self.cout.seek(0) + processed_output = self.cout.read() + if not is_suppress and not is_semicolon: + # + # In IPythonDirective.run, the elements of `ret` are eventually + # combined such that '' entries correspond to newlines. So if + # `processed_output` is equal to '', then the adding it to `ret` + # ensures that there is a blank line between consecutive inputs + # that have no outputs, as in: + # + # In [1]: x = 4 + # + # In [2]: x = 5 + # + # When there is processed output, it has a '\n' at the tail end. So + # adding the output to `ret` will provide the necessary spacing + # between consecutive input/output blocks, as in: + # + # In [1]: x + # Out[1]: 5 + # + # In [2]: x + # Out[2]: 5 + # + # When there is stdout from the input, it also has a '\n' at the + # tail end, and so this ensures proper spacing as well. E.g.: + # + # In [1]: print x + # 5 + # + # In [2]: x = 5 + # + # When in verbatim mode, `processed_output` is empty (because + # nothing was passed to IP. Sometimes the submitted code block has + # an Out[] portion and sometimes it does not. When it does not, we + # need to ensure proper spacing, so we have to add '' to `ret`. + # However, if there is an Out[] in the submitted code, then we do + # not want to add a newline as `process_output` has stuff to add. + # The difficulty is that `process_input` doesn't know if + # `process_output` will be called---so it doesn't know if there is + # Out[] in the code block. The requires that we include a hack in + # `process_block`. See the comments there. + # + ret.append(processed_output) + elif is_semicolon: + # Make sure there is a newline after the semicolon. + ret.append('') + + # context information + filename = "Unknown" + lineno = 0 + if self.directive.state: + filename = self.directive.state.document.current_source + lineno = self.directive.state.document.current_line + + # Use sphinx logger for warnings + logger = logging.getLogger(__name__) + + # output any exceptions raised during execution to stdout + # unless :okexcept: has been specified. + if not is_okexcept and ( + ("Traceback" in processed_output) or ("SyntaxError" in processed_output) + ): + s = "\n>>>" + ("-" * 73) + "\n" + s += "Exception in %s at block ending on line %s\n" % (filename, lineno) + s += "Specify :okexcept: as an option in the ipython:: block to suppress this message\n" + s += processed_output + "\n" + s += "<<<" + ("-" * 73) + logger.warning(s) + if self.warning_is_error: + raise RuntimeError('Non Expected exception in `{}` line {}'.format(filename, lineno)) + + # output any warning raised during execution to stdout + # unless :okwarning: has been specified. + if not is_okwarning: + for w in ws: + s = "\n>>>" + ("-" * 73) + "\n" + s += "Warning in %s at block ending on line %s\n" % (filename, lineno) + s += "Specify :okwarning: as an option in the ipython:: block to suppress this message\n" + s += ("-" * 76) + "\n" + s += warnings.formatwarning( + w.message, w.category, w.filename, w.lineno, w.line + ) + s += "<<<" + ("-" * 73) + logger.warning(s) + if self.warning_is_error: + raise RuntimeError('Non Expected warning in `{}` line {}'.format(filename, lineno)) + + self.clear_cout() + return (ret, input_lines, processed_output, + is_doctest, decorator, image_file, image_directive) + + + def process_output(self, data, output_prompt, input_lines, output, + is_doctest, decorator, image_file): + """ + Process data block for OUTPUT token. + + """ + # Recall: `data` is the submitted output, and `output` is the processed + # output from `input_lines`. + + TAB = ' ' * 4 + + if is_doctest and output is not None: + + found = output # This is the processed output + found = found.strip() + submitted = data.strip() + + if self.directive is None: + source = 'Unavailable' + content = 'Unavailable' + else: + source = self.directive.state.document.current_source + content = self.directive.content + # Add tabs and join into a single string. + content = '\n'.join([TAB + line for line in content]) + + # Make sure the output contains the output prompt. + ind = found.find(output_prompt) + if ind < 0: + e = ('output does not contain output prompt\n\n' + 'Document source: {0}\n\n' + 'Raw content: \n{1}\n\n' + 'Input line(s):\n{TAB}{2}\n\n' + 'Output line(s):\n{TAB}{3}\n\n') + e = e.format(source, content, '\n'.join(input_lines), + repr(found), TAB=TAB) + raise RuntimeError(e) + found = found[len(output_prompt):].strip() + + # Handle the actual doctest comparison. + if decorator.strip() == '@doctest': + # Standard doctest + if found != submitted: + e = ('doctest failure\n\n' + 'Document source: {0}\n\n' + 'Raw content: \n{1}\n\n' + 'On input line(s):\n{TAB}{2}\n\n' + 'we found output:\n{TAB}{3}\n\n' + 'instead of the expected:\n{TAB}{4}\n\n') + e = e.format(source, content, '\n'.join(input_lines), + repr(found), repr(submitted), TAB=TAB) + raise RuntimeError(e) + else: + self.custom_doctest(decorator, input_lines, found, submitted) + + # When in verbatim mode, this holds additional submitted output + # to be written in the final Sphinx output. + # https://github.com/ipython/ipython/issues/5776 + out_data = [] + + is_verbatim = decorator=='@verbatim' or self.is_verbatim + if is_verbatim and data.strip(): + # Note that `ret` in `process_block` has '' as its last element if + # the code block was in verbatim mode. So if there is no submitted + # output, then we will have proper spacing only if we do not add + # an additional '' to `out_data`. This is why we condition on + # `and data.strip()`. + + # The submitted output has no output prompt. If we want the + # prompt and the code to appear, we need to join them now + # instead of adding them separately---as this would create an + # undesired newline. How we do this ultimately depends on the + # format of the output regex. I'll do what works for the default + # prompt for now, and we might have to adjust if it doesn't work + # in other cases. Finally, the submitted output does not have + # a trailing newline, so we must add it manually. + out_data.append("{0} {1}\n".format(output_prompt, data)) + + return out_data + + def process_comment(self, data): + """Process data fPblock for COMMENT token.""" + if not self.is_suppress: + return [data] + + def save_image(self, image_file): + """ + Saves the image file to disk. + """ + self.ensure_pyplot() + command = 'plt.gcf().savefig("%s")'%image_file + #print 'SAVEFIG', command # dbg + self.process_input_line('bookmark ipy_thisdir', store_history=False) + self.process_input_line('cd -b ipy_savedir', store_history=False) + self.process_input_line(command, store_history=False) + self.process_input_line('cd -b ipy_thisdir', store_history=False) + self.process_input_line('bookmark -d ipy_thisdir', store_history=False) + self.clear_cout() + + def process_block(self, block): + """ + process block from the block_parser and return a list of processed lines + """ + ret = [] + output = None + input_lines = None + lineno = self.IP.execution_count + + input_prompt = self.promptin % lineno + output_prompt = self.promptout % lineno + image_file = None + image_directive = None + + found_input = False + for token, data in block: + if token == COMMENT: + out_data = self.process_comment(data) + elif token == INPUT: + found_input = True + (out_data, input_lines, output, is_doctest, + decorator, image_file, image_directive) = \ + self.process_input(data, input_prompt, lineno) + elif token == OUTPUT: + if not found_input: + + TAB = ' ' * 4 + linenumber = 0 + source = 'Unavailable' + content = 'Unavailable' + if self.directive: + linenumber = self.directive.state.document.current_line + source = self.directive.state.document.current_source + content = self.directive.content + # Add tabs and join into a single string. + content = '\n'.join([TAB + line for line in content]) + + e = ('\n\nInvalid block: Block contains an output prompt ' + 'without an input prompt.\n\n' + 'Document source: {0}\n\n' + 'Content begins at line {1}: \n\n{2}\n\n' + 'Problematic block within content: \n\n{TAB}{3}\n\n') + e = e.format(source, linenumber, content, block, TAB=TAB) + + # Write, rather than include in exception, since Sphinx + # will truncate tracebacks. + sys.stdout.write(e) + raise RuntimeError('An invalid block was detected.') + out_data = \ + self.process_output(data, output_prompt, input_lines, + output, is_doctest, decorator, + image_file) + if out_data: + # Then there was user submitted output in verbatim mode. + # We need to remove the last element of `ret` that was + # added in `process_input`, as it is '' and would introduce + # an undesirable newline. + assert(ret[-1] == '') + del ret[-1] + + if out_data: + ret.extend(out_data) + + # save the image files + if image_file is not None: + self.save_image(image_file) + + return ret, image_directive + + def ensure_pyplot(self): + """ + Ensures that pyplot has been imported into the embedded IPython shell. + + Also, makes sure to set the backend appropriately if not set already. + + """ + # We are here if the @figure pseudo decorator was used. Thus, it's + # possible that we could be here even if python_mplbackend were set to + # `None`. That's also strange and perhaps worthy of raising an + # exception, but for now, we just set the backend to 'agg'. + + if not self._pyplot_imported: + if 'matplotlib.backends' not in sys.modules: + # Then ipython_matplotlib was set to None but there was a + # call to the @figure decorator (and ipython_execlines did + # not set a backend). + #raise Exception("No backend was set, but @figure was used!") + import matplotlib + matplotlib.use('agg') + + # Always import pyplot into embedded shell. + self.process_input_line('import matplotlib.pyplot as plt', + store_history=False) + self._pyplot_imported = True + + def process_pure_python(self, content): + """ + content is a list of strings. it is unedited directive content + + This runs it line by line in the InteractiveShell, prepends + prompts as needed capturing stderr and stdout, then returns + the content as a list as if it were ipython code + """ + output = [] + savefig = False # keep up with this to clear figure + multiline = False # to handle line continuation + multiline_start = None + fmtin = self.promptin + + ct = 0 + + for lineno, line in enumerate(content): + + line_stripped = line.strip() + if not len(line): + output.append(line) + continue + + # handle decorators + if line_stripped.startswith('@'): + output.extend([line]) + if 'savefig' in line: + savefig = True # and need to clear figure + continue + + # handle comments + if line_stripped.startswith('#'): + output.extend([line]) + continue + + # deal with lines checking for multiline + continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2)) + if not multiline: + modified = u"%s %s" % (fmtin % ct, line_stripped) + output.append(modified) + ct += 1 + try: + ast.parse(line_stripped) + output.append(u'') + except Exception: # on a multiline + multiline = True + multiline_start = lineno + else: # still on a multiline + modified = u'%s %s' % (continuation, line) + output.append(modified) + + # if the next line is indented, it should be part of multiline + if len(content) > lineno + 1: + nextline = content[lineno + 1] + if len(nextline) - len(nextline.lstrip()) > 3: + continue + try: + mod = ast.parse( + '\n'.join(content[multiline_start:lineno+1])) + if isinstance(mod.body[0], ast.FunctionDef): + # check to see if we have the whole function + for element in mod.body[0].body: + if isinstance(element, ast.Return): + multiline = False + else: + output.append(u'') + multiline = False + except Exception: + pass + + if savefig: # clear figure if plotted + self.ensure_pyplot() + self.process_input_line('plt.clf()', store_history=False) + self.clear_cout() + savefig = False + + return output + + def custom_doctest(self, decorator, input_lines, found, submitted): + """ + Perform a specialized doctest. + + """ + from .custom_doctests import doctests + + args = decorator.split() + doctest_type = args[1] + if doctest_type in doctests: + doctests[doctest_type](self, args, input_lines, found, submitted) + else: + e = "Invalid option to @doctest: {0}".format(doctest_type) + raise Exception(e) + + +class IPythonDirective(Directive): + + has_content = True + required_arguments = 0 + optional_arguments = 4 # python, suppress, verbatim, doctest + final_argumuent_whitespace = True + option_spec = { 'python': directives.unchanged, + 'suppress' : directives.flag, + 'verbatim' : directives.flag, + 'doctest' : directives.flag, + 'okexcept': directives.flag, + 'okwarning': directives.flag + } + + shell = None + + seen_docs = set() + + def get_config_options(self): + # contains sphinx configuration variables + config = self.state.document.settings.env.config + + # get config variables to set figure output directory + savefig_dir = config.ipython_savefig_dir + source_dir = self.state.document.settings.env.srcdir + savefig_dir = os.path.join(source_dir, savefig_dir) + + # get regex and prompt stuff + rgxin = config.ipython_rgxin + rgxout = config.ipython_rgxout + warning_is_error= config.ipython_warning_is_error + promptin = config.ipython_promptin + promptout = config.ipython_promptout + mplbackend = config.ipython_mplbackend + exec_lines = config.ipython_execlines + hold_count = config.ipython_holdcount + + return (savefig_dir, source_dir, rgxin, rgxout, + promptin, promptout, mplbackend, exec_lines, hold_count, warning_is_error) + + def setup(self): + # Get configuration values. + (savefig_dir, source_dir, rgxin, rgxout, promptin, promptout, + mplbackend, exec_lines, hold_count, warning_is_error) = self.get_config_options() + + try: + os.makedirs(savefig_dir) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + if self.shell is None: + # We will be here many times. However, when the + # EmbeddedSphinxShell is created, its interactive shell member + # is the same for each instance. + + if mplbackend and 'matplotlib.backends' not in sys.modules and use_matplotlib: + import matplotlib + matplotlib.use(mplbackend) + + # Must be called after (potentially) importing matplotlib and + # setting its backend since exec_lines might import pylab. + self.shell = EmbeddedSphinxShell(exec_lines) + + # Store IPython directive to enable better error messages + self.shell.directive = self + + # reset the execution count if we haven't processed this doc + #NOTE: this may be borked if there are multiple seen_doc tmp files + #check time stamp? + if not self.state.document.current_source in self.seen_docs: + self.shell.IP.history_manager.reset() + self.shell.IP.execution_count = 1 + self.seen_docs.add(self.state.document.current_source) + + # and attach to shell so we don't have to pass them around + self.shell.rgxin = rgxin + self.shell.rgxout = rgxout + self.shell.promptin = promptin + self.shell.promptout = promptout + self.shell.savefig_dir = savefig_dir + self.shell.source_dir = source_dir + self.shell.hold_count = hold_count + self.shell.warning_is_error = warning_is_error + + # setup bookmark for saving figures directory + self.shell.process_input_line('bookmark ipy_savedir %s'%savefig_dir, + store_history=False) + self.shell.clear_cout() + + return rgxin, rgxout, promptin, promptout + + def teardown(self): + # delete last bookmark + self.shell.process_input_line('bookmark -d ipy_savedir', + store_history=False) + self.shell.clear_cout() + + def run(self): + debug = False + + #TODO, any reason block_parser can't be a method of embeddable shell + # then we wouldn't have to carry these around + rgxin, rgxout, promptin, promptout = self.setup() + + options = self.options + self.shell.is_suppress = 'suppress' in options + self.shell.is_doctest = 'doctest' in options + self.shell.is_verbatim = 'verbatim' in options + self.shell.is_okexcept = 'okexcept' in options + self.shell.is_okwarning = 'okwarning' in options + + # handle pure python code + if 'python' in self.arguments: + content = self.content + self.content = self.shell.process_pure_python(content) + + # parts consists of all text within the ipython-block. + # Each part is an input/output block. + parts = '\n'.join(self.content).split('\n\n') + + lines = ['.. code-block:: ipython', ''] + figures = [] + + # Use sphinx logger for warnings + logger = logging.getLogger(__name__) + + for part in parts: + block = block_parser(part, rgxin, rgxout, promptin, promptout) + if len(block): + rows, figure = self.shell.process_block(block) + for row in rows: + lines.extend([' {0}'.format(line) + for line in row.split('\n')]) + + if figure is not None: + figures.append(figure) + else: + message = 'Code input with no code at {}, line {}'\ + .format( + self.state.document.current_source, + self.state.document.current_line) + if self.shell.warning_is_error: + raise RuntimeError(message) + else: + logger.warning(message) + + for figure in figures: + lines.append('') + lines.extend(figure.split('\n')) + lines.append('') + + if len(lines) > 2: + if debug: + print('\n'.join(lines)) + else: + # This has to do with input, not output. But if we comment + # these lines out, then no IPython code will appear in the + # final output. + self.state_machine.insert_input( + lines, self.state_machine.input_lines.source(0)) + + # cleanup + self.teardown() + + return [] + +# Enable as a proper Sphinx directive +def setup(app): + setup.app = app + + app.add_directive('ipython', IPythonDirective) + app.add_config_value('ipython_savefig_dir', 'savefig', 'env') + app.add_config_value('ipython_warning_is_error', True, 'env') + app.add_config_value('ipython_rgxin', + re.compile(r'In \[(\d+)\]:\s?(.*)\s*'), 'env') + app.add_config_value('ipython_rgxout', + re.compile(r'Out\[(\d+)\]:\s?(.*)\s*'), 'env') + app.add_config_value('ipython_promptin', 'In [%d]:', 'env') + app.add_config_value('ipython_promptout', 'Out[%d]:', 'env') + + # We could just let matplotlib pick whatever is specified as the default + # backend in the matplotlibrc file, but this would cause issues if the + # backend didn't work in headless environments. For this reason, 'agg' + # is a good default backend choice. + app.add_config_value('ipython_mplbackend', 'agg', 'env') + + # If the user sets this config value to `None`, then EmbeddedSphinxShell's + # __init__ method will treat it as []. + execlines = ['import numpy as np'] + if use_matplotlib: + execlines.append('import matplotlib.pyplot as plt') + app.add_config_value('ipython_execlines', execlines, 'env') + + app.add_config_value('ipython_holdcount', True, 'env') + + metadata = {'parallel_read_safe': True, 'parallel_write_safe': True} + return metadata + +# Simple smoke test, needs to be converted to a proper automatic test. +def test(): + + examples = [ + r""" +In [9]: pwd +Out[9]: '/home/jdhunter/py4science/book' + +In [10]: cd bookdata/ +/home/jdhunter/py4science/book/bookdata + +In [2]: from pylab import * + +In [2]: ion() + +In [3]: im = imread('stinkbug.png') + +@savefig mystinkbug.png width=4in +In [4]: imshow(im) +Out[4]: <matplotlib.image.AxesImage object at 0x39ea850> + +""", + r""" + +In [1]: x = 'hello world' + +# string methods can be +# used to alter the string +@doctest +In [2]: x.upper() +Out[2]: 'HELLO WORLD' + +@verbatim +In [3]: x.st<TAB> +x.startswith x.strip +""", + r""" + +In [130]: url = 'http://ichart.finance.yahoo.com/table.csv?s=CROX\ + .....: &d=9&e=22&f=2009&g=d&a=1&br=8&c=2006&ignore=.csv' + +In [131]: print url.split('&') +['http://ichart.finance.yahoo.com/table.csv?s=CROX', 'd=9', 'e=22', 'f=2009', 'g=d', 'a=1', 'b=8', 'c=2006', 'ignore=.csv'] + +In [60]: import urllib + +""", + r"""\ + +In [133]: import numpy.random + +@suppress +In [134]: numpy.random.seed(2358) + +@doctest +In [135]: numpy.random.rand(10,2) +Out[135]: +array([[ 0.64524308, 0.59943846], + [ 0.47102322, 0.8715456 ], + [ 0.29370834, 0.74776844], + [ 0.99539577, 0.1313423 ], + [ 0.16250302, 0.21103583], + [ 0.81626524, 0.1312433 ], + [ 0.67338089, 0.72302393], + [ 0.7566368 , 0.07033696], + [ 0.22591016, 0.77731835], + [ 0.0072729 , 0.34273127]]) + +""", + + r""" +In [106]: print x +jdh + +In [109]: for i in range(10): + .....: print i + .....: + .....: +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +""", + + r""" + +In [144]: from pylab import * + +In [145]: ion() + +# use a semicolon to suppress the output +@savefig test_hist.png width=4in +In [151]: hist(np.random.randn(10000), 100); + + +@savefig test_plot.png width=4in +In [151]: plot(np.random.randn(10000), 'o'); + """, + + r""" +# use a semicolon to suppress the output +In [151]: plt.clf() + +@savefig plot_simple.png width=4in +In [151]: plot([1,2,3]) + +@savefig hist_simple.png width=4in +In [151]: hist(np.random.randn(10000), 100); + +""", + r""" +# update the current fig +In [151]: ylabel('number') + +In [152]: title('normal distribution') + + +@savefig hist_with_text.png +In [153]: grid(True) + +@doctest float +In [154]: 0.1 + 0.2 +Out[154]: 0.3 + +@doctest float +In [155]: np.arange(16).reshape(4,4) +Out[155]: +array([[ 0, 1, 2, 3], + [ 4, 5, 6, 7], + [ 8, 9, 10, 11], + [12, 13, 14, 15]]) + +In [1]: x = np.arange(16, dtype=float).reshape(4,4) + +In [2]: x[0,0] = np.inf + +In [3]: x[0,1] = np.nan + +@doctest float +In [4]: x +Out[4]: +array([[ inf, nan, 2., 3.], + [ 4., 5., 6., 7.], + [ 8., 9., 10., 11.], + [ 12., 13., 14., 15.]]) + + + """, + ] + # skip local-file depending first example: + examples = examples[1:] + + #ipython_directive.DEBUG = True # dbg + #options = dict(suppress=True) # dbg + options = {} + for example in examples: + content = example.split('\n') + IPythonDirective('debug', arguments=None, options=options, + content=content, lineno=0, + content_offset=None, block_text=None, + state=None, state_machine=None, + ) + +# Run test suite as a script +if __name__=='__main__': + if not os.path.isdir('_static'): + os.mkdir('_static') + test() + print('All OK? Check figures in _static/') diff --git a/contrib/python/ipython/py3/IPython/terminal/console.py b/contrib/python/ipython/py3/IPython/terminal/console.py index ccf3478c3de..65571a7572d 100644 --- a/contrib/python/ipython/py3/IPython/terminal/console.py +++ b/contrib/python/ipython/py3/IPython/terminal/console.py @@ -1,19 +1,19 @@ -""" -Shim to maintain backwards compatibility with old IPython.terminal.console imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.terminal.console` package has been deprecated since IPython 4.0. " - "You should import from jupyter_console instead.", ShimWarning) - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -sys.modules['IPython.terminal.console'] = ShimModule( - src='IPython.terminal.console', mirror='jupyter_console') +""" +Shim to maintain backwards compatibility with old IPython.terminal.console imports. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import sys +from warnings import warn + +from IPython.utils.shimmodule import ShimModule, ShimWarning + +warn("The `IPython.terminal.console` package has been deprecated since IPython 4.0. " + "You should import from jupyter_console instead.", ShimWarning) + +# Unconditionally insert the shim into sys.modules so that further import calls +# trigger the custom attribute access above + +sys.modules['IPython.terminal.console'] = ShimModule( + src='IPython.terminal.console', mirror='jupyter_console') diff --git a/contrib/python/ipython/py3/IPython/terminal/debugger.py b/contrib/python/ipython/py3/IPython/terminal/debugger.py index f9a4ca445f8..db8ecac0d26 100644 --- a/contrib/python/ipython/py3/IPython/terminal/debugger.py +++ b/contrib/python/ipython/py3/IPython/terminal/debugger.py @@ -1,146 +1,146 @@ -import asyncio -import signal -import sys - -from IPython.core.debugger import Pdb -from IPython.core.completer import IPCompleter -from .ptutils import IPythonPTCompleter -from .shortcuts import create_ipython_shortcuts, suspend_to_bg, cursor_in_leading_ws - -from prompt_toolkit.enums import DEFAULT_BUFFER -from prompt_toolkit.filters import (Condition, has_focus, has_selection, - vi_insert_mode, emacs_insert_mode) -from prompt_toolkit.key_binding import KeyBindings -from prompt_toolkit.key_binding.bindings.completion import display_completions_like_readline -from pygments.token import Token -from prompt_toolkit.shortcuts.prompt import PromptSession -from prompt_toolkit.enums import EditingMode -from prompt_toolkit.formatted_text import PygmentsTokens -from concurrent.futures import ThreadPoolExecutor - -from prompt_toolkit import __version__ as ptk_version -PTK3 = ptk_version.startswith('3.') - - -class TerminalPdb(Pdb): - """Standalone IPython debugger.""" - - def __init__(self, *args, pt_session_options=None, **kwargs): - Pdb.__init__(self, *args, **kwargs) - self._ptcomp = None - self.pt_init(pt_session_options) - self.thread_executor = ThreadPoolExecutor(1) - - def pt_init(self, pt_session_options=None): - """Initialize the prompt session and the prompt loop - and store them in self.pt_app and self.pt_loop. - - Additional keyword arguments for the PromptSession class - can be specified in pt_session_options. - """ - if pt_session_options is None: - pt_session_options = {} - - def get_prompt_tokens(): - return [(Token.Prompt, self.prompt)] - - if self._ptcomp is None: - compl = IPCompleter(shell=self.shell, - namespace={}, - global_namespace={}, - parent=self.shell, - ) - # add a completer for all the do_ methods - methods_names = [m[3:] for m in dir(self) if m.startswith("do_")] - - def gen_comp(self, text): - return [m for m in methods_names if m.startswith(text)] - import types - newcomp = types.MethodType(gen_comp, compl) - compl.custom_matchers.insert(0, newcomp) - # end add completer. - - self._ptcomp = IPythonPTCompleter(compl) - - options = dict( - message=(lambda: PygmentsTokens(get_prompt_tokens())), - editing_mode=getattr(EditingMode, self.shell.editing_mode.upper()), - key_bindings=create_ipython_shortcuts(self.shell), - history=self.shell.debugger_history, - completer=self._ptcomp, - enable_history_search=True, - mouse_support=self.shell.mouse_support, - complete_style=self.shell.pt_complete_style, - style=getattr(self.shell, "style", None), - color_depth=self.shell.color_depth, - ) - - if not PTK3: - options['inputhook'] = self.shell.inputhook - options.update(pt_session_options) - self.pt_loop = asyncio.new_event_loop() - self.pt_app = PromptSession(**options) - - def cmdloop(self, intro=None): - """Repeatedly issue a prompt, accept input, parse an initial prefix - off the received input, and dispatch to action methods, passing them - the remainder of the line as argument. - - override the same methods from cmd.Cmd to provide prompt toolkit replacement. - """ - if not self.use_rawinput: - raise ValueError('Sorry ipdb does not support use_rawinput=False') - - # In order to make sure that prompt, which uses asyncio doesn't - # interfere with applications in which it's used, we always run the - # prompt itself in a different thread (we can't start an event loop - # within an event loop). This new thread won't have any event loop - # running, and here we run our prompt-loop. - self.preloop() - - try: - if intro is not None: - self.intro = intro - if self.intro: - print(self.intro, file=self.stdout) - stop = None - while not stop: - if self.cmdqueue: - line = self.cmdqueue.pop(0) - else: - self._ptcomp.ipy_completer.namespace = self.curframe_locals - self._ptcomp.ipy_completer.global_namespace = self.curframe.f_globals - - # Run the prompt in a different thread. - try: - line = self.thread_executor.submit(self.pt_app.prompt).result() - except EOFError: - line = "EOF" - - line = self.precmd(line) - stop = self.onecmd(line) - stop = self.postcmd(stop, line) - self.postloop() - except Exception: - raise - - -def set_trace(frame=None): - """ - Start debugging from `frame`. - - If frame is not specified, debugging starts from caller's frame. - """ - TerminalPdb().set_trace(frame or sys._getframe().f_back) - - -if __name__ == '__main__': - import pdb - # IPython.core.debugger.Pdb.trace_dispatch shall not catch - # bdb.BdbQuit. When started through __main__ and an exception - # happened after hitting "c", this is needed in order to - # be able to quit the debugging session (see #9950). - old_trace_dispatch = pdb.Pdb.trace_dispatch - pdb.Pdb = TerminalPdb - pdb.Pdb.trace_dispatch = old_trace_dispatch - pdb.main() +import asyncio +import signal +import sys + +from IPython.core.debugger import Pdb +from IPython.core.completer import IPCompleter +from .ptutils import IPythonPTCompleter +from .shortcuts import create_ipython_shortcuts, suspend_to_bg, cursor_in_leading_ws + +from prompt_toolkit.enums import DEFAULT_BUFFER +from prompt_toolkit.filters import (Condition, has_focus, has_selection, + vi_insert_mode, emacs_insert_mode) +from prompt_toolkit.key_binding import KeyBindings +from prompt_toolkit.key_binding.bindings.completion import display_completions_like_readline +from pygments.token import Token +from prompt_toolkit.shortcuts.prompt import PromptSession +from prompt_toolkit.enums import EditingMode +from prompt_toolkit.formatted_text import PygmentsTokens +from concurrent.futures import ThreadPoolExecutor + +from prompt_toolkit import __version__ as ptk_version +PTK3 = ptk_version.startswith('3.') + + +class TerminalPdb(Pdb): + """Standalone IPython debugger.""" + + def __init__(self, *args, pt_session_options=None, **kwargs): + Pdb.__init__(self, *args, **kwargs) + self._ptcomp = None + self.pt_init(pt_session_options) + self.thread_executor = ThreadPoolExecutor(1) + + def pt_init(self, pt_session_options=None): + """Initialize the prompt session and the prompt loop + and store them in self.pt_app and self.pt_loop. + + Additional keyword arguments for the PromptSession class + can be specified in pt_session_options. + """ + if pt_session_options is None: + pt_session_options = {} + + def get_prompt_tokens(): + return [(Token.Prompt, self.prompt)] + + if self._ptcomp is None: + compl = IPCompleter(shell=self.shell, + namespace={}, + global_namespace={}, + parent=self.shell, + ) + # add a completer for all the do_ methods + methods_names = [m[3:] for m in dir(self) if m.startswith("do_")] + + def gen_comp(self, text): + return [m for m in methods_names if m.startswith(text)] + import types + newcomp = types.MethodType(gen_comp, compl) + compl.custom_matchers.insert(0, newcomp) + # end add completer. + + self._ptcomp = IPythonPTCompleter(compl) + + options = dict( + message=(lambda: PygmentsTokens(get_prompt_tokens())), + editing_mode=getattr(EditingMode, self.shell.editing_mode.upper()), + key_bindings=create_ipython_shortcuts(self.shell), + history=self.shell.debugger_history, + completer=self._ptcomp, + enable_history_search=True, + mouse_support=self.shell.mouse_support, + complete_style=self.shell.pt_complete_style, + style=getattr(self.shell, "style", None), + color_depth=self.shell.color_depth, + ) + + if not PTK3: + options['inputhook'] = self.shell.inputhook + options.update(pt_session_options) + self.pt_loop = asyncio.new_event_loop() + self.pt_app = PromptSession(**options) + + def cmdloop(self, intro=None): + """Repeatedly issue a prompt, accept input, parse an initial prefix + off the received input, and dispatch to action methods, passing them + the remainder of the line as argument. + + override the same methods from cmd.Cmd to provide prompt toolkit replacement. + """ + if not self.use_rawinput: + raise ValueError('Sorry ipdb does not support use_rawinput=False') + + # In order to make sure that prompt, which uses asyncio doesn't + # interfere with applications in which it's used, we always run the + # prompt itself in a different thread (we can't start an event loop + # within an event loop). This new thread won't have any event loop + # running, and here we run our prompt-loop. + self.preloop() + + try: + if intro is not None: + self.intro = intro + if self.intro: + print(self.intro, file=self.stdout) + stop = None + while not stop: + if self.cmdqueue: + line = self.cmdqueue.pop(0) + else: + self._ptcomp.ipy_completer.namespace = self.curframe_locals + self._ptcomp.ipy_completer.global_namespace = self.curframe.f_globals + + # Run the prompt in a different thread. + try: + line = self.thread_executor.submit(self.pt_app.prompt).result() + except EOFError: + line = "EOF" + + line = self.precmd(line) + stop = self.onecmd(line) + stop = self.postcmd(stop, line) + self.postloop() + except Exception: + raise + + +def set_trace(frame=None): + """ + Start debugging from `frame`. + + If frame is not specified, debugging starts from caller's frame. + """ + TerminalPdb().set_trace(frame or sys._getframe().f_back) + + +if __name__ == '__main__': + import pdb + # IPython.core.debugger.Pdb.trace_dispatch shall not catch + # bdb.BdbQuit. When started through __main__ and an exception + # happened after hitting "c", this is needed in order to + # be able to quit the debugging session (see #9950). + old_trace_dispatch = pdb.Pdb.trace_dispatch + pdb.Pdb = TerminalPdb + pdb.Pdb.trace_dispatch = old_trace_dispatch + pdb.main() diff --git a/contrib/python/ipython/py3/IPython/terminal/embed.py b/contrib/python/ipython/py3/IPython/terminal/embed.py index 25ddd53afa7..188844faddb 100644 --- a/contrib/python/ipython/py3/IPython/terminal/embed.py +++ b/contrib/python/ipython/py3/IPython/terminal/embed.py @@ -1,399 +1,399 @@ -# encoding: utf-8 -""" -An embedded IPython shell. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -import sys -import warnings - -from IPython.core import ultratb, compilerop -from IPython.core import magic_arguments -from IPython.core.magic import Magics, magics_class, line_magic -from IPython.core.interactiveshell import DummyMod, InteractiveShell -from IPython.terminal.interactiveshell import TerminalInteractiveShell -from IPython.terminal.ipapp import load_default_config - -from traitlets import Bool, CBool, Unicode -from IPython.utils.io import ask_yes_no - -class KillEmbedded(Exception):pass - -# kept for backward compatibility as IPython 6 was released with -# the typo. See https://github.com/ipython/ipython/pull/10706 -KillEmbeded = KillEmbedded - -# This is an additional magic that is exposed in embedded shells. -@magics_class -class EmbeddedMagics(Magics): - - @line_magic - @magic_arguments.magic_arguments() - @magic_arguments.argument('-i', '--instance', action='store_true', - help='Kill instance instead of call location') - @magic_arguments.argument('-x', '--exit', action='store_true', - help='Also exit the current session') - @magic_arguments.argument('-y', '--yes', action='store_true', - help='Do not ask confirmation') - def kill_embedded(self, parameter_s=''): - """%kill_embedded : deactivate for good the current embedded IPython - - This function (after asking for confirmation) sets an internal flag so - that an embedded IPython will never activate again for the given call - location. This is useful to permanently disable a shell that is being - called inside a loop: once you've figured out what you needed from it, - you may then kill it and the program will then continue to run without - the interactive shell interfering again. - - - Kill Instance Option: - - If for some reasons you need to kill the location where the instance - is created and not called, for example if you create a single - instance in one place and debug in many locations, you can use the - ``--instance`` option to kill this specific instance. Like for the - ``call location`` killing an "instance" should work even if it is - recreated within a loop. - - .. note:: - - This was the default behavior before IPython 5.2 - - """ - - args = magic_arguments.parse_argstring(self.kill_embedded, parameter_s) - print(args) - if args.instance: - # let no ask - if not args.yes: - kill = ask_yes_no( - "Are you sure you want to kill this embedded instance? [y/N] ", 'n') - else: - kill = True - if kill: - self.shell._disable_init_location() - print("This embedded IPython instance will not reactivate anymore " - "once you exit.") - else: - if not args.yes: - kill = ask_yes_no( - "Are you sure you want to kill this embedded call_location? [y/N] ", 'n') - else: - kill = True - if kill: - self.shell.embedded_active = False - print("This embedded IPython call location will not reactivate anymore " - "once you exit.") - - if args.exit: - # Ask-exit does not really ask, it just set internals flags to exit - # on next loop. - self.shell.ask_exit() - - - @line_magic - def exit_raise(self, parameter_s=''): - """%exit_raise Make the current embedded kernel exit and raise and exception. - - This function sets an internal flag so that an embedded IPython will - raise a `IPython.terminal.embed.KillEmbedded` Exception on exit, and then exit the current I. This is - useful to permanently exit a loop that create IPython embed instance. - """ - - self.shell.should_raise = True - self.shell.ask_exit() - - - -class InteractiveShellEmbed(TerminalInteractiveShell): - - dummy_mode = Bool(False) - exit_msg = Unicode('') - embedded = CBool(True) - should_raise = CBool(False) - # Like the base class display_banner is not configurable, but here it - # is True by default. - display_banner = CBool(True) - exit_msg = Unicode() - - # When embedding, by default we don't change the terminal title - term_title = Bool(False, - help="Automatically set the terminal title" - ).tag(config=True) - - _inactive_locations = set() - - @property - def embedded_active(self): - return (self._call_location_id not in InteractiveShellEmbed._inactive_locations)\ - and (self._init_location_id not in InteractiveShellEmbed._inactive_locations) - - def _disable_init_location(self): - """Disable the current Instance creation location""" - InteractiveShellEmbed._inactive_locations.add(self._init_location_id) - - @embedded_active.setter - def embedded_active(self, value): - if value: - InteractiveShellEmbed._inactive_locations.discard( - self._call_location_id) - InteractiveShellEmbed._inactive_locations.discard( - self._init_location_id) - else: - InteractiveShellEmbed._inactive_locations.add( - self._call_location_id) - - def __init__(self, **kw): - if kw.get('user_global_ns', None) is not None: - raise DeprecationWarning( - "Key word argument `user_global_ns` has been replaced by `user_module` since IPython 4.0.") - - clid = kw.pop('_init_location_id', None) - if not clid: - frame = sys._getframe(1) - clid = '%s:%s' % (frame.f_code.co_filename, frame.f_lineno) - self._init_location_id = clid - - super(InteractiveShellEmbed,self).__init__(**kw) - - # don't use the ipython crash handler so that user exceptions aren't - # trapped - sys.excepthook = ultratb.FormattedTB(color_scheme=self.colors, - mode=self.xmode, - call_pdb=self.pdb) - - def init_sys_modules(self): - """ - Explicitly overwrite :mod:`IPython.core.interactiveshell` to do nothing. - """ - pass - - def init_magics(self): - super(InteractiveShellEmbed, self).init_magics() - self.register_magics(EmbeddedMagics) - - def __call__(self, header='', local_ns=None, module=None, dummy=None, - stack_depth=1, global_ns=None, compile_flags=None, **kw): - """Activate the interactive interpreter. - - __call__(self,header='',local_ns=None,module=None,dummy=None) -> Start - the interpreter shell with the given local and global namespaces, and - optionally print a header string at startup. - - The shell can be globally activated/deactivated using the - dummy_mode attribute. This allows you to turn off a shell used - for debugging globally. - - However, *each* time you call the shell you can override the current - state of dummy_mode with the optional keyword parameter 'dummy'. For - example, if you set dummy mode on with IPShell.dummy_mode = True, you - can still have a specific call work by making it as IPShell(dummy=False). - """ - - # we are called, set the underlying interactiveshell not to exit. - self.keep_running = True - - # If the user has turned it off, go away - clid = kw.pop('_call_location_id', None) - if not clid: - frame = sys._getframe(1) - clid = '%s:%s' % (frame.f_code.co_filename, frame.f_lineno) - self._call_location_id = clid - - if not self.embedded_active: - return - - # Normal exits from interactive mode set this flag, so the shell can't - # re-enter (it checks this variable at the start of interactive mode). - self.exit_now = False - - # Allow the dummy parameter to override the global __dummy_mode - if dummy or (dummy != 0 and self.dummy_mode): - return - - # self.banner is auto computed - if header: - self.old_banner2 = self.banner2 - self.banner2 = self.banner2 + '\n' + header + '\n' - else: - self.old_banner2 = '' - - if self.display_banner: - self.show_banner() - - # Call the embedding code with a stack depth of 1 so it can skip over - # our call and get the original caller's namespaces. - self.mainloop(local_ns, module, stack_depth=stack_depth, - global_ns=global_ns, compile_flags=compile_flags) - - self.banner2 = self.old_banner2 - - if self.exit_msg is not None: - print(self.exit_msg) - - if self.should_raise: - raise KillEmbedded('Embedded IPython raising error, as user requested.') - - - def mainloop(self, local_ns=None, module=None, stack_depth=0, - display_banner=None, global_ns=None, compile_flags=None): - """Embeds IPython into a running python program. - - Parameters - ---------- - - local_ns, module - Working local namespace (a dict) and module (a module or similar - object). If given as None, they are automatically taken from the scope - where the shell was called, so that program variables become visible. - - stack_depth : int - How many levels in the stack to go to looking for namespaces (when - local_ns or module is None). This allows an intermediate caller to - make sure that this function gets the namespace from the intended - level in the stack. By default (0) it will get its locals and globals - from the immediate caller. - - compile_flags - A bit field identifying the __future__ features - that are enabled, as passed to the builtin :func:`compile` function. - If given as None, they are automatically taken from the scope where - the shell was called. - - """ - - if (global_ns is not None) and (module is None): - raise DeprecationWarning("'global_ns' keyword argument is deprecated, and has been removed in IPython 5.0 use `module` keyword argument instead.") - - if (display_banner is not None): - warnings.warn("The display_banner parameter is deprecated since IPython 4.0", DeprecationWarning) - - # Get locals and globals from caller - if ((local_ns is None or module is None or compile_flags is None) - and self.default_user_namespaces): - call_frame = sys._getframe(stack_depth).f_back - - if local_ns is None: - local_ns = call_frame.f_locals - if module is None: - global_ns = call_frame.f_globals - try: - module = sys.modules[global_ns['__name__']] - except KeyError: - warnings.warn("Failed to get module %s" % \ - global_ns.get('__name__', 'unknown module') - ) - module = DummyMod() - module.__dict__ = global_ns - if compile_flags is None: - compile_flags = (call_frame.f_code.co_flags & - compilerop.PyCF_MASK) - - # Save original namespace and module so we can restore them after - # embedding; otherwise the shell doesn't shut down correctly. - orig_user_module = self.user_module - orig_user_ns = self.user_ns - orig_compile_flags = self.compile.flags - - # Update namespaces and fire up interpreter - - # The global one is easy, we can just throw it in - if module is not None: - self.user_module = module - - # But the user/local one is tricky: ipython needs it to store internal - # data, but we also need the locals. We'll throw our hidden variables - # like _ih and get_ipython() into the local namespace, but delete them - # later. - if local_ns is not None: - reentrant_local_ns = {k: v for (k, v) in local_ns.items() if k not in self.user_ns_hidden.keys()} - self.user_ns = reentrant_local_ns - self.init_user_ns() - - # Compiler flags - if compile_flags is not None: - self.compile.flags = compile_flags - - # make sure the tab-completer has the correct frame information, so it - # actually completes using the frame's locals/globals - self.set_completer_frame() - - with self.builtin_trap, self.display_trap: - self.interact() - - # now, purge out the local namespace of IPython's hidden variables. - if local_ns is not None: - local_ns.update({k: v for (k, v) in self.user_ns.items() if k not in self.user_ns_hidden.keys()}) - - - # Restore original namespace so shell can shut down when we exit. - self.user_module = orig_user_module - self.user_ns = orig_user_ns - self.compile.flags = orig_compile_flags - - -def embed(**kwargs): - """Call this to embed IPython at the current point in your program. - - The first invocation of this will create an :class:`InteractiveShellEmbed` - instance and then call it. Consecutive calls just call the already - created instance. - - If you don't want the kernel to initialize the namespace - from the scope of the surrounding function, - and/or you want to load full IPython configuration, - you probably want `IPython.start_ipython()` instead. - - Here is a simple example:: - - from IPython import embed - a = 10 - b = 20 - embed(header='First time') - c = 30 - d = 40 - embed() - - Full customization can be done by passing a :class:`Config` in as the - config argument. - """ - config = kwargs.get('config') - header = kwargs.pop('header', u'') - compile_flags = kwargs.pop('compile_flags', None) - if config is None: - config = load_default_config() - config.InteractiveShellEmbed = config.TerminalInteractiveShell - kwargs['config'] = config - using = kwargs.get('using', 'sync') - if using : - kwargs['config'].update({'TerminalInteractiveShell':{'loop_runner':using, 'colors':'NoColor', 'autoawait': using!='sync'}}) - #save ps1/ps2 if defined - ps1 = None - ps2 = None - try: - ps1 = sys.ps1 - ps2 = sys.ps2 - except AttributeError: - pass - #save previous instance - saved_shell_instance = InteractiveShell._instance - if saved_shell_instance is not None: - cls = type(saved_shell_instance) - cls.clear_instance() - frame = sys._getframe(1) - shell = InteractiveShellEmbed.instance(_init_location_id='%s:%s' % ( - frame.f_code.co_filename, frame.f_lineno), **kwargs) - shell(header=header, stack_depth=2, compile_flags=compile_flags, - _call_location_id='%s:%s' % (frame.f_code.co_filename, frame.f_lineno)) - InteractiveShellEmbed.clear_instance() - #restore previous instance - if saved_shell_instance is not None: - cls = type(saved_shell_instance) - cls.clear_instance() - for subclass in cls._walk_mro(): - subclass._instance = saved_shell_instance - if ps1 is not None: - sys.ps1 = ps1 - sys.ps2 = ps2 +# encoding: utf-8 +""" +An embedded IPython shell. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +import sys +import warnings + +from IPython.core import ultratb, compilerop +from IPython.core import magic_arguments +from IPython.core.magic import Magics, magics_class, line_magic +from IPython.core.interactiveshell import DummyMod, InteractiveShell +from IPython.terminal.interactiveshell import TerminalInteractiveShell +from IPython.terminal.ipapp import load_default_config + +from traitlets import Bool, CBool, Unicode +from IPython.utils.io import ask_yes_no + +class KillEmbedded(Exception):pass + +# kept for backward compatibility as IPython 6 was released with +# the typo. See https://github.com/ipython/ipython/pull/10706 +KillEmbeded = KillEmbedded + +# This is an additional magic that is exposed in embedded shells. +@magics_class +class EmbeddedMagics(Magics): + + @line_magic + @magic_arguments.magic_arguments() + @magic_arguments.argument('-i', '--instance', action='store_true', + help='Kill instance instead of call location') + @magic_arguments.argument('-x', '--exit', action='store_true', + help='Also exit the current session') + @magic_arguments.argument('-y', '--yes', action='store_true', + help='Do not ask confirmation') + def kill_embedded(self, parameter_s=''): + """%kill_embedded : deactivate for good the current embedded IPython + + This function (after asking for confirmation) sets an internal flag so + that an embedded IPython will never activate again for the given call + location. This is useful to permanently disable a shell that is being + called inside a loop: once you've figured out what you needed from it, + you may then kill it and the program will then continue to run without + the interactive shell interfering again. + + + Kill Instance Option: + + If for some reasons you need to kill the location where the instance + is created and not called, for example if you create a single + instance in one place and debug in many locations, you can use the + ``--instance`` option to kill this specific instance. Like for the + ``call location`` killing an "instance" should work even if it is + recreated within a loop. + + .. note:: + + This was the default behavior before IPython 5.2 + + """ + + args = magic_arguments.parse_argstring(self.kill_embedded, parameter_s) + print(args) + if args.instance: + # let no ask + if not args.yes: + kill = ask_yes_no( + "Are you sure you want to kill this embedded instance? [y/N] ", 'n') + else: + kill = True + if kill: + self.shell._disable_init_location() + print("This embedded IPython instance will not reactivate anymore " + "once you exit.") + else: + if not args.yes: + kill = ask_yes_no( + "Are you sure you want to kill this embedded call_location? [y/N] ", 'n') + else: + kill = True + if kill: + self.shell.embedded_active = False + print("This embedded IPython call location will not reactivate anymore " + "once you exit.") + + if args.exit: + # Ask-exit does not really ask, it just set internals flags to exit + # on next loop. + self.shell.ask_exit() + + + @line_magic + def exit_raise(self, parameter_s=''): + """%exit_raise Make the current embedded kernel exit and raise and exception. + + This function sets an internal flag so that an embedded IPython will + raise a `IPython.terminal.embed.KillEmbedded` Exception on exit, and then exit the current I. This is + useful to permanently exit a loop that create IPython embed instance. + """ + + self.shell.should_raise = True + self.shell.ask_exit() + + + +class InteractiveShellEmbed(TerminalInteractiveShell): + + dummy_mode = Bool(False) + exit_msg = Unicode('') + embedded = CBool(True) + should_raise = CBool(False) + # Like the base class display_banner is not configurable, but here it + # is True by default. + display_banner = CBool(True) + exit_msg = Unicode() + + # When embedding, by default we don't change the terminal title + term_title = Bool(False, + help="Automatically set the terminal title" + ).tag(config=True) + + _inactive_locations = set() + + @property + def embedded_active(self): + return (self._call_location_id not in InteractiveShellEmbed._inactive_locations)\ + and (self._init_location_id not in InteractiveShellEmbed._inactive_locations) + + def _disable_init_location(self): + """Disable the current Instance creation location""" + InteractiveShellEmbed._inactive_locations.add(self._init_location_id) + + @embedded_active.setter + def embedded_active(self, value): + if value: + InteractiveShellEmbed._inactive_locations.discard( + self._call_location_id) + InteractiveShellEmbed._inactive_locations.discard( + self._init_location_id) + else: + InteractiveShellEmbed._inactive_locations.add( + self._call_location_id) + + def __init__(self, **kw): + if kw.get('user_global_ns', None) is not None: + raise DeprecationWarning( + "Key word argument `user_global_ns` has been replaced by `user_module` since IPython 4.0.") + + clid = kw.pop('_init_location_id', None) + if not clid: + frame = sys._getframe(1) + clid = '%s:%s' % (frame.f_code.co_filename, frame.f_lineno) + self._init_location_id = clid + + super(InteractiveShellEmbed,self).__init__(**kw) + + # don't use the ipython crash handler so that user exceptions aren't + # trapped + sys.excepthook = ultratb.FormattedTB(color_scheme=self.colors, + mode=self.xmode, + call_pdb=self.pdb) + + def init_sys_modules(self): + """ + Explicitly overwrite :mod:`IPython.core.interactiveshell` to do nothing. + """ + pass + + def init_magics(self): + super(InteractiveShellEmbed, self).init_magics() + self.register_magics(EmbeddedMagics) + + def __call__(self, header='', local_ns=None, module=None, dummy=None, + stack_depth=1, global_ns=None, compile_flags=None, **kw): + """Activate the interactive interpreter. + + __call__(self,header='',local_ns=None,module=None,dummy=None) -> Start + the interpreter shell with the given local and global namespaces, and + optionally print a header string at startup. + + The shell can be globally activated/deactivated using the + dummy_mode attribute. This allows you to turn off a shell used + for debugging globally. + + However, *each* time you call the shell you can override the current + state of dummy_mode with the optional keyword parameter 'dummy'. For + example, if you set dummy mode on with IPShell.dummy_mode = True, you + can still have a specific call work by making it as IPShell(dummy=False). + """ + + # we are called, set the underlying interactiveshell not to exit. + self.keep_running = True + + # If the user has turned it off, go away + clid = kw.pop('_call_location_id', None) + if not clid: + frame = sys._getframe(1) + clid = '%s:%s' % (frame.f_code.co_filename, frame.f_lineno) + self._call_location_id = clid + + if not self.embedded_active: + return + + # Normal exits from interactive mode set this flag, so the shell can't + # re-enter (it checks this variable at the start of interactive mode). + self.exit_now = False + + # Allow the dummy parameter to override the global __dummy_mode + if dummy or (dummy != 0 and self.dummy_mode): + return + + # self.banner is auto computed + if header: + self.old_banner2 = self.banner2 + self.banner2 = self.banner2 + '\n' + header + '\n' + else: + self.old_banner2 = '' + + if self.display_banner: + self.show_banner() + + # Call the embedding code with a stack depth of 1 so it can skip over + # our call and get the original caller's namespaces. + self.mainloop(local_ns, module, stack_depth=stack_depth, + global_ns=global_ns, compile_flags=compile_flags) + + self.banner2 = self.old_banner2 + + if self.exit_msg is not None: + print(self.exit_msg) + + if self.should_raise: + raise KillEmbedded('Embedded IPython raising error, as user requested.') + + + def mainloop(self, local_ns=None, module=None, stack_depth=0, + display_banner=None, global_ns=None, compile_flags=None): + """Embeds IPython into a running python program. + + Parameters + ---------- + + local_ns, module + Working local namespace (a dict) and module (a module or similar + object). If given as None, they are automatically taken from the scope + where the shell was called, so that program variables become visible. + + stack_depth : int + How many levels in the stack to go to looking for namespaces (when + local_ns or module is None). This allows an intermediate caller to + make sure that this function gets the namespace from the intended + level in the stack. By default (0) it will get its locals and globals + from the immediate caller. + + compile_flags + A bit field identifying the __future__ features + that are enabled, as passed to the builtin :func:`compile` function. + If given as None, they are automatically taken from the scope where + the shell was called. + + """ + + if (global_ns is not None) and (module is None): + raise DeprecationWarning("'global_ns' keyword argument is deprecated, and has been removed in IPython 5.0 use `module` keyword argument instead.") + + if (display_banner is not None): + warnings.warn("The display_banner parameter is deprecated since IPython 4.0", DeprecationWarning) + + # Get locals and globals from caller + if ((local_ns is None or module is None or compile_flags is None) + and self.default_user_namespaces): + call_frame = sys._getframe(stack_depth).f_back + + if local_ns is None: + local_ns = call_frame.f_locals + if module is None: + global_ns = call_frame.f_globals + try: + module = sys.modules[global_ns['__name__']] + except KeyError: + warnings.warn("Failed to get module %s" % \ + global_ns.get('__name__', 'unknown module') + ) + module = DummyMod() + module.__dict__ = global_ns + if compile_flags is None: + compile_flags = (call_frame.f_code.co_flags & + compilerop.PyCF_MASK) + + # Save original namespace and module so we can restore them after + # embedding; otherwise the shell doesn't shut down correctly. + orig_user_module = self.user_module + orig_user_ns = self.user_ns + orig_compile_flags = self.compile.flags + + # Update namespaces and fire up interpreter + + # The global one is easy, we can just throw it in + if module is not None: + self.user_module = module + + # But the user/local one is tricky: ipython needs it to store internal + # data, but we also need the locals. We'll throw our hidden variables + # like _ih and get_ipython() into the local namespace, but delete them + # later. + if local_ns is not None: + reentrant_local_ns = {k: v for (k, v) in local_ns.items() if k not in self.user_ns_hidden.keys()} + self.user_ns = reentrant_local_ns + self.init_user_ns() + + # Compiler flags + if compile_flags is not None: + self.compile.flags = compile_flags + + # make sure the tab-completer has the correct frame information, so it + # actually completes using the frame's locals/globals + self.set_completer_frame() + + with self.builtin_trap, self.display_trap: + self.interact() + + # now, purge out the local namespace of IPython's hidden variables. + if local_ns is not None: + local_ns.update({k: v for (k, v) in self.user_ns.items() if k not in self.user_ns_hidden.keys()}) + + + # Restore original namespace so shell can shut down when we exit. + self.user_module = orig_user_module + self.user_ns = orig_user_ns + self.compile.flags = orig_compile_flags + + +def embed(**kwargs): + """Call this to embed IPython at the current point in your program. + + The first invocation of this will create an :class:`InteractiveShellEmbed` + instance and then call it. Consecutive calls just call the already + created instance. + + If you don't want the kernel to initialize the namespace + from the scope of the surrounding function, + and/or you want to load full IPython configuration, + you probably want `IPython.start_ipython()` instead. + + Here is a simple example:: + + from IPython import embed + a = 10 + b = 20 + embed(header='First time') + c = 30 + d = 40 + embed() + + Full customization can be done by passing a :class:`Config` in as the + config argument. + """ + config = kwargs.get('config') + header = kwargs.pop('header', u'') + compile_flags = kwargs.pop('compile_flags', None) + if config is None: + config = load_default_config() + config.InteractiveShellEmbed = config.TerminalInteractiveShell + kwargs['config'] = config + using = kwargs.get('using', 'sync') + if using : + kwargs['config'].update({'TerminalInteractiveShell':{'loop_runner':using, 'colors':'NoColor', 'autoawait': using!='sync'}}) + #save ps1/ps2 if defined + ps1 = None + ps2 = None + try: + ps1 = sys.ps1 + ps2 = sys.ps2 + except AttributeError: + pass + #save previous instance + saved_shell_instance = InteractiveShell._instance + if saved_shell_instance is not None: + cls = type(saved_shell_instance) + cls.clear_instance() + frame = sys._getframe(1) + shell = InteractiveShellEmbed.instance(_init_location_id='%s:%s' % ( + frame.f_code.co_filename, frame.f_lineno), **kwargs) + shell(header=header, stack_depth=2, compile_flags=compile_flags, + _call_location_id='%s:%s' % (frame.f_code.co_filename, frame.f_lineno)) + InteractiveShellEmbed.clear_instance() + #restore previous instance + if saved_shell_instance is not None: + cls = type(saved_shell_instance) + cls.clear_instance() + for subclass in cls._walk_mro(): + subclass._instance = saved_shell_instance + if ps1 is not None: + sys.ps1 = ps1 + sys.ps2 = ps2 diff --git a/contrib/python/ipython/py3/IPython/terminal/interactiveshell.py b/contrib/python/ipython/py3/IPython/terminal/interactiveshell.py index f3b697b9930..4e35aadd616 100644 --- a/contrib/python/ipython/py3/IPython/terminal/interactiveshell.py +++ b/contrib/python/ipython/py3/IPython/terminal/interactiveshell.py @@ -1,648 +1,648 @@ -"""IPython terminal interface using prompt_toolkit""" - -import asyncio -import os -import sys -import warnings -from warnings import warn - -from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC -from IPython.utils import io -from IPython.utils.py3compat import input -from IPython.utils.terminal import toggle_set_term_title, set_term_title, restore_term_title -from IPython.utils.process import abbrev_cwd -from traitlets import ( - Bool, Unicode, Dict, Integer, observe, Instance, Type, default, Enum, Union, - Any, validate -) - -from prompt_toolkit.enums import DEFAULT_BUFFER, EditingMode -from prompt_toolkit.filters import (HasFocus, Condition, IsDone) -from prompt_toolkit.formatted_text import PygmentsTokens -from prompt_toolkit.history import InMemoryHistory -from prompt_toolkit.layout.processors import ConditionalProcessor, HighlightMatchingBracketProcessor -from prompt_toolkit.output import ColorDepth -from prompt_toolkit.patch_stdout import patch_stdout -from prompt_toolkit.shortcuts import PromptSession, CompleteStyle, print_formatted_text -from prompt_toolkit.styles import DynamicStyle, merge_styles -from prompt_toolkit.styles.pygments import style_from_pygments_cls, style_from_pygments_dict -from prompt_toolkit import __version__ as ptk_version - -from pygments.styles import get_style_by_name -from pygments.style import Style -from pygments.token import Token - -from .debugger import TerminalPdb, Pdb -from .magics import TerminalMagics -from .pt_inputhooks import get_inputhook_name_and_func -from .prompts import Prompts, ClassicPrompts, RichPromptDisplayHook -from .ptutils import IPythonPTCompleter, IPythonPTLexer -from .shortcuts import create_ipython_shortcuts - -DISPLAY_BANNER_DEPRECATED = object() -PTK3 = ptk_version.startswith('3.') - - -class _NoStyle(Style): pass - - - -_style_overrides_light_bg = { - Token.Prompt: '#0000ff', - Token.PromptNum: '#0000ee bold', - Token.OutPrompt: '#cc0000', - Token.OutPromptNum: '#bb0000 bold', -} - -_style_overrides_linux = { - Token.Prompt: '#00cc00', - Token.PromptNum: '#00bb00 bold', - Token.OutPrompt: '#cc0000', - Token.OutPromptNum: '#bb0000 bold', -} - -def get_default_editor(): - try: - return os.environ['EDITOR'] - except KeyError: - pass - except UnicodeError: - warn("$EDITOR environment variable is not pure ASCII. Using platform " - "default editor.") - - if os.name == 'posix': - return 'vi' # the only one guaranteed to be there! - else: - return 'notepad' # same in Windows! - -# conservatively check for tty -# overridden streams can result in things like: -# - sys.stdin = None -# - no isatty method -for _name in ('stdin', 'stdout', 'stderr'): - _stream = getattr(sys, _name) - if not _stream or not hasattr(_stream, 'isatty') or not _stream.isatty(): - _is_tty = False - break -else: - _is_tty = True - - -_use_simple_prompt = ('IPY_TEST_SIMPLE_PROMPT' in os.environ) or (not _is_tty) - -def black_reformat_handler(text_before_cursor): - import black - formatted_text = black.format_str(text_before_cursor, mode=black.FileMode()) - if not text_before_cursor.endswith('\n') and formatted_text.endswith('\n'): - formatted_text = formatted_text[:-1] - return formatted_text - - -class TerminalInteractiveShell(InteractiveShell): - mime_renderers = Dict().tag(config=True) - - space_for_menu = Integer(6, help='Number of line at the bottom of the screen ' - 'to reserve for the tab completion menu, ' - 'search history, ...etc, the height of ' - 'these menus will at most this value. ' - 'Increase it is you prefer long and skinny ' - 'menus, decrease for short and wide.' - ).tag(config=True) - - pt_app = None - debugger_history = None - - simple_prompt = Bool(_use_simple_prompt, - help="""Use `raw_input` for the REPL, without completion and prompt colors. - - Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR. Known usage are: - IPython own testing machinery, and emacs inferior-shell integration through elpy. - - This mode default to `True` if the `IPY_TEST_SIMPLE_PROMPT` - environment variable is set, or the current terminal is not a tty.""" - ).tag(config=True) - - @property - def debugger_cls(self): - return Pdb if self.simple_prompt else TerminalPdb - - confirm_exit = Bool(True, - help=""" - Set to confirm when you try to exit IPython with an EOF (Control-D - in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit', - you can force a direct exit without any confirmation.""", - ).tag(config=True) - - editing_mode = Unicode('emacs', - help="Shortcut style to use at the prompt. 'vi' or 'emacs'.", - ).tag(config=True) - - autoformatter = Unicode(None, - help="Autoformatter to reformat Terminal code. Can be `'black'` or `None`", - allow_none=True - ).tag(config=True) - - mouse_support = Bool(False, - help="Enable mouse support in the prompt\n(Note: prevents selecting text with the mouse)" - ).tag(config=True) - - # We don't load the list of styles for the help string, because loading - # Pygments plugins takes time and can cause unexpected errors. - highlighting_style = Union([Unicode('legacy'), Type(klass=Style)], - help="""The name or class of a Pygments style to use for syntax - highlighting. To see available styles, run `pygmentize -L styles`.""" - ).tag(config=True) - - @validate('editing_mode') - def _validate_editing_mode(self, proposal): - if proposal['value'].lower() == 'vim': - proposal['value']= 'vi' - elif proposal['value'].lower() == 'default': - proposal['value']= 'emacs' - - if hasattr(EditingMode, proposal['value'].upper()): - return proposal['value'].lower() - - return self.editing_mode - - - @observe('editing_mode') - def _editing_mode(self, change): - if self.pt_app: - self.pt_app.editing_mode = getattr(EditingMode, change.new.upper()) - - @observe('autoformatter') - def _autoformatter_changed(self, change): - formatter = change.new - if formatter is None: - self.reformat_handler = lambda x:x - elif formatter == 'black': - self.reformat_handler = black_reformat_handler - else: - raise ValueError - - @observe('highlighting_style') - @observe('colors') - def _highlighting_style_changed(self, change): - self.refresh_style() - - def refresh_style(self): - self._style = self._make_style_from_name_or_cls(self.highlighting_style) - - - highlighting_style_overrides = Dict( - help="Override highlighting format for specific tokens" - ).tag(config=True) - - true_color = Bool(False, - help=("Use 24bit colors instead of 256 colors in prompt highlighting. " - "If your terminal supports true color, the following command " - "should print 'TRUECOLOR' in orange: " - "printf \"\\x1b[38;2;255;100;0mTRUECOLOR\\x1b[0m\\n\"") - ).tag(config=True) - - editor = Unicode(get_default_editor(), - help="Set the editor used by IPython (default to $EDITOR/vi/notepad)." - ).tag(config=True) - - prompts_class = Type(Prompts, help='Class used to generate Prompt token for prompt_toolkit').tag(config=True) - - prompts = Instance(Prompts) - - @default('prompts') - def _prompts_default(self): - return self.prompts_class(self) - -# @observe('prompts') -# def _(self, change): -# self._update_layout() - - @default('displayhook_class') - def _displayhook_class_default(self): - return RichPromptDisplayHook - - term_title = Bool(True, - help="Automatically set the terminal title" - ).tag(config=True) - - term_title_format = Unicode("IPython: {cwd}", - help="Customize the terminal title format. This is a python format string. " + - "Available substitutions are: {cwd}." - ).tag(config=True) - - display_completions = Enum(('column', 'multicolumn','readlinelike'), - help= ( "Options for displaying tab completions, 'column', 'multicolumn', and " - "'readlinelike'. These options are for `prompt_toolkit`, see " - "`prompt_toolkit` documentation for more information." - ), - default_value='multicolumn').tag(config=True) - - highlight_matching_brackets = Bool(True, - help="Highlight matching brackets.", - ).tag(config=True) - - extra_open_editor_shortcuts = Bool(False, - help="Enable vi (v) or Emacs (C-X C-E) shortcuts to open an external editor. " - "This is in addition to the F2 binding, which is always enabled." - ).tag(config=True) - - handle_return = Any(None, - help="Provide an alternative handler to be called when the user presses " - "Return. This is an advanced option intended for debugging, which " - "may be changed or removed in later releases." - ).tag(config=True) - - enable_history_search = Bool(True, - help="Allows to enable/disable the prompt toolkit history search" - ).tag(config=True) - - prompt_includes_vi_mode = Bool(True, - help="Display the current vi mode (when using vi editing mode)." - ).tag(config=True) - - @observe('term_title') - def init_term_title(self, change=None): - # Enable or disable the terminal title. - if self.term_title: - toggle_set_term_title(True) - set_term_title(self.term_title_format.format(cwd=abbrev_cwd())) - else: - toggle_set_term_title(False) - - def restore_term_title(self): - if self.term_title: - restore_term_title() - - def init_display_formatter(self): - super(TerminalInteractiveShell, self).init_display_formatter() - # terminal only supports plain text - self.display_formatter.active_types = ['text/plain'] - # disable `_ipython_display_` - self.display_formatter.ipython_display_formatter.enabled = False - - def init_prompt_toolkit_cli(self): - if self.simple_prompt: - # Fall back to plain non-interactive output for tests. - # This is very limited. - def prompt(): - prompt_text = "".join(x[1] for x in self.prompts.in_prompt_tokens()) - lines = [input(prompt_text)] - prompt_continuation = "".join(x[1] for x in self.prompts.continuation_prompt_tokens()) - while self.check_complete('\n'.join(lines))[0] == 'incomplete': - lines.append( input(prompt_continuation) ) - return '\n'.join(lines) - self.prompt_for_code = prompt - return - - # Set up keyboard shortcuts - key_bindings = create_ipython_shortcuts(self) - - # Pre-populate history from IPython's history database - history = InMemoryHistory() - last_cell = u"" - for __, ___, cell in self.history_manager.get_tail(self.history_load_length, - include_latest=True): - # Ignore blank lines and consecutive duplicates - cell = cell.rstrip() - if cell and (cell != last_cell): - history.append_string(cell) - last_cell = cell - - self._style = self._make_style_from_name_or_cls(self.highlighting_style) - self.style = DynamicStyle(lambda: self._style) - - editing_mode = getattr(EditingMode, self.editing_mode.upper()) - - self.pt_loop = asyncio.new_event_loop() - self.pt_app = PromptSession( - editing_mode=editing_mode, - key_bindings=key_bindings, - history=history, - completer=IPythonPTCompleter(shell=self), - enable_history_search = self.enable_history_search, - style=self.style, - include_default_pygments_style=False, - mouse_support=self.mouse_support, - enable_open_in_editor=self.extra_open_editor_shortcuts, - color_depth=self.color_depth, - tempfile_suffix=".py", - **self._extra_prompt_options()) - - def _make_style_from_name_or_cls(self, name_or_cls): - """ - Small wrapper that make an IPython compatible style from a style name - - We need that to add style for prompt ... etc. - """ - style_overrides = {} - if name_or_cls == 'legacy': - legacy = self.colors.lower() - if legacy == 'linux': - style_cls = get_style_by_name('monokai') - style_overrides = _style_overrides_linux - elif legacy == 'lightbg': - style_overrides = _style_overrides_light_bg - style_cls = get_style_by_name('pastie') - elif legacy == 'neutral': - # The default theme needs to be visible on both a dark background - # and a light background, because we can't tell what the terminal - # looks like. These tweaks to the default theme help with that. - style_cls = get_style_by_name('default') - style_overrides.update({ - Token.Number: '#007700', - Token.Operator: 'noinherit', - Token.String: '#BB6622', - Token.Name.Function: '#2080D0', - Token.Name.Class: 'bold #2080D0', - Token.Name.Namespace: 'bold #2080D0', - Token.Name.Variable.Magic: '#ansiblue', - Token.Prompt: '#009900', - Token.PromptNum: '#ansibrightgreen bold', - Token.OutPrompt: '#990000', - Token.OutPromptNum: '#ansibrightred bold', - }) - - # Hack: Due to limited color support on the Windows console - # the prompt colors will be wrong without this - if os.name == 'nt': - style_overrides.update({ - Token.Prompt: '#ansidarkgreen', - Token.PromptNum: '#ansigreen bold', - Token.OutPrompt: '#ansidarkred', - Token.OutPromptNum: '#ansired bold', - }) - elif legacy =='nocolor': - style_cls=_NoStyle - style_overrides = {} - else : - raise ValueError('Got unknown colors: ', legacy) - else : - if isinstance(name_or_cls, str): - style_cls = get_style_by_name(name_or_cls) - else: - style_cls = name_or_cls - style_overrides = { - Token.Prompt: '#009900', - Token.PromptNum: '#ansibrightgreen bold', - Token.OutPrompt: '#990000', - Token.OutPromptNum: '#ansibrightred bold', - } - style_overrides.update(self.highlighting_style_overrides) - style = merge_styles([ - style_from_pygments_cls(style_cls), - style_from_pygments_dict(style_overrides), - ]) - - return style - - @property - def pt_complete_style(self): - return { - 'multicolumn': CompleteStyle.MULTI_COLUMN, - 'column': CompleteStyle.COLUMN, - 'readlinelike': CompleteStyle.READLINE_LIKE, - }[self.display_completions] - - @property - def color_depth(self): - return (ColorDepth.TRUE_COLOR if self.true_color else None) - - def _extra_prompt_options(self): - """ - Return the current layout option for the current Terminal InteractiveShell - """ - def get_message(): - return PygmentsTokens(self.prompts.in_prompt_tokens()) - - if self.editing_mode == 'emacs': - # with emacs mode the prompt is (usually) static, so we call only - # the function once. With VI mode it can toggle between [ins] and - # [nor] so we can't precompute. - # here I'm going to favor the default keybinding which almost - # everybody uses to decrease CPU usage. - # if we have issues with users with custom Prompts we can see how to - # work around this. - get_message = get_message() - - options = { - 'complete_in_thread': False, - 'lexer':IPythonPTLexer(), - 'reserve_space_for_menu':self.space_for_menu, - 'message': get_message, - 'prompt_continuation': ( - lambda width, lineno, is_soft_wrap: - PygmentsTokens(self.prompts.continuation_prompt_tokens(width))), - 'multiline': True, - 'complete_style': self.pt_complete_style, - - # Highlight matching brackets, but only when this setting is - # enabled, and only when the DEFAULT_BUFFER has the focus. - 'input_processors': [ConditionalProcessor( - processor=HighlightMatchingBracketProcessor(chars='[](){}'), - filter=HasFocus(DEFAULT_BUFFER) & ~IsDone() & - Condition(lambda: self.highlight_matching_brackets))], - } - if not PTK3: - options['inputhook'] = self.inputhook - - return options - - def prompt_for_code(self): - if self.rl_next_input: - default = self.rl_next_input - self.rl_next_input = None - else: - default = '' - - # In order to make sure that asyncio code written in the - # interactive shell doesn't interfere with the prompt, we run the - # prompt in a different event loop. - # If we don't do this, people could spawn coroutine with a - # while/true inside which will freeze the prompt. - - policy = asyncio.get_event_loop_policy() - try: - old_loop = policy.get_event_loop() - except RuntimeError: - # This happens when the the event loop is closed, - # e.g. by calling `asyncio.run()`. - old_loop = None - - policy.set_event_loop(self.pt_loop) - try: - with patch_stdout(raw=True): - text = self.pt_app.prompt( - default=default, - **self._extra_prompt_options()) - finally: - # Restore the original event loop. - if old_loop is not None: - policy.set_event_loop(old_loop) - - return text - - def enable_win_unicode_console(self): - # Since IPython 7.10 doesn't support python < 3.6 and PEP 528, Python uses the unicode APIs for the Windows - # console by default, so WUC shouldn't be needed. - from warnings import warn - warn("`enable_win_unicode_console` is deprecated since IPython 7.10, does not do anything and will be removed in the future", - DeprecationWarning, - stacklevel=2) - - def init_io(self): - if sys.platform not in {'win32', 'cli'}: - return - - import colorama - colorama.init() - - # For some reason we make these wrappers around stdout/stderr. - # For now, we need to reset them so all output gets coloured. - # https://github.com/ipython/ipython/issues/8669 - # io.std* are deprecated, but don't show our own deprecation warnings - # during initialization of the deprecated API. - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - io.stdout = io.IOStream(sys.stdout) - io.stderr = io.IOStream(sys.stderr) - - def init_magics(self): - super(TerminalInteractiveShell, self).init_magics() - self.register_magics(TerminalMagics) - - def init_alias(self): - # The parent class defines aliases that can be safely used with any - # frontend. - super(TerminalInteractiveShell, self).init_alias() - - # Now define aliases that only make sense on the terminal, because they - # need direct access to the console in a way that we can't emulate in - # GUI or web frontend - if os.name == 'posix': - for cmd in ('clear', 'more', 'less', 'man'): - self.alias_manager.soft_define_alias(cmd, cmd) - - - def __init__(self, *args, **kwargs): - super(TerminalInteractiveShell, self).__init__(*args, **kwargs) - self.init_prompt_toolkit_cli() - self.init_term_title() - self.keep_running = True - - self.debugger_history = InMemoryHistory() - - def ask_exit(self): - self.keep_running = False - - rl_next_input = None - - def interact(self, display_banner=DISPLAY_BANNER_DEPRECATED): - - if display_banner is not DISPLAY_BANNER_DEPRECATED: - warn('interact `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2) - - self.keep_running = True - while self.keep_running: - print(self.separate_in, end='') - - try: - code = self.prompt_for_code() - except EOFError: - if (not self.confirm_exit) \ - or self.ask_yes_no('Do you really want to exit ([y]/n)?','y','n'): - self.ask_exit() - - else: - if code: - self.run_cell(code, store_history=True) - - def mainloop(self, display_banner=DISPLAY_BANNER_DEPRECATED): - # An extra layer of protection in case someone mashing Ctrl-C breaks - # out of our internal code. - if display_banner is not DISPLAY_BANNER_DEPRECATED: - warn('mainloop `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2) - while True: - try: - self.interact() - break - except KeyboardInterrupt as e: - print("\n%s escaped interact()\n" % type(e).__name__) - finally: - # An interrupt during the eventloop will mess up the - # internal state of the prompt_toolkit library. - # Stopping the eventloop fixes this, see - # https://github.com/ipython/ipython/pull/9867 - if hasattr(self, '_eventloop'): - self._eventloop.stop() - - self.restore_term_title() - - - _inputhook = None - def inputhook(self, context): - if self._inputhook is not None: - self._inputhook(context) - - active_eventloop = None - def enable_gui(self, gui=None): - if gui and (gui != 'inline') : - self.active_eventloop, self._inputhook =\ - get_inputhook_name_and_func(gui) - else: - self.active_eventloop = self._inputhook = None - - # For prompt_toolkit 3.0. We have to create an asyncio event loop with - # this inputhook. - if PTK3: - import asyncio - from prompt_toolkit.eventloop import new_eventloop_with_inputhook - - if gui == 'asyncio': - # When we integrate the asyncio event loop, run the UI in the - # same event loop as the rest of the code. don't use an actual - # input hook. (Asyncio is not made for nesting event loops.) - self.pt_loop = asyncio.get_event_loop() - - elif self._inputhook: - # If an inputhook was set, create a new asyncio event loop with - # this inputhook for the prompt. - self.pt_loop = new_eventloop_with_inputhook(self._inputhook) - else: - # When there's no inputhook, run the prompt in a separate - # asyncio event loop. - self.pt_loop = asyncio.new_event_loop() - - # Run !system commands directly, not through pipes, so terminal programs - # work correctly. - system = InteractiveShell.system_raw - - def auto_rewrite_input(self, cmd): - """Overridden from the parent class to use fancy rewriting prompt""" - if not self.show_rewritten_input: - return - - tokens = self.prompts.rewrite_prompt_tokens() - if self.pt_app: - print_formatted_text(PygmentsTokens(tokens), end='', - style=self.pt_app.app.style) - print(cmd) - else: - prompt = ''.join(s for t, s in tokens) - print(prompt, cmd, sep='') - - _prompts_before = None - def switch_doctest_mode(self, mode): - """Switch prompts to classic for %doctest_mode""" - if mode: - self._prompts_before = self.prompts - self.prompts = ClassicPrompts(self) - elif self._prompts_before: - self.prompts = self._prompts_before - self._prompts_before = None -# self._update_layout() - - -InteractiveShellABC.register(TerminalInteractiveShell) - -if __name__ == '__main__': - TerminalInteractiveShell.instance().interact() +"""IPython terminal interface using prompt_toolkit""" + +import asyncio +import os +import sys +import warnings +from warnings import warn + +from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC +from IPython.utils import io +from IPython.utils.py3compat import input +from IPython.utils.terminal import toggle_set_term_title, set_term_title, restore_term_title +from IPython.utils.process import abbrev_cwd +from traitlets import ( + Bool, Unicode, Dict, Integer, observe, Instance, Type, default, Enum, Union, + Any, validate +) + +from prompt_toolkit.enums import DEFAULT_BUFFER, EditingMode +from prompt_toolkit.filters import (HasFocus, Condition, IsDone) +from prompt_toolkit.formatted_text import PygmentsTokens +from prompt_toolkit.history import InMemoryHistory +from prompt_toolkit.layout.processors import ConditionalProcessor, HighlightMatchingBracketProcessor +from prompt_toolkit.output import ColorDepth +from prompt_toolkit.patch_stdout import patch_stdout +from prompt_toolkit.shortcuts import PromptSession, CompleteStyle, print_formatted_text +from prompt_toolkit.styles import DynamicStyle, merge_styles +from prompt_toolkit.styles.pygments import style_from_pygments_cls, style_from_pygments_dict +from prompt_toolkit import __version__ as ptk_version + +from pygments.styles import get_style_by_name +from pygments.style import Style +from pygments.token import Token + +from .debugger import TerminalPdb, Pdb +from .magics import TerminalMagics +from .pt_inputhooks import get_inputhook_name_and_func +from .prompts import Prompts, ClassicPrompts, RichPromptDisplayHook +from .ptutils import IPythonPTCompleter, IPythonPTLexer +from .shortcuts import create_ipython_shortcuts + +DISPLAY_BANNER_DEPRECATED = object() +PTK3 = ptk_version.startswith('3.') + + +class _NoStyle(Style): pass + + + +_style_overrides_light_bg = { + Token.Prompt: '#0000ff', + Token.PromptNum: '#0000ee bold', + Token.OutPrompt: '#cc0000', + Token.OutPromptNum: '#bb0000 bold', +} + +_style_overrides_linux = { + Token.Prompt: '#00cc00', + Token.PromptNum: '#00bb00 bold', + Token.OutPrompt: '#cc0000', + Token.OutPromptNum: '#bb0000 bold', +} + +def get_default_editor(): + try: + return os.environ['EDITOR'] + except KeyError: + pass + except UnicodeError: + warn("$EDITOR environment variable is not pure ASCII. Using platform " + "default editor.") + + if os.name == 'posix': + return 'vi' # the only one guaranteed to be there! + else: + return 'notepad' # same in Windows! + +# conservatively check for tty +# overridden streams can result in things like: +# - sys.stdin = None +# - no isatty method +for _name in ('stdin', 'stdout', 'stderr'): + _stream = getattr(sys, _name) + if not _stream or not hasattr(_stream, 'isatty') or not _stream.isatty(): + _is_tty = False + break +else: + _is_tty = True + + +_use_simple_prompt = ('IPY_TEST_SIMPLE_PROMPT' in os.environ) or (not _is_tty) + +def black_reformat_handler(text_before_cursor): + import black + formatted_text = black.format_str(text_before_cursor, mode=black.FileMode()) + if not text_before_cursor.endswith('\n') and formatted_text.endswith('\n'): + formatted_text = formatted_text[:-1] + return formatted_text + + +class TerminalInteractiveShell(InteractiveShell): + mime_renderers = Dict().tag(config=True) + + space_for_menu = Integer(6, help='Number of line at the bottom of the screen ' + 'to reserve for the tab completion menu, ' + 'search history, ...etc, the height of ' + 'these menus will at most this value. ' + 'Increase it is you prefer long and skinny ' + 'menus, decrease for short and wide.' + ).tag(config=True) + + pt_app = None + debugger_history = None + + simple_prompt = Bool(_use_simple_prompt, + help="""Use `raw_input` for the REPL, without completion and prompt colors. + + Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR. Known usage are: + IPython own testing machinery, and emacs inferior-shell integration through elpy. + + This mode default to `True` if the `IPY_TEST_SIMPLE_PROMPT` + environment variable is set, or the current terminal is not a tty.""" + ).tag(config=True) + + @property + def debugger_cls(self): + return Pdb if self.simple_prompt else TerminalPdb + + confirm_exit = Bool(True, + help=""" + Set to confirm when you try to exit IPython with an EOF (Control-D + in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit', + you can force a direct exit without any confirmation.""", + ).tag(config=True) + + editing_mode = Unicode('emacs', + help="Shortcut style to use at the prompt. 'vi' or 'emacs'.", + ).tag(config=True) + + autoformatter = Unicode(None, + help="Autoformatter to reformat Terminal code. Can be `'black'` or `None`", + allow_none=True + ).tag(config=True) + + mouse_support = Bool(False, + help="Enable mouse support in the prompt\n(Note: prevents selecting text with the mouse)" + ).tag(config=True) + + # We don't load the list of styles for the help string, because loading + # Pygments plugins takes time and can cause unexpected errors. + highlighting_style = Union([Unicode('legacy'), Type(klass=Style)], + help="""The name or class of a Pygments style to use for syntax + highlighting. To see available styles, run `pygmentize -L styles`.""" + ).tag(config=True) + + @validate('editing_mode') + def _validate_editing_mode(self, proposal): + if proposal['value'].lower() == 'vim': + proposal['value']= 'vi' + elif proposal['value'].lower() == 'default': + proposal['value']= 'emacs' + + if hasattr(EditingMode, proposal['value'].upper()): + return proposal['value'].lower() + + return self.editing_mode + + + @observe('editing_mode') + def _editing_mode(self, change): + if self.pt_app: + self.pt_app.editing_mode = getattr(EditingMode, change.new.upper()) + + @observe('autoformatter') + def _autoformatter_changed(self, change): + formatter = change.new + if formatter is None: + self.reformat_handler = lambda x:x + elif formatter == 'black': + self.reformat_handler = black_reformat_handler + else: + raise ValueError + + @observe('highlighting_style') + @observe('colors') + def _highlighting_style_changed(self, change): + self.refresh_style() + + def refresh_style(self): + self._style = self._make_style_from_name_or_cls(self.highlighting_style) + + + highlighting_style_overrides = Dict( + help="Override highlighting format for specific tokens" + ).tag(config=True) + + true_color = Bool(False, + help=("Use 24bit colors instead of 256 colors in prompt highlighting. " + "If your terminal supports true color, the following command " + "should print 'TRUECOLOR' in orange: " + "printf \"\\x1b[38;2;255;100;0mTRUECOLOR\\x1b[0m\\n\"") + ).tag(config=True) + + editor = Unicode(get_default_editor(), + help="Set the editor used by IPython (default to $EDITOR/vi/notepad)." + ).tag(config=True) + + prompts_class = Type(Prompts, help='Class used to generate Prompt token for prompt_toolkit').tag(config=True) + + prompts = Instance(Prompts) + + @default('prompts') + def _prompts_default(self): + return self.prompts_class(self) + +# @observe('prompts') +# def _(self, change): +# self._update_layout() + + @default('displayhook_class') + def _displayhook_class_default(self): + return RichPromptDisplayHook + + term_title = Bool(True, + help="Automatically set the terminal title" + ).tag(config=True) + + term_title_format = Unicode("IPython: {cwd}", + help="Customize the terminal title format. This is a python format string. " + + "Available substitutions are: {cwd}." + ).tag(config=True) + + display_completions = Enum(('column', 'multicolumn','readlinelike'), + help= ( "Options for displaying tab completions, 'column', 'multicolumn', and " + "'readlinelike'. These options are for `prompt_toolkit`, see " + "`prompt_toolkit` documentation for more information." + ), + default_value='multicolumn').tag(config=True) + + highlight_matching_brackets = Bool(True, + help="Highlight matching brackets.", + ).tag(config=True) + + extra_open_editor_shortcuts = Bool(False, + help="Enable vi (v) or Emacs (C-X C-E) shortcuts to open an external editor. " + "This is in addition to the F2 binding, which is always enabled." + ).tag(config=True) + + handle_return = Any(None, + help="Provide an alternative handler to be called when the user presses " + "Return. This is an advanced option intended for debugging, which " + "may be changed or removed in later releases." + ).tag(config=True) + + enable_history_search = Bool(True, + help="Allows to enable/disable the prompt toolkit history search" + ).tag(config=True) + + prompt_includes_vi_mode = Bool(True, + help="Display the current vi mode (when using vi editing mode)." + ).tag(config=True) + + @observe('term_title') + def init_term_title(self, change=None): + # Enable or disable the terminal title. + if self.term_title: + toggle_set_term_title(True) + set_term_title(self.term_title_format.format(cwd=abbrev_cwd())) + else: + toggle_set_term_title(False) + + def restore_term_title(self): + if self.term_title: + restore_term_title() + + def init_display_formatter(self): + super(TerminalInteractiveShell, self).init_display_formatter() + # terminal only supports plain text + self.display_formatter.active_types = ['text/plain'] + # disable `_ipython_display_` + self.display_formatter.ipython_display_formatter.enabled = False + + def init_prompt_toolkit_cli(self): + if self.simple_prompt: + # Fall back to plain non-interactive output for tests. + # This is very limited. + def prompt(): + prompt_text = "".join(x[1] for x in self.prompts.in_prompt_tokens()) + lines = [input(prompt_text)] + prompt_continuation = "".join(x[1] for x in self.prompts.continuation_prompt_tokens()) + while self.check_complete('\n'.join(lines))[0] == 'incomplete': + lines.append( input(prompt_continuation) ) + return '\n'.join(lines) + self.prompt_for_code = prompt + return + + # Set up keyboard shortcuts + key_bindings = create_ipython_shortcuts(self) + + # Pre-populate history from IPython's history database + history = InMemoryHistory() + last_cell = u"" + for __, ___, cell in self.history_manager.get_tail(self.history_load_length, + include_latest=True): + # Ignore blank lines and consecutive duplicates + cell = cell.rstrip() + if cell and (cell != last_cell): + history.append_string(cell) + last_cell = cell + + self._style = self._make_style_from_name_or_cls(self.highlighting_style) + self.style = DynamicStyle(lambda: self._style) + + editing_mode = getattr(EditingMode, self.editing_mode.upper()) + + self.pt_loop = asyncio.new_event_loop() + self.pt_app = PromptSession( + editing_mode=editing_mode, + key_bindings=key_bindings, + history=history, + completer=IPythonPTCompleter(shell=self), + enable_history_search = self.enable_history_search, + style=self.style, + include_default_pygments_style=False, + mouse_support=self.mouse_support, + enable_open_in_editor=self.extra_open_editor_shortcuts, + color_depth=self.color_depth, + tempfile_suffix=".py", + **self._extra_prompt_options()) + + def _make_style_from_name_or_cls(self, name_or_cls): + """ + Small wrapper that make an IPython compatible style from a style name + + We need that to add style for prompt ... etc. + """ + style_overrides = {} + if name_or_cls == 'legacy': + legacy = self.colors.lower() + if legacy == 'linux': + style_cls = get_style_by_name('monokai') + style_overrides = _style_overrides_linux + elif legacy == 'lightbg': + style_overrides = _style_overrides_light_bg + style_cls = get_style_by_name('pastie') + elif legacy == 'neutral': + # The default theme needs to be visible on both a dark background + # and a light background, because we can't tell what the terminal + # looks like. These tweaks to the default theme help with that. + style_cls = get_style_by_name('default') + style_overrides.update({ + Token.Number: '#007700', + Token.Operator: 'noinherit', + Token.String: '#BB6622', + Token.Name.Function: '#2080D0', + Token.Name.Class: 'bold #2080D0', + Token.Name.Namespace: 'bold #2080D0', + Token.Name.Variable.Magic: '#ansiblue', + Token.Prompt: '#009900', + Token.PromptNum: '#ansibrightgreen bold', + Token.OutPrompt: '#990000', + Token.OutPromptNum: '#ansibrightred bold', + }) + + # Hack: Due to limited color support on the Windows console + # the prompt colors will be wrong without this + if os.name == 'nt': + style_overrides.update({ + Token.Prompt: '#ansidarkgreen', + Token.PromptNum: '#ansigreen bold', + Token.OutPrompt: '#ansidarkred', + Token.OutPromptNum: '#ansired bold', + }) + elif legacy =='nocolor': + style_cls=_NoStyle + style_overrides = {} + else : + raise ValueError('Got unknown colors: ', legacy) + else : + if isinstance(name_or_cls, str): + style_cls = get_style_by_name(name_or_cls) + else: + style_cls = name_or_cls + style_overrides = { + Token.Prompt: '#009900', + Token.PromptNum: '#ansibrightgreen bold', + Token.OutPrompt: '#990000', + Token.OutPromptNum: '#ansibrightred bold', + } + style_overrides.update(self.highlighting_style_overrides) + style = merge_styles([ + style_from_pygments_cls(style_cls), + style_from_pygments_dict(style_overrides), + ]) + + return style + + @property + def pt_complete_style(self): + return { + 'multicolumn': CompleteStyle.MULTI_COLUMN, + 'column': CompleteStyle.COLUMN, + 'readlinelike': CompleteStyle.READLINE_LIKE, + }[self.display_completions] + + @property + def color_depth(self): + return (ColorDepth.TRUE_COLOR if self.true_color else None) + + def _extra_prompt_options(self): + """ + Return the current layout option for the current Terminal InteractiveShell + """ + def get_message(): + return PygmentsTokens(self.prompts.in_prompt_tokens()) + + if self.editing_mode == 'emacs': + # with emacs mode the prompt is (usually) static, so we call only + # the function once. With VI mode it can toggle between [ins] and + # [nor] so we can't precompute. + # here I'm going to favor the default keybinding which almost + # everybody uses to decrease CPU usage. + # if we have issues with users with custom Prompts we can see how to + # work around this. + get_message = get_message() + + options = { + 'complete_in_thread': False, + 'lexer':IPythonPTLexer(), + 'reserve_space_for_menu':self.space_for_menu, + 'message': get_message, + 'prompt_continuation': ( + lambda width, lineno, is_soft_wrap: + PygmentsTokens(self.prompts.continuation_prompt_tokens(width))), + 'multiline': True, + 'complete_style': self.pt_complete_style, + + # Highlight matching brackets, but only when this setting is + # enabled, and only when the DEFAULT_BUFFER has the focus. + 'input_processors': [ConditionalProcessor( + processor=HighlightMatchingBracketProcessor(chars='[](){}'), + filter=HasFocus(DEFAULT_BUFFER) & ~IsDone() & + Condition(lambda: self.highlight_matching_brackets))], + } + if not PTK3: + options['inputhook'] = self.inputhook + + return options + + def prompt_for_code(self): + if self.rl_next_input: + default = self.rl_next_input + self.rl_next_input = None + else: + default = '' + + # In order to make sure that asyncio code written in the + # interactive shell doesn't interfere with the prompt, we run the + # prompt in a different event loop. + # If we don't do this, people could spawn coroutine with a + # while/true inside which will freeze the prompt. + + policy = asyncio.get_event_loop_policy() + try: + old_loop = policy.get_event_loop() + except RuntimeError: + # This happens when the the event loop is closed, + # e.g. by calling `asyncio.run()`. + old_loop = None + + policy.set_event_loop(self.pt_loop) + try: + with patch_stdout(raw=True): + text = self.pt_app.prompt( + default=default, + **self._extra_prompt_options()) + finally: + # Restore the original event loop. + if old_loop is not None: + policy.set_event_loop(old_loop) + + return text + + def enable_win_unicode_console(self): + # Since IPython 7.10 doesn't support python < 3.6 and PEP 528, Python uses the unicode APIs for the Windows + # console by default, so WUC shouldn't be needed. + from warnings import warn + warn("`enable_win_unicode_console` is deprecated since IPython 7.10, does not do anything and will be removed in the future", + DeprecationWarning, + stacklevel=2) + + def init_io(self): + if sys.platform not in {'win32', 'cli'}: + return + + import colorama + colorama.init() + + # For some reason we make these wrappers around stdout/stderr. + # For now, we need to reset them so all output gets coloured. + # https://github.com/ipython/ipython/issues/8669 + # io.std* are deprecated, but don't show our own deprecation warnings + # during initialization of the deprecated API. + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + io.stdout = io.IOStream(sys.stdout) + io.stderr = io.IOStream(sys.stderr) + + def init_magics(self): + super(TerminalInteractiveShell, self).init_magics() + self.register_magics(TerminalMagics) + + def init_alias(self): + # The parent class defines aliases that can be safely used with any + # frontend. + super(TerminalInteractiveShell, self).init_alias() + + # Now define aliases that only make sense on the terminal, because they + # need direct access to the console in a way that we can't emulate in + # GUI or web frontend + if os.name == 'posix': + for cmd in ('clear', 'more', 'less', 'man'): + self.alias_manager.soft_define_alias(cmd, cmd) + + + def __init__(self, *args, **kwargs): + super(TerminalInteractiveShell, self).__init__(*args, **kwargs) + self.init_prompt_toolkit_cli() + self.init_term_title() + self.keep_running = True + + self.debugger_history = InMemoryHistory() + + def ask_exit(self): + self.keep_running = False + + rl_next_input = None + + def interact(self, display_banner=DISPLAY_BANNER_DEPRECATED): + + if display_banner is not DISPLAY_BANNER_DEPRECATED: + warn('interact `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2) + + self.keep_running = True + while self.keep_running: + print(self.separate_in, end='') + + try: + code = self.prompt_for_code() + except EOFError: + if (not self.confirm_exit) \ + or self.ask_yes_no('Do you really want to exit ([y]/n)?','y','n'): + self.ask_exit() + + else: + if code: + self.run_cell(code, store_history=True) + + def mainloop(self, display_banner=DISPLAY_BANNER_DEPRECATED): + # An extra layer of protection in case someone mashing Ctrl-C breaks + # out of our internal code. + if display_banner is not DISPLAY_BANNER_DEPRECATED: + warn('mainloop `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2) + while True: + try: + self.interact() + break + except KeyboardInterrupt as e: + print("\n%s escaped interact()\n" % type(e).__name__) + finally: + # An interrupt during the eventloop will mess up the + # internal state of the prompt_toolkit library. + # Stopping the eventloop fixes this, see + # https://github.com/ipython/ipython/pull/9867 + if hasattr(self, '_eventloop'): + self._eventloop.stop() + + self.restore_term_title() + + + _inputhook = None + def inputhook(self, context): + if self._inputhook is not None: + self._inputhook(context) + + active_eventloop = None + def enable_gui(self, gui=None): + if gui and (gui != 'inline') : + self.active_eventloop, self._inputhook =\ + get_inputhook_name_and_func(gui) + else: + self.active_eventloop = self._inputhook = None + + # For prompt_toolkit 3.0. We have to create an asyncio event loop with + # this inputhook. + if PTK3: + import asyncio + from prompt_toolkit.eventloop import new_eventloop_with_inputhook + + if gui == 'asyncio': + # When we integrate the asyncio event loop, run the UI in the + # same event loop as the rest of the code. don't use an actual + # input hook. (Asyncio is not made for nesting event loops.) + self.pt_loop = asyncio.get_event_loop() + + elif self._inputhook: + # If an inputhook was set, create a new asyncio event loop with + # this inputhook for the prompt. + self.pt_loop = new_eventloop_with_inputhook(self._inputhook) + else: + # When there's no inputhook, run the prompt in a separate + # asyncio event loop. + self.pt_loop = asyncio.new_event_loop() + + # Run !system commands directly, not through pipes, so terminal programs + # work correctly. + system = InteractiveShell.system_raw + + def auto_rewrite_input(self, cmd): + """Overridden from the parent class to use fancy rewriting prompt""" + if not self.show_rewritten_input: + return + + tokens = self.prompts.rewrite_prompt_tokens() + if self.pt_app: + print_formatted_text(PygmentsTokens(tokens), end='', + style=self.pt_app.app.style) + print(cmd) + else: + prompt = ''.join(s for t, s in tokens) + print(prompt, cmd, sep='') + + _prompts_before = None + def switch_doctest_mode(self, mode): + """Switch prompts to classic for %doctest_mode""" + if mode: + self._prompts_before = self.prompts + self.prompts = ClassicPrompts(self) + elif self._prompts_before: + self.prompts = self._prompts_before + self._prompts_before = None +# self._update_layout() + + +InteractiveShellABC.register(TerminalInteractiveShell) + +if __name__ == '__main__': + TerminalInteractiveShell.instance().interact() diff --git a/contrib/python/ipython/py3/IPython/terminal/ipapp.py b/contrib/python/ipython/py3/IPython/terminal/ipapp.py index 180cec11582..defe3e79fa8 100644 --- a/contrib/python/ipython/py3/IPython/terminal/ipapp.py +++ b/contrib/python/ipython/py3/IPython/terminal/ipapp.py @@ -1,380 +1,380 @@ -#!/usr/bin/env python -# encoding: utf-8 -""" -The :class:`~IPython.core.application.Application` object for the command -line :command:`ipython` program. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -import logging -import os -import sys -import warnings - -from traitlets.config.loader import Config -from traitlets.config.application import boolean_flag, catch_config_error -from IPython.core import release -from IPython.core import usage -from IPython.core.completer import IPCompleter -from IPython.core.crashhandler import CrashHandler -from IPython.core.formatters import PlainTextFormatter -from IPython.core.history import HistoryManager -from IPython.core.application import ( - ProfileDir, BaseIPythonApplication, base_flags, base_aliases -) -from IPython.core.magics import ( - ScriptMagics, LoggingMagics -) -from IPython.core.shellapp import ( - InteractiveShellApp, shell_flags, shell_aliases -) -from IPython.extensions.storemagic import StoreMagics -from .interactiveshell import TerminalInteractiveShell -from IPython.paths import get_ipython_dir -from traitlets import ( - Bool, List, default, observe, Type -) - -#----------------------------------------------------------------------------- -# Globals, utilities and helpers -#----------------------------------------------------------------------------- - -_examples = """ -ipython --matplotlib # enable matplotlib integration -ipython --matplotlib=qt # enable matplotlib integration with qt4 backend - -ipython --log-level=DEBUG # set logging to DEBUG -ipython --profile=foo # start with profile foo - -ipython profile create foo # create profile foo w/ default config files -ipython help profile # show the help for the profile subcmd - -ipython locate # print the path to the IPython directory -ipython locate profile foo # print the path to the directory for profile `foo` -""" - -#----------------------------------------------------------------------------- -# Crash handler for this application -#----------------------------------------------------------------------------- - -class IPAppCrashHandler(CrashHandler): - """sys.excepthook for IPython itself, leaves a detailed report on disk.""" - - def __init__(self, app): - contact_name = release.author - contact_email = release.author_email - bug_tracker = 'https://github.com/ipython/ipython/issues' - super(IPAppCrashHandler,self).__init__( - app, contact_name, contact_email, bug_tracker - ) - - def make_report(self,traceback): - """Return a string containing a crash report.""" - - sec_sep = self.section_sep - # Start with parent report - report = [super(IPAppCrashHandler, self).make_report(traceback)] - # Add interactive-specific info we may have - rpt_add = report.append - try: - rpt_add(sec_sep+"History of session input:") - for line in self.app.shell.user_ns['_ih']: - rpt_add(line) - rpt_add('\n*** Last line of input (may not be in above history):\n') - rpt_add(self.app.shell._last_input_line+'\n') - except: - pass - - return ''.join(report) - -#----------------------------------------------------------------------------- -# Aliases and Flags -#----------------------------------------------------------------------------- -flags = dict(base_flags) -flags.update(shell_flags) -frontend_flags = {} -addflag = lambda *args: frontend_flags.update(boolean_flag(*args)) -addflag('autoedit-syntax', 'TerminalInteractiveShell.autoedit_syntax', - 'Turn on auto editing of files with syntax errors.', - 'Turn off auto editing of files with syntax errors.' -) -addflag('simple-prompt', 'TerminalInteractiveShell.simple_prompt', - "Force simple minimal prompt using `raw_input`", - "Use a rich interactive prompt with prompt_toolkit", -) - -addflag('banner', 'TerminalIPythonApp.display_banner', - "Display a banner upon starting IPython.", - "Don't display a banner upon starting IPython." -) -addflag('confirm-exit', 'TerminalInteractiveShell.confirm_exit', - """Set to confirm when you try to exit IPython with an EOF (Control-D - in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit', - you can force a direct exit without any confirmation.""", - "Don't prompt the user when exiting." -) -addflag('term-title', 'TerminalInteractiveShell.term_title', - "Enable auto setting the terminal title.", - "Disable auto setting the terminal title." -) -classic_config = Config() -classic_config.InteractiveShell.cache_size = 0 -classic_config.PlainTextFormatter.pprint = False -classic_config.TerminalInteractiveShell.prompts_class='IPython.terminal.prompts.ClassicPrompts' -classic_config.InteractiveShell.separate_in = '' -classic_config.InteractiveShell.separate_out = '' -classic_config.InteractiveShell.separate_out2 = '' -classic_config.InteractiveShell.colors = 'NoColor' -classic_config.InteractiveShell.xmode = 'Plain' - -frontend_flags['classic']=( - classic_config, - "Gives IPython a similar feel to the classic Python prompt." -) -# # log doesn't make so much sense this way anymore -# paa('--log','-l', -# action='store_true', dest='InteractiveShell.logstart', -# help="Start logging to the default log file (./ipython_log.py).") -# -# # quick is harder to implement -frontend_flags['quick']=( - {'TerminalIPythonApp' : {'quick' : True}}, - "Enable quick startup with no config files." -) - -frontend_flags['i'] = ( - {'TerminalIPythonApp' : {'force_interact' : True}}, - """If running code from the command line, become interactive afterwards. - It is often useful to follow this with `--` to treat remaining flags as - script arguments. - """ -) -flags.update(frontend_flags) - -aliases = dict(base_aliases) -aliases.update(shell_aliases) - -#----------------------------------------------------------------------------- -# Main classes and functions -#----------------------------------------------------------------------------- - - -class LocateIPythonApp(BaseIPythonApplication): - description = """print the path to the IPython dir""" - subcommands = dict( - profile=('IPython.core.profileapp.ProfileLocate', - "print the path to an IPython profile directory", - ), - ) - def start(self): - if self.subapp is not None: - return self.subapp.start() - else: - print(self.ipython_dir) - - -class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp): - name = u'ipython' - description = usage.cl_usage - crash_handler_class = IPAppCrashHandler - examples = _examples - - flags = flags - aliases = aliases - classes = List() - - interactive_shell_class = Type( - klass=object, # use default_value otherwise which only allow subclasses. - default_value=TerminalInteractiveShell, - help="Class to use to instantiate the TerminalInteractiveShell object. Useful for custom Frontends" - ).tag(config=True) - - @default('classes') - def _classes_default(self): - """This has to be in a method, for TerminalIPythonApp to be available.""" - return [ - InteractiveShellApp, # ShellApp comes before TerminalApp, because - self.__class__, # it will also affect subclasses (e.g. QtConsole) - TerminalInteractiveShell, - HistoryManager, - ProfileDir, - PlainTextFormatter, - IPCompleter, - ScriptMagics, - LoggingMagics, - StoreMagics, - ] - - deprecated_subcommands = dict( - qtconsole=('qtconsole.qtconsoleapp.JupyterQtConsoleApp', - """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter Qt Console.""" - ), - notebook=('notebook.notebookapp.NotebookApp', - """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter HTML Notebook Server.""" - ), - console=('jupyter_console.app.ZMQTerminalIPythonApp', - """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter terminal-based Console.""" - ), - nbconvert=('nbconvert.nbconvertapp.NbConvertApp', - "DEPRECATED, Will be removed in IPython 6.0 : Convert notebooks to/from other formats." - ), - trust=('nbformat.sign.TrustNotebookApp', - "DEPRECATED, Will be removed in IPython 6.0 : Sign notebooks to trust their potentially unsafe contents at load." - ), - kernelspec=('jupyter_client.kernelspecapp.KernelSpecApp', - "DEPRECATED, Will be removed in IPython 6.0 : Manage Jupyter kernel specifications." - ), - ) - subcommands = dict( - profile = ("IPython.core.profileapp.ProfileApp", - "Create and manage IPython profiles." - ), - kernel = ("ipykernel.kernelapp.IPKernelApp", - "Start a kernel without an attached frontend." - ), - locate=('IPython.terminal.ipapp.LocateIPythonApp', - LocateIPythonApp.description - ), - history=('IPython.core.historyapp.HistoryApp', - "Manage the IPython history database." - ), - ) - deprecated_subcommands['install-nbextension'] = ( - "notebook.nbextensions.InstallNBExtensionApp", - "DEPRECATED, Will be removed in IPython 6.0 : Install Jupyter notebook extension files" - ) - subcommands.update(deprecated_subcommands) - - # *do* autocreate requested profile, but don't create the config file. - auto_create=Bool(True) - # configurables - quick = Bool(False, - help="""Start IPython quickly by skipping the loading of config files.""" - ).tag(config=True) - @observe('quick') - def _quick_changed(self, change): - if change['new']: - self.load_config_file = lambda *a, **kw: None - - display_banner = Bool(True, - help="Whether to display a banner upon starting IPython." - ).tag(config=True) - - # if there is code of files to run from the cmd line, don't interact - # unless the --i flag (App.force_interact) is true. - force_interact = Bool(False, - help="""If a command or file is given via the command-line, - e.g. 'ipython foo.py', start an interactive shell after executing the - file or command.""" - ).tag(config=True) - @observe('force_interact') - def _force_interact_changed(self, change): - if change['new']: - self.interact = True - - @observe('file_to_run', 'code_to_run', 'module_to_run') - def _file_to_run_changed(self, change): - new = change['new'] - if new: - self.something_to_run = True - if new and not self.force_interact: - self.interact = False - - # internal, not-configurable - something_to_run=Bool(False) - - def parse_command_line(self, argv=None): - """override to allow old '-pylab' flag with deprecation warning""" - - argv = sys.argv[1:] if argv is None else argv - - if '-pylab' in argv: - # deprecated `-pylab` given, - # warn and transform into current syntax - argv = argv[:] # copy, don't clobber - idx = argv.index('-pylab') - warnings.warn("`-pylab` flag has been deprecated.\n" - " Use `--matplotlib <backend>` and import pylab manually.") - argv[idx] = '--pylab' - - return super(TerminalIPythonApp, self).parse_command_line(argv) - - @catch_config_error - def initialize(self, argv=None): - """Do actions after construct, but before starting the app.""" - super(TerminalIPythonApp, self).initialize(argv) - if self.subapp is not None: - # don't bother initializing further, starting subapp - return - # print self.extra_args - if self.extra_args and not self.something_to_run: - self.file_to_run = self.extra_args[0] - self.init_path() - # create the shell - self.init_shell() - # and draw the banner - self.init_banner() - # Now a variety of things that happen after the banner is printed. - self.init_gui_pylab() - self.init_extensions() - self.init_code() - - def init_shell(self): - """initialize the InteractiveShell instance""" - # Create an InteractiveShell instance. - # shell.display_banner should always be False for the terminal - # based app, because we call shell.show_banner() by hand below - # so the banner shows *before* all extension loading stuff. - self.shell = self.interactive_shell_class.instance(parent=self, - profile_dir=self.profile_dir, - ipython_dir=self.ipython_dir, user_ns=self.user_ns) - self.shell.configurables.append(self) - - def init_banner(self): - """optionally display the banner""" - if self.display_banner and self.interact: - self.shell.show_banner() - # Make sure there is a space below the banner. - if self.log_level <= logging.INFO: print() - - def _pylab_changed(self, name, old, new): - """Replace --pylab='inline' with --pylab='auto'""" - if new == 'inline': - warnings.warn("'inline' not available as pylab backend, " - "using 'auto' instead.") - self.pylab = 'auto' - - def start(self): - if self.subapp is not None: - return self.subapp.start() - # perform any prexec steps: - if self.interact: - self.log.debug("Starting IPython's mainloop...") - self.shell.mainloop() - else: - self.log.debug("IPython not interactive...") - if not self.shell.last_execution_succeeded: - sys.exit(1) - -def load_default_config(ipython_dir=None): - """Load the default config file from the default ipython_dir. - - This is useful for embedded shells. - """ - if ipython_dir is None: - ipython_dir = get_ipython_dir() - - profile_dir = os.path.join(ipython_dir, 'profile_default') - app = TerminalIPythonApp() - app.config_file_paths.append(profile_dir) - app.load_config_file() - return app.config - -launch_new_instance = TerminalIPythonApp.launch_instance - - -if __name__ == '__main__': - launch_new_instance() +#!/usr/bin/env python +# encoding: utf-8 +""" +The :class:`~IPython.core.application.Application` object for the command +line :command:`ipython` program. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +import logging +import os +import sys +import warnings + +from traitlets.config.loader import Config +from traitlets.config.application import boolean_flag, catch_config_error +from IPython.core import release +from IPython.core import usage +from IPython.core.completer import IPCompleter +from IPython.core.crashhandler import CrashHandler +from IPython.core.formatters import PlainTextFormatter +from IPython.core.history import HistoryManager +from IPython.core.application import ( + ProfileDir, BaseIPythonApplication, base_flags, base_aliases +) +from IPython.core.magics import ( + ScriptMagics, LoggingMagics +) +from IPython.core.shellapp import ( + InteractiveShellApp, shell_flags, shell_aliases +) +from IPython.extensions.storemagic import StoreMagics +from .interactiveshell import TerminalInteractiveShell +from IPython.paths import get_ipython_dir +from traitlets import ( + Bool, List, default, observe, Type +) + +#----------------------------------------------------------------------------- +# Globals, utilities and helpers +#----------------------------------------------------------------------------- + +_examples = """ +ipython --matplotlib # enable matplotlib integration +ipython --matplotlib=qt # enable matplotlib integration with qt4 backend + +ipython --log-level=DEBUG # set logging to DEBUG +ipython --profile=foo # start with profile foo + +ipython profile create foo # create profile foo w/ default config files +ipython help profile # show the help for the profile subcmd + +ipython locate # print the path to the IPython directory +ipython locate profile foo # print the path to the directory for profile `foo` +""" + +#----------------------------------------------------------------------------- +# Crash handler for this application +#----------------------------------------------------------------------------- + +class IPAppCrashHandler(CrashHandler): + """sys.excepthook for IPython itself, leaves a detailed report on disk.""" + + def __init__(self, app): + contact_name = release.author + contact_email = release.author_email + bug_tracker = 'https://github.com/ipython/ipython/issues' + super(IPAppCrashHandler,self).__init__( + app, contact_name, contact_email, bug_tracker + ) + + def make_report(self,traceback): + """Return a string containing a crash report.""" + + sec_sep = self.section_sep + # Start with parent report + report = [super(IPAppCrashHandler, self).make_report(traceback)] + # Add interactive-specific info we may have + rpt_add = report.append + try: + rpt_add(sec_sep+"History of session input:") + for line in self.app.shell.user_ns['_ih']: + rpt_add(line) + rpt_add('\n*** Last line of input (may not be in above history):\n') + rpt_add(self.app.shell._last_input_line+'\n') + except: + pass + + return ''.join(report) + +#----------------------------------------------------------------------------- +# Aliases and Flags +#----------------------------------------------------------------------------- +flags = dict(base_flags) +flags.update(shell_flags) +frontend_flags = {} +addflag = lambda *args: frontend_flags.update(boolean_flag(*args)) +addflag('autoedit-syntax', 'TerminalInteractiveShell.autoedit_syntax', + 'Turn on auto editing of files with syntax errors.', + 'Turn off auto editing of files with syntax errors.' +) +addflag('simple-prompt', 'TerminalInteractiveShell.simple_prompt', + "Force simple minimal prompt using `raw_input`", + "Use a rich interactive prompt with prompt_toolkit", +) + +addflag('banner', 'TerminalIPythonApp.display_banner', + "Display a banner upon starting IPython.", + "Don't display a banner upon starting IPython." +) +addflag('confirm-exit', 'TerminalInteractiveShell.confirm_exit', + """Set to confirm when you try to exit IPython with an EOF (Control-D + in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit', + you can force a direct exit without any confirmation.""", + "Don't prompt the user when exiting." +) +addflag('term-title', 'TerminalInteractiveShell.term_title', + "Enable auto setting the terminal title.", + "Disable auto setting the terminal title." +) +classic_config = Config() +classic_config.InteractiveShell.cache_size = 0 +classic_config.PlainTextFormatter.pprint = False +classic_config.TerminalInteractiveShell.prompts_class='IPython.terminal.prompts.ClassicPrompts' +classic_config.InteractiveShell.separate_in = '' +classic_config.InteractiveShell.separate_out = '' +classic_config.InteractiveShell.separate_out2 = '' +classic_config.InteractiveShell.colors = 'NoColor' +classic_config.InteractiveShell.xmode = 'Plain' + +frontend_flags['classic']=( + classic_config, + "Gives IPython a similar feel to the classic Python prompt." +) +# # log doesn't make so much sense this way anymore +# paa('--log','-l', +# action='store_true', dest='InteractiveShell.logstart', +# help="Start logging to the default log file (./ipython_log.py).") +# +# # quick is harder to implement +frontend_flags['quick']=( + {'TerminalIPythonApp' : {'quick' : True}}, + "Enable quick startup with no config files." +) + +frontend_flags['i'] = ( + {'TerminalIPythonApp' : {'force_interact' : True}}, + """If running code from the command line, become interactive afterwards. + It is often useful to follow this with `--` to treat remaining flags as + script arguments. + """ +) +flags.update(frontend_flags) + +aliases = dict(base_aliases) +aliases.update(shell_aliases) + +#----------------------------------------------------------------------------- +# Main classes and functions +#----------------------------------------------------------------------------- + + +class LocateIPythonApp(BaseIPythonApplication): + description = """print the path to the IPython dir""" + subcommands = dict( + profile=('IPython.core.profileapp.ProfileLocate', + "print the path to an IPython profile directory", + ), + ) + def start(self): + if self.subapp is not None: + return self.subapp.start() + else: + print(self.ipython_dir) + + +class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp): + name = u'ipython' + description = usage.cl_usage + crash_handler_class = IPAppCrashHandler + examples = _examples + + flags = flags + aliases = aliases + classes = List() + + interactive_shell_class = Type( + klass=object, # use default_value otherwise which only allow subclasses. + default_value=TerminalInteractiveShell, + help="Class to use to instantiate the TerminalInteractiveShell object. Useful for custom Frontends" + ).tag(config=True) + + @default('classes') + def _classes_default(self): + """This has to be in a method, for TerminalIPythonApp to be available.""" + return [ + InteractiveShellApp, # ShellApp comes before TerminalApp, because + self.__class__, # it will also affect subclasses (e.g. QtConsole) + TerminalInteractiveShell, + HistoryManager, + ProfileDir, + PlainTextFormatter, + IPCompleter, + ScriptMagics, + LoggingMagics, + StoreMagics, + ] + + deprecated_subcommands = dict( + qtconsole=('qtconsole.qtconsoleapp.JupyterQtConsoleApp', + """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter Qt Console.""" + ), + notebook=('notebook.notebookapp.NotebookApp', + """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter HTML Notebook Server.""" + ), + console=('jupyter_console.app.ZMQTerminalIPythonApp', + """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter terminal-based Console.""" + ), + nbconvert=('nbconvert.nbconvertapp.NbConvertApp', + "DEPRECATED, Will be removed in IPython 6.0 : Convert notebooks to/from other formats." + ), + trust=('nbformat.sign.TrustNotebookApp', + "DEPRECATED, Will be removed in IPython 6.0 : Sign notebooks to trust their potentially unsafe contents at load." + ), + kernelspec=('jupyter_client.kernelspecapp.KernelSpecApp', + "DEPRECATED, Will be removed in IPython 6.0 : Manage Jupyter kernel specifications." + ), + ) + subcommands = dict( + profile = ("IPython.core.profileapp.ProfileApp", + "Create and manage IPython profiles." + ), + kernel = ("ipykernel.kernelapp.IPKernelApp", + "Start a kernel without an attached frontend." + ), + locate=('IPython.terminal.ipapp.LocateIPythonApp', + LocateIPythonApp.description + ), + history=('IPython.core.historyapp.HistoryApp', + "Manage the IPython history database." + ), + ) + deprecated_subcommands['install-nbextension'] = ( + "notebook.nbextensions.InstallNBExtensionApp", + "DEPRECATED, Will be removed in IPython 6.0 : Install Jupyter notebook extension files" + ) + subcommands.update(deprecated_subcommands) + + # *do* autocreate requested profile, but don't create the config file. + auto_create=Bool(True) + # configurables + quick = Bool(False, + help="""Start IPython quickly by skipping the loading of config files.""" + ).tag(config=True) + @observe('quick') + def _quick_changed(self, change): + if change['new']: + self.load_config_file = lambda *a, **kw: None + + display_banner = Bool(True, + help="Whether to display a banner upon starting IPython." + ).tag(config=True) + + # if there is code of files to run from the cmd line, don't interact + # unless the --i flag (App.force_interact) is true. + force_interact = Bool(False, + help="""If a command or file is given via the command-line, + e.g. 'ipython foo.py', start an interactive shell after executing the + file or command.""" + ).tag(config=True) + @observe('force_interact') + def _force_interact_changed(self, change): + if change['new']: + self.interact = True + + @observe('file_to_run', 'code_to_run', 'module_to_run') + def _file_to_run_changed(self, change): + new = change['new'] + if new: + self.something_to_run = True + if new and not self.force_interact: + self.interact = False + + # internal, not-configurable + something_to_run=Bool(False) + + def parse_command_line(self, argv=None): + """override to allow old '-pylab' flag with deprecation warning""" + + argv = sys.argv[1:] if argv is None else argv + + if '-pylab' in argv: + # deprecated `-pylab` given, + # warn and transform into current syntax + argv = argv[:] # copy, don't clobber + idx = argv.index('-pylab') + warnings.warn("`-pylab` flag has been deprecated.\n" + " Use `--matplotlib <backend>` and import pylab manually.") + argv[idx] = '--pylab' + + return super(TerminalIPythonApp, self).parse_command_line(argv) + + @catch_config_error + def initialize(self, argv=None): + """Do actions after construct, but before starting the app.""" + super(TerminalIPythonApp, self).initialize(argv) + if self.subapp is not None: + # don't bother initializing further, starting subapp + return + # print self.extra_args + if self.extra_args and not self.something_to_run: + self.file_to_run = self.extra_args[0] + self.init_path() + # create the shell + self.init_shell() + # and draw the banner + self.init_banner() + # Now a variety of things that happen after the banner is printed. + self.init_gui_pylab() + self.init_extensions() + self.init_code() + + def init_shell(self): + """initialize the InteractiveShell instance""" + # Create an InteractiveShell instance. + # shell.display_banner should always be False for the terminal + # based app, because we call shell.show_banner() by hand below + # so the banner shows *before* all extension loading stuff. + self.shell = self.interactive_shell_class.instance(parent=self, + profile_dir=self.profile_dir, + ipython_dir=self.ipython_dir, user_ns=self.user_ns) + self.shell.configurables.append(self) + + def init_banner(self): + """optionally display the banner""" + if self.display_banner and self.interact: + self.shell.show_banner() + # Make sure there is a space below the banner. + if self.log_level <= logging.INFO: print() + + def _pylab_changed(self, name, old, new): + """Replace --pylab='inline' with --pylab='auto'""" + if new == 'inline': + warnings.warn("'inline' not available as pylab backend, " + "using 'auto' instead.") + self.pylab = 'auto' + + def start(self): + if self.subapp is not None: + return self.subapp.start() + # perform any prexec steps: + if self.interact: + self.log.debug("Starting IPython's mainloop...") + self.shell.mainloop() + else: + self.log.debug("IPython not interactive...") + if not self.shell.last_execution_succeeded: + sys.exit(1) + +def load_default_config(ipython_dir=None): + """Load the default config file from the default ipython_dir. + + This is useful for embedded shells. + """ + if ipython_dir is None: + ipython_dir = get_ipython_dir() + + profile_dir = os.path.join(ipython_dir, 'profile_default') + app = TerminalIPythonApp() + app.config_file_paths.append(profile_dir) + app.load_config_file() + return app.config + +launch_new_instance = TerminalIPythonApp.launch_instance + + +if __name__ == '__main__': + launch_new_instance() diff --git a/contrib/python/ipython/py3/IPython/terminal/magics.py b/contrib/python/ipython/py3/IPython/terminal/magics.py index d6f9c1d701d..42231c3f803 100644 --- a/contrib/python/ipython/py3/IPython/terminal/magics.py +++ b/contrib/python/ipython/py3/IPython/terminal/magics.py @@ -1,213 +1,213 @@ -"""Extra magics for terminal use.""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -from logging import error -import os -import sys - -from IPython.core.error import TryNext, UsageError -from IPython.core.magic import Magics, magics_class, line_magic -from IPython.lib.clipboard import ClipboardEmpty -from IPython.utils.text import SList, strip_email_quotes -from IPython.utils import py3compat - -def get_pasted_lines(sentinel, l_input=py3compat.input, quiet=False): - """ Yield pasted lines until the user enters the given sentinel value. - """ - if not quiet: - print("Pasting code; enter '%s' alone on the line to stop or use Ctrl-D." \ - % sentinel) - prompt = ":" - else: - prompt = "" - while True: - try: - l = l_input(prompt) - if l == sentinel: - return - else: - yield l - except EOFError: - print('<EOF>') - return - - -@magics_class -class TerminalMagics(Magics): - def __init__(self, shell): - super(TerminalMagics, self).__init__(shell) - - def store_or_execute(self, block, name): - """ Execute a block, or store it in a variable, per the user's request. - """ - if name: - # If storing it for further editing - self.shell.user_ns[name] = SList(block.splitlines()) - print("Block assigned to '%s'" % name) - else: - b = self.preclean_input(block) - self.shell.user_ns['pasted_block'] = b - self.shell.using_paste_magics = True - try: - self.shell.run_cell(b) - finally: - self.shell.using_paste_magics = False - - def preclean_input(self, block): - lines = block.splitlines() - while lines and not lines[0].strip(): - lines = lines[1:] - return strip_email_quotes('\n'.join(lines)) - - def rerun_pasted(self, name='pasted_block'): - """ Rerun a previously pasted command. - """ - b = self.shell.user_ns.get(name) - - # Sanity checks - if b is None: - raise UsageError('No previous pasted block available') - if not isinstance(b, str): - raise UsageError( - "Variable 'pasted_block' is not a string, can't execute") - - print("Re-executing '%s...' (%d chars)"% (b.split('\n',1)[0], len(b))) - self.shell.run_cell(b) - - @line_magic - def autoindent(self, parameter_s = ''): - """Toggle autoindent on/off (deprecated)""" - self.shell.set_autoindent() - print("Automatic indentation is:",['OFF','ON'][self.shell.autoindent]) - - @line_magic - def cpaste(self, parameter_s=''): - """Paste & execute a pre-formatted code block from clipboard. - - You must terminate the block with '--' (two minus-signs) or Ctrl-D - alone on the line. You can also provide your own sentinel with '%paste - -s %%' ('%%' is the new sentinel for this operation). - - The block is dedented prior to execution to enable execution of method - definitions. '>' and '+' characters at the beginning of a line are - ignored, to allow pasting directly from e-mails, diff files and - doctests (the '...' continuation prompt is also stripped). The - executed block is also assigned to variable named 'pasted_block' for - later editing with '%edit pasted_block'. - - You can also pass a variable name as an argument, e.g. '%cpaste foo'. - This assigns the pasted block to variable 'foo' as string, without - dedenting or executing it (preceding >>> and + is still stripped) - - '%cpaste -r' re-executes the block previously entered by cpaste. - '%cpaste -q' suppresses any additional output messages. - - Do not be alarmed by garbled output on Windows (it's a readline bug). - Just press enter and type -- (and press enter again) and the block - will be what was just pasted. - - Shell escapes are not supported (yet). - - See also - -------- - paste: automatically pull code from clipboard. - - Examples - -------- - :: - - In [8]: %cpaste - Pasting code; enter '--' alone on the line to stop. - :>>> a = ["world!", "Hello"] - :>>> print(" ".join(sorted(a))) - :-- - Hello world! - - :: - In [8]: %cpaste - Pasting code; enter '--' alone on the line to stop. - :>>> %alias_magic t timeit - :>>> %t -n1 pass - :-- - Created `%t` as an alias for `%timeit`. - Created `%%t` as an alias for `%%timeit`. - 354 ns ± 224 ns per loop (mean ± std. dev. of 7 runs, 1 loop each) - """ - opts, name = self.parse_options(parameter_s, 'rqs:', mode='string') - if 'r' in opts: - self.rerun_pasted() - return - - quiet = ('q' in opts) - - sentinel = opts.get('s', u'--') - block = '\n'.join(get_pasted_lines(sentinel, quiet=quiet)) - self.store_or_execute(block, name) - - @line_magic - def paste(self, parameter_s=''): - """Paste & execute a pre-formatted code block from clipboard. - - The text is pulled directly from the clipboard without user - intervention and printed back on the screen before execution (unless - the -q flag is given to force quiet mode). - - The block is dedented prior to execution to enable execution of method - definitions. '>' and '+' characters at the beginning of a line are - ignored, to allow pasting directly from e-mails, diff files and - doctests (the '...' continuation prompt is also stripped). The - executed block is also assigned to variable named 'pasted_block' for - later editing with '%edit pasted_block'. - - You can also pass a variable name as an argument, e.g. '%paste foo'. - This assigns the pasted block to variable 'foo' as string, without - executing it (preceding >>> and + is still stripped). - - Options: - - -r: re-executes the block previously entered by cpaste. - - -q: quiet mode: do not echo the pasted text back to the terminal. - - IPython statements (magics, shell escapes) are not supported (yet). - - See also - -------- - cpaste: manually paste code into terminal until you mark its end. - """ - opts, name = self.parse_options(parameter_s, 'rq', mode='string') - if 'r' in opts: - self.rerun_pasted() - return - try: - block = self.shell.hooks.clipboard_get() - except TryNext as clipboard_exc: - message = getattr(clipboard_exc, 'args') - if message: - error(message[0]) - else: - error('Could not get text from the clipboard.') - return - except ClipboardEmpty: - raise UsageError("The clipboard appears to be empty") - - # By default, echo back to terminal unless quiet mode is requested - if 'q' not in opts: - write = self.shell.write - write(self.shell.pycolorize(block)) - if not block.endswith('\n'): - write('\n') - write("## -- End pasted text --\n") - - self.store_or_execute(block, name) - - # Class-level: add a '%cls' magic only on Windows - if sys.platform == 'win32': - @line_magic - def cls(self, s): - """Clear screen. - """ - os.system("cls") +"""Extra magics for terminal use.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +from logging import error +import os +import sys + +from IPython.core.error import TryNext, UsageError +from IPython.core.magic import Magics, magics_class, line_magic +from IPython.lib.clipboard import ClipboardEmpty +from IPython.utils.text import SList, strip_email_quotes +from IPython.utils import py3compat + +def get_pasted_lines(sentinel, l_input=py3compat.input, quiet=False): + """ Yield pasted lines until the user enters the given sentinel value. + """ + if not quiet: + print("Pasting code; enter '%s' alone on the line to stop or use Ctrl-D." \ + % sentinel) + prompt = ":" + else: + prompt = "" + while True: + try: + l = l_input(prompt) + if l == sentinel: + return + else: + yield l + except EOFError: + print('<EOF>') + return + + +@magics_class +class TerminalMagics(Magics): + def __init__(self, shell): + super(TerminalMagics, self).__init__(shell) + + def store_or_execute(self, block, name): + """ Execute a block, or store it in a variable, per the user's request. + """ + if name: + # If storing it for further editing + self.shell.user_ns[name] = SList(block.splitlines()) + print("Block assigned to '%s'" % name) + else: + b = self.preclean_input(block) + self.shell.user_ns['pasted_block'] = b + self.shell.using_paste_magics = True + try: + self.shell.run_cell(b) + finally: + self.shell.using_paste_magics = False + + def preclean_input(self, block): + lines = block.splitlines() + while lines and not lines[0].strip(): + lines = lines[1:] + return strip_email_quotes('\n'.join(lines)) + + def rerun_pasted(self, name='pasted_block'): + """ Rerun a previously pasted command. + """ + b = self.shell.user_ns.get(name) + + # Sanity checks + if b is None: + raise UsageError('No previous pasted block available') + if not isinstance(b, str): + raise UsageError( + "Variable 'pasted_block' is not a string, can't execute") + + print("Re-executing '%s...' (%d chars)"% (b.split('\n',1)[0], len(b))) + self.shell.run_cell(b) + + @line_magic + def autoindent(self, parameter_s = ''): + """Toggle autoindent on/off (deprecated)""" + self.shell.set_autoindent() + print("Automatic indentation is:",['OFF','ON'][self.shell.autoindent]) + + @line_magic + def cpaste(self, parameter_s=''): + """Paste & execute a pre-formatted code block from clipboard. + + You must terminate the block with '--' (two minus-signs) or Ctrl-D + alone on the line. You can also provide your own sentinel with '%paste + -s %%' ('%%' is the new sentinel for this operation). + + The block is dedented prior to execution to enable execution of method + definitions. '>' and '+' characters at the beginning of a line are + ignored, to allow pasting directly from e-mails, diff files and + doctests (the '...' continuation prompt is also stripped). The + executed block is also assigned to variable named 'pasted_block' for + later editing with '%edit pasted_block'. + + You can also pass a variable name as an argument, e.g. '%cpaste foo'. + This assigns the pasted block to variable 'foo' as string, without + dedenting or executing it (preceding >>> and + is still stripped) + + '%cpaste -r' re-executes the block previously entered by cpaste. + '%cpaste -q' suppresses any additional output messages. + + Do not be alarmed by garbled output on Windows (it's a readline bug). + Just press enter and type -- (and press enter again) and the block + will be what was just pasted. + + Shell escapes are not supported (yet). + + See also + -------- + paste: automatically pull code from clipboard. + + Examples + -------- + :: + + In [8]: %cpaste + Pasting code; enter '--' alone on the line to stop. + :>>> a = ["world!", "Hello"] + :>>> print(" ".join(sorted(a))) + :-- + Hello world! + + :: + In [8]: %cpaste + Pasting code; enter '--' alone on the line to stop. + :>>> %alias_magic t timeit + :>>> %t -n1 pass + :-- + Created `%t` as an alias for `%timeit`. + Created `%%t` as an alias for `%%timeit`. + 354 ns ± 224 ns per loop (mean ± std. dev. of 7 runs, 1 loop each) + """ + opts, name = self.parse_options(parameter_s, 'rqs:', mode='string') + if 'r' in opts: + self.rerun_pasted() + return + + quiet = ('q' in opts) + + sentinel = opts.get('s', u'--') + block = '\n'.join(get_pasted_lines(sentinel, quiet=quiet)) + self.store_or_execute(block, name) + + @line_magic + def paste(self, parameter_s=''): + """Paste & execute a pre-formatted code block from clipboard. + + The text is pulled directly from the clipboard without user + intervention and printed back on the screen before execution (unless + the -q flag is given to force quiet mode). + + The block is dedented prior to execution to enable execution of method + definitions. '>' and '+' characters at the beginning of a line are + ignored, to allow pasting directly from e-mails, diff files and + doctests (the '...' continuation prompt is also stripped). The + executed block is also assigned to variable named 'pasted_block' for + later editing with '%edit pasted_block'. + + You can also pass a variable name as an argument, e.g. '%paste foo'. + This assigns the pasted block to variable 'foo' as string, without + executing it (preceding >>> and + is still stripped). + + Options: + + -r: re-executes the block previously entered by cpaste. + + -q: quiet mode: do not echo the pasted text back to the terminal. + + IPython statements (magics, shell escapes) are not supported (yet). + + See also + -------- + cpaste: manually paste code into terminal until you mark its end. + """ + opts, name = self.parse_options(parameter_s, 'rq', mode='string') + if 'r' in opts: + self.rerun_pasted() + return + try: + block = self.shell.hooks.clipboard_get() + except TryNext as clipboard_exc: + message = getattr(clipboard_exc, 'args') + if message: + error(message[0]) + else: + error('Could not get text from the clipboard.') + return + except ClipboardEmpty: + raise UsageError("The clipboard appears to be empty") + + # By default, echo back to terminal unless quiet mode is requested + if 'q' not in opts: + write = self.shell.write + write(self.shell.pycolorize(block)) + if not block.endswith('\n'): + write('\n') + write("## -- End pasted text --\n") + + self.store_or_execute(block, name) + + # Class-level: add a '%cls' magic only on Windows + if sys.platform == 'win32': + @line_magic + def cls(self, s): + """Clear screen. + """ + os.system("cls") diff --git a/contrib/python/ipython/py3/IPython/terminal/prompts.py b/contrib/python/ipython/py3/IPython/terminal/prompts.py index 55e90ccdd34..3f5c07b980e 100644 --- a/contrib/python/ipython/py3/IPython/terminal/prompts.py +++ b/contrib/python/ipython/py3/IPython/terminal/prompts.py @@ -1,108 +1,108 @@ -"""Terminal input and output prompts.""" - -from pygments.token import Token -import sys - -from IPython.core.displayhook import DisplayHook - -from prompt_toolkit.formatted_text import fragment_list_width, PygmentsTokens -from prompt_toolkit.shortcuts import print_formatted_text -from prompt_toolkit.enums import EditingMode - - -class Prompts(object): - def __init__(self, shell): - self.shell = shell - - def vi_mode(self): - if (getattr(self.shell.pt_app, 'editing_mode', None) == EditingMode.VI - and self.shell.prompt_includes_vi_mode): - mode = str(self.shell.pt_app.app.vi_state.input_mode) - if mode.startswith('InputMode.'): - mode = mode[10:13].lower() - elif mode.startswith('vi-'): - mode = mode[3:6] - return '['+mode+'] ' - return '' - - - def in_prompt_tokens(self): - return [ - (Token.Prompt, self.vi_mode() ), - (Token.Prompt, 'In ['), - (Token.PromptNum, str(self.shell.execution_count)), - (Token.Prompt, ']: '), - ] - - def _width(self): - return fragment_list_width(self.in_prompt_tokens()) - - def continuation_prompt_tokens(self, width=None): - if width is None: - width = self._width() - return [ - (Token.Prompt, (' ' * (width - 5)) + '...: '), - ] - - def rewrite_prompt_tokens(self): - width = self._width() - return [ - (Token.Prompt, ('-' * (width - 2)) + '> '), - ] - - def out_prompt_tokens(self): - return [ - (Token.OutPrompt, 'Out['), - (Token.OutPromptNum, str(self.shell.execution_count)), - (Token.OutPrompt, ']: '), - ] - -class ClassicPrompts(Prompts): - def in_prompt_tokens(self): - return [ - (Token.Prompt, '>>> '), - ] - - def continuation_prompt_tokens(self, width=None): - return [ - (Token.Prompt, '... ') - ] - - def rewrite_prompt_tokens(self): - return [] - - def out_prompt_tokens(self): - return [] - -class RichPromptDisplayHook(DisplayHook): - """Subclass of base display hook using coloured prompt""" - def write_output_prompt(self): - sys.stdout.write(self.shell.separate_out) - # If we're not displaying a prompt, it effectively ends with a newline, - # because the output will be left-aligned. - self.prompt_end_newline = True - - if self.do_full_cache: - tokens = self.shell.prompts.out_prompt_tokens() - prompt_txt = ''.join(s for t, s in tokens) - if prompt_txt and not prompt_txt.endswith('\n'): - # Ask for a newline before multiline output - self.prompt_end_newline = False - - if self.shell.pt_app: - print_formatted_text(PygmentsTokens(tokens), - style=self.shell.pt_app.app.style, end='', - ) - else: - sys.stdout.write(prompt_txt) - - def write_format_data(self, format_dict, md_dict=None) -> None: - if self.shell.mime_renderers: - - for mime, handler in self.shell.mime_renderers.items(): - if mime in format_dict: - handler(format_dict[mime], None) - return - - super().write_format_data(format_dict, md_dict) - +"""Terminal input and output prompts.""" + +from pygments.token import Token +import sys + +from IPython.core.displayhook import DisplayHook + +from prompt_toolkit.formatted_text import fragment_list_width, PygmentsTokens +from prompt_toolkit.shortcuts import print_formatted_text +from prompt_toolkit.enums import EditingMode + + +class Prompts(object): + def __init__(self, shell): + self.shell = shell + + def vi_mode(self): + if (getattr(self.shell.pt_app, 'editing_mode', None) == EditingMode.VI + and self.shell.prompt_includes_vi_mode): + mode = str(self.shell.pt_app.app.vi_state.input_mode) + if mode.startswith('InputMode.'): + mode = mode[10:13].lower() + elif mode.startswith('vi-'): + mode = mode[3:6] + return '['+mode+'] ' + return '' + + + def in_prompt_tokens(self): + return [ + (Token.Prompt, self.vi_mode() ), + (Token.Prompt, 'In ['), + (Token.PromptNum, str(self.shell.execution_count)), + (Token.Prompt, ']: '), + ] + + def _width(self): + return fragment_list_width(self.in_prompt_tokens()) + + def continuation_prompt_tokens(self, width=None): + if width is None: + width = self._width() + return [ + (Token.Prompt, (' ' * (width - 5)) + '...: '), + ] + + def rewrite_prompt_tokens(self): + width = self._width() + return [ + (Token.Prompt, ('-' * (width - 2)) + '> '), + ] + + def out_prompt_tokens(self): + return [ + (Token.OutPrompt, 'Out['), + (Token.OutPromptNum, str(self.shell.execution_count)), + (Token.OutPrompt, ']: '), + ] + +class ClassicPrompts(Prompts): + def in_prompt_tokens(self): + return [ + (Token.Prompt, '>>> '), + ] + + def continuation_prompt_tokens(self, width=None): + return [ + (Token.Prompt, '... ') + ] + + def rewrite_prompt_tokens(self): + return [] + + def out_prompt_tokens(self): + return [] + +class RichPromptDisplayHook(DisplayHook): + """Subclass of base display hook using coloured prompt""" + def write_output_prompt(self): + sys.stdout.write(self.shell.separate_out) + # If we're not displaying a prompt, it effectively ends with a newline, + # because the output will be left-aligned. + self.prompt_end_newline = True + + if self.do_full_cache: + tokens = self.shell.prompts.out_prompt_tokens() + prompt_txt = ''.join(s for t, s in tokens) + if prompt_txt and not prompt_txt.endswith('\n'): + # Ask for a newline before multiline output + self.prompt_end_newline = False + + if self.shell.pt_app: + print_formatted_text(PygmentsTokens(tokens), + style=self.shell.pt_app.app.style, end='', + ) + else: + sys.stdout.write(prompt_txt) + + def write_format_data(self, format_dict, md_dict=None) -> None: + if self.shell.mime_renderers: + + for mime, handler in self.shell.mime_renderers.items(): + if mime in format_dict: + handler(format_dict[mime], None) + return + + super().write_format_data(format_dict, md_dict) + diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/__init__.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/__init__.py index e41e26a5f58..69ff0ba1e6a 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/__init__.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/__init__.py @@ -1,62 +1,62 @@ -import importlib -import os - -aliases = { - 'qt4': 'qt', - 'gtk2': 'gtk', -} - -backends = [ - "qt", - "qt4", - "qt5", - "qt6", - "gtk", - "gtk2", - "gtk3", - "gtk4", - "tk", - "wx", - "pyglet", - "glut", - "osx", - "asyncio", -] - -registered = {} - -def register(name, inputhook): - """Register the function *inputhook* as an event loop integration.""" - registered[name] = inputhook - - -class UnknownBackend(KeyError): - def __init__(self, name): - self.name = name - - def __str__(self): - return ("No event loop integration for {!r}. " - "Supported event loops are: {}").format(self.name, - ', '.join(backends + sorted(registered))) - - -def get_inputhook_name_and_func(gui): - if gui in registered: - return gui, registered[gui] - - if gui not in backends: - raise UnknownBackend(gui) - - if gui in aliases: - return get_inputhook_name_and_func(aliases[gui]) - - gui_mod = gui - if gui == "qt5": - os.environ["QT_API"] = "pyqt5" - gui_mod = "qt" - elif gui == "qt6": - os.environ["QT_API"] = "pyqt6" - gui_mod = "qt" - - mod = importlib.import_module('IPython.terminal.pt_inputhooks.'+gui_mod) - return gui, mod.inputhook +import importlib +import os + +aliases = { + 'qt4': 'qt', + 'gtk2': 'gtk', +} + +backends = [ + "qt", + "qt4", + "qt5", + "qt6", + "gtk", + "gtk2", + "gtk3", + "gtk4", + "tk", + "wx", + "pyglet", + "glut", + "osx", + "asyncio", +] + +registered = {} + +def register(name, inputhook): + """Register the function *inputhook* as an event loop integration.""" + registered[name] = inputhook + + +class UnknownBackend(KeyError): + def __init__(self, name): + self.name = name + + def __str__(self): + return ("No event loop integration for {!r}. " + "Supported event loops are: {}").format(self.name, + ', '.join(backends + sorted(registered))) + + +def get_inputhook_name_and_func(gui): + if gui in registered: + return gui, registered[gui] + + if gui not in backends: + raise UnknownBackend(gui) + + if gui in aliases: + return get_inputhook_name_and_func(aliases[gui]) + + gui_mod = gui + if gui == "qt5": + os.environ["QT_API"] = "pyqt5" + gui_mod = "qt" + elif gui == "qt6": + os.environ["QT_API"] = "pyqt6" + gui_mod = "qt" + + mod = importlib.import_module('IPython.terminal.pt_inputhooks.'+gui_mod) + return gui, mod.inputhook diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/asyncio.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/asyncio.py index 44d480527d6..95cf194f866 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/asyncio.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/asyncio.py @@ -1,64 +1,64 @@ -""" -Inputhook for running the original asyncio event loop while we're waiting for -input. - -By default, in IPython, we run the prompt with a different asyncio event loop, -because otherwise we risk that people are freezing the prompt by scheduling bad -coroutines. E.g., a coroutine that does a while/true and never yield back -control to the loop. We can't cancel that. - -However, sometimes we want the asyncio loop to keep running while waiting for -a prompt. - -The following example will print the numbers from 1 to 10 above the prompt, -while we are waiting for input. (This works also because we use -prompt_toolkit`s `patch_stdout`):: - - In [1]: import asyncio - - In [2]: %gui asyncio - - In [3]: async def f(): - ...: for i in range(10): - ...: await asyncio.sleep(1) - ...: print(i) - - - In [4]: asyncio.ensure_future(f()) - -""" -import asyncio -from prompt_toolkit import __version__ as ptk_version - -PTK3 = ptk_version.startswith('3.') - - -# Keep reference to the original asyncio loop, because getting the event loop -# within the input hook would return the other loop. -loop = asyncio.get_event_loop() - - -def inputhook(context): - """ - Inputhook for asyncio event loop integration. - """ - # For prompt_toolkit 3.0, this input hook literally doesn't do anything. - # The event loop integration here is implemented in `interactiveshell.py` - # by running the prompt itself in the current asyncio loop. The main reason - # for this is that nesting asyncio event loops is unreliable. - if PTK3: - return - - # For prompt_toolkit 2.0, we can run the current asyncio event loop, - # because prompt_toolkit 2.0 uses a different event loop internally. - - def stop(): - loop.stop() - - fileno = context.fileno() - loop.add_reader(fileno, stop) - try: - loop.run_forever() - finally: - loop.remove_reader(fileno) - +""" +Inputhook for running the original asyncio event loop while we're waiting for +input. + +By default, in IPython, we run the prompt with a different asyncio event loop, +because otherwise we risk that people are freezing the prompt by scheduling bad +coroutines. E.g., a coroutine that does a while/true and never yield back +control to the loop. We can't cancel that. + +However, sometimes we want the asyncio loop to keep running while waiting for +a prompt. + +The following example will print the numbers from 1 to 10 above the prompt, +while we are waiting for input. (This works also because we use +prompt_toolkit`s `patch_stdout`):: + + In [1]: import asyncio + + In [2]: %gui asyncio + + In [3]: async def f(): + ...: for i in range(10): + ...: await asyncio.sleep(1) + ...: print(i) + + + In [4]: asyncio.ensure_future(f()) + +""" +import asyncio +from prompt_toolkit import __version__ as ptk_version + +PTK3 = ptk_version.startswith('3.') + + +# Keep reference to the original asyncio loop, because getting the event loop +# within the input hook would return the other loop. +loop = asyncio.get_event_loop() + + +def inputhook(context): + """ + Inputhook for asyncio event loop integration. + """ + # For prompt_toolkit 3.0, this input hook literally doesn't do anything. + # The event loop integration here is implemented in `interactiveshell.py` + # by running the prompt itself in the current asyncio loop. The main reason + # for this is that nesting asyncio event loops is unreliable. + if PTK3: + return + + # For prompt_toolkit 2.0, we can run the current asyncio event loop, + # because prompt_toolkit 2.0 uses a different event loop internally. + + def stop(): + loop.stop() + + fileno = context.fileno() + loop.add_reader(fileno, stop) + try: + loop.run_forever() + finally: + loop.remove_reader(fileno) + diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/glut.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/glut.py index 6715c77fd80..f6d54a55b4d 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/glut.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/glut.py @@ -1,140 +1,140 @@ -"""GLUT Input hook for interactive use with prompt_toolkit -""" - - -# GLUT is quite an old library and it is difficult to ensure proper -# integration within IPython since original GLUT does not allow to handle -# events one by one. Instead, it requires for the mainloop to be entered -# and never returned (there is not even a function to exit he -# mainloop). Fortunately, there are alternatives such as freeglut -# (available for linux and windows) and the OSX implementation gives -# access to a glutCheckLoop() function that blocks itself until a new -# event is received. This means we have to setup the idle callback to -# ensure we got at least one event that will unblock the function. -# -# Furthermore, it is not possible to install these handlers without a window -# being first created. We choose to make this window invisible. This means that -# display mode options are set at this level and user won't be able to change -# them later without modifying the code. This should probably be made available -# via IPython options system. - -import sys -import time -import signal -import OpenGL.GLUT as glut -import OpenGL.platform as platform -from timeit import default_timer as clock - -# Frame per second : 60 -# Should probably be an IPython option -glut_fps = 60 - -# Display mode : double buffeed + rgba + depth -# Should probably be an IPython option -glut_display_mode = (glut.GLUT_DOUBLE | - glut.GLUT_RGBA | - glut.GLUT_DEPTH) - -glutMainLoopEvent = None -if sys.platform == 'darwin': - try: - glutCheckLoop = platform.createBaseFunction( - 'glutCheckLoop', dll=platform.GLUT, resultType=None, - argTypes=[], - doc='glutCheckLoop( ) -> None', - argNames=(), - ) - except AttributeError: - raise RuntimeError( - '''Your glut implementation does not allow interactive sessions''' - '''Consider installing freeglut.''') - glutMainLoopEvent = glutCheckLoop -elif glut.HAVE_FREEGLUT: - glutMainLoopEvent = glut.glutMainLoopEvent -else: - raise RuntimeError( - '''Your glut implementation does not allow interactive sessions. ''' - '''Consider installing freeglut.''') - - -def glut_display(): - # Dummy display function - pass - -def glut_idle(): - # Dummy idle function - pass - -def glut_close(): - # Close function only hides the current window - glut.glutHideWindow() - glutMainLoopEvent() - -def glut_int_handler(signum, frame): - # Catch sigint and print the defaultipyt message - signal.signal(signal.SIGINT, signal.default_int_handler) - print('\nKeyboardInterrupt') - # Need to reprint the prompt at this stage - -# Initialisation code -glut.glutInit( sys.argv ) -glut.glutInitDisplayMode( glut_display_mode ) -# This is specific to freeglut -if bool(glut.glutSetOption): - glut.glutSetOption( glut.GLUT_ACTION_ON_WINDOW_CLOSE, - glut.GLUT_ACTION_GLUTMAINLOOP_RETURNS ) -glut.glutCreateWindow( b'ipython' ) -glut.glutReshapeWindow( 1, 1 ) -glut.glutHideWindow( ) -glut.glutWMCloseFunc( glut_close ) -glut.glutDisplayFunc( glut_display ) -glut.glutIdleFunc( glut_idle ) - - -def inputhook(context): - """Run the pyglet event loop by processing pending events only. - - This keeps processing pending events until stdin is ready. After - processing all pending events, a call to time.sleep is inserted. This is - needed, otherwise, CPU usage is at 100%. This sleep time should be tuned - though for best performance. - """ - # We need to protect against a user pressing Control-C when IPython is - # idle and this is running. We trap KeyboardInterrupt and pass. - - signal.signal(signal.SIGINT, glut_int_handler) - - try: - t = clock() - - # Make sure the default window is set after a window has been closed - if glut.glutGetWindow() == 0: - glut.glutSetWindow( 1 ) - glutMainLoopEvent() - return 0 - - while not context.input_is_ready(): - glutMainLoopEvent() - # We need to sleep at this point to keep the idle CPU load - # low. However, if sleep to long, GUI response is poor. As - # a compromise, we watch how often GUI events are being processed - # and switch between a short and long sleep time. Here are some - # stats useful in helping to tune this. - # time CPU load - # 0.001 13% - # 0.005 3% - # 0.01 1.5% - # 0.05 0.5% - used_time = clock() - t - if used_time > 10.0: - # print 'Sleep for 1 s' # dbg - time.sleep(1.0) - elif used_time > 0.1: - # Few GUI events coming in, so we can sleep longer - # print 'Sleep for 0.05 s' # dbg - time.sleep(0.05) - else: - # Many GUI events coming in, so sleep only very little - time.sleep(0.001) - except KeyboardInterrupt: - pass +"""GLUT Input hook for interactive use with prompt_toolkit +""" + + +# GLUT is quite an old library and it is difficult to ensure proper +# integration within IPython since original GLUT does not allow to handle +# events one by one. Instead, it requires for the mainloop to be entered +# and never returned (there is not even a function to exit he +# mainloop). Fortunately, there are alternatives such as freeglut +# (available for linux and windows) and the OSX implementation gives +# access to a glutCheckLoop() function that blocks itself until a new +# event is received. This means we have to setup the idle callback to +# ensure we got at least one event that will unblock the function. +# +# Furthermore, it is not possible to install these handlers without a window +# being first created. We choose to make this window invisible. This means that +# display mode options are set at this level and user won't be able to change +# them later without modifying the code. This should probably be made available +# via IPython options system. + +import sys +import time +import signal +import OpenGL.GLUT as glut +import OpenGL.platform as platform +from timeit import default_timer as clock + +# Frame per second : 60 +# Should probably be an IPython option +glut_fps = 60 + +# Display mode : double buffeed + rgba + depth +# Should probably be an IPython option +glut_display_mode = (glut.GLUT_DOUBLE | + glut.GLUT_RGBA | + glut.GLUT_DEPTH) + +glutMainLoopEvent = None +if sys.platform == 'darwin': + try: + glutCheckLoop = platform.createBaseFunction( + 'glutCheckLoop', dll=platform.GLUT, resultType=None, + argTypes=[], + doc='glutCheckLoop( ) -> None', + argNames=(), + ) + except AttributeError: + raise RuntimeError( + '''Your glut implementation does not allow interactive sessions''' + '''Consider installing freeglut.''') + glutMainLoopEvent = glutCheckLoop +elif glut.HAVE_FREEGLUT: + glutMainLoopEvent = glut.glutMainLoopEvent +else: + raise RuntimeError( + '''Your glut implementation does not allow interactive sessions. ''' + '''Consider installing freeglut.''') + + +def glut_display(): + # Dummy display function + pass + +def glut_idle(): + # Dummy idle function + pass + +def glut_close(): + # Close function only hides the current window + glut.glutHideWindow() + glutMainLoopEvent() + +def glut_int_handler(signum, frame): + # Catch sigint and print the defaultipyt message + signal.signal(signal.SIGINT, signal.default_int_handler) + print('\nKeyboardInterrupt') + # Need to reprint the prompt at this stage + +# Initialisation code +glut.glutInit( sys.argv ) +glut.glutInitDisplayMode( glut_display_mode ) +# This is specific to freeglut +if bool(glut.glutSetOption): + glut.glutSetOption( glut.GLUT_ACTION_ON_WINDOW_CLOSE, + glut.GLUT_ACTION_GLUTMAINLOOP_RETURNS ) +glut.glutCreateWindow( b'ipython' ) +glut.glutReshapeWindow( 1, 1 ) +glut.glutHideWindow( ) +glut.glutWMCloseFunc( glut_close ) +glut.glutDisplayFunc( glut_display ) +glut.glutIdleFunc( glut_idle ) + + +def inputhook(context): + """Run the pyglet event loop by processing pending events only. + + This keeps processing pending events until stdin is ready. After + processing all pending events, a call to time.sleep is inserted. This is + needed, otherwise, CPU usage is at 100%. This sleep time should be tuned + though for best performance. + """ + # We need to protect against a user pressing Control-C when IPython is + # idle and this is running. We trap KeyboardInterrupt and pass. + + signal.signal(signal.SIGINT, glut_int_handler) + + try: + t = clock() + + # Make sure the default window is set after a window has been closed + if glut.glutGetWindow() == 0: + glut.glutSetWindow( 1 ) + glutMainLoopEvent() + return 0 + + while not context.input_is_ready(): + glutMainLoopEvent() + # We need to sleep at this point to keep the idle CPU load + # low. However, if sleep to long, GUI response is poor. As + # a compromise, we watch how often GUI events are being processed + # and switch between a short and long sleep time. Here are some + # stats useful in helping to tune this. + # time CPU load + # 0.001 13% + # 0.005 3% + # 0.01 1.5% + # 0.05 0.5% + used_time = clock() - t + if used_time > 10.0: + # print 'Sleep for 1 s' # dbg + time.sleep(1.0) + elif used_time > 0.1: + # Few GUI events coming in, so we can sleep longer + # print 'Sleep for 0.05 s' # dbg + time.sleep(0.05) + else: + # Many GUI events coming in, so sleep only very little + time.sleep(0.001) + except KeyboardInterrupt: + pass diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/gtk.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/gtk.py index 13ce1f7d1e4..6e246ba8377 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/gtk.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/gtk.py @@ -1,58 +1,58 @@ -# Code borrowed from python-prompt-toolkit examples -# https://github.com/jonathanslenders/python-prompt-toolkit/blob/77cdcfbc7f4b4c34a9d2f9a34d422d7152f16209/examples/inputhook.py - -# Copyright (c) 2014, Jonathan Slenders -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright notice, this -# list of conditions and the following disclaimer in the documentation and/or -# other materials provided with the distribution. -# -# * Neither the name of the {organization} nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -""" -PyGTK input hook for prompt_toolkit. - -Listens on the pipe prompt_toolkit sets up for a notification that it should -return control to the terminal event loop. -""" - -import gtk, gobject - -# Enable threading in GTK. (Otherwise, GTK will keep the GIL.) -gtk.gdk.threads_init() - -def inputhook(context): - """ - When the eventloop of prompt-toolkit is idle, call this inputhook. - - This will run the GTK main loop until the file descriptor - `context.fileno()` becomes ready. - - :param context: An `InputHookContext` instance. - """ - def _main_quit(*a, **kw): - gtk.main_quit() - return False - - gobject.io_add_watch(context.fileno(), gobject.IO_IN, _main_quit) - gtk.main() +# Code borrowed from python-prompt-toolkit examples +# https://github.com/jonathanslenders/python-prompt-toolkit/blob/77cdcfbc7f4b4c34a9d2f9a34d422d7152f16209/examples/inputhook.py + +# Copyright (c) 2014, Jonathan Slenders +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, this +# list of conditions and the following disclaimer in the documentation and/or +# other materials provided with the distribution. +# +# * Neither the name of the {organization} nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" +PyGTK input hook for prompt_toolkit. + +Listens on the pipe prompt_toolkit sets up for a notification that it should +return control to the terminal event loop. +""" + +import gtk, gobject + +# Enable threading in GTK. (Otherwise, GTK will keep the GIL.) +gtk.gdk.threads_init() + +def inputhook(context): + """ + When the eventloop of prompt-toolkit is idle, call this inputhook. + + This will run the GTK main loop until the file descriptor + `context.fileno()` becomes ready. + + :param context: An `InputHookContext` instance. + """ + def _main_quit(*a, **kw): + gtk.main_quit() + return False + + gobject.io_add_watch(context.fileno(), gobject.IO_IN, _main_quit) + gtk.main() diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/gtk3.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/gtk3.py index 2c6243c0725..ae82b4edaaa 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/gtk3.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/gtk3.py @@ -1,12 +1,12 @@ -"""prompt_toolkit input hook for GTK 3 -""" - -from gi.repository import Gtk, GLib - -def _main_quit(*args, **kwargs): - Gtk.main_quit() - return False - -def inputhook(context): - GLib.io_add_watch(context.fileno(), GLib.PRIORITY_DEFAULT, GLib.IO_IN, _main_quit) - Gtk.main() +"""prompt_toolkit input hook for GTK 3 +""" + +from gi.repository import Gtk, GLib + +def _main_quit(*args, **kwargs): + Gtk.main_quit() + return False + +def inputhook(context): + GLib.io_add_watch(context.fileno(), GLib.PRIORITY_DEFAULT, GLib.IO_IN, _main_quit) + Gtk.main() diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/gtk4.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/gtk4.py index 38fce01f8c3..009fbf12126 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/gtk4.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/gtk4.py @@ -1,27 +1,27 @@ -""" -prompt_toolkit input hook for GTK 4. -""" - -from gi.repository import GLib - - -class _InputHook: - def __init__(self, context): - self._quit = False - GLib.io_add_watch( - context.fileno(), GLib.PRIORITY_DEFAULT, GLib.IO_IN, self.quit - ) - - def quit(self, *args, **kwargs): - self._quit = True - return False - - def run(self): - context = GLib.MainContext.default() - while not self._quit: - context.iteration(True) - - -def inputhook(context): - hook = _InputHook(context) - hook.run() +""" +prompt_toolkit input hook for GTK 4. +""" + +from gi.repository import GLib + + +class _InputHook: + def __init__(self, context): + self._quit = False + GLib.io_add_watch( + context.fileno(), GLib.PRIORITY_DEFAULT, GLib.IO_IN, self.quit + ) + + def quit(self, *args, **kwargs): + self._quit = True + return False + + def run(self): + context = GLib.MainContext.default() + while not self._quit: + context.iteration(True) + + +def inputhook(context): + hook = _InputHook(context) + hook.run() diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/osx.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/osx.py index 4b48d0d1e34..80440196fb4 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/osx.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/osx.py @@ -1,157 +1,157 @@ -"""Inputhook for OS X - -Calls NSApp / CoreFoundation APIs via ctypes. -""" - -# obj-c boilerplate from appnope, used under BSD 2-clause - -import ctypes -import ctypes.util -from threading import Event - -objc = ctypes.cdll.LoadLibrary(ctypes.util.find_library('objc')) - -void_p = ctypes.c_void_p - -objc.objc_getClass.restype = void_p -objc.sel_registerName.restype = void_p -objc.objc_msgSend.restype = void_p -objc.objc_msgSend.argtypes = [void_p, void_p] - -msg = objc.objc_msgSend - -def _utf8(s): - """ensure utf8 bytes""" - if not isinstance(s, bytes): - s = s.encode('utf8') - return s - -def n(name): - """create a selector name (for ObjC methods)""" - return objc.sel_registerName(_utf8(name)) - -def C(classname): - """get an ObjC Class by name""" - return objc.objc_getClass(_utf8(classname)) - -# end obj-c boilerplate from appnope - -# CoreFoundation C-API calls we will use: -CoreFoundation = ctypes.cdll.LoadLibrary(ctypes.util.find_library('CoreFoundation')) - -CFFileDescriptorCreate = CoreFoundation.CFFileDescriptorCreate -CFFileDescriptorCreate.restype = void_p -CFFileDescriptorCreate.argtypes = [void_p, ctypes.c_int, ctypes.c_bool, void_p, void_p] - -CFFileDescriptorGetNativeDescriptor = CoreFoundation.CFFileDescriptorGetNativeDescriptor -CFFileDescriptorGetNativeDescriptor.restype = ctypes.c_int -CFFileDescriptorGetNativeDescriptor.argtypes = [void_p] - -CFFileDescriptorEnableCallBacks = CoreFoundation.CFFileDescriptorEnableCallBacks -CFFileDescriptorEnableCallBacks.restype = None -CFFileDescriptorEnableCallBacks.argtypes = [void_p, ctypes.c_ulong] - -CFFileDescriptorCreateRunLoopSource = CoreFoundation.CFFileDescriptorCreateRunLoopSource -CFFileDescriptorCreateRunLoopSource.restype = void_p -CFFileDescriptorCreateRunLoopSource.argtypes = [void_p, void_p, void_p] - -CFRunLoopGetCurrent = CoreFoundation.CFRunLoopGetCurrent -CFRunLoopGetCurrent.restype = void_p - -CFRunLoopAddSource = CoreFoundation.CFRunLoopAddSource -CFRunLoopAddSource.restype = None -CFRunLoopAddSource.argtypes = [void_p, void_p, void_p] - -CFRelease = CoreFoundation.CFRelease -CFRelease.restype = None -CFRelease.argtypes = [void_p] - -CFFileDescriptorInvalidate = CoreFoundation.CFFileDescriptorInvalidate -CFFileDescriptorInvalidate.restype = None -CFFileDescriptorInvalidate.argtypes = [void_p] - -# From CFFileDescriptor.h -kCFFileDescriptorReadCallBack = 1 -kCFRunLoopCommonModes = void_p.in_dll(CoreFoundation, 'kCFRunLoopCommonModes') - - -def _NSApp(): - """Return the global NSApplication instance (NSApp)""" - objc.objc_msgSend.argtypes = [void_p, void_p] - return msg(C('NSApplication'), n('sharedApplication')) - - -def _wake(NSApp): - """Wake the Application""" - objc.objc_msgSend.argtypes = [ - void_p, - void_p, - void_p, - void_p, - void_p, - void_p, - void_p, - void_p, - void_p, - void_p, - void_p, - ] - event = msg( - C("NSEvent"), - n( - "otherEventWithType:location:modifierFlags:" - "timestamp:windowNumber:context:subtype:data1:data2:" - ), - 15, # Type - 0, # location - 0, # flags - 0, # timestamp - 0, # window - None, # context - 0, # subtype - 0, # data1 - 0, # data2 - ) - objc.objc_msgSend.argtypes = [void_p, void_p, void_p, void_p] - msg(NSApp, n('postEvent:atStart:'), void_p(event), True) - - -_triggered = Event() - -def _input_callback(fdref, flags, info): - """Callback to fire when there's input to be read""" - _triggered.set() - CFFileDescriptorInvalidate(fdref) - CFRelease(fdref) - NSApp = _NSApp() - objc.objc_msgSend.argtypes = [void_p, void_p, void_p] - msg(NSApp, n('stop:'), NSApp) - _wake(NSApp) - -_c_callback_func_type = ctypes.CFUNCTYPE(None, void_p, void_p, void_p) -_c_input_callback = _c_callback_func_type(_input_callback) - - -def _stop_on_read(fd): - """Register callback to stop eventloop when there's data on fd""" - _triggered.clear() - fdref = CFFileDescriptorCreate(None, fd, False, _c_input_callback, None) - CFFileDescriptorEnableCallBacks(fdref, kCFFileDescriptorReadCallBack) - source = CFFileDescriptorCreateRunLoopSource(None, fdref, 0) - loop = CFRunLoopGetCurrent() - CFRunLoopAddSource(loop, source, kCFRunLoopCommonModes) - CFRelease(source) - - -def inputhook(context): - """Inputhook for Cocoa (NSApp)""" - NSApp = _NSApp() - _stop_on_read(context.fileno()) - objc.objc_msgSend.argtypes = [void_p, void_p] - msg(NSApp, n('run')) - if not _triggered.is_set(): - # app closed without firing callback, - # probably due to last window being closed. - # Run the loop manually in this case, - # since there may be events still to process (#9734) - CoreFoundation.CFRunLoopRun() +"""Inputhook for OS X + +Calls NSApp / CoreFoundation APIs via ctypes. +""" + +# obj-c boilerplate from appnope, used under BSD 2-clause + +import ctypes +import ctypes.util +from threading import Event + +objc = ctypes.cdll.LoadLibrary(ctypes.util.find_library('objc')) + +void_p = ctypes.c_void_p + +objc.objc_getClass.restype = void_p +objc.sel_registerName.restype = void_p +objc.objc_msgSend.restype = void_p +objc.objc_msgSend.argtypes = [void_p, void_p] + +msg = objc.objc_msgSend + +def _utf8(s): + """ensure utf8 bytes""" + if not isinstance(s, bytes): + s = s.encode('utf8') + return s + +def n(name): + """create a selector name (for ObjC methods)""" + return objc.sel_registerName(_utf8(name)) + +def C(classname): + """get an ObjC Class by name""" + return objc.objc_getClass(_utf8(classname)) + +# end obj-c boilerplate from appnope + +# CoreFoundation C-API calls we will use: +CoreFoundation = ctypes.cdll.LoadLibrary(ctypes.util.find_library('CoreFoundation')) + +CFFileDescriptorCreate = CoreFoundation.CFFileDescriptorCreate +CFFileDescriptorCreate.restype = void_p +CFFileDescriptorCreate.argtypes = [void_p, ctypes.c_int, ctypes.c_bool, void_p, void_p] + +CFFileDescriptorGetNativeDescriptor = CoreFoundation.CFFileDescriptorGetNativeDescriptor +CFFileDescriptorGetNativeDescriptor.restype = ctypes.c_int +CFFileDescriptorGetNativeDescriptor.argtypes = [void_p] + +CFFileDescriptorEnableCallBacks = CoreFoundation.CFFileDescriptorEnableCallBacks +CFFileDescriptorEnableCallBacks.restype = None +CFFileDescriptorEnableCallBacks.argtypes = [void_p, ctypes.c_ulong] + +CFFileDescriptorCreateRunLoopSource = CoreFoundation.CFFileDescriptorCreateRunLoopSource +CFFileDescriptorCreateRunLoopSource.restype = void_p +CFFileDescriptorCreateRunLoopSource.argtypes = [void_p, void_p, void_p] + +CFRunLoopGetCurrent = CoreFoundation.CFRunLoopGetCurrent +CFRunLoopGetCurrent.restype = void_p + +CFRunLoopAddSource = CoreFoundation.CFRunLoopAddSource +CFRunLoopAddSource.restype = None +CFRunLoopAddSource.argtypes = [void_p, void_p, void_p] + +CFRelease = CoreFoundation.CFRelease +CFRelease.restype = None +CFRelease.argtypes = [void_p] + +CFFileDescriptorInvalidate = CoreFoundation.CFFileDescriptorInvalidate +CFFileDescriptorInvalidate.restype = None +CFFileDescriptorInvalidate.argtypes = [void_p] + +# From CFFileDescriptor.h +kCFFileDescriptorReadCallBack = 1 +kCFRunLoopCommonModes = void_p.in_dll(CoreFoundation, 'kCFRunLoopCommonModes') + + +def _NSApp(): + """Return the global NSApplication instance (NSApp)""" + objc.objc_msgSend.argtypes = [void_p, void_p] + return msg(C('NSApplication'), n('sharedApplication')) + + +def _wake(NSApp): + """Wake the Application""" + objc.objc_msgSend.argtypes = [ + void_p, + void_p, + void_p, + void_p, + void_p, + void_p, + void_p, + void_p, + void_p, + void_p, + void_p, + ] + event = msg( + C("NSEvent"), + n( + "otherEventWithType:location:modifierFlags:" + "timestamp:windowNumber:context:subtype:data1:data2:" + ), + 15, # Type + 0, # location + 0, # flags + 0, # timestamp + 0, # window + None, # context + 0, # subtype + 0, # data1 + 0, # data2 + ) + objc.objc_msgSend.argtypes = [void_p, void_p, void_p, void_p] + msg(NSApp, n('postEvent:atStart:'), void_p(event), True) + + +_triggered = Event() + +def _input_callback(fdref, flags, info): + """Callback to fire when there's input to be read""" + _triggered.set() + CFFileDescriptorInvalidate(fdref) + CFRelease(fdref) + NSApp = _NSApp() + objc.objc_msgSend.argtypes = [void_p, void_p, void_p] + msg(NSApp, n('stop:'), NSApp) + _wake(NSApp) + +_c_callback_func_type = ctypes.CFUNCTYPE(None, void_p, void_p, void_p) +_c_input_callback = _c_callback_func_type(_input_callback) + + +def _stop_on_read(fd): + """Register callback to stop eventloop when there's data on fd""" + _triggered.clear() + fdref = CFFileDescriptorCreate(None, fd, False, _c_input_callback, None) + CFFileDescriptorEnableCallBacks(fdref, kCFFileDescriptorReadCallBack) + source = CFFileDescriptorCreateRunLoopSource(None, fdref, 0) + loop = CFRunLoopGetCurrent() + CFRunLoopAddSource(loop, source, kCFRunLoopCommonModes) + CFRelease(source) + + +def inputhook(context): + """Inputhook for Cocoa (NSApp)""" + NSApp = _NSApp() + _stop_on_read(context.fileno()) + objc.objc_msgSend.argtypes = [void_p, void_p] + msg(NSApp, n('run')) + if not _triggered.is_set(): + # app closed without firing callback, + # probably due to last window being closed. + # Run the loop manually in this case, + # since there may be events still to process (#9734) + CoreFoundation.CFRunLoopRun() diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/pyglet.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/pyglet.py index f34ed85845d..49ec86d2237 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/pyglet.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/pyglet.py @@ -1,66 +1,66 @@ -"""Enable pyglet to be used interactively with prompt_toolkit -""" - -import sys -import time -from timeit import default_timer as clock -import pyglet - -# On linux only, window.flip() has a bug that causes an AttributeError on -# window close. For details, see: -# http://groups.google.com/group/pyglet-users/browse_thread/thread/47c1aab9aa4a3d23/c22f9e819826799e?#c22f9e819826799e - -if sys.platform.startswith('linux'): - def flip(window): - try: - window.flip() - except AttributeError: - pass -else: - def flip(window): - window.flip() - - -def inputhook(context): - """Run the pyglet event loop by processing pending events only. - - This keeps processing pending events until stdin is ready. After - processing all pending events, a call to time.sleep is inserted. This is - needed, otherwise, CPU usage is at 100%. This sleep time should be tuned - though for best performance. - """ - # We need to protect against a user pressing Control-C when IPython is - # idle and this is running. We trap KeyboardInterrupt and pass. - try: - t = clock() - while not context.input_is_ready(): - pyglet.clock.tick() - for window in pyglet.app.windows: - window.switch_to() - window.dispatch_events() - window.dispatch_event('on_draw') - flip(window) - - # We need to sleep at this point to keep the idle CPU load - # low. However, if sleep to long, GUI response is poor. As - # a compromise, we watch how often GUI events are being processed - # and switch between a short and long sleep time. Here are some - # stats useful in helping to tune this. - # time CPU load - # 0.001 13% - # 0.005 3% - # 0.01 1.5% - # 0.05 0.5% - used_time = clock() - t - if used_time > 10.0: - # print 'Sleep for 1 s' # dbg - time.sleep(1.0) - elif used_time > 0.1: - # Few GUI events coming in, so we can sleep longer - # print 'Sleep for 0.05 s' # dbg - time.sleep(0.05) - else: - # Many GUI events coming in, so sleep only very little - time.sleep(0.001) - except KeyboardInterrupt: - pass +"""Enable pyglet to be used interactively with prompt_toolkit +""" + +import sys +import time +from timeit import default_timer as clock +import pyglet + +# On linux only, window.flip() has a bug that causes an AttributeError on +# window close. For details, see: +# http://groups.google.com/group/pyglet-users/browse_thread/thread/47c1aab9aa4a3d23/c22f9e819826799e?#c22f9e819826799e + +if sys.platform.startswith('linux'): + def flip(window): + try: + window.flip() + except AttributeError: + pass +else: + def flip(window): + window.flip() + + +def inputhook(context): + """Run the pyglet event loop by processing pending events only. + + This keeps processing pending events until stdin is ready. After + processing all pending events, a call to time.sleep is inserted. This is + needed, otherwise, CPU usage is at 100%. This sleep time should be tuned + though for best performance. + """ + # We need to protect against a user pressing Control-C when IPython is + # idle and this is running. We trap KeyboardInterrupt and pass. + try: + t = clock() + while not context.input_is_ready(): + pyglet.clock.tick() + for window in pyglet.app.windows: + window.switch_to() + window.dispatch_events() + window.dispatch_event('on_draw') + flip(window) + + # We need to sleep at this point to keep the idle CPU load + # low. However, if sleep to long, GUI response is poor. As + # a compromise, we watch how often GUI events are being processed + # and switch between a short and long sleep time. Here are some + # stats useful in helping to tune this. + # time CPU load + # 0.001 13% + # 0.005 3% + # 0.01 1.5% + # 0.05 0.5% + used_time = clock() - t + if used_time > 10.0: + # print 'Sleep for 1 s' # dbg + time.sleep(1.0) + elif used_time > 0.1: + # Few GUI events coming in, so we can sleep longer + # print 'Sleep for 0.05 s' # dbg + time.sleep(0.05) + else: + # Many GUI events coming in, so sleep only very little + time.sleep(0.001) + except KeyboardInterrupt: + pass diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/qt.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/qt.py index dde9238236a..b999f5aa173 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/qt.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/qt.py @@ -1,83 +1,83 @@ -import sys -import os -from IPython.external.qt_for_kernel import QtCore, QtGui, enum_helper -from IPython import get_ipython - -# If we create a QApplication, keep a reference to it so that it doesn't get -# garbage collected. -_appref = None -_already_warned = False - - -def _exec(obj): - # exec on PyQt6, exec_ elsewhere. - obj.exec() if hasattr(obj, "exec") else obj.exec_() - - -def _reclaim_excepthook(): - shell = get_ipython() - if shell is not None: - sys.excepthook = shell.excepthook - - -def inputhook(context): - global _appref - app = QtCore.QCoreApplication.instance() - if not app: - if sys.platform == 'linux': - if not os.environ.get('DISPLAY') \ - and not os.environ.get('WAYLAND_DISPLAY'): - import warnings - global _already_warned - if not _already_warned: - _already_warned = True - warnings.warn( - 'The DISPLAY or WAYLAND_DISPLAY environment variable is ' - 'not set or empty and Qt5 requires this environment ' - 'variable. Deactivate Qt5 code.' - ) - return - try: - QtCore.QApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling) - except AttributeError: # Only for Qt>=5.6, <6. - pass - try: - QtCore.QApplication.setHighDpiScaleFactorRoundingPolicy( - QtCore.Qt.HighDpiScaleFactorRoundingPolicy.PassThrough - ) - except AttributeError: # Only for Qt>=5.14. - pass - _appref = app = QtGui.QApplication([" "]) - - # "reclaim" IPython sys.excepthook after event loop starts - # without this, it defaults back to BaseIPythonApplication.excepthook - # and exceptions in the Qt event loop are rendered without traceback - # formatting and look like "bug in IPython". - QtCore.QTimer.singleShot(0, _reclaim_excepthook) - - event_loop = QtCore.QEventLoop(app) - - if sys.platform == 'win32': - # The QSocketNotifier method doesn't appear to work on Windows. - # Use polling instead. - timer = QtCore.QTimer() - timer.timeout.connect(event_loop.quit) - while not context.input_is_ready(): - timer.start(50) # 50 ms - _exec(event_loop) - timer.stop() - else: - # On POSIX platforms, we can use a file descriptor to quit the event - # loop when there is input ready to read. - notifier = QtCore.QSocketNotifier( - context.fileno(), enum_helper("QtCore.QSocketNotifier.Type").Read - ) - try: - # connect the callback we care about before we turn it on - notifier.activated.connect(lambda: event_loop.exit()) - notifier.setEnabled(True) - # only start the event loop we are not already flipped - if not context.input_is_ready(): - _exec(event_loop) - finally: - notifier.setEnabled(False) +import sys +import os +from IPython.external.qt_for_kernel import QtCore, QtGui, enum_helper +from IPython import get_ipython + +# If we create a QApplication, keep a reference to it so that it doesn't get +# garbage collected. +_appref = None +_already_warned = False + + +def _exec(obj): + # exec on PyQt6, exec_ elsewhere. + obj.exec() if hasattr(obj, "exec") else obj.exec_() + + +def _reclaim_excepthook(): + shell = get_ipython() + if shell is not None: + sys.excepthook = shell.excepthook + + +def inputhook(context): + global _appref + app = QtCore.QCoreApplication.instance() + if not app: + if sys.platform == 'linux': + if not os.environ.get('DISPLAY') \ + and not os.environ.get('WAYLAND_DISPLAY'): + import warnings + global _already_warned + if not _already_warned: + _already_warned = True + warnings.warn( + 'The DISPLAY or WAYLAND_DISPLAY environment variable is ' + 'not set or empty and Qt5 requires this environment ' + 'variable. Deactivate Qt5 code.' + ) + return + try: + QtCore.QApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling) + except AttributeError: # Only for Qt>=5.6, <6. + pass + try: + QtCore.QApplication.setHighDpiScaleFactorRoundingPolicy( + QtCore.Qt.HighDpiScaleFactorRoundingPolicy.PassThrough + ) + except AttributeError: # Only for Qt>=5.14. + pass + _appref = app = QtGui.QApplication([" "]) + + # "reclaim" IPython sys.excepthook after event loop starts + # without this, it defaults back to BaseIPythonApplication.excepthook + # and exceptions in the Qt event loop are rendered without traceback + # formatting and look like "bug in IPython". + QtCore.QTimer.singleShot(0, _reclaim_excepthook) + + event_loop = QtCore.QEventLoop(app) + + if sys.platform == 'win32': + # The QSocketNotifier method doesn't appear to work on Windows. + # Use polling instead. + timer = QtCore.QTimer() + timer.timeout.connect(event_loop.quit) + while not context.input_is_ready(): + timer.start(50) # 50 ms + _exec(event_loop) + timer.stop() + else: + # On POSIX platforms, we can use a file descriptor to quit the event + # loop when there is input ready to read. + notifier = QtCore.QSocketNotifier( + context.fileno(), enum_helper("QtCore.QSocketNotifier.Type").Read + ) + try: + # connect the callback we care about before we turn it on + notifier.activated.connect(lambda: event_loop.exit()) + notifier.setEnabled(True) + # only start the event loop we are not already flipped + if not context.input_is_ready(): + _exec(event_loop) + finally: + notifier.setEnabled(False) diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/tk.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/tk.py index 6afc276747d..2715505f1f0 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/tk.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/tk.py @@ -1,90 +1,90 @@ -# Code borrowed from ptpython -# https://github.com/jonathanslenders/ptpython/blob/86b71a89626114b18898a0af463978bdb32eeb70/ptpython/eventloop.py - -# Copyright (c) 2015, Jonathan Slenders -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright notice, this -# list of conditions and the following disclaimer in the documentation and/or -# other materials provided with the distribution. -# -# * Neither the name of the {organization} nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -""" -Wrapper around the eventloop that gives some time to the Tkinter GUI to process -events when it's loaded and while we are waiting for input at the REPL. This -way we don't block the UI of for instance ``turtle`` and other Tk libraries. - -(Normally Tkinter registers it's callbacks in ``PyOS_InputHook`` to integrate -in readline. ``prompt-toolkit`` doesn't understand that input hook, but this -will fix it for Tk.) -""" -import time - -import _tkinter -import tkinter - -def inputhook(inputhook_context): - """ - Inputhook for Tk. - Run the Tk eventloop until prompt-toolkit needs to process the next input. - """ - # Get the current TK application. - root = tkinter._default_root - - def wait_using_filehandler(): - """ - Run the TK eventloop until the file handler that we got from the - inputhook becomes readable. - """ - # Add a handler that sets the stop flag when `prompt-toolkit` has input - # to process. - stop = [False] - def done(*a): - stop[0] = True - - root.createfilehandler(inputhook_context.fileno(), _tkinter.READABLE, done) - - # Run the TK event loop as long as we don't receive input. - while root.dooneevent(_tkinter.ALL_EVENTS): - if stop[0]: - break - - root.deletefilehandler(inputhook_context.fileno()) - - def wait_using_polling(): - """ - Windows TK doesn't support 'createfilehandler'. - So, run the TK eventloop and poll until input is ready. - """ - while not inputhook_context.input_is_ready(): - while root.dooneevent(_tkinter.ALL_EVENTS | _tkinter.DONT_WAIT): - pass - # Sleep to make the CPU idle, but not too long, so that the UI - # stays responsive. - time.sleep(.01) - - if root is not None: - if hasattr(root, 'createfilehandler'): - wait_using_filehandler() - else: - wait_using_polling() +# Code borrowed from ptpython +# https://github.com/jonathanslenders/ptpython/blob/86b71a89626114b18898a0af463978bdb32eeb70/ptpython/eventloop.py + +# Copyright (c) 2015, Jonathan Slenders +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, this +# list of conditions and the following disclaimer in the documentation and/or +# other materials provided with the distribution. +# +# * Neither the name of the {organization} nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" +Wrapper around the eventloop that gives some time to the Tkinter GUI to process +events when it's loaded and while we are waiting for input at the REPL. This +way we don't block the UI of for instance ``turtle`` and other Tk libraries. + +(Normally Tkinter registers it's callbacks in ``PyOS_InputHook`` to integrate +in readline. ``prompt-toolkit`` doesn't understand that input hook, but this +will fix it for Tk.) +""" +import time + +import _tkinter +import tkinter + +def inputhook(inputhook_context): + """ + Inputhook for Tk. + Run the Tk eventloop until prompt-toolkit needs to process the next input. + """ + # Get the current TK application. + root = tkinter._default_root + + def wait_using_filehandler(): + """ + Run the TK eventloop until the file handler that we got from the + inputhook becomes readable. + """ + # Add a handler that sets the stop flag when `prompt-toolkit` has input + # to process. + stop = [False] + def done(*a): + stop[0] = True + + root.createfilehandler(inputhook_context.fileno(), _tkinter.READABLE, done) + + # Run the TK event loop as long as we don't receive input. + while root.dooneevent(_tkinter.ALL_EVENTS): + if stop[0]: + break + + root.deletefilehandler(inputhook_context.fileno()) + + def wait_using_polling(): + """ + Windows TK doesn't support 'createfilehandler'. + So, run the TK eventloop and poll until input is ready. + """ + while not inputhook_context.input_is_ready(): + while root.dooneevent(_tkinter.ALL_EVENTS | _tkinter.DONT_WAIT): + pass + # Sleep to make the CPU idle, but not too long, so that the UI + # stays responsive. + time.sleep(.01) + + if root is not None: + if hasattr(root, 'createfilehandler'): + wait_using_filehandler() + else: + wait_using_polling() diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/wx.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/wx.py index 3b7b0e0a33b..a0f4442c771 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/wx.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/wx.py @@ -1,219 +1,219 @@ -"""Enable wxPython to be used interactively in prompt_toolkit -""" - -import sys -import signal -import time -from timeit import default_timer as clock -import wx - - -def ignore_keyboardinterrupts(func): - """Decorator which causes KeyboardInterrupt exceptions to be ignored during - execution of the decorated function. - - This is used by the inputhook functions to handle the event where the user - presses CTRL+C while IPython is idle, and the inputhook loop is running. In - this case, we want to ignore interrupts. - """ - def wrapper(*args, **kwargs): - try: - func(*args, **kwargs) - except KeyboardInterrupt: - pass - return wrapper - - -@ignore_keyboardinterrupts -def inputhook_wx1(context): - """Run the wx event loop by processing pending events only. - - This approach seems to work, but its performance is not great as it - relies on having PyOS_InputHook called regularly. - """ - app = wx.GetApp() - if app is not None: - assert wx.Thread_IsMain() - - # Make a temporary event loop and process system events until - # there are no more waiting, then allow idle events (which - # will also deal with pending or posted wx events.) - evtloop = wx.EventLoop() - ea = wx.EventLoopActivator(evtloop) - while evtloop.Pending(): - evtloop.Dispatch() - app.ProcessIdle() - del ea - return 0 - - -class EventLoopTimer(wx.Timer): - - def __init__(self, func): - self.func = func - wx.Timer.__init__(self) - - def Notify(self): - self.func() - - -class EventLoopRunner(object): - - def Run(self, time, input_is_ready): - self.input_is_ready = input_is_ready - self.evtloop = wx.EventLoop() - self.timer = EventLoopTimer(self.check_stdin) - self.timer.Start(time) - self.evtloop.Run() - - def check_stdin(self): - if self.input_is_ready(): - self.timer.Stop() - self.evtloop.Exit() - - -@ignore_keyboardinterrupts -def inputhook_wx2(context): - """Run the wx event loop, polling for stdin. - - This version runs the wx eventloop for an undetermined amount of time, - during which it periodically checks to see if anything is ready on - stdin. If anything is ready on stdin, the event loop exits. - - The argument to elr.Run controls how often the event loop looks at stdin. - This determines the responsiveness at the keyboard. A setting of 1000 - enables a user to type at most 1 char per second. I have found that a - setting of 10 gives good keyboard response. We can shorten it further, - but eventually performance would suffer from calling select/kbhit too - often. - """ - app = wx.GetApp() - if app is not None: - assert wx.Thread_IsMain() - elr = EventLoopRunner() - # As this time is made shorter, keyboard response improves, but idle - # CPU load goes up. 10 ms seems like a good compromise. - elr.Run(time=10, # CHANGE time here to control polling interval - input_is_ready=context.input_is_ready) - return 0 - - -@ignore_keyboardinterrupts -def inputhook_wx3(context): - """Run the wx event loop by processing pending events only. - - This is like inputhook_wx1, but it keeps processing pending events - until stdin is ready. After processing all pending events, a call to - time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%. - This sleep time should be tuned though for best performance. - """ - app = wx.GetApp() - if app is not None: - assert wx.Thread_IsMain() - - # The import of wx on Linux sets the handler for signal.SIGINT - # to 0. This is a bug in wx or gtk. We fix by just setting it - # back to the Python default. - if not callable(signal.getsignal(signal.SIGINT)): - signal.signal(signal.SIGINT, signal.default_int_handler) - - evtloop = wx.EventLoop() - ea = wx.EventLoopActivator(evtloop) - t = clock() - while not context.input_is_ready(): - while evtloop.Pending(): - t = clock() - evtloop.Dispatch() - app.ProcessIdle() - # We need to sleep at this point to keep the idle CPU load - # low. However, if sleep to long, GUI response is poor. As - # a compromise, we watch how often GUI events are being processed - # and switch between a short and long sleep time. Here are some - # stats useful in helping to tune this. - # time CPU load - # 0.001 13% - # 0.005 3% - # 0.01 1.5% - # 0.05 0.5% - used_time = clock() - t - if used_time > 10.0: - # print 'Sleep for 1 s' # dbg - time.sleep(1.0) - elif used_time > 0.1: - # Few GUI events coming in, so we can sleep longer - # print 'Sleep for 0.05 s' # dbg - time.sleep(0.05) - else: - # Many GUI events coming in, so sleep only very little - time.sleep(0.001) - del ea - return 0 - - -@ignore_keyboardinterrupts -def inputhook_wxphoenix(context): - """Run the wx event loop until the user provides more input. - - This input hook is suitable for use with wxPython >= 4 (a.k.a. Phoenix). - - It uses the same approach to that used in - ipykernel.eventloops.loop_wx. The wx.MainLoop is executed, and a wx.Timer - is used to periodically poll the context for input. As soon as input is - ready, the wx.MainLoop is stopped. - """ - - app = wx.GetApp() - - if app is None: - return - - if context.input_is_ready(): - return - - assert wx.IsMainThread() - - # Wx uses milliseconds - poll_interval = 100 - - # Use a wx.Timer to periodically check whether input is ready - as soon as - # it is, we exit the main loop - timer = wx.Timer() - - def poll(ev): - if context.input_is_ready(): - timer.Stop() - app.ExitMainLoop() - - timer.Start(poll_interval) - timer.Bind(wx.EVT_TIMER, poll) - - # The import of wx on Linux sets the handler for signal.SIGINT to 0. This - # is a bug in wx or gtk. We fix by just setting it back to the Python - # default. - if not callable(signal.getsignal(signal.SIGINT)): - signal.signal(signal.SIGINT, signal.default_int_handler) - - # The SetExitOnFrameDelete call allows us to run the wx mainloop without - # having a frame open. - app.SetExitOnFrameDelete(False) - app.MainLoop() - - -# Get the major wx version number to figure out what input hook we should use. -major_version = 3 - -try: - major_version = int(wx.__version__[0]) -except Exception: - pass - -# Use the phoenix hook on all platforms for wxpython >= 4 -if major_version >= 4: - inputhook = inputhook_wxphoenix -# On OSX, evtloop.Pending() always returns True, regardless of there being -# any events pending. As such we can't use implementations 1 or 3 of the -# inputhook as those depend on a pending/dispatch loop. -elif sys.platform == 'darwin': - inputhook = inputhook_wx2 -else: - inputhook = inputhook_wx3 +"""Enable wxPython to be used interactively in prompt_toolkit +""" + +import sys +import signal +import time +from timeit import default_timer as clock +import wx + + +def ignore_keyboardinterrupts(func): + """Decorator which causes KeyboardInterrupt exceptions to be ignored during + execution of the decorated function. + + This is used by the inputhook functions to handle the event where the user + presses CTRL+C while IPython is idle, and the inputhook loop is running. In + this case, we want to ignore interrupts. + """ + def wrapper(*args, **kwargs): + try: + func(*args, **kwargs) + except KeyboardInterrupt: + pass + return wrapper + + +@ignore_keyboardinterrupts +def inputhook_wx1(context): + """Run the wx event loop by processing pending events only. + + This approach seems to work, but its performance is not great as it + relies on having PyOS_InputHook called regularly. + """ + app = wx.GetApp() + if app is not None: + assert wx.Thread_IsMain() + + # Make a temporary event loop and process system events until + # there are no more waiting, then allow idle events (which + # will also deal with pending or posted wx events.) + evtloop = wx.EventLoop() + ea = wx.EventLoopActivator(evtloop) + while evtloop.Pending(): + evtloop.Dispatch() + app.ProcessIdle() + del ea + return 0 + + +class EventLoopTimer(wx.Timer): + + def __init__(self, func): + self.func = func + wx.Timer.__init__(self) + + def Notify(self): + self.func() + + +class EventLoopRunner(object): + + def Run(self, time, input_is_ready): + self.input_is_ready = input_is_ready + self.evtloop = wx.EventLoop() + self.timer = EventLoopTimer(self.check_stdin) + self.timer.Start(time) + self.evtloop.Run() + + def check_stdin(self): + if self.input_is_ready(): + self.timer.Stop() + self.evtloop.Exit() + + +@ignore_keyboardinterrupts +def inputhook_wx2(context): + """Run the wx event loop, polling for stdin. + + This version runs the wx eventloop for an undetermined amount of time, + during which it periodically checks to see if anything is ready on + stdin. If anything is ready on stdin, the event loop exits. + + The argument to elr.Run controls how often the event loop looks at stdin. + This determines the responsiveness at the keyboard. A setting of 1000 + enables a user to type at most 1 char per second. I have found that a + setting of 10 gives good keyboard response. We can shorten it further, + but eventually performance would suffer from calling select/kbhit too + often. + """ + app = wx.GetApp() + if app is not None: + assert wx.Thread_IsMain() + elr = EventLoopRunner() + # As this time is made shorter, keyboard response improves, but idle + # CPU load goes up. 10 ms seems like a good compromise. + elr.Run(time=10, # CHANGE time here to control polling interval + input_is_ready=context.input_is_ready) + return 0 + + +@ignore_keyboardinterrupts +def inputhook_wx3(context): + """Run the wx event loop by processing pending events only. + + This is like inputhook_wx1, but it keeps processing pending events + until stdin is ready. After processing all pending events, a call to + time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%. + This sleep time should be tuned though for best performance. + """ + app = wx.GetApp() + if app is not None: + assert wx.Thread_IsMain() + + # The import of wx on Linux sets the handler for signal.SIGINT + # to 0. This is a bug in wx or gtk. We fix by just setting it + # back to the Python default. + if not callable(signal.getsignal(signal.SIGINT)): + signal.signal(signal.SIGINT, signal.default_int_handler) + + evtloop = wx.EventLoop() + ea = wx.EventLoopActivator(evtloop) + t = clock() + while not context.input_is_ready(): + while evtloop.Pending(): + t = clock() + evtloop.Dispatch() + app.ProcessIdle() + # We need to sleep at this point to keep the idle CPU load + # low. However, if sleep to long, GUI response is poor. As + # a compromise, we watch how often GUI events are being processed + # and switch between a short and long sleep time. Here are some + # stats useful in helping to tune this. + # time CPU load + # 0.001 13% + # 0.005 3% + # 0.01 1.5% + # 0.05 0.5% + used_time = clock() - t + if used_time > 10.0: + # print 'Sleep for 1 s' # dbg + time.sleep(1.0) + elif used_time > 0.1: + # Few GUI events coming in, so we can sleep longer + # print 'Sleep for 0.05 s' # dbg + time.sleep(0.05) + else: + # Many GUI events coming in, so sleep only very little + time.sleep(0.001) + del ea + return 0 + + +@ignore_keyboardinterrupts +def inputhook_wxphoenix(context): + """Run the wx event loop until the user provides more input. + + This input hook is suitable for use with wxPython >= 4 (a.k.a. Phoenix). + + It uses the same approach to that used in + ipykernel.eventloops.loop_wx. The wx.MainLoop is executed, and a wx.Timer + is used to periodically poll the context for input. As soon as input is + ready, the wx.MainLoop is stopped. + """ + + app = wx.GetApp() + + if app is None: + return + + if context.input_is_ready(): + return + + assert wx.IsMainThread() + + # Wx uses milliseconds + poll_interval = 100 + + # Use a wx.Timer to periodically check whether input is ready - as soon as + # it is, we exit the main loop + timer = wx.Timer() + + def poll(ev): + if context.input_is_ready(): + timer.Stop() + app.ExitMainLoop() + + timer.Start(poll_interval) + timer.Bind(wx.EVT_TIMER, poll) + + # The import of wx on Linux sets the handler for signal.SIGINT to 0. This + # is a bug in wx or gtk. We fix by just setting it back to the Python + # default. + if not callable(signal.getsignal(signal.SIGINT)): + signal.signal(signal.SIGINT, signal.default_int_handler) + + # The SetExitOnFrameDelete call allows us to run the wx mainloop without + # having a frame open. + app.SetExitOnFrameDelete(False) + app.MainLoop() + + +# Get the major wx version number to figure out what input hook we should use. +major_version = 3 + +try: + major_version = int(wx.__version__[0]) +except Exception: + pass + +# Use the phoenix hook on all platforms for wxpython >= 4 +if major_version >= 4: + inputhook = inputhook_wxphoenix +# On OSX, evtloop.Pending() always returns True, regardless of there being +# any events pending. As such we can't use implementations 1 or 3 of the +# inputhook as those depend on a pending/dispatch loop. +elif sys.platform == 'darwin': + inputhook = inputhook_wx2 +else: + inputhook = inputhook_wx3 diff --git a/contrib/python/ipython/py3/IPython/terminal/ptshell.py b/contrib/python/ipython/py3/IPython/terminal/ptshell.py index aad111b041d..666d3c5b514 100644 --- a/contrib/python/ipython/py3/IPython/terminal/ptshell.py +++ b/contrib/python/ipython/py3/IPython/terminal/ptshell.py @@ -1,8 +1,8 @@ -raise DeprecationWarning("""DEPRECATED: - -After Popular request and decision from the BDFL: -`IPython.terminal.ptshell` has been moved back to `IPython.terminal.interactiveshell` -during the beta cycle (after IPython 5.0.beta3) Sorry about that. - -This file will be removed in 5.0 rc or final. -""") +raise DeprecationWarning("""DEPRECATED: + +After Popular request and decision from the BDFL: +`IPython.terminal.ptshell` has been moved back to `IPython.terminal.interactiveshell` +during the beta cycle (after IPython 5.0.beta3) Sorry about that. + +This file will be removed in 5.0 rc or final. +""") diff --git a/contrib/python/ipython/py3/IPython/terminal/ptutils.py b/contrib/python/ipython/py3/IPython/terminal/ptutils.py index 380087dc296..3e5d3c5c770 100644 --- a/contrib/python/ipython/py3/IPython/terminal/ptutils.py +++ b/contrib/python/ipython/py3/IPython/terminal/ptutils.py @@ -1,197 +1,197 @@ -"""prompt-toolkit utilities - -Everything in this module is a private API, -not to be used outside IPython. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import unicodedata -from wcwidth import wcwidth -import sys -import traceback - -from IPython.core.completer import ( - provisionalcompleter, cursor_to_position, - _deduplicate_completions) -from prompt_toolkit.completion import Completer, Completion -from prompt_toolkit.lexers import Lexer -from prompt_toolkit.lexers import PygmentsLexer -from prompt_toolkit.patch_stdout import patch_stdout - -import pygments.lexers as pygments_lexers -import os - -_completion_sentinel = object() - -def _elide_point(string:str, *, min_elide=30)->str: - """ - If a string is long enough, and has at least 3 dots, - replace the middle part with ellipses. - - If a string naming a file is long enough, and has at least 3 slashes, - replace the middle part with ellipses. - - If three consecutive dots, or two consecutive dots are encountered these are - replaced by the equivalents HORIZONTAL ELLIPSIS or TWO DOT LEADER unicode - equivalents - """ - string = string.replace('...','\N{HORIZONTAL ELLIPSIS}') - string = string.replace('..','\N{TWO DOT LEADER}') - if len(string) < min_elide: - return string - - object_parts = string.split('.') - file_parts = string.split(os.sep) - if file_parts[-1] == '': - file_parts.pop() - - if len(object_parts) > 3: - return '{}.{}\N{HORIZONTAL ELLIPSIS}{}.{}'.format(object_parts[0], object_parts[1][0], object_parts[-2][-1], object_parts[-1]) - - elif len(file_parts) > 3: - return ('{}' + os.sep + '{}\N{HORIZONTAL ELLIPSIS}{}' + os.sep + '{}').format(file_parts[0], file_parts[1][0], file_parts[-2][-1], file_parts[-1]) - - return string - -def _elide_typed(string:str, typed:str, *, min_elide:int=30)->str: - """ - Elide the middle of a long string if the beginning has already been typed. - """ - - if len(string) < min_elide: - return string - cut_how_much = len(typed)-3 - if cut_how_much < 7: - return string - if string.startswith(typed) and len(string)> len(typed): - return f"{string[:3]}\N{HORIZONTAL ELLIPSIS}{string[cut_how_much:]}" - return string - -def _elide(string:str, typed:str, min_elide=30)->str: - return _elide_typed( - _elide_point(string, min_elide=min_elide), - typed, min_elide=min_elide) - - - -def _adjust_completion_text_based_on_context(text, body, offset): - if text.endswith('=') and len(body) > offset and body[offset] == '=': - return text[:-1] - else: - return text - - -class IPythonPTCompleter(Completer): - """Adaptor to provide IPython completions to prompt_toolkit""" - def __init__(self, ipy_completer=None, shell=None): - if shell is None and ipy_completer is None: - raise TypeError("Please pass shell=an InteractiveShell instance.") - self._ipy_completer = ipy_completer - self.shell = shell - - @property - def ipy_completer(self): - if self._ipy_completer: - return self._ipy_completer - else: - return self.shell.Completer - - def get_completions(self, document, complete_event): - if not document.current_line.strip(): - return - # Some bits of our completion system may print stuff (e.g. if a module - # is imported). This context manager ensures that doesn't interfere with - # the prompt. - - with patch_stdout(), provisionalcompleter(): - body = document.text - cursor_row = document.cursor_position_row - cursor_col = document.cursor_position_col - cursor_position = document.cursor_position - offset = cursor_to_position(body, cursor_row, cursor_col) - try: - yield from self._get_completions(body, offset, cursor_position, self.ipy_completer) - except Exception as e: - try: - exc_type, exc_value, exc_tb = sys.exc_info() - traceback.print_exception(exc_type, exc_value, exc_tb) - except AttributeError: - print('Unrecoverable Error in completions') - - @staticmethod - def _get_completions(body, offset, cursor_position, ipyc): - """ - Private equivalent of get_completions() use only for unit_testing. - """ - debug = getattr(ipyc, 'debug', False) - completions = _deduplicate_completions( - body, ipyc.completions(body, offset)) - for c in completions: - if not c.text: - # Guard against completion machinery giving us an empty string. - continue - text = unicodedata.normalize('NFC', c.text) - # When the first character of the completion has a zero length, - # then it's probably a decomposed unicode character. E.g. caused by - # the "\dot" completion. Try to compose again with the previous - # character. - if wcwidth(text[0]) == 0: - if cursor_position + c.start > 0: - char_before = body[c.start - 1] - fixed_text = unicodedata.normalize( - 'NFC', char_before + text) - - # Yield the modified completion instead, if this worked. - if wcwidth(text[0:1]) == 1: - yield Completion(fixed_text, start_position=c.start - offset - 1) - continue - - # TODO: Use Jedi to determine meta_text - # (Jedi currently has a bug that results in incorrect information.) - # meta_text = '' - # yield Completion(m, start_position=start_pos, - # display_meta=meta_text) - display_text = c.text - - adjusted_text = _adjust_completion_text_based_on_context(c.text, body, offset) - if c.type == 'function': - yield Completion(adjusted_text, start_position=c.start - offset, display=_elide(display_text+'()', body[c.start:c.end]), display_meta=c.type+c.signature) - else: - yield Completion(adjusted_text, start_position=c.start - offset, display=_elide(display_text, body[c.start:c.end]), display_meta=c.type) - -class IPythonPTLexer(Lexer): - """ - Wrapper around PythonLexer and BashLexer. - """ - def __init__(self): - l = pygments_lexers - self.python_lexer = PygmentsLexer(l.Python3Lexer) - self.shell_lexer = PygmentsLexer(l.BashLexer) - - self.magic_lexers = { - 'HTML': PygmentsLexer(l.HtmlLexer), - 'html': PygmentsLexer(l.HtmlLexer), - 'javascript': PygmentsLexer(l.JavascriptLexer), - 'js': PygmentsLexer(l.JavascriptLexer), - 'perl': PygmentsLexer(l.PerlLexer), - 'ruby': PygmentsLexer(l.RubyLexer), - 'latex': PygmentsLexer(l.TexLexer), - } - - def lex_document(self, document): - text = document.text.lstrip() - - lexer = self.python_lexer - - if text.startswith('!') or text.startswith('%%bash'): - lexer = self.shell_lexer - - elif text.startswith('%%'): - for magic, l in self.magic_lexers.items(): - if text.startswith('%%' + magic): - lexer = l - break - - return lexer.lex_document(document) +"""prompt-toolkit utilities + +Everything in this module is a private API, +not to be used outside IPython. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import unicodedata +from wcwidth import wcwidth +import sys +import traceback + +from IPython.core.completer import ( + provisionalcompleter, cursor_to_position, + _deduplicate_completions) +from prompt_toolkit.completion import Completer, Completion +from prompt_toolkit.lexers import Lexer +from prompt_toolkit.lexers import PygmentsLexer +from prompt_toolkit.patch_stdout import patch_stdout + +import pygments.lexers as pygments_lexers +import os + +_completion_sentinel = object() + +def _elide_point(string:str, *, min_elide=30)->str: + """ + If a string is long enough, and has at least 3 dots, + replace the middle part with ellipses. + + If a string naming a file is long enough, and has at least 3 slashes, + replace the middle part with ellipses. + + If three consecutive dots, or two consecutive dots are encountered these are + replaced by the equivalents HORIZONTAL ELLIPSIS or TWO DOT LEADER unicode + equivalents + """ + string = string.replace('...','\N{HORIZONTAL ELLIPSIS}') + string = string.replace('..','\N{TWO DOT LEADER}') + if len(string) < min_elide: + return string + + object_parts = string.split('.') + file_parts = string.split(os.sep) + if file_parts[-1] == '': + file_parts.pop() + + if len(object_parts) > 3: + return '{}.{}\N{HORIZONTAL ELLIPSIS}{}.{}'.format(object_parts[0], object_parts[1][0], object_parts[-2][-1], object_parts[-1]) + + elif len(file_parts) > 3: + return ('{}' + os.sep + '{}\N{HORIZONTAL ELLIPSIS}{}' + os.sep + '{}').format(file_parts[0], file_parts[1][0], file_parts[-2][-1], file_parts[-1]) + + return string + +def _elide_typed(string:str, typed:str, *, min_elide:int=30)->str: + """ + Elide the middle of a long string if the beginning has already been typed. + """ + + if len(string) < min_elide: + return string + cut_how_much = len(typed)-3 + if cut_how_much < 7: + return string + if string.startswith(typed) and len(string)> len(typed): + return f"{string[:3]}\N{HORIZONTAL ELLIPSIS}{string[cut_how_much:]}" + return string + +def _elide(string:str, typed:str, min_elide=30)->str: + return _elide_typed( + _elide_point(string, min_elide=min_elide), + typed, min_elide=min_elide) + + + +def _adjust_completion_text_based_on_context(text, body, offset): + if text.endswith('=') and len(body) > offset and body[offset] == '=': + return text[:-1] + else: + return text + + +class IPythonPTCompleter(Completer): + """Adaptor to provide IPython completions to prompt_toolkit""" + def __init__(self, ipy_completer=None, shell=None): + if shell is None and ipy_completer is None: + raise TypeError("Please pass shell=an InteractiveShell instance.") + self._ipy_completer = ipy_completer + self.shell = shell + + @property + def ipy_completer(self): + if self._ipy_completer: + return self._ipy_completer + else: + return self.shell.Completer + + def get_completions(self, document, complete_event): + if not document.current_line.strip(): + return + # Some bits of our completion system may print stuff (e.g. if a module + # is imported). This context manager ensures that doesn't interfere with + # the prompt. + + with patch_stdout(), provisionalcompleter(): + body = document.text + cursor_row = document.cursor_position_row + cursor_col = document.cursor_position_col + cursor_position = document.cursor_position + offset = cursor_to_position(body, cursor_row, cursor_col) + try: + yield from self._get_completions(body, offset, cursor_position, self.ipy_completer) + except Exception as e: + try: + exc_type, exc_value, exc_tb = sys.exc_info() + traceback.print_exception(exc_type, exc_value, exc_tb) + except AttributeError: + print('Unrecoverable Error in completions') + + @staticmethod + def _get_completions(body, offset, cursor_position, ipyc): + """ + Private equivalent of get_completions() use only for unit_testing. + """ + debug = getattr(ipyc, 'debug', False) + completions = _deduplicate_completions( + body, ipyc.completions(body, offset)) + for c in completions: + if not c.text: + # Guard against completion machinery giving us an empty string. + continue + text = unicodedata.normalize('NFC', c.text) + # When the first character of the completion has a zero length, + # then it's probably a decomposed unicode character. E.g. caused by + # the "\dot" completion. Try to compose again with the previous + # character. + if wcwidth(text[0]) == 0: + if cursor_position + c.start > 0: + char_before = body[c.start - 1] + fixed_text = unicodedata.normalize( + 'NFC', char_before + text) + + # Yield the modified completion instead, if this worked. + if wcwidth(text[0:1]) == 1: + yield Completion(fixed_text, start_position=c.start - offset - 1) + continue + + # TODO: Use Jedi to determine meta_text + # (Jedi currently has a bug that results in incorrect information.) + # meta_text = '' + # yield Completion(m, start_position=start_pos, + # display_meta=meta_text) + display_text = c.text + + adjusted_text = _adjust_completion_text_based_on_context(c.text, body, offset) + if c.type == 'function': + yield Completion(adjusted_text, start_position=c.start - offset, display=_elide(display_text+'()', body[c.start:c.end]), display_meta=c.type+c.signature) + else: + yield Completion(adjusted_text, start_position=c.start - offset, display=_elide(display_text, body[c.start:c.end]), display_meta=c.type) + +class IPythonPTLexer(Lexer): + """ + Wrapper around PythonLexer and BashLexer. + """ + def __init__(self): + l = pygments_lexers + self.python_lexer = PygmentsLexer(l.Python3Lexer) + self.shell_lexer = PygmentsLexer(l.BashLexer) + + self.magic_lexers = { + 'HTML': PygmentsLexer(l.HtmlLexer), + 'html': PygmentsLexer(l.HtmlLexer), + 'javascript': PygmentsLexer(l.JavascriptLexer), + 'js': PygmentsLexer(l.JavascriptLexer), + 'perl': PygmentsLexer(l.PerlLexer), + 'ruby': PygmentsLexer(l.RubyLexer), + 'latex': PygmentsLexer(l.TexLexer), + } + + def lex_document(self, document): + text = document.text.lstrip() + + lexer = self.python_lexer + + if text.startswith('!') or text.startswith('%%bash'): + lexer = self.shell_lexer + + elif text.startswith('%%'): + for magic, l in self.magic_lexers.items(): + if text.startswith('%%' + magic): + lexer = l + break + + return lexer.lex_document(document) diff --git a/contrib/python/ipython/py3/IPython/terminal/shortcuts.py b/contrib/python/ipython/py3/IPython/terminal/shortcuts.py index 6f4468b3a54..a23fa091a0e 100644 --- a/contrib/python/ipython/py3/IPython/terminal/shortcuts.py +++ b/contrib/python/ipython/py3/IPython/terminal/shortcuts.py @@ -1,276 +1,276 @@ -""" -Module to define and register Terminal IPython shortcuts with -:mod:`prompt_toolkit` -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import warnings -import signal -import sys -from typing import Callable - - -from prompt_toolkit.application.current import get_app -from prompt_toolkit.enums import DEFAULT_BUFFER, SEARCH_BUFFER -from prompt_toolkit.filters import (has_focus, has_selection, Condition, - vi_insert_mode, emacs_insert_mode, has_completions, vi_mode) -from prompt_toolkit.key_binding.bindings.completion import display_completions_like_readline -from prompt_toolkit.key_binding import KeyBindings - -from IPython.utils.decorators import undoc - -@undoc -@Condition -def cursor_in_leading_ws(): - before = get_app().current_buffer.document.current_line_before_cursor - return (not before) or before.isspace() - - -def create_ipython_shortcuts(shell): - """Set up the prompt_toolkit keyboard shortcuts for IPython""" - - kb = KeyBindings() - insert_mode = vi_insert_mode | emacs_insert_mode - - if getattr(shell, 'handle_return', None): - return_handler = shell.handle_return(shell) - else: - return_handler = newline_or_execute_outer(shell) - - kb.add('enter', filter=(has_focus(DEFAULT_BUFFER) - & ~has_selection - & insert_mode - ))(return_handler) - - def reformat_and_execute(event): - reformat_text_before_cursor(event.current_buffer, event.current_buffer.document, shell) - event.current_buffer.validate_and_handle() - - kb.add('escape', 'enter', filter=(has_focus(DEFAULT_BUFFER) - & ~has_selection - & insert_mode - ))(reformat_and_execute) - - kb.add('c-\\')(force_exit) - - kb.add('c-p', filter=(vi_insert_mode & has_focus(DEFAULT_BUFFER)) - )(previous_history_or_previous_completion) - - kb.add('c-n', filter=(vi_insert_mode & has_focus(DEFAULT_BUFFER)) - )(next_history_or_next_completion) - - kb.add('c-g', filter=(has_focus(DEFAULT_BUFFER) & has_completions) - )(dismiss_completion) - - kb.add('c-c', filter=has_focus(DEFAULT_BUFFER))(reset_buffer) - - kb.add('c-c', filter=has_focus(SEARCH_BUFFER))(reset_search_buffer) - - supports_suspend = Condition(lambda: hasattr(signal, 'SIGTSTP')) - kb.add('c-z', filter=supports_suspend)(suspend_to_bg) - - # Ctrl+I == Tab - kb.add('tab', filter=(has_focus(DEFAULT_BUFFER) - & ~has_selection - & insert_mode - & cursor_in_leading_ws - ))(indent_buffer) - kb.add('c-o', filter=(has_focus(DEFAULT_BUFFER) & emacs_insert_mode) - )(newline_autoindent_outer(shell.input_transformer_manager)) - - kb.add('f2', filter=has_focus(DEFAULT_BUFFER))(open_input_in_editor) - - if shell.display_completions == 'readlinelike': - kb.add('c-i', filter=(has_focus(DEFAULT_BUFFER) - & ~has_selection - & insert_mode - & ~cursor_in_leading_ws - ))(display_completions_like_readline) - - if sys.platform == 'win32': - kb.add('c-v', filter=(has_focus(DEFAULT_BUFFER) & ~vi_mode))(win_paste) - - return kb - - -def reformat_text_before_cursor(buffer, document, shell): - text = buffer.delete_before_cursor(len(document.text[:document.cursor_position])) - try: - formatted_text = shell.reformat_handler(text) - buffer.insert_text(formatted_text) - except Exception as e: - buffer.insert_text(text) - - -def newline_or_execute_outer(shell): - - def newline_or_execute(event): - """When the user presses return, insert a newline or execute the code.""" - b = event.current_buffer - d = b.document - - if b.complete_state: - cc = b.complete_state.current_completion - if cc: - b.apply_completion(cc) - else: - b.cancel_completion() - return - - # If there's only one line, treat it as if the cursor is at the end. - # See https://github.com/ipython/ipython/issues/10425 - if d.line_count == 1: - check_text = d.text - else: - check_text = d.text[:d.cursor_position] - status, indent = shell.check_complete(check_text) - - # if all we have after the cursor is whitespace: reformat current text - # before cursor - after_cursor = d.text[d.cursor_position:] - reformatted = False - if not after_cursor.strip(): - reformat_text_before_cursor(b, d, shell) - reformatted = True - if not (d.on_last_line or - d.cursor_position_row >= d.line_count - d.empty_line_count_at_the_end() - ): - if shell.autoindent: - b.insert_text('\n' + indent) - else: - b.insert_text('\n') - return - - if (status != 'incomplete') and b.accept_handler: - if not reformatted: - reformat_text_before_cursor(b, d, shell) - b.validate_and_handle() - else: - if shell.autoindent: - b.insert_text('\n' + indent) - else: - b.insert_text('\n') - return newline_or_execute - - -def previous_history_or_previous_completion(event): - """ - Control-P in vi edit mode on readline is history next, unlike default prompt toolkit. - - If completer is open this still select previous completion. - """ - event.current_buffer.auto_up() - - -def next_history_or_next_completion(event): - """ - Control-N in vi edit mode on readline is history previous, unlike default prompt toolkit. - - If completer is open this still select next completion. - """ - event.current_buffer.auto_down() - - -def dismiss_completion(event): - b = event.current_buffer - if b.complete_state: - b.cancel_completion() - - -def reset_buffer(event): - b = event.current_buffer - if b.complete_state: - b.cancel_completion() - else: - b.reset() - - -def reset_search_buffer(event): - if event.current_buffer.document.text: - event.current_buffer.reset() - else: - event.app.layout.focus(DEFAULT_BUFFER) - -def suspend_to_bg(event): - event.app.suspend_to_background() - -def force_exit(event): - """ - Force exit (with a non-zero return value) - """ - sys.exit("Quit") - -def indent_buffer(event): - event.current_buffer.insert_text(' ' * 4) - -@undoc -def newline_with_copy_margin(event): - """ - DEPRECATED since IPython 6.0 - - See :any:`newline_autoindent_outer` for a replacement. - - Preserve margin and cursor position when using - Control-O to insert a newline in EMACS mode - """ - warnings.warn("`newline_with_copy_margin(event)` is deprecated since IPython 6.0. " - "see `newline_autoindent_outer(shell)(event)` for a replacement.", - DeprecationWarning, stacklevel=2) - - b = event.current_buffer - cursor_start_pos = b.document.cursor_position_col - b.newline(copy_margin=True) - b.cursor_up(count=1) - cursor_end_pos = b.document.cursor_position_col - if cursor_start_pos != cursor_end_pos: - pos_diff = cursor_start_pos - cursor_end_pos - b.cursor_right(count=pos_diff) - -def newline_autoindent_outer(inputsplitter) -> Callable[..., None]: - """ - Return a function suitable for inserting a indented newline after the cursor. - - Fancier version of deprecated ``newline_with_copy_margin`` which should - compute the correct indentation of the inserted line. That is to say, indent - by 4 extra space after a function definition, class definition, context - manager... And dedent by 4 space after ``pass``, ``return``, ``raise ...``. - """ - - def newline_autoindent(event): - """insert a newline after the cursor indented appropriately.""" - b = event.current_buffer - d = b.document - - if b.complete_state: - b.cancel_completion() - text = d.text[:d.cursor_position] + '\n' - _, indent = inputsplitter.check_complete(text) - b.insert_text('\n' + (' ' * (indent or 0)), move_cursor=False) - - return newline_autoindent - - -def open_input_in_editor(event): - event.app.current_buffer.open_in_editor() - - -if sys.platform == 'win32': - from IPython.core.error import TryNext - from IPython.lib.clipboard import (ClipboardEmpty, - win32_clipboard_get, - tkinter_clipboard_get) - - @undoc - def win_paste(event): - try: - text = win32_clipboard_get() - except TryNext: - try: - text = tkinter_clipboard_get() - except (TryNext, ClipboardEmpty): - return - except ClipboardEmpty: - return - event.current_buffer.insert_text(text.replace('\t', ' ' * 4)) +""" +Module to define and register Terminal IPython shortcuts with +:mod:`prompt_toolkit` +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import warnings +import signal +import sys +from typing import Callable + + +from prompt_toolkit.application.current import get_app +from prompt_toolkit.enums import DEFAULT_BUFFER, SEARCH_BUFFER +from prompt_toolkit.filters import (has_focus, has_selection, Condition, + vi_insert_mode, emacs_insert_mode, has_completions, vi_mode) +from prompt_toolkit.key_binding.bindings.completion import display_completions_like_readline +from prompt_toolkit.key_binding import KeyBindings + +from IPython.utils.decorators import undoc + +@undoc +@Condition +def cursor_in_leading_ws(): + before = get_app().current_buffer.document.current_line_before_cursor + return (not before) or before.isspace() + + +def create_ipython_shortcuts(shell): + """Set up the prompt_toolkit keyboard shortcuts for IPython""" + + kb = KeyBindings() + insert_mode = vi_insert_mode | emacs_insert_mode + + if getattr(shell, 'handle_return', None): + return_handler = shell.handle_return(shell) + else: + return_handler = newline_or_execute_outer(shell) + + kb.add('enter', filter=(has_focus(DEFAULT_BUFFER) + & ~has_selection + & insert_mode + ))(return_handler) + + def reformat_and_execute(event): + reformat_text_before_cursor(event.current_buffer, event.current_buffer.document, shell) + event.current_buffer.validate_and_handle() + + kb.add('escape', 'enter', filter=(has_focus(DEFAULT_BUFFER) + & ~has_selection + & insert_mode + ))(reformat_and_execute) + + kb.add('c-\\')(force_exit) + + kb.add('c-p', filter=(vi_insert_mode & has_focus(DEFAULT_BUFFER)) + )(previous_history_or_previous_completion) + + kb.add('c-n', filter=(vi_insert_mode & has_focus(DEFAULT_BUFFER)) + )(next_history_or_next_completion) + + kb.add('c-g', filter=(has_focus(DEFAULT_BUFFER) & has_completions) + )(dismiss_completion) + + kb.add('c-c', filter=has_focus(DEFAULT_BUFFER))(reset_buffer) + + kb.add('c-c', filter=has_focus(SEARCH_BUFFER))(reset_search_buffer) + + supports_suspend = Condition(lambda: hasattr(signal, 'SIGTSTP')) + kb.add('c-z', filter=supports_suspend)(suspend_to_bg) + + # Ctrl+I == Tab + kb.add('tab', filter=(has_focus(DEFAULT_BUFFER) + & ~has_selection + & insert_mode + & cursor_in_leading_ws + ))(indent_buffer) + kb.add('c-o', filter=(has_focus(DEFAULT_BUFFER) & emacs_insert_mode) + )(newline_autoindent_outer(shell.input_transformer_manager)) + + kb.add('f2', filter=has_focus(DEFAULT_BUFFER))(open_input_in_editor) + + if shell.display_completions == 'readlinelike': + kb.add('c-i', filter=(has_focus(DEFAULT_BUFFER) + & ~has_selection + & insert_mode + & ~cursor_in_leading_ws + ))(display_completions_like_readline) + + if sys.platform == 'win32': + kb.add('c-v', filter=(has_focus(DEFAULT_BUFFER) & ~vi_mode))(win_paste) + + return kb + + +def reformat_text_before_cursor(buffer, document, shell): + text = buffer.delete_before_cursor(len(document.text[:document.cursor_position])) + try: + formatted_text = shell.reformat_handler(text) + buffer.insert_text(formatted_text) + except Exception as e: + buffer.insert_text(text) + + +def newline_or_execute_outer(shell): + + def newline_or_execute(event): + """When the user presses return, insert a newline or execute the code.""" + b = event.current_buffer + d = b.document + + if b.complete_state: + cc = b.complete_state.current_completion + if cc: + b.apply_completion(cc) + else: + b.cancel_completion() + return + + # If there's only one line, treat it as if the cursor is at the end. + # See https://github.com/ipython/ipython/issues/10425 + if d.line_count == 1: + check_text = d.text + else: + check_text = d.text[:d.cursor_position] + status, indent = shell.check_complete(check_text) + + # if all we have after the cursor is whitespace: reformat current text + # before cursor + after_cursor = d.text[d.cursor_position:] + reformatted = False + if not after_cursor.strip(): + reformat_text_before_cursor(b, d, shell) + reformatted = True + if not (d.on_last_line or + d.cursor_position_row >= d.line_count - d.empty_line_count_at_the_end() + ): + if shell.autoindent: + b.insert_text('\n' + indent) + else: + b.insert_text('\n') + return + + if (status != 'incomplete') and b.accept_handler: + if not reformatted: + reformat_text_before_cursor(b, d, shell) + b.validate_and_handle() + else: + if shell.autoindent: + b.insert_text('\n' + indent) + else: + b.insert_text('\n') + return newline_or_execute + + +def previous_history_or_previous_completion(event): + """ + Control-P in vi edit mode on readline is history next, unlike default prompt toolkit. + + If completer is open this still select previous completion. + """ + event.current_buffer.auto_up() + + +def next_history_or_next_completion(event): + """ + Control-N in vi edit mode on readline is history previous, unlike default prompt toolkit. + + If completer is open this still select next completion. + """ + event.current_buffer.auto_down() + + +def dismiss_completion(event): + b = event.current_buffer + if b.complete_state: + b.cancel_completion() + + +def reset_buffer(event): + b = event.current_buffer + if b.complete_state: + b.cancel_completion() + else: + b.reset() + + +def reset_search_buffer(event): + if event.current_buffer.document.text: + event.current_buffer.reset() + else: + event.app.layout.focus(DEFAULT_BUFFER) + +def suspend_to_bg(event): + event.app.suspend_to_background() + +def force_exit(event): + """ + Force exit (with a non-zero return value) + """ + sys.exit("Quit") + +def indent_buffer(event): + event.current_buffer.insert_text(' ' * 4) + +@undoc +def newline_with_copy_margin(event): + """ + DEPRECATED since IPython 6.0 + + See :any:`newline_autoindent_outer` for a replacement. + + Preserve margin and cursor position when using + Control-O to insert a newline in EMACS mode + """ + warnings.warn("`newline_with_copy_margin(event)` is deprecated since IPython 6.0. " + "see `newline_autoindent_outer(shell)(event)` for a replacement.", + DeprecationWarning, stacklevel=2) + + b = event.current_buffer + cursor_start_pos = b.document.cursor_position_col + b.newline(copy_margin=True) + b.cursor_up(count=1) + cursor_end_pos = b.document.cursor_position_col + if cursor_start_pos != cursor_end_pos: + pos_diff = cursor_start_pos - cursor_end_pos + b.cursor_right(count=pos_diff) + +def newline_autoindent_outer(inputsplitter) -> Callable[..., None]: + """ + Return a function suitable for inserting a indented newline after the cursor. + + Fancier version of deprecated ``newline_with_copy_margin`` which should + compute the correct indentation of the inserted line. That is to say, indent + by 4 extra space after a function definition, class definition, context + manager... And dedent by 4 space after ``pass``, ``return``, ``raise ...``. + """ + + def newline_autoindent(event): + """insert a newline after the cursor indented appropriately.""" + b = event.current_buffer + d = b.document + + if b.complete_state: + b.cancel_completion() + text = d.text[:d.cursor_position] + '\n' + _, indent = inputsplitter.check_complete(text) + b.insert_text('\n' + (' ' * (indent or 0)), move_cursor=False) + + return newline_autoindent + + +def open_input_in_editor(event): + event.app.current_buffer.open_in_editor() + + +if sys.platform == 'win32': + from IPython.core.error import TryNext + from IPython.lib.clipboard import (ClipboardEmpty, + win32_clipboard_get, + tkinter_clipboard_get) + + @undoc + def win_paste(event): + try: + text = win32_clipboard_get() + except TryNext: + try: + text = tkinter_clipboard_get() + except (TryNext, ClipboardEmpty): + return + except ClipboardEmpty: + return + event.current_buffer.insert_text(text.replace('\t', ' ' * 4)) diff --git a/contrib/python/ipython/py3/IPython/testing/__init__.py b/contrib/python/ipython/py3/IPython/testing/__init__.py index 1f7f74540de..552608792d9 100644 --- a/contrib/python/ipython/py3/IPython/testing/__init__.py +++ b/contrib/python/ipython/py3/IPython/testing/__init__.py @@ -1,49 +1,49 @@ -"""Testing support (tools to test IPython itself). -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2009-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - - -import os - -#----------------------------------------------------------------------------- -# Functions -#----------------------------------------------------------------------------- - -# User-level entry point for testing -def test(**kwargs): - """Run the entire IPython test suite. - - Any of the options for run_iptestall() may be passed as keyword arguments. - - For example:: - - IPython.test(testgroups=['lib', 'config', 'utils'], fast=2) - - will run those three sections of the test suite, using two processes. - """ - - # Do the import internally, so that this function doesn't increase total - # import time - from .iptestcontroller import run_iptestall, default_options - options = default_options() - for name, val in kwargs.items(): - setattr(options, name, val) - run_iptestall(options) - -#----------------------------------------------------------------------------- -# Constants -#----------------------------------------------------------------------------- - -# We scale all timeouts via this factor, slow machines can increase it -IPYTHON_TESTING_TIMEOUT_SCALE = float(os.getenv( - 'IPYTHON_TESTING_TIMEOUT_SCALE', 1)) - -# So nose doesn't try to run this as a test itself and we end up with an -# infinite test loop -test.__test__ = False +"""Testing support (tools to test IPython itself). +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2009-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + + +import os + +#----------------------------------------------------------------------------- +# Functions +#----------------------------------------------------------------------------- + +# User-level entry point for testing +def test(**kwargs): + """Run the entire IPython test suite. + + Any of the options for run_iptestall() may be passed as keyword arguments. + + For example:: + + IPython.test(testgroups=['lib', 'config', 'utils'], fast=2) + + will run those three sections of the test suite, using two processes. + """ + + # Do the import internally, so that this function doesn't increase total + # import time + from .iptestcontroller import run_iptestall, default_options + options = default_options() + for name, val in kwargs.items(): + setattr(options, name, val) + run_iptestall(options) + +#----------------------------------------------------------------------------- +# Constants +#----------------------------------------------------------------------------- + +# We scale all timeouts via this factor, slow machines can increase it +IPYTHON_TESTING_TIMEOUT_SCALE = float(os.getenv( + 'IPYTHON_TESTING_TIMEOUT_SCALE', 1)) + +# So nose doesn't try to run this as a test itself and we end up with an +# infinite test loop +test.__test__ = False diff --git a/contrib/python/ipython/py3/IPython/testing/__main__.py b/contrib/python/ipython/py3/IPython/testing/__main__.py index 179ec6f6997..4b0bb8ba9ca 100644 --- a/contrib/python/ipython/py3/IPython/testing/__main__.py +++ b/contrib/python/ipython/py3/IPython/testing/__main__.py @@ -1,3 +1,3 @@ -if __name__ == '__main__': - from IPython.testing import iptestcontroller - iptestcontroller.main() +if __name__ == '__main__': + from IPython.testing import iptestcontroller + iptestcontroller.main() diff --git a/contrib/python/ipython/py3/IPython/testing/decorators.py b/contrib/python/ipython/py3/IPython/testing/decorators.py index 6b1a20a2c05..4539a72a8ca 100644 --- a/contrib/python/ipython/py3/IPython/testing/decorators.py +++ b/contrib/python/ipython/py3/IPython/testing/decorators.py @@ -1,383 +1,383 @@ -# -*- coding: utf-8 -*- -"""Decorators for labeling test objects. - -Decorators that merely return a modified version of the original function -object are straightforward. Decorators that return a new function object need -to use nose.tools.make_decorator(original_function)(decorator) in returning the -decorator, in order to preserve metadata such as function name, setup and -teardown functions and so on - see nose.tools for more information. - -This module provides a set of useful decorators meant to be ready to use in -your own tests. See the bottom of the file for the ready-made ones, and if you -find yourself writing a new one that may be of generic use, add it here. - -Included decorators: - - -Lightweight testing that remains unittest-compatible. - -- An @as_unittest decorator can be used to tag any normal parameter-less - function as a unittest TestCase. Then, both nose and normal unittest will - recognize it as such. This will make it easier to migrate away from Nose if - we ever need/want to while maintaining very lightweight tests. - -NOTE: This file contains IPython-specific decorators. Using the machinery in -IPython.external.decorators, we import either numpy.testing.decorators if numpy is -available, OR use equivalent code in IPython.external._decorators, which -we've copied verbatim from numpy. - -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import os -import shutil -import sys -import tempfile -import unittest -import warnings -from importlib import import_module - -from decorator import decorator - -# Expose the unittest-driven decorators -from .ipunittest import ipdoctest, ipdocstring - -# Grab the numpy-specific decorators which we keep in a file that we -# occasionally update from upstream: decorators.py is a copy of -# numpy.testing.decorators, we expose all of it here. -from IPython.external.decorators import knownfailureif - -#----------------------------------------------------------------------------- -# Classes and functions -#----------------------------------------------------------------------------- - -# Simple example of the basic idea -def as_unittest(func): - """Decorator to make a simple function into a normal test via unittest.""" - class Tester(unittest.TestCase): - def test(self): - func() - - Tester.__name__ = func.__name__ - - return Tester - -# Utility functions - -def apply_wrapper(wrapper, func): - """Apply a wrapper to a function for decoration. - - This mixes Michele Simionato's decorator tool with nose's make_decorator, - to apply a wrapper in a decorator so that all nose attributes, as well as - function signature and other properties, survive the decoration cleanly. - This will ensure that wrapped functions can still be well introspected via - IPython, for example. - """ - warnings.warn("The function `apply_wrapper` is deprecated since IPython 4.0", - DeprecationWarning, stacklevel=2) - import nose.tools - - return decorator(wrapper,nose.tools.make_decorator(func)(wrapper)) - - -def make_label_dec(label, ds=None): - """Factory function to create a decorator that applies one or more labels. - - Parameters - ---------- - label : string or sequence - One or more labels that will be applied by the decorator to the functions - it decorates. Labels are attributes of the decorated function with their - value set to True. - - ds : string - An optional docstring for the resulting decorator. If not given, a - default docstring is auto-generated. - - Returns - ------- - A decorator. - - Examples - -------- - - A simple labeling decorator: - - >>> slow = make_label_dec('slow') - >>> slow.__doc__ - "Labels a test as 'slow'." - - And one that uses multiple labels and a custom docstring: - - >>> rare = make_label_dec(['slow','hard'], - ... "Mix labels 'slow' and 'hard' for rare tests.") - >>> rare.__doc__ - "Mix labels 'slow' and 'hard' for rare tests." - - Now, let's test using this one: - >>> @rare - ... def f(): pass - ... - >>> - >>> f.slow - True - >>> f.hard - True - """ - - warnings.warn("The function `make_label_dec` is deprecated since IPython 4.0", - DeprecationWarning, stacklevel=2) - if isinstance(label, str): - labels = [label] - else: - labels = label - - # Validate that the given label(s) are OK for use in setattr() by doing a - # dry run on a dummy function. - tmp = lambda : None - for label in labels: - setattr(tmp,label,True) - - # This is the actual decorator we'll return - def decor(f): - for label in labels: - setattr(f,label,True) - return f - - # Apply the user's docstring, or autogenerate a basic one - if ds is None: - ds = "Labels a test as %r." % label - decor.__doc__ = ds - - return decor - - -# Inspired by numpy's skipif, but uses the full apply_wrapper utility to -# preserve function metadata better and allows the skip condition to be a -# callable. -def skipif(skip_condition, msg=None): - ''' Make function raise SkipTest exception if skip_condition is true - - Parameters - ---------- - - skip_condition : bool or callable - Flag to determine whether to skip test. If the condition is a - callable, it is used at runtime to dynamically make the decision. This - is useful for tests that may require costly imports, to delay the cost - until the test suite is actually executed. - msg : string - Message to give on raising a SkipTest exception. - - Returns - ------- - decorator : function - Decorator, which, when applied to a function, causes SkipTest - to be raised when the skip_condition was True, and the function - to be called normally otherwise. - - Notes - ----- - You will see from the code that we had to further decorate the - decorator with the nose.tools.make_decorator function in order to - transmit function name, and various other metadata. - ''' - - def skip_decorator(f): - # Local import to avoid a hard nose dependency and only incur the - # import time overhead at actual test-time. - import nose - - # Allow for both boolean or callable skip conditions. - if callable(skip_condition): - skip_val = skip_condition - else: - skip_val = lambda : skip_condition - - def get_msg(func,msg=None): - """Skip message with information about function being skipped.""" - if msg is None: out = 'Test skipped due to test condition.' - else: out = msg - return "Skipping test: %s. %s" % (func.__name__,out) - - # We need to define *two* skippers because Python doesn't allow both - # return with value and yield inside the same function. - def skipper_func(*args, **kwargs): - """Skipper for normal test functions.""" - if skip_val(): - raise nose.SkipTest(get_msg(f,msg)) - else: - return f(*args, **kwargs) - - def skipper_gen(*args, **kwargs): - """Skipper for test generators.""" - if skip_val(): - raise nose.SkipTest(get_msg(f,msg)) - else: - for x in f(*args, **kwargs): - yield x - - # Choose the right skipper to use when building the actual generator. - if nose.util.isgenerator(f): - skipper = skipper_gen - else: - skipper = skipper_func - - return nose.tools.make_decorator(f)(skipper) - - return skip_decorator - -# A version with the condition set to true, common case just to attach a message -# to a skip decorator -def skip(msg=None): - """Decorator factory - mark a test function for skipping from test suite. - - Parameters - ---------- - msg : string - Optional message to be added. - - Returns - ------- - decorator : function - Decorator, which, when applied to a function, causes SkipTest - to be raised, with the optional message added. - """ - if msg and not isinstance(msg, str): - raise ValueError('invalid object passed to `@skip` decorator, did you ' - 'meant `@skip()` with brackets ?') - return skipif(True, msg) - - -def onlyif(condition, msg): - """The reverse from skipif, see skipif for details.""" - - if callable(condition): - skip_condition = lambda : not condition() - else: - skip_condition = lambda : not condition - - return skipif(skip_condition, msg) - -#----------------------------------------------------------------------------- -# Utility functions for decorators -def module_not_available(module): - """Can module be imported? Returns true if module does NOT import. - - This is used to make a decorator to skip tests that require module to be - available, but delay the 'import numpy' to test execution time. - """ - try: - mod = import_module(module) - mod_not_avail = False - except ImportError: - mod_not_avail = True - - return mod_not_avail - - -def decorated_dummy(dec, name): - """Return a dummy function decorated with dec, with the given name. - - Examples - -------- - import IPython.testing.decorators as dec - setup = dec.decorated_dummy(dec.skip_if_no_x11, __name__) - """ - warnings.warn("The function `decorated_dummy` is deprecated since IPython 4.0", - DeprecationWarning, stacklevel=2) - dummy = lambda: None - dummy.__name__ = name - return dec(dummy) - -#----------------------------------------------------------------------------- -# Decorators for public use - -# Decorators to skip certain tests on specific platforms. -skip_win32 = skipif(sys.platform == 'win32', - "This test does not run under Windows") -skip_linux = skipif(sys.platform.startswith('linux'), - "This test does not run under Linux") -skip_osx = skipif(sys.platform == 'darwin',"This test does not run under OS X") - - -# Decorators to skip tests if not on specific platforms. -skip_if_not_win32 = skipif(sys.platform != 'win32', - "This test only runs under Windows") -skip_if_not_linux = skipif(not sys.platform.startswith('linux'), - "This test only runs under Linux") -skip_if_not_osx = skipif(sys.platform != 'darwin', - "This test only runs under OSX") - - -_x11_skip_cond = (sys.platform not in ('darwin', 'win32') and - os.environ.get('DISPLAY', '') == '') -_x11_skip_msg = "Skipped under *nix when X11/XOrg not available" - -skip_if_no_x11 = skipif(_x11_skip_cond, _x11_skip_msg) - - -# Decorators to skip certain tests on specific platform/python combinations -skip_win32_py38 = skipif(sys.version_info > (3,8) and os.name == 'nt') - - -# not a decorator itself, returns a dummy function to be used as setup -def skip_file_no_x11(name): - warnings.warn("The function `skip_file_no_x11` is deprecated since IPython 4.0", - DeprecationWarning, stacklevel=2) - return decorated_dummy(skip_if_no_x11, name) if _x11_skip_cond else None - -# Other skip decorators - -# generic skip without module -skip_without = lambda mod: skipif(module_not_available(mod), "This test requires %s" % mod) - -skipif_not_numpy = skip_without('numpy') - -skipif_not_matplotlib = skip_without('matplotlib') - -skipif_not_sympy = skip_without('sympy') - -skip_known_failure = knownfailureif(True,'This test is known to fail') - -# A null 'decorator', useful to make more readable code that needs to pick -# between different decorators based on OS or other conditions -null_deco = lambda f: f - -# Some tests only run where we can use unicode paths. Note that we can't just -# check os.path.supports_unicode_filenames, which is always False on Linux. -try: - f = tempfile.NamedTemporaryFile(prefix=u"tmp€") -except UnicodeEncodeError: - unicode_paths = False -else: - unicode_paths = True - f.close() - -onlyif_unicode_paths = onlyif(unicode_paths, ("This test is only applicable " - "where we can use unicode in filenames.")) - - -def onlyif_cmds_exist(*commands): - """ - Decorator to skip test when at least one of `commands` is not found. - """ - for cmd in commands: - if not shutil.which(cmd): - return skip("This test runs only if command '{0}' " - "is installed".format(cmd)) - return null_deco - -def onlyif_any_cmd_exists(*commands): - """ - Decorator to skip test unless at least one of `commands` is found. - """ - warnings.warn("The function `onlyif_any_cmd_exists` is deprecated since IPython 4.0", - DeprecationWarning, stacklevel=2) - for cmd in commands: - if shutil.which(cmd): - return null_deco - return skip("This test runs only if one of the commands {0} " - "is installed".format(commands)) +# -*- coding: utf-8 -*- +"""Decorators for labeling test objects. + +Decorators that merely return a modified version of the original function +object are straightforward. Decorators that return a new function object need +to use nose.tools.make_decorator(original_function)(decorator) in returning the +decorator, in order to preserve metadata such as function name, setup and +teardown functions and so on - see nose.tools for more information. + +This module provides a set of useful decorators meant to be ready to use in +your own tests. See the bottom of the file for the ready-made ones, and if you +find yourself writing a new one that may be of generic use, add it here. + +Included decorators: + + +Lightweight testing that remains unittest-compatible. + +- An @as_unittest decorator can be used to tag any normal parameter-less + function as a unittest TestCase. Then, both nose and normal unittest will + recognize it as such. This will make it easier to migrate away from Nose if + we ever need/want to while maintaining very lightweight tests. + +NOTE: This file contains IPython-specific decorators. Using the machinery in +IPython.external.decorators, we import either numpy.testing.decorators if numpy is +available, OR use equivalent code in IPython.external._decorators, which +we've copied verbatim from numpy. + +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import os +import shutil +import sys +import tempfile +import unittest +import warnings +from importlib import import_module + +from decorator import decorator + +# Expose the unittest-driven decorators +from .ipunittest import ipdoctest, ipdocstring + +# Grab the numpy-specific decorators which we keep in a file that we +# occasionally update from upstream: decorators.py is a copy of +# numpy.testing.decorators, we expose all of it here. +from IPython.external.decorators import knownfailureif + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + +# Simple example of the basic idea +def as_unittest(func): + """Decorator to make a simple function into a normal test via unittest.""" + class Tester(unittest.TestCase): + def test(self): + func() + + Tester.__name__ = func.__name__ + + return Tester + +# Utility functions + +def apply_wrapper(wrapper, func): + """Apply a wrapper to a function for decoration. + + This mixes Michele Simionato's decorator tool with nose's make_decorator, + to apply a wrapper in a decorator so that all nose attributes, as well as + function signature and other properties, survive the decoration cleanly. + This will ensure that wrapped functions can still be well introspected via + IPython, for example. + """ + warnings.warn("The function `apply_wrapper` is deprecated since IPython 4.0", + DeprecationWarning, stacklevel=2) + import nose.tools + + return decorator(wrapper,nose.tools.make_decorator(func)(wrapper)) + + +def make_label_dec(label, ds=None): + """Factory function to create a decorator that applies one or more labels. + + Parameters + ---------- + label : string or sequence + One or more labels that will be applied by the decorator to the functions + it decorates. Labels are attributes of the decorated function with their + value set to True. + + ds : string + An optional docstring for the resulting decorator. If not given, a + default docstring is auto-generated. + + Returns + ------- + A decorator. + + Examples + -------- + + A simple labeling decorator: + + >>> slow = make_label_dec('slow') + >>> slow.__doc__ + "Labels a test as 'slow'." + + And one that uses multiple labels and a custom docstring: + + >>> rare = make_label_dec(['slow','hard'], + ... "Mix labels 'slow' and 'hard' for rare tests.") + >>> rare.__doc__ + "Mix labels 'slow' and 'hard' for rare tests." + + Now, let's test using this one: + >>> @rare + ... def f(): pass + ... + >>> + >>> f.slow + True + >>> f.hard + True + """ + + warnings.warn("The function `make_label_dec` is deprecated since IPython 4.0", + DeprecationWarning, stacklevel=2) + if isinstance(label, str): + labels = [label] + else: + labels = label + + # Validate that the given label(s) are OK for use in setattr() by doing a + # dry run on a dummy function. + tmp = lambda : None + for label in labels: + setattr(tmp,label,True) + + # This is the actual decorator we'll return + def decor(f): + for label in labels: + setattr(f,label,True) + return f + + # Apply the user's docstring, or autogenerate a basic one + if ds is None: + ds = "Labels a test as %r." % label + decor.__doc__ = ds + + return decor + + +# Inspired by numpy's skipif, but uses the full apply_wrapper utility to +# preserve function metadata better and allows the skip condition to be a +# callable. +def skipif(skip_condition, msg=None): + ''' Make function raise SkipTest exception if skip_condition is true + + Parameters + ---------- + + skip_condition : bool or callable + Flag to determine whether to skip test. If the condition is a + callable, it is used at runtime to dynamically make the decision. This + is useful for tests that may require costly imports, to delay the cost + until the test suite is actually executed. + msg : string + Message to give on raising a SkipTest exception. + + Returns + ------- + decorator : function + Decorator, which, when applied to a function, causes SkipTest + to be raised when the skip_condition was True, and the function + to be called normally otherwise. + + Notes + ----- + You will see from the code that we had to further decorate the + decorator with the nose.tools.make_decorator function in order to + transmit function name, and various other metadata. + ''' + + def skip_decorator(f): + # Local import to avoid a hard nose dependency and only incur the + # import time overhead at actual test-time. + import nose + + # Allow for both boolean or callable skip conditions. + if callable(skip_condition): + skip_val = skip_condition + else: + skip_val = lambda : skip_condition + + def get_msg(func,msg=None): + """Skip message with information about function being skipped.""" + if msg is None: out = 'Test skipped due to test condition.' + else: out = msg + return "Skipping test: %s. %s" % (func.__name__,out) + + # We need to define *two* skippers because Python doesn't allow both + # return with value and yield inside the same function. + def skipper_func(*args, **kwargs): + """Skipper for normal test functions.""" + if skip_val(): + raise nose.SkipTest(get_msg(f,msg)) + else: + return f(*args, **kwargs) + + def skipper_gen(*args, **kwargs): + """Skipper for test generators.""" + if skip_val(): + raise nose.SkipTest(get_msg(f,msg)) + else: + for x in f(*args, **kwargs): + yield x + + # Choose the right skipper to use when building the actual generator. + if nose.util.isgenerator(f): + skipper = skipper_gen + else: + skipper = skipper_func + + return nose.tools.make_decorator(f)(skipper) + + return skip_decorator + +# A version with the condition set to true, common case just to attach a message +# to a skip decorator +def skip(msg=None): + """Decorator factory - mark a test function for skipping from test suite. + + Parameters + ---------- + msg : string + Optional message to be added. + + Returns + ------- + decorator : function + Decorator, which, when applied to a function, causes SkipTest + to be raised, with the optional message added. + """ + if msg and not isinstance(msg, str): + raise ValueError('invalid object passed to `@skip` decorator, did you ' + 'meant `@skip()` with brackets ?') + return skipif(True, msg) + + +def onlyif(condition, msg): + """The reverse from skipif, see skipif for details.""" + + if callable(condition): + skip_condition = lambda : not condition() + else: + skip_condition = lambda : not condition + + return skipif(skip_condition, msg) + +#----------------------------------------------------------------------------- +# Utility functions for decorators +def module_not_available(module): + """Can module be imported? Returns true if module does NOT import. + + This is used to make a decorator to skip tests that require module to be + available, but delay the 'import numpy' to test execution time. + """ + try: + mod = import_module(module) + mod_not_avail = False + except ImportError: + mod_not_avail = True + + return mod_not_avail + + +def decorated_dummy(dec, name): + """Return a dummy function decorated with dec, with the given name. + + Examples + -------- + import IPython.testing.decorators as dec + setup = dec.decorated_dummy(dec.skip_if_no_x11, __name__) + """ + warnings.warn("The function `decorated_dummy` is deprecated since IPython 4.0", + DeprecationWarning, stacklevel=2) + dummy = lambda: None + dummy.__name__ = name + return dec(dummy) + +#----------------------------------------------------------------------------- +# Decorators for public use + +# Decorators to skip certain tests on specific platforms. +skip_win32 = skipif(sys.platform == 'win32', + "This test does not run under Windows") +skip_linux = skipif(sys.platform.startswith('linux'), + "This test does not run under Linux") +skip_osx = skipif(sys.platform == 'darwin',"This test does not run under OS X") + + +# Decorators to skip tests if not on specific platforms. +skip_if_not_win32 = skipif(sys.platform != 'win32', + "This test only runs under Windows") +skip_if_not_linux = skipif(not sys.platform.startswith('linux'), + "This test only runs under Linux") +skip_if_not_osx = skipif(sys.platform != 'darwin', + "This test only runs under OSX") + + +_x11_skip_cond = (sys.platform not in ('darwin', 'win32') and + os.environ.get('DISPLAY', '') == '') +_x11_skip_msg = "Skipped under *nix when X11/XOrg not available" + +skip_if_no_x11 = skipif(_x11_skip_cond, _x11_skip_msg) + + +# Decorators to skip certain tests on specific platform/python combinations +skip_win32_py38 = skipif(sys.version_info > (3,8) and os.name == 'nt') + + +# not a decorator itself, returns a dummy function to be used as setup +def skip_file_no_x11(name): + warnings.warn("The function `skip_file_no_x11` is deprecated since IPython 4.0", + DeprecationWarning, stacklevel=2) + return decorated_dummy(skip_if_no_x11, name) if _x11_skip_cond else None + +# Other skip decorators + +# generic skip without module +skip_without = lambda mod: skipif(module_not_available(mod), "This test requires %s" % mod) + +skipif_not_numpy = skip_without('numpy') + +skipif_not_matplotlib = skip_without('matplotlib') + +skipif_not_sympy = skip_without('sympy') + +skip_known_failure = knownfailureif(True,'This test is known to fail') + +# A null 'decorator', useful to make more readable code that needs to pick +# between different decorators based on OS or other conditions +null_deco = lambda f: f + +# Some tests only run where we can use unicode paths. Note that we can't just +# check os.path.supports_unicode_filenames, which is always False on Linux. +try: + f = tempfile.NamedTemporaryFile(prefix=u"tmp€") +except UnicodeEncodeError: + unicode_paths = False +else: + unicode_paths = True + f.close() + +onlyif_unicode_paths = onlyif(unicode_paths, ("This test is only applicable " + "where we can use unicode in filenames.")) + + +def onlyif_cmds_exist(*commands): + """ + Decorator to skip test when at least one of `commands` is not found. + """ + for cmd in commands: + if not shutil.which(cmd): + return skip("This test runs only if command '{0}' " + "is installed".format(cmd)) + return null_deco + +def onlyif_any_cmd_exists(*commands): + """ + Decorator to skip test unless at least one of `commands` is found. + """ + warnings.warn("The function `onlyif_any_cmd_exists` is deprecated since IPython 4.0", + DeprecationWarning, stacklevel=2) + for cmd in commands: + if shutil.which(cmd): + return null_deco + return skip("This test runs only if one of the commands {0} " + "is installed".format(commands)) diff --git a/contrib/python/ipython/py3/IPython/testing/globalipapp.py b/contrib/python/ipython/py3/IPython/testing/globalipapp.py index b3128281dd5..c435f9d0873 100644 --- a/contrib/python/ipython/py3/IPython/testing/globalipapp.py +++ b/contrib/python/ipython/py3/IPython/testing/globalipapp.py @@ -1,137 +1,137 @@ -"""Global IPython app to support test running. - -We must start our own ipython object and heavily muck with it so that all the -modifications IPython makes to system behavior don't send the doctest machinery -into a fit. This code should be considered a gross hack, but it gets the job -done. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import builtins as builtin_mod -import sys -import types -import warnings - -from . import tools - -from IPython.core import page -from IPython.utils import io -from IPython.terminal.interactiveshell import TerminalInteractiveShell - - -class StreamProxy(io.IOStream): - """Proxy for sys.stdout/err. This will request the stream *at call time* - allowing for nose's Capture plugin's redirection of sys.stdout/err. - - Parameters - ---------- - name : str - The name of the stream. This will be requested anew at every call - """ - - def __init__(self, name): - warnings.warn("StreamProxy is deprecated and unused as of IPython 5", DeprecationWarning, - stacklevel=2, - ) - self.name=name - - @property - def stream(self): - return getattr(sys, self.name) - - def flush(self): - self.stream.flush() - - -def get_ipython(): - # This will get replaced by the real thing once we start IPython below - return start_ipython() - - -# A couple of methods to override those in the running IPython to interact -# better with doctest (doctest captures on raw stdout, so we need to direct -# various types of output there otherwise it will miss them). - -def xsys(self, cmd): - """Replace the default system call with a capturing one for doctest. - """ - # We use getoutput, but we need to strip it because pexpect captures - # the trailing newline differently from commands.getoutput - print(self.getoutput(cmd, split=False, depth=1).rstrip(), end='', file=sys.stdout) - sys.stdout.flush() - - -def _showtraceback(self, etype, evalue, stb): - """Print the traceback purely on stdout for doctest to capture it. - """ - print(self.InteractiveTB.stb2text(stb), file=sys.stdout) - - -def start_ipython(): - """Start a global IPython shell, which we need for IPython-specific syntax. - """ - global get_ipython - - # This function should only ever run once! - if hasattr(start_ipython, 'already_called'): - return - start_ipython.already_called = True - - # Store certain global objects that IPython modifies - _displayhook = sys.displayhook - _excepthook = sys.excepthook - _main = sys.modules.get('__main__') - - # Create custom argv and namespaces for our IPython to be test-friendly - config = tools.default_config() - config.TerminalInteractiveShell.simple_prompt = True - - # Create and initialize our test-friendly IPython instance. - shell = TerminalInteractiveShell.instance(config=config, - ) - - # A few more tweaks needed for playing nicely with doctests... - - # remove history file - shell.tempfiles.append(config.HistoryManager.hist_file) - - # These traps are normally only active for interactive use, set them - # permanently since we'll be mocking interactive sessions. - shell.builtin_trap.activate() - - # Modify the IPython system call with one that uses getoutput, so that we - # can capture subcommands and print them to Python's stdout, otherwise the - # doctest machinery would miss them. - shell.system = types.MethodType(xsys, shell) - - shell._showtraceback = types.MethodType(_showtraceback, shell) - - # IPython is ready, now clean up some global state... - - # Deactivate the various python system hooks added by ipython for - # interactive convenience so we don't confuse the doctest system - sys.modules['__main__'] = _main - sys.displayhook = _displayhook - sys.excepthook = _excepthook - - # So that ipython magics and aliases can be doctested (they work by making - # a call into a global _ip object). Also make the top-level get_ipython - # now return this without recursively calling here again. - _ip = shell - get_ipython = _ip.get_ipython - builtin_mod._ip = _ip - builtin_mod.ip = _ip - builtin_mod.get_ipython = get_ipython - - # Override paging, so we don't require user interaction during the tests. - def nopage(strng, start=0, screen_lines=0, pager_cmd=None): - if isinstance(strng, dict): - strng = strng.get('text/plain', '') - print(strng) - - page.orig_page = page.pager_page - page.pager_page = nopage - - return _ip +"""Global IPython app to support test running. + +We must start our own ipython object and heavily muck with it so that all the +modifications IPython makes to system behavior don't send the doctest machinery +into a fit. This code should be considered a gross hack, but it gets the job +done. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import builtins as builtin_mod +import sys +import types +import warnings + +from . import tools + +from IPython.core import page +from IPython.utils import io +from IPython.terminal.interactiveshell import TerminalInteractiveShell + + +class StreamProxy(io.IOStream): + """Proxy for sys.stdout/err. This will request the stream *at call time* + allowing for nose's Capture plugin's redirection of sys.stdout/err. + + Parameters + ---------- + name : str + The name of the stream. This will be requested anew at every call + """ + + def __init__(self, name): + warnings.warn("StreamProxy is deprecated and unused as of IPython 5", DeprecationWarning, + stacklevel=2, + ) + self.name=name + + @property + def stream(self): + return getattr(sys, self.name) + + def flush(self): + self.stream.flush() + + +def get_ipython(): + # This will get replaced by the real thing once we start IPython below + return start_ipython() + + +# A couple of methods to override those in the running IPython to interact +# better with doctest (doctest captures on raw stdout, so we need to direct +# various types of output there otherwise it will miss them). + +def xsys(self, cmd): + """Replace the default system call with a capturing one for doctest. + """ + # We use getoutput, but we need to strip it because pexpect captures + # the trailing newline differently from commands.getoutput + print(self.getoutput(cmd, split=False, depth=1).rstrip(), end='', file=sys.stdout) + sys.stdout.flush() + + +def _showtraceback(self, etype, evalue, stb): + """Print the traceback purely on stdout for doctest to capture it. + """ + print(self.InteractiveTB.stb2text(stb), file=sys.stdout) + + +def start_ipython(): + """Start a global IPython shell, which we need for IPython-specific syntax. + """ + global get_ipython + + # This function should only ever run once! + if hasattr(start_ipython, 'already_called'): + return + start_ipython.already_called = True + + # Store certain global objects that IPython modifies + _displayhook = sys.displayhook + _excepthook = sys.excepthook + _main = sys.modules.get('__main__') + + # Create custom argv and namespaces for our IPython to be test-friendly + config = tools.default_config() + config.TerminalInteractiveShell.simple_prompt = True + + # Create and initialize our test-friendly IPython instance. + shell = TerminalInteractiveShell.instance(config=config, + ) + + # A few more tweaks needed for playing nicely with doctests... + + # remove history file + shell.tempfiles.append(config.HistoryManager.hist_file) + + # These traps are normally only active for interactive use, set them + # permanently since we'll be mocking interactive sessions. + shell.builtin_trap.activate() + + # Modify the IPython system call with one that uses getoutput, so that we + # can capture subcommands and print them to Python's stdout, otherwise the + # doctest machinery would miss them. + shell.system = types.MethodType(xsys, shell) + + shell._showtraceback = types.MethodType(_showtraceback, shell) + + # IPython is ready, now clean up some global state... + + # Deactivate the various python system hooks added by ipython for + # interactive convenience so we don't confuse the doctest system + sys.modules['__main__'] = _main + sys.displayhook = _displayhook + sys.excepthook = _excepthook + + # So that ipython magics and aliases can be doctested (they work by making + # a call into a global _ip object). Also make the top-level get_ipython + # now return this without recursively calling here again. + _ip = shell + get_ipython = _ip.get_ipython + builtin_mod._ip = _ip + builtin_mod.ip = _ip + builtin_mod.get_ipython = get_ipython + + # Override paging, so we don't require user interaction during the tests. + def nopage(strng, start=0, screen_lines=0, pager_cmd=None): + if isinstance(strng, dict): + strng = strng.get('text/plain', '') + print(strng) + + page.orig_page = page.pager_page + page.pager_page = nopage + + return _ip diff --git a/contrib/python/ipython/py3/IPython/testing/iptest.py b/contrib/python/ipython/py3/IPython/testing/iptest.py index b50e0058b02..8efcc97201c 100644 --- a/contrib/python/ipython/py3/IPython/testing/iptest.py +++ b/contrib/python/ipython/py3/IPython/testing/iptest.py @@ -1,460 +1,460 @@ -# -*- coding: utf-8 -*- -"""IPython Test Suite Runner. - -This module provides a main entry point to a user script to test IPython -itself from the command line. There are two ways of running this script: - -1. With the syntax `iptest all`. This runs our entire test suite by - calling this script (with different arguments) recursively. This - causes modules and package to be tested in different processes, using nose - or trial where appropriate. -2. With the regular nose syntax, like `iptest IPython -- -vvs`. In this form - the script simply calls nose, but with special command line flags and - plugins loaded. Options after `--` are passed to nose. - -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -import glob -from io import BytesIO -import os -import os.path as path -import sys -from threading import Thread, Lock, Event -import warnings - -import nose.plugins.builtin -from nose.plugins.xunit import Xunit -from nose import SkipTest -from nose.core import TestProgram -from nose.plugins import Plugin -from nose.util import safe_str - -from IPython import version_info -from IPython.utils.py3compat import decode -from IPython.utils.importstring import import_item -from IPython.testing.plugin.ipdoctest import IPythonDoctest -from IPython.external.decorators import KnownFailure, knownfailureif - -pjoin = path.join - - -# Enable printing all warnings raise by IPython's modules -warnings.filterwarnings('ignore', message='.*Matplotlib is building the font cache.*', category=UserWarning, module='.*') -warnings.filterwarnings('error', message='.*', category=ResourceWarning, module='.*') -warnings.filterwarnings('error', message=".*{'config': True}.*", category=DeprecationWarning, module='IPy.*') -warnings.filterwarnings('default', message='.*', category=Warning, module='IPy.*') - -warnings.filterwarnings('error', message='.*apply_wrapper.*', category=DeprecationWarning, module='.*') -warnings.filterwarnings('error', message='.*make_label_dec', category=DeprecationWarning, module='.*') -warnings.filterwarnings('error', message='.*decorated_dummy.*', category=DeprecationWarning, module='.*') -warnings.filterwarnings('error', message='.*skip_file_no_x11.*', category=DeprecationWarning, module='.*') -warnings.filterwarnings('error', message='.*onlyif_any_cmd_exists.*', category=DeprecationWarning, module='.*') - -warnings.filterwarnings('error', message='.*disable_gui.*', category=DeprecationWarning, module='.*') - -warnings.filterwarnings('error', message='.*ExceptionColors global is deprecated.*', category=DeprecationWarning, module='.*') - -# Jedi older versions -warnings.filterwarnings( - 'error', message='.*elementwise != comparison failed and.*', category=FutureWarning, module='.*') - -if version_info < (6,): - # nose.tools renames all things from `camelCase` to `snake_case` which raise an - # warning with the runner they also import from standard import library. (as of Dec 2015) - # Ignore, let's revisit that in a couple of years for IPython 6. - warnings.filterwarnings( - 'ignore', message='.*Please use assertEqual instead', category=Warning, module='IPython.*') - -if version_info < (8,): - warnings.filterwarnings('ignore', message='.*Completer.complete.*', - category=PendingDeprecationWarning, module='.*') -else: - warnings.warn( - 'Completer.complete was pending deprecation and should be changed to Deprecated', FutureWarning) - - - -# ------------------------------------------------------------------------------ -# Monkeypatch Xunit to count known failures as skipped. -# ------------------------------------------------------------------------------ -def monkeypatch_xunit(): - try: - dec.knownfailureif(True)(lambda: None)() - except Exception as e: - KnownFailureTest = type(e) - - def addError(self, test, err, capt=None): - if issubclass(err[0], KnownFailureTest): - err = (SkipTest,) + err[1:] - return self.orig_addError(test, err, capt) - - Xunit.orig_addError = Xunit.addError - Xunit.addError = addError - -#----------------------------------------------------------------------------- -# Check which dependencies are installed and greater than minimum version. -#----------------------------------------------------------------------------- -def extract_version(mod): - return mod.__version__ - -def test_for(item, min_version=None, callback=extract_version): - """Test to see if item is importable, and optionally check against a minimum - version. - - If min_version is given, the default behavior is to check against the - `__version__` attribute of the item, but specifying `callback` allows you to - extract the value you are interested in. e.g:: - - In [1]: import sys - - In [2]: from IPython.testing.iptest import test_for - - In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info) - Out[3]: True - - """ - try: - check = import_item(item) - except (ImportError, RuntimeError): - # GTK reports Runtime error if it can't be initialized even if it's - # importable. - return False - else: - if min_version: - if callback: - # extra processing step to get version to compare - check = callback(check) - - return check >= min_version - else: - return True - -# Global dict where we can store information on what we have and what we don't -# have available at test run time -have = {'matplotlib': test_for('matplotlib'), - 'pygments': test_for('pygments'), - 'sqlite3': test_for('sqlite3')} - -#----------------------------------------------------------------------------- -# Test suite definitions -#----------------------------------------------------------------------------- - -test_group_names = ['core', - 'extensions', 'lib', 'terminal', 'testing', 'utils', - ] - -class TestSection(object): - def __init__(self, name, includes): - self.name = name - self.includes = includes - self.excludes = [] - self.dependencies = [] - self.enabled = True - - def exclude(self, module): - if not module.startswith('IPython'): - module = self.includes[0] + "." + module - self.excludes.append(module.replace('.', os.sep)) - - def requires(self, *packages): - self.dependencies.extend(packages) - - @property - def will_run(self): - return self.enabled and all(have[p] for p in self.dependencies) - -# Name -> (include, exclude, dependencies_met) -test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names} - - -# Exclusions and dependencies -# --------------------------- - -# core: -sec = test_sections['core'] -if not have['sqlite3']: - sec.exclude('tests.test_history') - sec.exclude('history') -if not have['matplotlib']: - sec.exclude('pylabtools'), - sec.exclude('tests.test_pylabtools') - -# lib: -sec = test_sections['lib'] -sec.exclude('kernel') -if not have['pygments']: - sec.exclude('tests.test_lexers') -# We do this unconditionally, so that the test suite doesn't import -# gtk, changing the default encoding and masking some unicode bugs. -sec.exclude('inputhookgtk') -# We also do this unconditionally, because wx can interfere with Unix signals. -# There are currently no tests for it anyway. -sec.exclude('inputhookwx') -# Testing inputhook will need a lot of thought, to figure out -# how to have tests that don't lock up with the gui event -# loops in the picture -sec.exclude('inputhook') - -# testing: -sec = test_sections['testing'] -# These have to be skipped on win32 because they use echo, rm, cd, etc. -# See ticket https://github.com/ipython/ipython/issues/87 -if sys.platform == 'win32': - sec.exclude('plugin.test_exampleip') - sec.exclude('plugin.dtexample') - -# don't run jupyter_console tests found via shim -test_sections['terminal'].exclude('console') - -# extensions: -sec = test_sections['extensions'] -# This is deprecated in favour of rpy2 -sec.exclude('rmagic') -# autoreload does some strange stuff, so move it to its own test section -sec.exclude('autoreload') -sec.exclude('tests.test_autoreload') -test_sections['autoreload'] = TestSection('autoreload', - ['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload']) -test_group_names.append('autoreload') - - -#----------------------------------------------------------------------------- -# Functions and classes -#----------------------------------------------------------------------------- - -def check_exclusions_exist(): - from IPython.paths import get_ipython_package_dir - from warnings import warn - parent = os.path.dirname(get_ipython_package_dir()) - for sec in test_sections: - for pattern in sec.exclusions: - fullpath = pjoin(parent, pattern) - if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'): - warn("Excluding nonexistent file: %r" % pattern) - - -class ExclusionPlugin(Plugin): - """A nose plugin to effect our exclusions of files and directories. - """ - name = 'exclusions' - score = 3000 # Should come before any other plugins - - def __init__(self, exclude_patterns=None): - """ - Parameters - ---------- - - exclude_patterns : sequence of strings, optional - Filenames containing these patterns (as raw strings, not as regular - expressions) are excluded from the tests. - """ - self.exclude_patterns = exclude_patterns or [] - super(ExclusionPlugin, self).__init__() - - def options(self, parser, env=os.environ): - Plugin.options(self, parser, env) - - def configure(self, options, config): - Plugin.configure(self, options, config) - # Override nose trying to disable plugin. - self.enabled = True - - def wantFile(self, filename): - """Return whether the given filename should be scanned for tests. - """ - if any(pat in filename for pat in self.exclude_patterns): - return False - return None - - def wantDirectory(self, directory): - """Return whether the given directory should be scanned for tests. - """ - if any(pat in directory for pat in self.exclude_patterns): - return False - return None - - -class StreamCapturer(Thread): - daemon = True # Don't hang if main thread crashes - started = False - def __init__(self, echo=False): - super(StreamCapturer, self).__init__() - self.echo = echo - self.streams = [] - self.buffer = BytesIO() - self.readfd, self.writefd = os.pipe() - self.buffer_lock = Lock() - self.stop = Event() - - def run(self): - self.started = True - - while not self.stop.is_set(): - chunk = os.read(self.readfd, 1024) - - with self.buffer_lock: - self.buffer.write(chunk) - if self.echo: - sys.stdout.write(decode(chunk)) - - os.close(self.readfd) - os.close(self.writefd) - - def reset_buffer(self): - with self.buffer_lock: - self.buffer.truncate(0) - self.buffer.seek(0) - - def get_buffer(self): - with self.buffer_lock: - return self.buffer.getvalue() - - def ensure_started(self): - if not self.started: - self.start() - - def halt(self): - """Safely stop the thread.""" - if not self.started: - return - - self.stop.set() - os.write(self.writefd, b'\0') # Ensure we're not locked in a read() - self.join() - -class SubprocessStreamCapturePlugin(Plugin): - name='subprocstreams' - def __init__(self): - Plugin.__init__(self) - self.stream_capturer = StreamCapturer() - self.destination = os.environ.get('IPTEST_SUBPROC_STREAMS', 'capture') - # This is ugly, but distant parts of the test machinery need to be able - # to redirect streams, so we make the object globally accessible. - nose.iptest_stdstreams_fileno = self.get_write_fileno - - def get_write_fileno(self): - if self.destination == 'capture': - self.stream_capturer.ensure_started() - return self.stream_capturer.writefd - elif self.destination == 'discard': - return os.open(os.devnull, os.O_WRONLY) - else: - return sys.__stdout__.fileno() - - def configure(self, options, config): - Plugin.configure(self, options, config) - # Override nose trying to disable plugin. - if self.destination == 'capture': - self.enabled = True - - def startTest(self, test): - # Reset log capture - self.stream_capturer.reset_buffer() - - def formatFailure(self, test, err): - # Show output - ec, ev, tb = err - captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace') - if captured.strip(): - ev = safe_str(ev) - out = [ev, '>> begin captured subprocess output <<', - captured, - '>> end captured subprocess output <<'] - return ec, '\n'.join(out), tb - - return err - - formatError = formatFailure - - def finalize(self, result): - self.stream_capturer.halt() - - -def run_iptest(): - """Run the IPython test suite using nose. - - This function is called when this script is **not** called with the form - `iptest all`. It simply calls nose with appropriate command line flags - and accepts all of the standard nose arguments. - """ - # Apply our monkeypatch to Xunit - if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'): - monkeypatch_xunit() - - arg1 = sys.argv[1] - if arg1.startswith('IPython/'): - if arg1.endswith('.py'): - arg1 = arg1[:-3] - sys.argv[1] = arg1.replace('/', '.') - - arg1 = sys.argv[1] - if arg1 in test_sections: - section = test_sections[arg1] - sys.argv[1:2] = section.includes - elif arg1.startswith('IPython.') and arg1[8:] in test_sections: - section = test_sections[arg1[8:]] - sys.argv[1:2] = section.includes - else: - section = TestSection(arg1, includes=[arg1]) - - - argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks - # We add --exe because of setuptools' imbecility (it - # blindly does chmod +x on ALL files). Nose does the - # right thing and it tries to avoid executables, - # setuptools unfortunately forces our hand here. This - # has been discussed on the distutils list and the - # setuptools devs refuse to fix this problem! - '--exe', - ] - if '-a' not in argv and '-A' not in argv: - argv = argv + ['-a', '!crash'] - - if nose.__version__ >= '0.11': - # I don't fully understand why we need this one, but depending on what - # directory the test suite is run from, if we don't give it, 0 tests - # get run. Specifically, if the test suite is run from the source dir - # with an argument (like 'iptest.py IPython.core', 0 tests are run, - # even if the same call done in this directory works fine). It appears - # that if the requested package is in the current dir, nose bails early - # by default. Since it's otherwise harmless, leave it in by default - # for nose >= 0.11, though unfortunately nose 0.10 doesn't support it. - argv.append('--traverse-namespace') - - plugins = [ ExclusionPlugin(section.excludes), KnownFailure(), - SubprocessStreamCapturePlugin() ] - - # we still have some vestigial doctests in core - if (section.name.startswith(('core', 'IPython.core', 'IPython.utils'))): - plugins.append(IPythonDoctest()) - argv.extend([ - '--with-ipdoctest', - '--ipdoctest-tests', - '--ipdoctest-extension=txt', - ]) - - - # Use working directory set by parent process (see iptestcontroller) - if 'IPTEST_WORKING_DIR' in os.environ: - os.chdir(os.environ['IPTEST_WORKING_DIR']) - - # We need a global ipython running in this process, but the special - # in-process group spawns its own IPython kernels, so for *that* group we - # must avoid also opening the global one (otherwise there's a conflict of - # singletons). Ultimately the solution to this problem is to refactor our - # assumptions about what needs to be a singleton and what doesn't (app - # objects should, individual shells shouldn't). But for now, this - # workaround allows the test suite for the inprocess module to complete. - if 'kernel.inprocess' not in section.name: - from IPython.testing import globalipapp - globalipapp.start_ipython() - - # Now nose can run - TestProgram(argv=argv, addplugins=plugins) - -if __name__ == '__main__': - run_iptest() +# -*- coding: utf-8 -*- +"""IPython Test Suite Runner. + +This module provides a main entry point to a user script to test IPython +itself from the command line. There are two ways of running this script: + +1. With the syntax `iptest all`. This runs our entire test suite by + calling this script (with different arguments) recursively. This + causes modules and package to be tested in different processes, using nose + or trial where appropriate. +2. With the regular nose syntax, like `iptest IPython -- -vvs`. In this form + the script simply calls nose, but with special command line flags and + plugins loaded. Options after `--` are passed to nose. + +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +import glob +from io import BytesIO +import os +import os.path as path +import sys +from threading import Thread, Lock, Event +import warnings + +import nose.plugins.builtin +from nose.plugins.xunit import Xunit +from nose import SkipTest +from nose.core import TestProgram +from nose.plugins import Plugin +from nose.util import safe_str + +from IPython import version_info +from IPython.utils.py3compat import decode +from IPython.utils.importstring import import_item +from IPython.testing.plugin.ipdoctest import IPythonDoctest +from IPython.external.decorators import KnownFailure, knownfailureif + +pjoin = path.join + + +# Enable printing all warnings raise by IPython's modules +warnings.filterwarnings('ignore', message='.*Matplotlib is building the font cache.*', category=UserWarning, module='.*') +warnings.filterwarnings('error', message='.*', category=ResourceWarning, module='.*') +warnings.filterwarnings('error', message=".*{'config': True}.*", category=DeprecationWarning, module='IPy.*') +warnings.filterwarnings('default', message='.*', category=Warning, module='IPy.*') + +warnings.filterwarnings('error', message='.*apply_wrapper.*', category=DeprecationWarning, module='.*') +warnings.filterwarnings('error', message='.*make_label_dec', category=DeprecationWarning, module='.*') +warnings.filterwarnings('error', message='.*decorated_dummy.*', category=DeprecationWarning, module='.*') +warnings.filterwarnings('error', message='.*skip_file_no_x11.*', category=DeprecationWarning, module='.*') +warnings.filterwarnings('error', message='.*onlyif_any_cmd_exists.*', category=DeprecationWarning, module='.*') + +warnings.filterwarnings('error', message='.*disable_gui.*', category=DeprecationWarning, module='.*') + +warnings.filterwarnings('error', message='.*ExceptionColors global is deprecated.*', category=DeprecationWarning, module='.*') + +# Jedi older versions +warnings.filterwarnings( + 'error', message='.*elementwise != comparison failed and.*', category=FutureWarning, module='.*') + +if version_info < (6,): + # nose.tools renames all things from `camelCase` to `snake_case` which raise an + # warning with the runner they also import from standard import library. (as of Dec 2015) + # Ignore, let's revisit that in a couple of years for IPython 6. + warnings.filterwarnings( + 'ignore', message='.*Please use assertEqual instead', category=Warning, module='IPython.*') + +if version_info < (8,): + warnings.filterwarnings('ignore', message='.*Completer.complete.*', + category=PendingDeprecationWarning, module='.*') +else: + warnings.warn( + 'Completer.complete was pending deprecation and should be changed to Deprecated', FutureWarning) + + + +# ------------------------------------------------------------------------------ +# Monkeypatch Xunit to count known failures as skipped. +# ------------------------------------------------------------------------------ +def monkeypatch_xunit(): + try: + dec.knownfailureif(True)(lambda: None)() + except Exception as e: + KnownFailureTest = type(e) + + def addError(self, test, err, capt=None): + if issubclass(err[0], KnownFailureTest): + err = (SkipTest,) + err[1:] + return self.orig_addError(test, err, capt) + + Xunit.orig_addError = Xunit.addError + Xunit.addError = addError + +#----------------------------------------------------------------------------- +# Check which dependencies are installed and greater than minimum version. +#----------------------------------------------------------------------------- +def extract_version(mod): + return mod.__version__ + +def test_for(item, min_version=None, callback=extract_version): + """Test to see if item is importable, and optionally check against a minimum + version. + + If min_version is given, the default behavior is to check against the + `__version__` attribute of the item, but specifying `callback` allows you to + extract the value you are interested in. e.g:: + + In [1]: import sys + + In [2]: from IPython.testing.iptest import test_for + + In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info) + Out[3]: True + + """ + try: + check = import_item(item) + except (ImportError, RuntimeError): + # GTK reports Runtime error if it can't be initialized even if it's + # importable. + return False + else: + if min_version: + if callback: + # extra processing step to get version to compare + check = callback(check) + + return check >= min_version + else: + return True + +# Global dict where we can store information on what we have and what we don't +# have available at test run time +have = {'matplotlib': test_for('matplotlib'), + 'pygments': test_for('pygments'), + 'sqlite3': test_for('sqlite3')} + +#----------------------------------------------------------------------------- +# Test suite definitions +#----------------------------------------------------------------------------- + +test_group_names = ['core', + 'extensions', 'lib', 'terminal', 'testing', 'utils', + ] + +class TestSection(object): + def __init__(self, name, includes): + self.name = name + self.includes = includes + self.excludes = [] + self.dependencies = [] + self.enabled = True + + def exclude(self, module): + if not module.startswith('IPython'): + module = self.includes[0] + "." + module + self.excludes.append(module.replace('.', os.sep)) + + def requires(self, *packages): + self.dependencies.extend(packages) + + @property + def will_run(self): + return self.enabled and all(have[p] for p in self.dependencies) + +# Name -> (include, exclude, dependencies_met) +test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names} + + +# Exclusions and dependencies +# --------------------------- + +# core: +sec = test_sections['core'] +if not have['sqlite3']: + sec.exclude('tests.test_history') + sec.exclude('history') +if not have['matplotlib']: + sec.exclude('pylabtools'), + sec.exclude('tests.test_pylabtools') + +# lib: +sec = test_sections['lib'] +sec.exclude('kernel') +if not have['pygments']: + sec.exclude('tests.test_lexers') +# We do this unconditionally, so that the test suite doesn't import +# gtk, changing the default encoding and masking some unicode bugs. +sec.exclude('inputhookgtk') +# We also do this unconditionally, because wx can interfere with Unix signals. +# There are currently no tests for it anyway. +sec.exclude('inputhookwx') +# Testing inputhook will need a lot of thought, to figure out +# how to have tests that don't lock up with the gui event +# loops in the picture +sec.exclude('inputhook') + +# testing: +sec = test_sections['testing'] +# These have to be skipped on win32 because they use echo, rm, cd, etc. +# See ticket https://github.com/ipython/ipython/issues/87 +if sys.platform == 'win32': + sec.exclude('plugin.test_exampleip') + sec.exclude('plugin.dtexample') + +# don't run jupyter_console tests found via shim +test_sections['terminal'].exclude('console') + +# extensions: +sec = test_sections['extensions'] +# This is deprecated in favour of rpy2 +sec.exclude('rmagic') +# autoreload does some strange stuff, so move it to its own test section +sec.exclude('autoreload') +sec.exclude('tests.test_autoreload') +test_sections['autoreload'] = TestSection('autoreload', + ['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload']) +test_group_names.append('autoreload') + + +#----------------------------------------------------------------------------- +# Functions and classes +#----------------------------------------------------------------------------- + +def check_exclusions_exist(): + from IPython.paths import get_ipython_package_dir + from warnings import warn + parent = os.path.dirname(get_ipython_package_dir()) + for sec in test_sections: + for pattern in sec.exclusions: + fullpath = pjoin(parent, pattern) + if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'): + warn("Excluding nonexistent file: %r" % pattern) + + +class ExclusionPlugin(Plugin): + """A nose plugin to effect our exclusions of files and directories. + """ + name = 'exclusions' + score = 3000 # Should come before any other plugins + + def __init__(self, exclude_patterns=None): + """ + Parameters + ---------- + + exclude_patterns : sequence of strings, optional + Filenames containing these patterns (as raw strings, not as regular + expressions) are excluded from the tests. + """ + self.exclude_patterns = exclude_patterns or [] + super(ExclusionPlugin, self).__init__() + + def options(self, parser, env=os.environ): + Plugin.options(self, parser, env) + + def configure(self, options, config): + Plugin.configure(self, options, config) + # Override nose trying to disable plugin. + self.enabled = True + + def wantFile(self, filename): + """Return whether the given filename should be scanned for tests. + """ + if any(pat in filename for pat in self.exclude_patterns): + return False + return None + + def wantDirectory(self, directory): + """Return whether the given directory should be scanned for tests. + """ + if any(pat in directory for pat in self.exclude_patterns): + return False + return None + + +class StreamCapturer(Thread): + daemon = True # Don't hang if main thread crashes + started = False + def __init__(self, echo=False): + super(StreamCapturer, self).__init__() + self.echo = echo + self.streams = [] + self.buffer = BytesIO() + self.readfd, self.writefd = os.pipe() + self.buffer_lock = Lock() + self.stop = Event() + + def run(self): + self.started = True + + while not self.stop.is_set(): + chunk = os.read(self.readfd, 1024) + + with self.buffer_lock: + self.buffer.write(chunk) + if self.echo: + sys.stdout.write(decode(chunk)) + + os.close(self.readfd) + os.close(self.writefd) + + def reset_buffer(self): + with self.buffer_lock: + self.buffer.truncate(0) + self.buffer.seek(0) + + def get_buffer(self): + with self.buffer_lock: + return self.buffer.getvalue() + + def ensure_started(self): + if not self.started: + self.start() + + def halt(self): + """Safely stop the thread.""" + if not self.started: + return + + self.stop.set() + os.write(self.writefd, b'\0') # Ensure we're not locked in a read() + self.join() + +class SubprocessStreamCapturePlugin(Plugin): + name='subprocstreams' + def __init__(self): + Plugin.__init__(self) + self.stream_capturer = StreamCapturer() + self.destination = os.environ.get('IPTEST_SUBPROC_STREAMS', 'capture') + # This is ugly, but distant parts of the test machinery need to be able + # to redirect streams, so we make the object globally accessible. + nose.iptest_stdstreams_fileno = self.get_write_fileno + + def get_write_fileno(self): + if self.destination == 'capture': + self.stream_capturer.ensure_started() + return self.stream_capturer.writefd + elif self.destination == 'discard': + return os.open(os.devnull, os.O_WRONLY) + else: + return sys.__stdout__.fileno() + + def configure(self, options, config): + Plugin.configure(self, options, config) + # Override nose trying to disable plugin. + if self.destination == 'capture': + self.enabled = True + + def startTest(self, test): + # Reset log capture + self.stream_capturer.reset_buffer() + + def formatFailure(self, test, err): + # Show output + ec, ev, tb = err + captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace') + if captured.strip(): + ev = safe_str(ev) + out = [ev, '>> begin captured subprocess output <<', + captured, + '>> end captured subprocess output <<'] + return ec, '\n'.join(out), tb + + return err + + formatError = formatFailure + + def finalize(self, result): + self.stream_capturer.halt() + + +def run_iptest(): + """Run the IPython test suite using nose. + + This function is called when this script is **not** called with the form + `iptest all`. It simply calls nose with appropriate command line flags + and accepts all of the standard nose arguments. + """ + # Apply our monkeypatch to Xunit + if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'): + monkeypatch_xunit() + + arg1 = sys.argv[1] + if arg1.startswith('IPython/'): + if arg1.endswith('.py'): + arg1 = arg1[:-3] + sys.argv[1] = arg1.replace('/', '.') + + arg1 = sys.argv[1] + if arg1 in test_sections: + section = test_sections[arg1] + sys.argv[1:2] = section.includes + elif arg1.startswith('IPython.') and arg1[8:] in test_sections: + section = test_sections[arg1[8:]] + sys.argv[1:2] = section.includes + else: + section = TestSection(arg1, includes=[arg1]) + + + argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks + # We add --exe because of setuptools' imbecility (it + # blindly does chmod +x on ALL files). Nose does the + # right thing and it tries to avoid executables, + # setuptools unfortunately forces our hand here. This + # has been discussed on the distutils list and the + # setuptools devs refuse to fix this problem! + '--exe', + ] + if '-a' not in argv and '-A' not in argv: + argv = argv + ['-a', '!crash'] + + if nose.__version__ >= '0.11': + # I don't fully understand why we need this one, but depending on what + # directory the test suite is run from, if we don't give it, 0 tests + # get run. Specifically, if the test suite is run from the source dir + # with an argument (like 'iptest.py IPython.core', 0 tests are run, + # even if the same call done in this directory works fine). It appears + # that if the requested package is in the current dir, nose bails early + # by default. Since it's otherwise harmless, leave it in by default + # for nose >= 0.11, though unfortunately nose 0.10 doesn't support it. + argv.append('--traverse-namespace') + + plugins = [ ExclusionPlugin(section.excludes), KnownFailure(), + SubprocessStreamCapturePlugin() ] + + # we still have some vestigial doctests in core + if (section.name.startswith(('core', 'IPython.core', 'IPython.utils'))): + plugins.append(IPythonDoctest()) + argv.extend([ + '--with-ipdoctest', + '--ipdoctest-tests', + '--ipdoctest-extension=txt', + ]) + + + # Use working directory set by parent process (see iptestcontroller) + if 'IPTEST_WORKING_DIR' in os.environ: + os.chdir(os.environ['IPTEST_WORKING_DIR']) + + # We need a global ipython running in this process, but the special + # in-process group spawns its own IPython kernels, so for *that* group we + # must avoid also opening the global one (otherwise there's a conflict of + # singletons). Ultimately the solution to this problem is to refactor our + # assumptions about what needs to be a singleton and what doesn't (app + # objects should, individual shells shouldn't). But for now, this + # workaround allows the test suite for the inprocess module to complete. + if 'kernel.inprocess' not in section.name: + from IPython.testing import globalipapp + globalipapp.start_ipython() + + # Now nose can run + TestProgram(argv=argv, addplugins=plugins) + +if __name__ == '__main__': + run_iptest() diff --git a/contrib/python/ipython/py3/IPython/testing/iptestcontroller.py b/contrib/python/ipython/py3/IPython/testing/iptestcontroller.py index 0ca4647e5fd..b522f60f376 100644 --- a/contrib/python/ipython/py3/IPython/testing/iptestcontroller.py +++ b/contrib/python/ipython/py3/IPython/testing/iptestcontroller.py @@ -1,491 +1,491 @@ -# -*- coding: utf-8 -*- -"""IPython Test Process Controller - -This module runs one or more subprocesses which will actually run the IPython -test suite. - -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -import argparse -import multiprocessing.pool -import os -import stat -import shutil -import signal -import sys -import subprocess -import time - -from .iptest import ( - have, test_group_names as py_test_group_names, test_sections, StreamCapturer, -) -from IPython.utils.path import compress_user -from IPython.utils.py3compat import decode -from IPython.utils.sysinfo import get_sys_info -from IPython.utils.tempdir import TemporaryDirectory - -class TestController: - """Run tests in a subprocess - """ - #: str, IPython test suite to be executed. - section = None - #: list, command line arguments to be executed - cmd = None - #: dict, extra environment variables to set for the subprocess - env = None - #: list, TemporaryDirectory instances to clear up when the process finishes - dirs = None - #: subprocess.Popen instance - process = None - #: str, process stdout+stderr - stdout = None - - def __init__(self): - self.cmd = [] - self.env = {} - self.dirs = [] - - def setUp(self): - """Create temporary directories etc. - - This is only called when we know the test group will be run. Things - created here may be cleaned up by self.cleanup(). - """ - pass - - def launch(self, buffer_output=False, capture_output=False): - # print('*** ENV:', self.env) # dbg - # print('*** CMD:', self.cmd) # dbg - env = os.environ.copy() - env.update(self.env) - if buffer_output: - capture_output = True - self.stdout_capturer = c = StreamCapturer(echo=not buffer_output) - c.start() - stdout = c.writefd if capture_output else None - stderr = subprocess.STDOUT if capture_output else None - self.process = subprocess.Popen(self.cmd, stdout=stdout, - stderr=stderr, env=env) - - def wait(self): - self.process.wait() - self.stdout_capturer.halt() - self.stdout = self.stdout_capturer.get_buffer() - return self.process.returncode - - def cleanup_process(self): - """Cleanup on exit by killing any leftover processes.""" - subp = self.process - if subp is None or (subp.poll() is not None): - return # Process doesn't exist, or is already dead. - - try: - print('Cleaning up stale PID: %d' % subp.pid) - subp.kill() - except: # (OSError, WindowsError) ? - # This is just a best effort, if we fail or the process was - # really gone, ignore it. - pass - else: - for i in range(10): - if subp.poll() is None: - time.sleep(0.1) - else: - break - - if subp.poll() is None: - # The process did not die... - print('... failed. Manual cleanup may be required.') - - def cleanup(self): - "Kill process if it's still alive, and clean up temporary directories" - self.cleanup_process() - for td in self.dirs: - td.cleanup() - - __del__ = cleanup - - -class PyTestController(TestController): - """Run Python tests using IPython.testing.iptest""" - #: str, Python command to execute in subprocess - pycmd = None - - def __init__(self, section, options): - """Create new test runner.""" - TestController.__init__(self) - self.section = section - # pycmd is put into cmd[2] in PyTestController.launch() - self.cmd = [sys.executable, '-c', None, section] - self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()" - self.options = options - - def setup(self): - ipydir = TemporaryDirectory() - self.dirs.append(ipydir) - self.env['IPYTHONDIR'] = ipydir.name - self.workingdir = workingdir = TemporaryDirectory() - self.dirs.append(workingdir) - self.env['IPTEST_WORKING_DIR'] = workingdir.name - # This means we won't get odd effects from our own matplotlib config - self.env['MPLCONFIGDIR'] = workingdir.name - # For security reasons (http://bugs.python.org/issue16202), use - # a temporary directory to which other users have no access. - self.env['TMPDIR'] = workingdir.name - - # Add a non-accessible directory to PATH (see gh-7053) - noaccess = os.path.join(self.workingdir.name, "_no_access_") - self.noaccess = noaccess - os.mkdir(noaccess, 0) - - PATH = os.environ.get('PATH', '') - if PATH: - PATH = noaccess + os.pathsep + PATH - else: - PATH = noaccess - self.env['PATH'] = PATH - - # From options: - if self.options.xunit: - self.add_xunit() - if self.options.coverage: - self.add_coverage() - self.env['IPTEST_SUBPROC_STREAMS'] = self.options.subproc_streams - self.cmd.extend(self.options.extra_args) - - def cleanup(self): - """ - Make the non-accessible directory created in setup() accessible - again, otherwise deleting the workingdir will fail. - """ - os.chmod(self.noaccess, stat.S_IRWXU) - TestController.cleanup(self) - - @property - def will_run(self): - try: - return test_sections[self.section].will_run - except KeyError: - return True - - def add_xunit(self): - xunit_file = os.path.abspath(self.section + '.xunit.xml') - self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file]) - - def add_coverage(self): - try: - sources = test_sections[self.section].includes - except KeyError: - sources = ['IPython'] - - coverage_rc = ("[run]\n" - "data_file = {data_file}\n" - "source =\n" - " {source}\n" - ).format(data_file=os.path.abspath('.coverage.'+self.section), - source="\n ".join(sources)) - config_file = os.path.join(self.workingdir.name, '.coveragerc') - with open(config_file, 'w') as f: - f.write(coverage_rc) - - self.env['COVERAGE_PROCESS_START'] = config_file - self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd - - def launch(self, buffer_output=False): - self.cmd[2] = self.pycmd - super(PyTestController, self).launch(buffer_output=buffer_output) - - -def prepare_controllers(options): - """Returns two lists of TestController instances, those to run, and those - not to run.""" - testgroups = options.testgroups - if not testgroups: - testgroups = py_test_group_names - - controllers = [PyTestController(name, options) for name in testgroups] - - to_run = [c for c in controllers if c.will_run] - not_run = [c for c in controllers if not c.will_run] - return to_run, not_run - -def do_run(controller, buffer_output=True): - """Setup and run a test controller. - - If buffer_output is True, no output is displayed, to avoid it appearing - interleaved. In this case, the caller is responsible for displaying test - output on failure. - - Returns - ------- - controller : TestController - The same controller as passed in, as a convenience for using map() type - APIs. - exitcode : int - The exit code of the test subprocess. Non-zero indicates failure. - """ - try: - try: - controller.setup() - controller.launch(buffer_output=buffer_output) - except Exception: - import traceback - traceback.print_exc() - return controller, 1 # signal failure - - exitcode = controller.wait() - return controller, exitcode - - except KeyboardInterrupt: - return controller, -signal.SIGINT - finally: - controller.cleanup() - -def report(): - """Return a string with a summary report of test-related variables.""" - inf = get_sys_info() - out = [] - def _add(name, value): - out.append((name, value)) - - _add('IPython version', inf['ipython_version']) - _add('IPython commit', "{} ({})".format(inf['commit_hash'], inf['commit_source'])) - _add('IPython package', compress_user(inf['ipython_path'])) - _add('Python version', inf['sys_version'].replace('\n','')) - _add('sys.executable', compress_user(inf['sys_executable'])) - _add('Platform', inf['platform']) - - width = max(len(n) for (n,v) in out) - out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out] - - avail = [] - not_avail = [] - - for k, is_avail in have.items(): - if is_avail: - avail.append(k) - else: - not_avail.append(k) - - if avail: - out.append('\nTools and libraries available at test time:\n') - avail.sort() - out.append(' ' + ' '.join(avail)+'\n') - - if not_avail: - out.append('\nTools and libraries NOT available at test time:\n') - not_avail.sort() - out.append(' ' + ' '.join(not_avail)+'\n') - - return ''.join(out) - -def run_iptestall(options): - """Run the entire IPython test suite by calling nose and trial. - - This function constructs :class:`IPTester` instances for all IPython - modules and package and then runs each of them. This causes the modules - and packages of IPython to be tested each in their own subprocess using - nose. - - Parameters - ---------- - - All parameters are passed as attributes of the options object. - - testgroups : list of str - Run only these sections of the test suite. If empty, run all the available - sections. - - fast : int or None - Run the test suite in parallel, using n simultaneous processes. If None - is passed, one process is used per CPU core. Default 1 (i.e. sequential) - - inc_slow : bool - Include slow tests. By default, these tests aren't run. - - url : unicode - Address:port to use when running the JS tests. - - xunit : bool - Produce Xunit XML output. This is written to multiple foo.xunit.xml files. - - coverage : bool or str - Measure code coverage from tests. True will store the raw coverage data, - or pass 'html' or 'xml' to get reports. - - extra_args : list - Extra arguments to pass to the test subprocesses, e.g. '-v' - """ - to_run, not_run = prepare_controllers(options) - - def justify(ltext, rtext, width=70, fill='-'): - ltext += ' ' - rtext = (' ' + rtext).rjust(width - len(ltext), fill) - return ltext + rtext - - # Run all test runners, tracking execution time - failed = [] - t_start = time.time() - - print() - if options.fast == 1: - # This actually means sequential, i.e. with 1 job - for controller in to_run: - print('Test group:', controller.section) - sys.stdout.flush() # Show in correct order when output is piped - controller, res = do_run(controller, buffer_output=False) - if res: - failed.append(controller) - if res == -signal.SIGINT: - print("Interrupted") - break - print() - - else: - # Run tests concurrently - try: - pool = multiprocessing.pool.ThreadPool(options.fast) - for (controller, res) in pool.imap_unordered(do_run, to_run): - res_string = 'OK' if res == 0 else 'FAILED' - print(justify('Test group: ' + controller.section, res_string)) - if res: - print(decode(controller.stdout)) - failed.append(controller) - if res == -signal.SIGINT: - print("Interrupted") - break - except KeyboardInterrupt: - return - - for controller in not_run: - print(justify('Test group: ' + controller.section, 'NOT RUN')) - - t_end = time.time() - t_tests = t_end - t_start - nrunners = len(to_run) - nfail = len(failed) - # summarize results - print('_'*70) - print('Test suite completed for system with the following information:') - print(report()) - took = "Took %.3fs." % t_tests - print('Status: ', end='') - if not failed: - print('OK (%d test groups).' % nrunners, took) - else: - # If anything went wrong, point out what command to rerun manually to - # see the actual errors and individual summary - failed_sections = [c.section for c in failed] - print('ERROR - {} out of {} test groups failed ({}).'.format(nfail, - nrunners, ', '.join(failed_sections)), took) - print() - print('You may wish to rerun these, with:') - print(' iptest', *failed_sections) - print() - - if options.coverage: - from coverage import coverage, CoverageException - cov = coverage(data_file='.coverage') - cov.combine() - cov.save() - - # Coverage HTML report - if options.coverage == 'html': - html_dir = 'ipy_htmlcov' - shutil.rmtree(html_dir, ignore_errors=True) - print("Writing HTML coverage report to %s/ ... " % html_dir, end="") - sys.stdout.flush() - - # Custom HTML reporter to clean up module names. - from coverage.html import HtmlReporter - class CustomHtmlReporter(HtmlReporter): - def find_code_units(self, morfs): - super(CustomHtmlReporter, self).find_code_units(morfs) - for cu in self.code_units: - nameparts = cu.name.split(os.sep) - if 'IPython' not in nameparts: - continue - ix = nameparts.index('IPython') - cu.name = '.'.join(nameparts[ix:]) - - # Reimplement the html_report method with our custom reporter - cov.get_data() - cov.config.from_args(omit='*{0}tests{0}*'.format(os.sep), html_dir=html_dir, - html_title='IPython test coverage', - ) - reporter = CustomHtmlReporter(cov, cov.config) - reporter.report(None) - print('done.') - - # Coverage XML report - elif options.coverage == 'xml': - try: - cov.xml_report(outfile='ipy_coverage.xml') - except CoverageException as e: - print('Generating coverage report failed. Are you running javascript tests only?') - import traceback - traceback.print_exc() - - if failed: - # Ensure that our exit code indicates failure - sys.exit(1) - -argparser = argparse.ArgumentParser(description='Run IPython test suite') -argparser.add_argument('testgroups', nargs='*', - help='Run specified groups of tests. If omitted, run ' - 'all tests.') -argparser.add_argument('--all', action='store_true', - help='Include slow tests not run by default.') -argparser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int, - help='Run test sections in parallel. This starts as many ' - 'processes as you have cores, or you can specify a number.') -argparser.add_argument('--xunit', action='store_true', - help='Produce Xunit XML results') -argparser.add_argument('--coverage', nargs='?', const=True, default=False, - help="Measure test coverage. Specify 'html' or " - "'xml' to get reports.") -argparser.add_argument('--subproc-streams', default='capture', - help="What to do with stdout/stderr from subprocesses. " - "'capture' (default), 'show' and 'discard' are the options.") - -def default_options(): - """Get an argparse Namespace object with the default arguments, to pass to - :func:`run_iptestall`. - """ - options = argparser.parse_args([]) - options.extra_args = [] - return options - -def main(): - # iptest doesn't work correctly if the working directory is the - # root of the IPython source tree. Tell the user to avoid - # frustration. - if os.path.exists(os.path.join(os.getcwd(), - 'IPython', 'testing', '__main__.py')): - print("Don't run iptest from the IPython source directory", - file=sys.stderr) - sys.exit(1) - # Arguments after -- should be passed through to nose. Argparse treats - # everything after -- as regular positional arguments, so we separate them - # first. - try: - ix = sys.argv.index('--') - except ValueError: - to_parse = sys.argv[1:] - extra_args = [] - else: - to_parse = sys.argv[1:ix] - extra_args = sys.argv[ix+1:] - - options = argparser.parse_args(to_parse) - options.extra_args = extra_args - - run_iptestall(options) - - -if __name__ == '__main__': - main() +# -*- coding: utf-8 -*- +"""IPython Test Process Controller + +This module runs one or more subprocesses which will actually run the IPython +test suite. + +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +import argparse +import multiprocessing.pool +import os +import stat +import shutil +import signal +import sys +import subprocess +import time + +from .iptest import ( + have, test_group_names as py_test_group_names, test_sections, StreamCapturer, +) +from IPython.utils.path import compress_user +from IPython.utils.py3compat import decode +from IPython.utils.sysinfo import get_sys_info +from IPython.utils.tempdir import TemporaryDirectory + +class TestController: + """Run tests in a subprocess + """ + #: str, IPython test suite to be executed. + section = None + #: list, command line arguments to be executed + cmd = None + #: dict, extra environment variables to set for the subprocess + env = None + #: list, TemporaryDirectory instances to clear up when the process finishes + dirs = None + #: subprocess.Popen instance + process = None + #: str, process stdout+stderr + stdout = None + + def __init__(self): + self.cmd = [] + self.env = {} + self.dirs = [] + + def setUp(self): + """Create temporary directories etc. + + This is only called when we know the test group will be run. Things + created here may be cleaned up by self.cleanup(). + """ + pass + + def launch(self, buffer_output=False, capture_output=False): + # print('*** ENV:', self.env) # dbg + # print('*** CMD:', self.cmd) # dbg + env = os.environ.copy() + env.update(self.env) + if buffer_output: + capture_output = True + self.stdout_capturer = c = StreamCapturer(echo=not buffer_output) + c.start() + stdout = c.writefd if capture_output else None + stderr = subprocess.STDOUT if capture_output else None + self.process = subprocess.Popen(self.cmd, stdout=stdout, + stderr=stderr, env=env) + + def wait(self): + self.process.wait() + self.stdout_capturer.halt() + self.stdout = self.stdout_capturer.get_buffer() + return self.process.returncode + + def cleanup_process(self): + """Cleanup on exit by killing any leftover processes.""" + subp = self.process + if subp is None or (subp.poll() is not None): + return # Process doesn't exist, or is already dead. + + try: + print('Cleaning up stale PID: %d' % subp.pid) + subp.kill() + except: # (OSError, WindowsError) ? + # This is just a best effort, if we fail or the process was + # really gone, ignore it. + pass + else: + for i in range(10): + if subp.poll() is None: + time.sleep(0.1) + else: + break + + if subp.poll() is None: + # The process did not die... + print('... failed. Manual cleanup may be required.') + + def cleanup(self): + "Kill process if it's still alive, and clean up temporary directories" + self.cleanup_process() + for td in self.dirs: + td.cleanup() + + __del__ = cleanup + + +class PyTestController(TestController): + """Run Python tests using IPython.testing.iptest""" + #: str, Python command to execute in subprocess + pycmd = None + + def __init__(self, section, options): + """Create new test runner.""" + TestController.__init__(self) + self.section = section + # pycmd is put into cmd[2] in PyTestController.launch() + self.cmd = [sys.executable, '-c', None, section] + self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()" + self.options = options + + def setup(self): + ipydir = TemporaryDirectory() + self.dirs.append(ipydir) + self.env['IPYTHONDIR'] = ipydir.name + self.workingdir = workingdir = TemporaryDirectory() + self.dirs.append(workingdir) + self.env['IPTEST_WORKING_DIR'] = workingdir.name + # This means we won't get odd effects from our own matplotlib config + self.env['MPLCONFIGDIR'] = workingdir.name + # For security reasons (http://bugs.python.org/issue16202), use + # a temporary directory to which other users have no access. + self.env['TMPDIR'] = workingdir.name + + # Add a non-accessible directory to PATH (see gh-7053) + noaccess = os.path.join(self.workingdir.name, "_no_access_") + self.noaccess = noaccess + os.mkdir(noaccess, 0) + + PATH = os.environ.get('PATH', '') + if PATH: + PATH = noaccess + os.pathsep + PATH + else: + PATH = noaccess + self.env['PATH'] = PATH + + # From options: + if self.options.xunit: + self.add_xunit() + if self.options.coverage: + self.add_coverage() + self.env['IPTEST_SUBPROC_STREAMS'] = self.options.subproc_streams + self.cmd.extend(self.options.extra_args) + + def cleanup(self): + """ + Make the non-accessible directory created in setup() accessible + again, otherwise deleting the workingdir will fail. + """ + os.chmod(self.noaccess, stat.S_IRWXU) + TestController.cleanup(self) + + @property + def will_run(self): + try: + return test_sections[self.section].will_run + except KeyError: + return True + + def add_xunit(self): + xunit_file = os.path.abspath(self.section + '.xunit.xml') + self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file]) + + def add_coverage(self): + try: + sources = test_sections[self.section].includes + except KeyError: + sources = ['IPython'] + + coverage_rc = ("[run]\n" + "data_file = {data_file}\n" + "source =\n" + " {source}\n" + ).format(data_file=os.path.abspath('.coverage.'+self.section), + source="\n ".join(sources)) + config_file = os.path.join(self.workingdir.name, '.coveragerc') + with open(config_file, 'w') as f: + f.write(coverage_rc) + + self.env['COVERAGE_PROCESS_START'] = config_file + self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd + + def launch(self, buffer_output=False): + self.cmd[2] = self.pycmd + super(PyTestController, self).launch(buffer_output=buffer_output) + + +def prepare_controllers(options): + """Returns two lists of TestController instances, those to run, and those + not to run.""" + testgroups = options.testgroups + if not testgroups: + testgroups = py_test_group_names + + controllers = [PyTestController(name, options) for name in testgroups] + + to_run = [c for c in controllers if c.will_run] + not_run = [c for c in controllers if not c.will_run] + return to_run, not_run + +def do_run(controller, buffer_output=True): + """Setup and run a test controller. + + If buffer_output is True, no output is displayed, to avoid it appearing + interleaved. In this case, the caller is responsible for displaying test + output on failure. + + Returns + ------- + controller : TestController + The same controller as passed in, as a convenience for using map() type + APIs. + exitcode : int + The exit code of the test subprocess. Non-zero indicates failure. + """ + try: + try: + controller.setup() + controller.launch(buffer_output=buffer_output) + except Exception: + import traceback + traceback.print_exc() + return controller, 1 # signal failure + + exitcode = controller.wait() + return controller, exitcode + + except KeyboardInterrupt: + return controller, -signal.SIGINT + finally: + controller.cleanup() + +def report(): + """Return a string with a summary report of test-related variables.""" + inf = get_sys_info() + out = [] + def _add(name, value): + out.append((name, value)) + + _add('IPython version', inf['ipython_version']) + _add('IPython commit', "{} ({})".format(inf['commit_hash'], inf['commit_source'])) + _add('IPython package', compress_user(inf['ipython_path'])) + _add('Python version', inf['sys_version'].replace('\n','')) + _add('sys.executable', compress_user(inf['sys_executable'])) + _add('Platform', inf['platform']) + + width = max(len(n) for (n,v) in out) + out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out] + + avail = [] + not_avail = [] + + for k, is_avail in have.items(): + if is_avail: + avail.append(k) + else: + not_avail.append(k) + + if avail: + out.append('\nTools and libraries available at test time:\n') + avail.sort() + out.append(' ' + ' '.join(avail)+'\n') + + if not_avail: + out.append('\nTools and libraries NOT available at test time:\n') + not_avail.sort() + out.append(' ' + ' '.join(not_avail)+'\n') + + return ''.join(out) + +def run_iptestall(options): + """Run the entire IPython test suite by calling nose and trial. + + This function constructs :class:`IPTester` instances for all IPython + modules and package and then runs each of them. This causes the modules + and packages of IPython to be tested each in their own subprocess using + nose. + + Parameters + ---------- + + All parameters are passed as attributes of the options object. + + testgroups : list of str + Run only these sections of the test suite. If empty, run all the available + sections. + + fast : int or None + Run the test suite in parallel, using n simultaneous processes. If None + is passed, one process is used per CPU core. Default 1 (i.e. sequential) + + inc_slow : bool + Include slow tests. By default, these tests aren't run. + + url : unicode + Address:port to use when running the JS tests. + + xunit : bool + Produce Xunit XML output. This is written to multiple foo.xunit.xml files. + + coverage : bool or str + Measure code coverage from tests. True will store the raw coverage data, + or pass 'html' or 'xml' to get reports. + + extra_args : list + Extra arguments to pass to the test subprocesses, e.g. '-v' + """ + to_run, not_run = prepare_controllers(options) + + def justify(ltext, rtext, width=70, fill='-'): + ltext += ' ' + rtext = (' ' + rtext).rjust(width - len(ltext), fill) + return ltext + rtext + + # Run all test runners, tracking execution time + failed = [] + t_start = time.time() + + print() + if options.fast == 1: + # This actually means sequential, i.e. with 1 job + for controller in to_run: + print('Test group:', controller.section) + sys.stdout.flush() # Show in correct order when output is piped + controller, res = do_run(controller, buffer_output=False) + if res: + failed.append(controller) + if res == -signal.SIGINT: + print("Interrupted") + break + print() + + else: + # Run tests concurrently + try: + pool = multiprocessing.pool.ThreadPool(options.fast) + for (controller, res) in pool.imap_unordered(do_run, to_run): + res_string = 'OK' if res == 0 else 'FAILED' + print(justify('Test group: ' + controller.section, res_string)) + if res: + print(decode(controller.stdout)) + failed.append(controller) + if res == -signal.SIGINT: + print("Interrupted") + break + except KeyboardInterrupt: + return + + for controller in not_run: + print(justify('Test group: ' + controller.section, 'NOT RUN')) + + t_end = time.time() + t_tests = t_end - t_start + nrunners = len(to_run) + nfail = len(failed) + # summarize results + print('_'*70) + print('Test suite completed for system with the following information:') + print(report()) + took = "Took %.3fs." % t_tests + print('Status: ', end='') + if not failed: + print('OK (%d test groups).' % nrunners, took) + else: + # If anything went wrong, point out what command to rerun manually to + # see the actual errors and individual summary + failed_sections = [c.section for c in failed] + print('ERROR - {} out of {} test groups failed ({}).'.format(nfail, + nrunners, ', '.join(failed_sections)), took) + print() + print('You may wish to rerun these, with:') + print(' iptest', *failed_sections) + print() + + if options.coverage: + from coverage import coverage, CoverageException + cov = coverage(data_file='.coverage') + cov.combine() + cov.save() + + # Coverage HTML report + if options.coverage == 'html': + html_dir = 'ipy_htmlcov' + shutil.rmtree(html_dir, ignore_errors=True) + print("Writing HTML coverage report to %s/ ... " % html_dir, end="") + sys.stdout.flush() + + # Custom HTML reporter to clean up module names. + from coverage.html import HtmlReporter + class CustomHtmlReporter(HtmlReporter): + def find_code_units(self, morfs): + super(CustomHtmlReporter, self).find_code_units(morfs) + for cu in self.code_units: + nameparts = cu.name.split(os.sep) + if 'IPython' not in nameparts: + continue + ix = nameparts.index('IPython') + cu.name = '.'.join(nameparts[ix:]) + + # Reimplement the html_report method with our custom reporter + cov.get_data() + cov.config.from_args(omit='*{0}tests{0}*'.format(os.sep), html_dir=html_dir, + html_title='IPython test coverage', + ) + reporter = CustomHtmlReporter(cov, cov.config) + reporter.report(None) + print('done.') + + # Coverage XML report + elif options.coverage == 'xml': + try: + cov.xml_report(outfile='ipy_coverage.xml') + except CoverageException as e: + print('Generating coverage report failed. Are you running javascript tests only?') + import traceback + traceback.print_exc() + + if failed: + # Ensure that our exit code indicates failure + sys.exit(1) + +argparser = argparse.ArgumentParser(description='Run IPython test suite') +argparser.add_argument('testgroups', nargs='*', + help='Run specified groups of tests. If omitted, run ' + 'all tests.') +argparser.add_argument('--all', action='store_true', + help='Include slow tests not run by default.') +argparser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int, + help='Run test sections in parallel. This starts as many ' + 'processes as you have cores, or you can specify a number.') +argparser.add_argument('--xunit', action='store_true', + help='Produce Xunit XML results') +argparser.add_argument('--coverage', nargs='?', const=True, default=False, + help="Measure test coverage. Specify 'html' or " + "'xml' to get reports.") +argparser.add_argument('--subproc-streams', default='capture', + help="What to do with stdout/stderr from subprocesses. " + "'capture' (default), 'show' and 'discard' are the options.") + +def default_options(): + """Get an argparse Namespace object with the default arguments, to pass to + :func:`run_iptestall`. + """ + options = argparser.parse_args([]) + options.extra_args = [] + return options + +def main(): + # iptest doesn't work correctly if the working directory is the + # root of the IPython source tree. Tell the user to avoid + # frustration. + if os.path.exists(os.path.join(os.getcwd(), + 'IPython', 'testing', '__main__.py')): + print("Don't run iptest from the IPython source directory", + file=sys.stderr) + sys.exit(1) + # Arguments after -- should be passed through to nose. Argparse treats + # everything after -- as regular positional arguments, so we separate them + # first. + try: + ix = sys.argv.index('--') + except ValueError: + to_parse = sys.argv[1:] + extra_args = [] + else: + to_parse = sys.argv[1:ix] + extra_args = sys.argv[ix+1:] + + options = argparser.parse_args(to_parse) + options.extra_args = extra_args + + run_iptestall(options) + + +if __name__ == '__main__': + main() diff --git a/contrib/python/ipython/py3/IPython/testing/ipunittest.py b/contrib/python/ipython/py3/IPython/testing/ipunittest.py index ab12201d2a1..5a940a5fe91 100644 --- a/contrib/python/ipython/py3/IPython/testing/ipunittest.py +++ b/contrib/python/ipython/py3/IPython/testing/ipunittest.py @@ -1,178 +1,178 @@ -"""Experimental code for cleaner support of IPython syntax with unittest. - -In IPython up until 0.10, we've used very hacked up nose machinery for running -tests with IPython special syntax, and this has proved to be extremely slow. -This module provides decorators to try a different approach, stemming from a -conversation Brian and I (FP) had about this problem Sept/09. - -The goal is to be able to easily write simple functions that can be seen by -unittest as tests, and ultimately for these to support doctests with full -IPython syntax. Nose already offers this based on naming conventions and our -hackish plugins, but we are seeking to move away from nose dependencies if -possible. - -This module follows a different approach, based on decorators. - -- A decorator called @ipdoctest can mark any function as having a docstring - that should be viewed as a doctest, but after syntax conversion. - -Authors -------- - -- Fernando Perez <Fernando.Perez@berkeley.edu> -""" - - -#----------------------------------------------------------------------------- -# Copyright (C) 2009-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Stdlib -import re -import unittest -from doctest import DocTestFinder, DocTestRunner, TestResults -from IPython.terminal.interactiveshell import InteractiveShell - -#----------------------------------------------------------------------------- -# Classes and functions -#----------------------------------------------------------------------------- - -def count_failures(runner): - """Count number of failures in a doctest runner. - - Code modeled after the summarize() method in doctest. - """ - return [TestResults(f, t) for f, t in runner._name2ft.values() if f > 0 ] - - -class IPython2PythonConverter(object): - """Convert IPython 'syntax' to valid Python. - - Eventually this code may grow to be the full IPython syntax conversion - implementation, but for now it only does prompt conversion.""" - - def __init__(self): - self.rps1 = re.compile(r'In\ \[\d+\]: ') - self.rps2 = re.compile(r'\ \ \ \.\.\.+: ') - self.rout = re.compile(r'Out\[\d+\]: \s*?\n?') - self.pyps1 = '>>> ' - self.pyps2 = '... ' - self.rpyps1 = re.compile (r'(\s*%s)(.*)$' % self.pyps1) - self.rpyps2 = re.compile (r'(\s*%s)(.*)$' % self.pyps2) - - def __call__(self, ds): - """Convert IPython prompts to python ones in a string.""" - from . import globalipapp - - pyps1 = '>>> ' - pyps2 = '... ' - pyout = '' - - dnew = ds - dnew = self.rps1.sub(pyps1, dnew) - dnew = self.rps2.sub(pyps2, dnew) - dnew = self.rout.sub(pyout, dnew) - ip = InteractiveShell.instance() - - # Convert input IPython source into valid Python. - out = [] - newline = out.append - for line in dnew.splitlines(): - - mps1 = self.rpyps1.match(line) - if mps1 is not None: - prompt, text = mps1.groups() - newline(prompt+ip.prefilter(text, False)) - continue - - mps2 = self.rpyps2.match(line) - if mps2 is not None: - prompt, text = mps2.groups() - newline(prompt+ip.prefilter(text, True)) - continue - - newline(line) - newline('') # ensure a closing newline, needed by doctest - #print "PYSRC:", '\n'.join(out) # dbg - return '\n'.join(out) - - #return dnew - - -class Doc2UnitTester(object): - """Class whose instances act as a decorator for docstring testing. - - In practice we're only likely to need one instance ever, made below (though - no attempt is made at turning it into a singleton, there is no need for - that). - """ - def __init__(self, verbose=False): - """New decorator. - - Parameters - ---------- - - verbose : boolean, optional (False) - Passed to the doctest finder and runner to control verbosity. - """ - self.verbose = verbose - # We can reuse the same finder for all instances - self.finder = DocTestFinder(verbose=verbose, recurse=False) - - def __call__(self, func): - """Use as a decorator: doctest a function's docstring as a unittest. - - This version runs normal doctests, but the idea is to make it later run - ipython syntax instead.""" - - # Capture the enclosing instance with a different name, so the new - # class below can see it without confusion regarding its own 'self' - # that will point to the test instance at runtime - d2u = self - - # Rewrite the function's docstring to have python syntax - if func.__doc__ is not None: - func.__doc__ = ip2py(func.__doc__) - - # Now, create a tester object that is a real unittest instance, so - # normal unittest machinery (or Nose, or Trial) can find it. - class Tester(unittest.TestCase): - def test(self): - # Make a new runner per function to be tested - runner = DocTestRunner(verbose=d2u.verbose) - for the_test in d2u.finder.find(func, func.__name__): - runner.run(the_test) - failed = count_failures(runner) - if failed: - # Since we only looked at a single function's docstring, - # failed should contain at most one item. More than that - # is a case we can't handle and should error out on - if len(failed) > 1: - err = "Invalid number of test results: %s" % failed - raise ValueError(err) - # Report a normal failure. - self.fail('failed doctests: %s' % str(failed[0])) - - # Rename it so test reports have the original signature. - Tester.__name__ = func.__name__ - return Tester - - -def ipdocstring(func): - """Change the function docstring via ip2py. - """ - if func.__doc__ is not None: - func.__doc__ = ip2py(func.__doc__) - return func - - -# Make an instance of the classes for public use -ipdoctest = Doc2UnitTester() -ip2py = IPython2PythonConverter() +"""Experimental code for cleaner support of IPython syntax with unittest. + +In IPython up until 0.10, we've used very hacked up nose machinery for running +tests with IPython special syntax, and this has proved to be extremely slow. +This module provides decorators to try a different approach, stemming from a +conversation Brian and I (FP) had about this problem Sept/09. + +The goal is to be able to easily write simple functions that can be seen by +unittest as tests, and ultimately for these to support doctests with full +IPython syntax. Nose already offers this based on naming conventions and our +hackish plugins, but we are seeking to move away from nose dependencies if +possible. + +This module follows a different approach, based on decorators. + +- A decorator called @ipdoctest can mark any function as having a docstring + that should be viewed as a doctest, but after syntax conversion. + +Authors +------- + +- Fernando Perez <Fernando.Perez@berkeley.edu> +""" + + +#----------------------------------------------------------------------------- +# Copyright (C) 2009-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib +import re +import unittest +from doctest import DocTestFinder, DocTestRunner, TestResults +from IPython.terminal.interactiveshell import InteractiveShell + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + +def count_failures(runner): + """Count number of failures in a doctest runner. + + Code modeled after the summarize() method in doctest. + """ + return [TestResults(f, t) for f, t in runner._name2ft.values() if f > 0 ] + + +class IPython2PythonConverter(object): + """Convert IPython 'syntax' to valid Python. + + Eventually this code may grow to be the full IPython syntax conversion + implementation, but for now it only does prompt conversion.""" + + def __init__(self): + self.rps1 = re.compile(r'In\ \[\d+\]: ') + self.rps2 = re.compile(r'\ \ \ \.\.\.+: ') + self.rout = re.compile(r'Out\[\d+\]: \s*?\n?') + self.pyps1 = '>>> ' + self.pyps2 = '... ' + self.rpyps1 = re.compile (r'(\s*%s)(.*)$' % self.pyps1) + self.rpyps2 = re.compile (r'(\s*%s)(.*)$' % self.pyps2) + + def __call__(self, ds): + """Convert IPython prompts to python ones in a string.""" + from . import globalipapp + + pyps1 = '>>> ' + pyps2 = '... ' + pyout = '' + + dnew = ds + dnew = self.rps1.sub(pyps1, dnew) + dnew = self.rps2.sub(pyps2, dnew) + dnew = self.rout.sub(pyout, dnew) + ip = InteractiveShell.instance() + + # Convert input IPython source into valid Python. + out = [] + newline = out.append + for line in dnew.splitlines(): + + mps1 = self.rpyps1.match(line) + if mps1 is not None: + prompt, text = mps1.groups() + newline(prompt+ip.prefilter(text, False)) + continue + + mps2 = self.rpyps2.match(line) + if mps2 is not None: + prompt, text = mps2.groups() + newline(prompt+ip.prefilter(text, True)) + continue + + newline(line) + newline('') # ensure a closing newline, needed by doctest + #print "PYSRC:", '\n'.join(out) # dbg + return '\n'.join(out) + + #return dnew + + +class Doc2UnitTester(object): + """Class whose instances act as a decorator for docstring testing. + + In practice we're only likely to need one instance ever, made below (though + no attempt is made at turning it into a singleton, there is no need for + that). + """ + def __init__(self, verbose=False): + """New decorator. + + Parameters + ---------- + + verbose : boolean, optional (False) + Passed to the doctest finder and runner to control verbosity. + """ + self.verbose = verbose + # We can reuse the same finder for all instances + self.finder = DocTestFinder(verbose=verbose, recurse=False) + + def __call__(self, func): + """Use as a decorator: doctest a function's docstring as a unittest. + + This version runs normal doctests, but the idea is to make it later run + ipython syntax instead.""" + + # Capture the enclosing instance with a different name, so the new + # class below can see it without confusion regarding its own 'self' + # that will point to the test instance at runtime + d2u = self + + # Rewrite the function's docstring to have python syntax + if func.__doc__ is not None: + func.__doc__ = ip2py(func.__doc__) + + # Now, create a tester object that is a real unittest instance, so + # normal unittest machinery (or Nose, or Trial) can find it. + class Tester(unittest.TestCase): + def test(self): + # Make a new runner per function to be tested + runner = DocTestRunner(verbose=d2u.verbose) + for the_test in d2u.finder.find(func, func.__name__): + runner.run(the_test) + failed = count_failures(runner) + if failed: + # Since we only looked at a single function's docstring, + # failed should contain at most one item. More than that + # is a case we can't handle and should error out on + if len(failed) > 1: + err = "Invalid number of test results: %s" % failed + raise ValueError(err) + # Report a normal failure. + self.fail('failed doctests: %s' % str(failed[0])) + + # Rename it so test reports have the original signature. + Tester.__name__ = func.__name__ + return Tester + + +def ipdocstring(func): + """Change the function docstring via ip2py. + """ + if func.__doc__ is not None: + func.__doc__ = ip2py(func.__doc__) + return func + + +# Make an instance of the classes for public use +ipdoctest = Doc2UnitTester() +ip2py = IPython2PythonConverter() diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/README.txt b/contrib/python/ipython/py3/IPython/testing/plugin/README.txt index bc350d12fe9..a85e5a12a10 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/README.txt +++ b/contrib/python/ipython/py3/IPython/testing/plugin/README.txt @@ -1,34 +1,34 @@ -======================================================= - Nose plugin with IPython and extension module support -======================================================= - -This directory provides the key functionality for test support that IPython -needs as a nose plugin, which can be installed for use in projects other than -IPython. - -The presence of a Makefile here is mostly for development and debugging -purposes as it only provides a few shorthand commands. You can manually -install the plugin by using standard Python procedures (``setup.py install`` -with appropriate arguments). - -To install the plugin using the Makefile, edit its first line to reflect where -you'd like the installation. - -Once you've set the prefix, simply build/install the plugin with:: - - make - -and run the tests with:: - - make test - -You should see output similar to:: - - maqroll[plugin]> make test - nosetests -s --with-ipdoctest --doctest-tests dtexample.py - .. - ---------------------------------------------------------------------- - Ran 2 tests in 0.016s - - OK - +======================================================= + Nose plugin with IPython and extension module support +======================================================= + +This directory provides the key functionality for test support that IPython +needs as a nose plugin, which can be installed for use in projects other than +IPython. + +The presence of a Makefile here is mostly for development and debugging +purposes as it only provides a few shorthand commands. You can manually +install the plugin by using standard Python procedures (``setup.py install`` +with appropriate arguments). + +To install the plugin using the Makefile, edit its first line to reflect where +you'd like the installation. + +Once you've set the prefix, simply build/install the plugin with:: + + make + +and run the tests with:: + + make test + +You should see output similar to:: + + maqroll[plugin]> make test + nosetests -s --with-ipdoctest --doctest-tests dtexample.py + .. + ---------------------------------------------------------------------- + Ran 2 tests in 0.016s + + OK + diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/dtexample.py b/contrib/python/ipython/py3/IPython/testing/plugin/dtexample.py index 4b94c74db29..d73cd246fd2 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/dtexample.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/dtexample.py @@ -1,157 +1,157 @@ -"""Simple example using doctests. - -This file just contains doctests both using plain python and IPython prompts. -All tests should be loaded by nose. -""" - -def pyfunc(): - """Some pure python tests... - - >>> pyfunc() - 'pyfunc' - - >>> import os - - >>> 2+3 - 5 - - >>> for i in range(3): - ... print(i, end=' ') - ... print(i+1, end=' ') - ... - 0 1 1 2 2 3 - """ - return 'pyfunc' - -def ipfunc(): - """Some ipython tests... - - In [1]: import os - - In [3]: 2+3 - Out[3]: 5 - - In [26]: for i in range(3): - ....: print(i, end=' ') - ....: print(i+1, end=' ') - ....: - 0 1 1 2 2 3 - - - Examples that access the operating system work: - - In [1]: !echo hello - hello - - In [2]: !echo hello > /tmp/foo_iptest - - In [3]: !cat /tmp/foo_iptest - hello - - In [4]: rm -f /tmp/foo_iptest - - It's OK to use '_' for the last result, but do NOT try to use IPython's - numbered history of _NN outputs, since those won't exist under the - doctest environment: - - In [7]: 'hi' - Out[7]: 'hi' - - In [8]: print(repr(_)) - 'hi' - - In [7]: 3+4 - Out[7]: 7 - - In [8]: _+3 - Out[8]: 10 - - In [9]: ipfunc() - Out[9]: 'ipfunc' - """ - return 'ipfunc' - - -def ranfunc(): - """A function with some random output. - - Normal examples are verified as usual: - >>> 1+3 - 4 - - But if you put '# random' in the output, it is ignored: - >>> 1+3 - junk goes here... # random - - >>> 1+2 - again, anything goes #random - if multiline, the random mark is only needed once. - - >>> 1+2 - You can also put the random marker at the end: - # random - - >>> 1+2 - # random - .. or at the beginning. - - More correct input is properly verified: - >>> ranfunc() - 'ranfunc' - """ - return 'ranfunc' - - -def random_all(): - """A function where we ignore the output of ALL examples. - - Examples: - - # all-random - - This mark tells the testing machinery that all subsequent examples should - be treated as random (ignoring their output). They are still executed, - so if a they raise an error, it will be detected as such, but their - output is completely ignored. - - >>> 1+3 - junk goes here... - - >>> 1+3 - klasdfj; - - >>> 1+2 - again, anything goes - blah... - """ - pass - -def iprand(): - """Some ipython tests with random output. - - In [7]: 3+4 - Out[7]: 7 - - In [8]: print('hello') - world # random - - In [9]: iprand() - Out[9]: 'iprand' - """ - return 'iprand' - -def iprand_all(): - """Some ipython tests with fully random output. - - # all-random - - In [7]: 1 - Out[7]: 99 - - In [8]: print('hello') - world - - In [9]: iprand_all() - Out[9]: 'junk' - """ - return 'iprand_all' +"""Simple example using doctests. + +This file just contains doctests both using plain python and IPython prompts. +All tests should be loaded by nose. +""" + +def pyfunc(): + """Some pure python tests... + + >>> pyfunc() + 'pyfunc' + + >>> import os + + >>> 2+3 + 5 + + >>> for i in range(3): + ... print(i, end=' ') + ... print(i+1, end=' ') + ... + 0 1 1 2 2 3 + """ + return 'pyfunc' + +def ipfunc(): + """Some ipython tests... + + In [1]: import os + + In [3]: 2+3 + Out[3]: 5 + + In [26]: for i in range(3): + ....: print(i, end=' ') + ....: print(i+1, end=' ') + ....: + 0 1 1 2 2 3 + + + Examples that access the operating system work: + + In [1]: !echo hello + hello + + In [2]: !echo hello > /tmp/foo_iptest + + In [3]: !cat /tmp/foo_iptest + hello + + In [4]: rm -f /tmp/foo_iptest + + It's OK to use '_' for the last result, but do NOT try to use IPython's + numbered history of _NN outputs, since those won't exist under the + doctest environment: + + In [7]: 'hi' + Out[7]: 'hi' + + In [8]: print(repr(_)) + 'hi' + + In [7]: 3+4 + Out[7]: 7 + + In [8]: _+3 + Out[8]: 10 + + In [9]: ipfunc() + Out[9]: 'ipfunc' + """ + return 'ipfunc' + + +def ranfunc(): + """A function with some random output. + + Normal examples are verified as usual: + >>> 1+3 + 4 + + But if you put '# random' in the output, it is ignored: + >>> 1+3 + junk goes here... # random + + >>> 1+2 + again, anything goes #random + if multiline, the random mark is only needed once. + + >>> 1+2 + You can also put the random marker at the end: + # random + + >>> 1+2 + # random + .. or at the beginning. + + More correct input is properly verified: + >>> ranfunc() + 'ranfunc' + """ + return 'ranfunc' + + +def random_all(): + """A function where we ignore the output of ALL examples. + + Examples: + + # all-random + + This mark tells the testing machinery that all subsequent examples should + be treated as random (ignoring their output). They are still executed, + so if a they raise an error, it will be detected as such, but their + output is completely ignored. + + >>> 1+3 + junk goes here... + + >>> 1+3 + klasdfj; + + >>> 1+2 + again, anything goes + blah... + """ + pass + +def iprand(): + """Some ipython tests with random output. + + In [7]: 3+4 + Out[7]: 7 + + In [8]: print('hello') + world # random + + In [9]: iprand() + Out[9]: 'iprand' + """ + return 'iprand' + +def iprand_all(): + """Some ipython tests with fully random output. + + # all-random + + In [7]: 1 + Out[7]: 99 + + In [8]: print('hello') + world + + In [9]: iprand_all() + Out[9]: 'junk' + """ + return 'iprand_all' diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/ipdoctest.py b/contrib/python/ipython/py3/IPython/testing/plugin/ipdoctest.py index 1ab547201ab..3b8667e72ff 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/ipdoctest.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/ipdoctest.py @@ -1,761 +1,761 @@ -"""Nose Plugin that supports IPython doctests. - -Limitations: - -- When generating examples for use as doctests, make sure that you have - pretty-printing OFF. This can be done either by setting the - ``PlainTextFormatter.pprint`` option in your configuration file to False, or - by interactively disabling it with %Pprint. This is required so that IPython - output matches that of normal Python, which is used by doctest for internal - execution. - -- Do not rely on specific prompt numbers for results (such as using - '_34==True', for example). For IPython tests run via an external process the - prompt numbers may be different, and IPython tests run as normal python code - won't even have these special _NN variables set at all. -""" - -#----------------------------------------------------------------------------- -# Module imports - -# From the standard library -import builtins as builtin_mod -import doctest -import inspect -import logging -import os -import re -import sys -from importlib import import_module -from io import StringIO - -from testpath import modified_env - -from inspect import getmodule - -# We are overriding the default doctest runner, so we need to import a few -# things from doctest directly -from doctest import (REPORTING_FLAGS, REPORT_ONLY_FIRST_FAILURE, - _unittest_reportflags, DocTestRunner, - _extract_future_flags, pdb, _OutputRedirectingPdb, - _exception_traceback, - linecache) - -# Third-party modules - -from nose.plugins import doctests, Plugin -from nose.util import anyp, tolist - -#----------------------------------------------------------------------------- -# Module globals and other constants -#----------------------------------------------------------------------------- - -log = logging.getLogger(__name__) - - -#----------------------------------------------------------------------------- -# Classes and functions -#----------------------------------------------------------------------------- - -def is_extension_module(filename): - """Return whether the given filename is an extension module. - - This simply checks that the extension is either .so or .pyd. - """ - return os.path.splitext(filename)[1].lower() in ('.so','.pyd') - - -class DocTestSkip(object): - """Object wrapper for doctests to be skipped.""" - - ds_skip = """Doctest to skip. - >>> 1 #doctest: +SKIP - """ - - def __init__(self,obj): - self.obj = obj - - def __getattribute__(self,key): - if key == '__doc__': - return DocTestSkip.ds_skip - else: - return getattr(object.__getattribute__(self,'obj'),key) - -# Modified version of the one in the stdlib, that fixes a python bug (doctests -# not found in extension modules, http://bugs.python.org/issue3158) -class DocTestFinder(doctest.DocTestFinder): - - def _from_module(self, module, object): - """ - Return true if the given object is defined in the given - module. - """ - if module is None: - return True - elif inspect.isfunction(object): - return module.__dict__ is object.__globals__ - elif inspect.isbuiltin(object): - return module.__name__ == object.__module__ - elif inspect.isclass(object): - return module.__name__ == object.__module__ - elif inspect.ismethod(object): - # This one may be a bug in cython that fails to correctly set the - # __module__ attribute of methods, but since the same error is easy - # to make by extension code writers, having this safety in place - # isn't such a bad idea - return module.__name__ == object.__self__.__class__.__module__ - elif inspect.getmodule(object) is not None: - return module is inspect.getmodule(object) - elif hasattr(object, '__module__'): - return module.__name__ == object.__module__ - elif isinstance(object, property): - return True # [XX] no way not be sure. - elif inspect.ismethoddescriptor(object): - # Unbound PyQt signals reach this point in Python 3.4b3, and we want - # to avoid throwing an error. See also http://bugs.python.org/issue3158 - return False - else: - raise ValueError("object must be a class or function, got %r" % object) - - def _find(self, tests, obj, name, module, source_lines, globs, seen): - """ - Find tests for the given object and any contained objects, and - add them to `tests`. - """ - print('_find for:', obj, name, module) # dbg - if hasattr(obj,"skip_doctest"): - #print 'SKIPPING DOCTEST FOR:',obj # dbg - obj = DocTestSkip(obj) - - doctest.DocTestFinder._find(self,tests, obj, name, module, - source_lines, globs, seen) - - # Below we re-run pieces of the above method with manual modifications, - # because the original code is buggy and fails to correctly identify - # doctests in extension modules. - - # Local shorthands - from inspect import isroutine, isclass - - # Look for tests in a module's contained objects. - if inspect.ismodule(obj) and self._recurse: - for valname, val in obj.__dict__.items(): - valname1 = '%s.%s' % (name, valname) - if ( (isroutine(val) or isclass(val)) - and self._from_module(module, val) ): - - self._find(tests, val, valname1, module, source_lines, - globs, seen) - - # Look for tests in a class's contained objects. - if inspect.isclass(obj) and self._recurse: - #print 'RECURSE into class:',obj # dbg - for valname, val in obj.__dict__.items(): - # Special handling for staticmethod/classmethod. - if isinstance(val, staticmethod): - val = getattr(obj, valname) - if isinstance(val, classmethod): - val = getattr(obj, valname).__func__ - - # Recurse to methods, properties, and nested classes. - if ((inspect.isfunction(val) or inspect.isclass(val) or - inspect.ismethod(val) or - isinstance(val, property)) and - self._from_module(module, val)): - valname = '%s.%s' % (name, valname) - self._find(tests, val, valname, module, source_lines, - globs, seen) - - -class IPDoctestOutputChecker(doctest.OutputChecker): - """Second-chance checker with support for random tests. - - If the default comparison doesn't pass, this checker looks in the expected - output string for flags that tell us to ignore the output. - """ - - random_re = re.compile(r'#\s*random\s+') - - def check_output(self, want, got, optionflags): - """Check output, accepting special markers embedded in the output. - - If the output didn't pass the default validation but the special string - '#random' is included, we accept it.""" - - # Let the original tester verify first, in case people have valid tests - # that happen to have a comment saying '#random' embedded in. - ret = doctest.OutputChecker.check_output(self, want, got, - optionflags) - if not ret and self.random_re.search(want): - #print >> sys.stderr, 'RANDOM OK:',want # dbg - return True - - return ret - - -class DocTestCase(doctests.DocTestCase): - """Proxy for DocTestCase: provides an address() method that - returns the correct address for the doctest case. Otherwise - acts as a proxy to the test case. To provide hints for address(), - an obj may also be passed -- this will be used as the test object - for purposes of determining the test address, if it is provided. - """ - - # Note: this method was taken from numpy's nosetester module. - - # Subclass nose.plugins.doctests.DocTestCase to work around a bug in - # its constructor that blocks non-default arguments from being passed - # down into doctest.DocTestCase - - def __init__(self, test, optionflags=0, setUp=None, tearDown=None, - checker=None, obj=None, result_var='_'): - self._result_var = result_var - doctests.DocTestCase.__init__(self, test, - optionflags=optionflags, - setUp=setUp, tearDown=tearDown, - checker=checker) - # Now we must actually copy the original constructor from the stdlib - # doctest class, because we can't call it directly and a bug in nose - # means it never gets passed the right arguments. - - self._dt_optionflags = optionflags - self._dt_checker = checker - self._dt_test = test - self._dt_test_globs_ori = test.globs - self._dt_setUp = setUp - self._dt_tearDown = tearDown - - # XXX - store this runner once in the object! - runner = IPDocTestRunner(optionflags=optionflags, - checker=checker, verbose=False) - self._dt_runner = runner - - - # Each doctest should remember the directory it was loaded from, so - # things like %run work without too many contortions - self._ori_dir = os.path.dirname(test.filename) - - # Modified runTest from the default stdlib - def runTest(self): - test = self._dt_test - runner = self._dt_runner - - old = sys.stdout - new = StringIO() - optionflags = self._dt_optionflags - - if not (optionflags & REPORTING_FLAGS): - # The option flags don't include any reporting flags, - # so add the default reporting flags - optionflags |= _unittest_reportflags - - try: - # Save our current directory and switch out to the one where the - # test was originally created, in case another doctest did a - # directory change. We'll restore this in the finally clause. - curdir = os.getcwd() - #print 'runTest in dir:', self._ori_dir # dbg - os.chdir(self._ori_dir) - - runner.DIVIDER = "-"*70 - failures, tries = runner.run(test,out=new.write, - clear_globs=False) - finally: - sys.stdout = old - os.chdir(curdir) - - if failures: - raise self.failureException(self.format_failure(new.getvalue())) - - def setUp(self): - """Modified test setup that syncs with ipython namespace""" - #print "setUp test", self._dt_test.examples # dbg - if isinstance(self._dt_test.examples[0], IPExample): - # for IPython examples *only*, we swap the globals with the ipython - # namespace, after updating it with the globals (which doctest - # fills with the necessary info from the module being tested). - self.user_ns_orig = {} - self.user_ns_orig.update(_ip.user_ns) - _ip.user_ns.update(self._dt_test.globs) - # We must remove the _ key in the namespace, so that Python's - # doctest code sets it naturally - _ip.user_ns.pop('_', None) - _ip.user_ns['__builtins__'] = builtin_mod - self._dt_test.globs = _ip.user_ns - - super(DocTestCase, self).setUp() - - def tearDown(self): - - # Undo the test.globs reassignment we made, so that the parent class - # teardown doesn't destroy the ipython namespace - if isinstance(self._dt_test.examples[0], IPExample): - self._dt_test.globs = self._dt_test_globs_ori - _ip.user_ns.clear() - _ip.user_ns.update(self.user_ns_orig) - - # XXX - fperez: I am not sure if this is truly a bug in nose 0.11, but - # it does look like one to me: its tearDown method tries to run - # - # delattr(builtin_mod, self._result_var) - # - # without checking that the attribute really is there; it implicitly - # assumes it should have been set via displayhook. But if the - # displayhook was never called, this doesn't necessarily happen. I - # haven't been able to find a little self-contained example outside of - # ipython that would show the problem so I can report it to the nose - # team, but it does happen a lot in our code. - # - # So here, we just protect as narrowly as possible by trapping an - # attribute error whose message would be the name of self._result_var, - # and letting any other error propagate. - try: - super(DocTestCase, self).tearDown() - except AttributeError as exc: - if exc.args[0] != self._result_var: - raise - - -# A simple subclassing of the original with a different class name, so we can -# distinguish and treat differently IPython examples from pure python ones. -class IPExample(doctest.Example): pass - - -class IPExternalExample(doctest.Example): - """Doctest examples to be run in an external process.""" - - def __init__(self, source, want, exc_msg=None, lineno=0, indent=0, - options=None): - # Parent constructor - doctest.Example.__init__(self,source,want,exc_msg,lineno,indent,options) - - # An EXTRA newline is needed to prevent pexpect hangs - self.source += '\n' - - -class IPDocTestParser(doctest.DocTestParser): - """ - A class used to parse strings containing doctest examples. - - Note: This is a version modified to properly recognize IPython input and - convert any IPython examples into valid Python ones. - """ - # This regular expression is used to find doctest examples in a - # string. It defines three groups: `source` is the source code - # (including leading indentation and prompts); `indent` is the - # indentation of the first (PS1) line of the source code; and - # `want` is the expected output (including leading indentation). - - # Classic Python prompts or default IPython ones - _PS1_PY = r'>>>' - _PS2_PY = r'\.\.\.' - - _PS1_IP = r'In\ \[\d+\]:' - _PS2_IP = r'\ \ \ \.\.\.+:' - - _RE_TPL = r''' - # Source consists of a PS1 line followed by zero or more PS2 lines. - (?P<source> - (?:^(?P<indent> [ ]*) (?P<ps1> %s) .*) # PS1 line - (?:\n [ ]* (?P<ps2> %s) .*)*) # PS2 lines - \n? # a newline - # Want consists of any non-blank lines that do not start with PS1. - (?P<want> (?:(?![ ]*$) # Not a blank line - (?![ ]*%s) # Not a line starting with PS1 - (?![ ]*%s) # Not a line starting with PS2 - .*$\n? # But any other line - )*) - ''' - - _EXAMPLE_RE_PY = re.compile( _RE_TPL % (_PS1_PY,_PS2_PY,_PS1_PY,_PS2_PY), - re.MULTILINE | re.VERBOSE) - - _EXAMPLE_RE_IP = re.compile( _RE_TPL % (_PS1_IP,_PS2_IP,_PS1_IP,_PS2_IP), - re.MULTILINE | re.VERBOSE) - - # Mark a test as being fully random. In this case, we simply append the - # random marker ('#random') to each individual example's output. This way - # we don't need to modify any other code. - _RANDOM_TEST = re.compile(r'#\s*all-random\s+') - - # Mark tests to be executed in an external process - currently unsupported. - _EXTERNAL_IP = re.compile(r'#\s*ipdoctest:\s*EXTERNAL') - - def ip2py(self,source): - """Convert input IPython source into valid Python.""" - block = _ip.input_transformer_manager.transform_cell(source) - if len(block.splitlines()) == 1: - return _ip.prefilter(block) - else: - return block - - def parse(self, string, name='<string>'): - """ - Divide the given string into examples and intervening text, - and return them as a list of alternating Examples and strings. - Line numbers for the Examples are 0-based. The optional - argument `name` is a name identifying this string, and is only - used for error messages. - """ - - #print 'Parse string:\n',string # dbg - - string = string.expandtabs() - # If all lines begin with the same indentation, then strip it. - min_indent = self._min_indent(string) - if min_indent > 0: - string = '\n'.join([l[min_indent:] for l in string.split('\n')]) - - output = [] - charno, lineno = 0, 0 - - # We make 'all random' tests by adding the '# random' mark to every - # block of output in the test. - if self._RANDOM_TEST.search(string): - random_marker = '\n# random' - else: - random_marker = '' - - # Whether to convert the input from ipython to python syntax - ip2py = False - # Find all doctest examples in the string. First, try them as Python - # examples, then as IPython ones - terms = list(self._EXAMPLE_RE_PY.finditer(string)) - if terms: - # Normal Python example - #print '-'*70 # dbg - #print 'PyExample, Source:\n',string # dbg - #print '-'*70 # dbg - Example = doctest.Example - else: - # It's an ipython example. Note that IPExamples are run - # in-process, so their syntax must be turned into valid python. - # IPExternalExamples are run out-of-process (via pexpect) so they - # don't need any filtering (a real ipython will be executing them). - terms = list(self._EXAMPLE_RE_IP.finditer(string)) - if self._EXTERNAL_IP.search(string): - #print '-'*70 # dbg - #print 'IPExternalExample, Source:\n',string # dbg - #print '-'*70 # dbg - Example = IPExternalExample - else: - #print '-'*70 # dbg - #print 'IPExample, Source:\n',string # dbg - #print '-'*70 # dbg - Example = IPExample - ip2py = True - - for m in terms: - # Add the pre-example text to `output`. - output.append(string[charno:m.start()]) - # Update lineno (lines before this example) - lineno += string.count('\n', charno, m.start()) - # Extract info from the regexp match. - (source, options, want, exc_msg) = \ - self._parse_example(m, name, lineno,ip2py) - - # Append the random-output marker (it defaults to empty in most - # cases, it's only non-empty for 'all-random' tests): - want += random_marker - - if Example is IPExternalExample: - options[doctest.NORMALIZE_WHITESPACE] = True - want += '\n' - - # Create an Example, and add it to the list. - if not self._IS_BLANK_OR_COMMENT(source): - output.append(Example(source, want, exc_msg, - lineno=lineno, - indent=min_indent+len(m.group('indent')), - options=options)) - # Update lineno (lines inside this example) - lineno += string.count('\n', m.start(), m.end()) - # Update charno. - charno = m.end() - # Add any remaining post-example text to `output`. - output.append(string[charno:]) - return output - - def _parse_example(self, m, name, lineno,ip2py=False): - """ - Given a regular expression match from `_EXAMPLE_RE` (`m`), - return a pair `(source, want)`, where `source` is the matched - example's source code (with prompts and indentation stripped); - and `want` is the example's expected output (with indentation - stripped). - - `name` is the string's name, and `lineno` is the line number - where the example starts; both are used for error messages. - - Optional: - `ip2py`: if true, filter the input via IPython to convert the syntax - into valid python. - """ - - # Get the example's indentation level. - indent = len(m.group('indent')) - - # Divide source into lines; check that they're properly - # indented; and then strip their indentation & prompts. - source_lines = m.group('source').split('\n') - - # We're using variable-length input prompts - ps1 = m.group('ps1') - ps2 = m.group('ps2') - ps1_len = len(ps1) - - self._check_prompt_blank(source_lines, indent, name, lineno,ps1_len) - if ps2: - self._check_prefix(source_lines[1:], ' '*indent + ps2, name, lineno) - - source = '\n'.join([sl[indent+ps1_len+1:] for sl in source_lines]) - - if ip2py: - # Convert source input from IPython into valid Python syntax - source = self.ip2py(source) - - # Divide want into lines; check that it's properly indented; and - # then strip the indentation. Spaces before the last newline should - # be preserved, so plain rstrip() isn't good enough. - want = m.group('want') - want_lines = want.split('\n') - if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]): - del want_lines[-1] # forget final newline & spaces after it - self._check_prefix(want_lines, ' '*indent, name, - lineno + len(source_lines)) - - # Remove ipython output prompt that might be present in the first line - want_lines[0] = re.sub(r'Out\[\d+\]: \s*?\n?','',want_lines[0]) - - want = '\n'.join([wl[indent:] for wl in want_lines]) - - # If `want` contains a traceback message, then extract it. - m = self._EXCEPTION_RE.match(want) - if m: - exc_msg = m.group('msg') - else: - exc_msg = None - - # Extract options from the source. - options = self._find_options(source, name, lineno) - - return source, options, want, exc_msg - - def _check_prompt_blank(self, lines, indent, name, lineno, ps1_len): - """ - Given the lines of a source string (including prompts and - leading indentation), check to make sure that every prompt is - followed by a space character. If any line is not followed by - a space character, then raise ValueError. - - Note: IPython-modified version which takes the input prompt length as a - parameter, so that prompts of variable length can be dealt with. - """ - space_idx = indent+ps1_len - min_len = space_idx+1 - for i, line in enumerate(lines): - if len(line) >= min_len and line[space_idx] != ' ': - raise ValueError('line %r of the docstring for %s ' - 'lacks blank after %s: %r' % - (lineno+i+1, name, - line[indent:space_idx], line)) - - -SKIP = doctest.register_optionflag('SKIP') - - -class IPDocTestRunner(doctest.DocTestRunner,object): - """Test runner that synchronizes the IPython namespace with test globals. - """ - - def run(self, test, compileflags=None, out=None, clear_globs=True): - - # Hack: ipython needs access to the execution context of the example, - # so that it can propagate user variables loaded by %run into - # test.globs. We put them here into our modified %run as a function - # attribute. Our new %run will then only make the namespace update - # when called (rather than unconditionally updating test.globs here - # for all examples, most of which won't be calling %run anyway). - #_ip._ipdoctest_test_globs = test.globs - #_ip._ipdoctest_test_filename = test.filename - - test.globs.update(_ip.user_ns) - - # Override terminal size to standardise traceback format - with modified_env({'COLUMNS': '80', 'LINES': '24'}): - return super(IPDocTestRunner,self).run(test, - compileflags,out,clear_globs) - - -class DocFileCase(doctest.DocFileCase): - """Overrides to provide filename - """ - def address(self): - return (self._dt_test.filename, None, None) - - -class ExtensionDoctest(doctests.Doctest): - """Nose Plugin that supports doctests in extension modules. - """ - name = 'extdoctest' # call nosetests with --with-extdoctest - enabled = True - - def options(self, parser, env=os.environ): - Plugin.options(self, parser, env) - parser.add_option('--doctest-tests', action='store_true', - dest='doctest_tests', - default=env.get('NOSE_DOCTEST_TESTS',True), - help="Also look for doctests in test modules. " - "Note that classes, methods and functions should " - "have either doctests or non-doctest tests, " - "not both. [NOSE_DOCTEST_TESTS]") - parser.add_option('--doctest-extension', action="append", - dest="doctestExtension", - help="Also look for doctests in files with " - "this extension [NOSE_DOCTEST_EXTENSION]") - # Set the default as a list, if given in env; otherwise - # an additional value set on the command line will cause - # an error. - env_setting = env.get('NOSE_DOCTEST_EXTENSION') - if env_setting is not None: - parser.set_defaults(doctestExtension=tolist(env_setting)) - - - def configure(self, options, config): - Plugin.configure(self, options, config) - # Pull standard doctest plugin out of config; we will do doctesting - config.plugins.plugins = [p for p in config.plugins.plugins - if p.name != 'doctest'] - self.doctest_tests = options.doctest_tests - self.extension = tolist(options.doctestExtension) - - self.parser = doctest.DocTestParser() - self.finder = DocTestFinder() - self.checker = IPDoctestOutputChecker() - self.globs = None - self.extraglobs = None - - - def loadTestsFromExtensionModule(self,filename): - bpath,mod = os.path.split(filename) - modname = os.path.splitext(mod)[0] - try: - sys.path.append(bpath) - module = import_module(modname) - tests = list(self.loadTestsFromModule(module)) - finally: - sys.path.pop() - return tests - - # NOTE: the method below is almost a copy of the original one in nose, with - # a few modifications to control output checking. - - def loadTestsFromModule(self, module): - #print '*** ipdoctest - lTM',module # dbg - - if not self.matches(module.__name__): - log.debug("Doctest doesn't want module %s", module) - return - - tests = self.finder.find(module,globs=self.globs, - extraglobs=self.extraglobs) - if not tests: - return - - # always use whitespace and ellipsis options - optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS - - tests.sort() - module_file = module.__file__ - if module_file[-4:] in ('.pyc', '.pyo'): - module_file = module_file[:-1] - for test in tests: - if not test.examples: - continue - if not test.filename: - test.filename = module_file - - yield DocTestCase(test, - optionflags=optionflags, - checker=self.checker) - - - def loadTestsFromFile(self, filename): - #print "ipdoctest - from file", filename # dbg - if is_extension_module(filename): - for t in self.loadTestsFromExtensionModule(filename): - yield t - else: - if self.extension and anyp(filename.endswith, self.extension): - name = os.path.basename(filename) - with open(filename) as dh: - doc = dh.read() - test = self.parser.get_doctest( - doc, globs={'__file__': filename}, name=name, - filename=filename, lineno=0) - if test.examples: - #print 'FileCase:',test.examples # dbg - yield DocFileCase(test) - else: - yield False # no tests to load - - -class IPythonDoctest(ExtensionDoctest): - """Nose Plugin that supports doctests in extension modules. - """ - name = 'ipdoctest' # call nosetests with --with-ipdoctest - enabled = True - - def makeTest(self, obj, parent): - """Look for doctests in the given object, which will be a - function, method or class. - """ - #print 'Plugin analyzing:', obj, parent # dbg - # always use whitespace and ellipsis options - optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS - - doctests = self.finder.find(obj, module=getmodule(parent)) - if doctests: - for test in doctests: - if len(test.examples) == 0: - continue - - yield DocTestCase(test, obj=obj, - optionflags=optionflags, - checker=self.checker) - - def options(self, parser, env=os.environ): - #print "Options for nose plugin:", self.name # dbg - Plugin.options(self, parser, env) - parser.add_option('--ipdoctest-tests', action='store_true', - dest='ipdoctest_tests', - default=env.get('NOSE_IPDOCTEST_TESTS',True), - help="Also look for doctests in test modules. " - "Note that classes, methods and functions should " - "have either doctests or non-doctest tests, " - "not both. [NOSE_IPDOCTEST_TESTS]") - parser.add_option('--ipdoctest-extension', action="append", - dest="ipdoctest_extension", - help="Also look for doctests in files with " - "this extension [NOSE_IPDOCTEST_EXTENSION]") - # Set the default as a list, if given in env; otherwise - # an additional value set on the command line will cause - # an error. - env_setting = env.get('NOSE_IPDOCTEST_EXTENSION') - if env_setting is not None: - parser.set_defaults(ipdoctest_extension=tolist(env_setting)) - - def configure(self, options, config): - #print "Configuring nose plugin:", self.name # dbg - Plugin.configure(self, options, config) - # Pull standard doctest plugin out of config; we will do doctesting - config.plugins.plugins = [p for p in config.plugins.plugins - if p.name != 'doctest'] - self.doctest_tests = options.ipdoctest_tests - self.extension = tolist(options.ipdoctest_extension) - - self.parser = IPDocTestParser() - self.finder = DocTestFinder(parser=self.parser) - self.checker = IPDoctestOutputChecker() - self.globs = None - self.extraglobs = None +"""Nose Plugin that supports IPython doctests. + +Limitations: + +- When generating examples for use as doctests, make sure that you have + pretty-printing OFF. This can be done either by setting the + ``PlainTextFormatter.pprint`` option in your configuration file to False, or + by interactively disabling it with %Pprint. This is required so that IPython + output matches that of normal Python, which is used by doctest for internal + execution. + +- Do not rely on specific prompt numbers for results (such as using + '_34==True', for example). For IPython tests run via an external process the + prompt numbers may be different, and IPython tests run as normal python code + won't even have these special _NN variables set at all. +""" + +#----------------------------------------------------------------------------- +# Module imports + +# From the standard library +import builtins as builtin_mod +import doctest +import inspect +import logging +import os +import re +import sys +from importlib import import_module +from io import StringIO + +from testpath import modified_env + +from inspect import getmodule + +# We are overriding the default doctest runner, so we need to import a few +# things from doctest directly +from doctest import (REPORTING_FLAGS, REPORT_ONLY_FIRST_FAILURE, + _unittest_reportflags, DocTestRunner, + _extract_future_flags, pdb, _OutputRedirectingPdb, + _exception_traceback, + linecache) + +# Third-party modules + +from nose.plugins import doctests, Plugin +from nose.util import anyp, tolist + +#----------------------------------------------------------------------------- +# Module globals and other constants +#----------------------------------------------------------------------------- + +log = logging.getLogger(__name__) + + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + +def is_extension_module(filename): + """Return whether the given filename is an extension module. + + This simply checks that the extension is either .so or .pyd. + """ + return os.path.splitext(filename)[1].lower() in ('.so','.pyd') + + +class DocTestSkip(object): + """Object wrapper for doctests to be skipped.""" + + ds_skip = """Doctest to skip. + >>> 1 #doctest: +SKIP + """ + + def __init__(self,obj): + self.obj = obj + + def __getattribute__(self,key): + if key == '__doc__': + return DocTestSkip.ds_skip + else: + return getattr(object.__getattribute__(self,'obj'),key) + +# Modified version of the one in the stdlib, that fixes a python bug (doctests +# not found in extension modules, http://bugs.python.org/issue3158) +class DocTestFinder(doctest.DocTestFinder): + + def _from_module(self, module, object): + """ + Return true if the given object is defined in the given + module. + """ + if module is None: + return True + elif inspect.isfunction(object): + return module.__dict__ is object.__globals__ + elif inspect.isbuiltin(object): + return module.__name__ == object.__module__ + elif inspect.isclass(object): + return module.__name__ == object.__module__ + elif inspect.ismethod(object): + # This one may be a bug in cython that fails to correctly set the + # __module__ attribute of methods, but since the same error is easy + # to make by extension code writers, having this safety in place + # isn't such a bad idea + return module.__name__ == object.__self__.__class__.__module__ + elif inspect.getmodule(object) is not None: + return module is inspect.getmodule(object) + elif hasattr(object, '__module__'): + return module.__name__ == object.__module__ + elif isinstance(object, property): + return True # [XX] no way not be sure. + elif inspect.ismethoddescriptor(object): + # Unbound PyQt signals reach this point in Python 3.4b3, and we want + # to avoid throwing an error. See also http://bugs.python.org/issue3158 + return False + else: + raise ValueError("object must be a class or function, got %r" % object) + + def _find(self, tests, obj, name, module, source_lines, globs, seen): + """ + Find tests for the given object and any contained objects, and + add them to `tests`. + """ + print('_find for:', obj, name, module) # dbg + if hasattr(obj,"skip_doctest"): + #print 'SKIPPING DOCTEST FOR:',obj # dbg + obj = DocTestSkip(obj) + + doctest.DocTestFinder._find(self,tests, obj, name, module, + source_lines, globs, seen) + + # Below we re-run pieces of the above method with manual modifications, + # because the original code is buggy and fails to correctly identify + # doctests in extension modules. + + # Local shorthands + from inspect import isroutine, isclass + + # Look for tests in a module's contained objects. + if inspect.ismodule(obj) and self._recurse: + for valname, val in obj.__dict__.items(): + valname1 = '%s.%s' % (name, valname) + if ( (isroutine(val) or isclass(val)) + and self._from_module(module, val) ): + + self._find(tests, val, valname1, module, source_lines, + globs, seen) + + # Look for tests in a class's contained objects. + if inspect.isclass(obj) and self._recurse: + #print 'RECURSE into class:',obj # dbg + for valname, val in obj.__dict__.items(): + # Special handling for staticmethod/classmethod. + if isinstance(val, staticmethod): + val = getattr(obj, valname) + if isinstance(val, classmethod): + val = getattr(obj, valname).__func__ + + # Recurse to methods, properties, and nested classes. + if ((inspect.isfunction(val) or inspect.isclass(val) or + inspect.ismethod(val) or + isinstance(val, property)) and + self._from_module(module, val)): + valname = '%s.%s' % (name, valname) + self._find(tests, val, valname, module, source_lines, + globs, seen) + + +class IPDoctestOutputChecker(doctest.OutputChecker): + """Second-chance checker with support for random tests. + + If the default comparison doesn't pass, this checker looks in the expected + output string for flags that tell us to ignore the output. + """ + + random_re = re.compile(r'#\s*random\s+') + + def check_output(self, want, got, optionflags): + """Check output, accepting special markers embedded in the output. + + If the output didn't pass the default validation but the special string + '#random' is included, we accept it.""" + + # Let the original tester verify first, in case people have valid tests + # that happen to have a comment saying '#random' embedded in. + ret = doctest.OutputChecker.check_output(self, want, got, + optionflags) + if not ret and self.random_re.search(want): + #print >> sys.stderr, 'RANDOM OK:',want # dbg + return True + + return ret + + +class DocTestCase(doctests.DocTestCase): + """Proxy for DocTestCase: provides an address() method that + returns the correct address for the doctest case. Otherwise + acts as a proxy to the test case. To provide hints for address(), + an obj may also be passed -- this will be used as the test object + for purposes of determining the test address, if it is provided. + """ + + # Note: this method was taken from numpy's nosetester module. + + # Subclass nose.plugins.doctests.DocTestCase to work around a bug in + # its constructor that blocks non-default arguments from being passed + # down into doctest.DocTestCase + + def __init__(self, test, optionflags=0, setUp=None, tearDown=None, + checker=None, obj=None, result_var='_'): + self._result_var = result_var + doctests.DocTestCase.__init__(self, test, + optionflags=optionflags, + setUp=setUp, tearDown=tearDown, + checker=checker) + # Now we must actually copy the original constructor from the stdlib + # doctest class, because we can't call it directly and a bug in nose + # means it never gets passed the right arguments. + + self._dt_optionflags = optionflags + self._dt_checker = checker + self._dt_test = test + self._dt_test_globs_ori = test.globs + self._dt_setUp = setUp + self._dt_tearDown = tearDown + + # XXX - store this runner once in the object! + runner = IPDocTestRunner(optionflags=optionflags, + checker=checker, verbose=False) + self._dt_runner = runner + + + # Each doctest should remember the directory it was loaded from, so + # things like %run work without too many contortions + self._ori_dir = os.path.dirname(test.filename) + + # Modified runTest from the default stdlib + def runTest(self): + test = self._dt_test + runner = self._dt_runner + + old = sys.stdout + new = StringIO() + optionflags = self._dt_optionflags + + if not (optionflags & REPORTING_FLAGS): + # The option flags don't include any reporting flags, + # so add the default reporting flags + optionflags |= _unittest_reportflags + + try: + # Save our current directory and switch out to the one where the + # test was originally created, in case another doctest did a + # directory change. We'll restore this in the finally clause. + curdir = os.getcwd() + #print 'runTest in dir:', self._ori_dir # dbg + os.chdir(self._ori_dir) + + runner.DIVIDER = "-"*70 + failures, tries = runner.run(test,out=new.write, + clear_globs=False) + finally: + sys.stdout = old + os.chdir(curdir) + + if failures: + raise self.failureException(self.format_failure(new.getvalue())) + + def setUp(self): + """Modified test setup that syncs with ipython namespace""" + #print "setUp test", self._dt_test.examples # dbg + if isinstance(self._dt_test.examples[0], IPExample): + # for IPython examples *only*, we swap the globals with the ipython + # namespace, after updating it with the globals (which doctest + # fills with the necessary info from the module being tested). + self.user_ns_orig = {} + self.user_ns_orig.update(_ip.user_ns) + _ip.user_ns.update(self._dt_test.globs) + # We must remove the _ key in the namespace, so that Python's + # doctest code sets it naturally + _ip.user_ns.pop('_', None) + _ip.user_ns['__builtins__'] = builtin_mod + self._dt_test.globs = _ip.user_ns + + super(DocTestCase, self).setUp() + + def tearDown(self): + + # Undo the test.globs reassignment we made, so that the parent class + # teardown doesn't destroy the ipython namespace + if isinstance(self._dt_test.examples[0], IPExample): + self._dt_test.globs = self._dt_test_globs_ori + _ip.user_ns.clear() + _ip.user_ns.update(self.user_ns_orig) + + # XXX - fperez: I am not sure if this is truly a bug in nose 0.11, but + # it does look like one to me: its tearDown method tries to run + # + # delattr(builtin_mod, self._result_var) + # + # without checking that the attribute really is there; it implicitly + # assumes it should have been set via displayhook. But if the + # displayhook was never called, this doesn't necessarily happen. I + # haven't been able to find a little self-contained example outside of + # ipython that would show the problem so I can report it to the nose + # team, but it does happen a lot in our code. + # + # So here, we just protect as narrowly as possible by trapping an + # attribute error whose message would be the name of self._result_var, + # and letting any other error propagate. + try: + super(DocTestCase, self).tearDown() + except AttributeError as exc: + if exc.args[0] != self._result_var: + raise + + +# A simple subclassing of the original with a different class name, so we can +# distinguish and treat differently IPython examples from pure python ones. +class IPExample(doctest.Example): pass + + +class IPExternalExample(doctest.Example): + """Doctest examples to be run in an external process.""" + + def __init__(self, source, want, exc_msg=None, lineno=0, indent=0, + options=None): + # Parent constructor + doctest.Example.__init__(self,source,want,exc_msg,lineno,indent,options) + + # An EXTRA newline is needed to prevent pexpect hangs + self.source += '\n' + + +class IPDocTestParser(doctest.DocTestParser): + """ + A class used to parse strings containing doctest examples. + + Note: This is a version modified to properly recognize IPython input and + convert any IPython examples into valid Python ones. + """ + # This regular expression is used to find doctest examples in a + # string. It defines three groups: `source` is the source code + # (including leading indentation and prompts); `indent` is the + # indentation of the first (PS1) line of the source code; and + # `want` is the expected output (including leading indentation). + + # Classic Python prompts or default IPython ones + _PS1_PY = r'>>>' + _PS2_PY = r'\.\.\.' + + _PS1_IP = r'In\ \[\d+\]:' + _PS2_IP = r'\ \ \ \.\.\.+:' + + _RE_TPL = r''' + # Source consists of a PS1 line followed by zero or more PS2 lines. + (?P<source> + (?:^(?P<indent> [ ]*) (?P<ps1> %s) .*) # PS1 line + (?:\n [ ]* (?P<ps2> %s) .*)*) # PS2 lines + \n? # a newline + # Want consists of any non-blank lines that do not start with PS1. + (?P<want> (?:(?![ ]*$) # Not a blank line + (?![ ]*%s) # Not a line starting with PS1 + (?![ ]*%s) # Not a line starting with PS2 + .*$\n? # But any other line + )*) + ''' + + _EXAMPLE_RE_PY = re.compile( _RE_TPL % (_PS1_PY,_PS2_PY,_PS1_PY,_PS2_PY), + re.MULTILINE | re.VERBOSE) + + _EXAMPLE_RE_IP = re.compile( _RE_TPL % (_PS1_IP,_PS2_IP,_PS1_IP,_PS2_IP), + re.MULTILINE | re.VERBOSE) + + # Mark a test as being fully random. In this case, we simply append the + # random marker ('#random') to each individual example's output. This way + # we don't need to modify any other code. + _RANDOM_TEST = re.compile(r'#\s*all-random\s+') + + # Mark tests to be executed in an external process - currently unsupported. + _EXTERNAL_IP = re.compile(r'#\s*ipdoctest:\s*EXTERNAL') + + def ip2py(self,source): + """Convert input IPython source into valid Python.""" + block = _ip.input_transformer_manager.transform_cell(source) + if len(block.splitlines()) == 1: + return _ip.prefilter(block) + else: + return block + + def parse(self, string, name='<string>'): + """ + Divide the given string into examples and intervening text, + and return them as a list of alternating Examples and strings. + Line numbers for the Examples are 0-based. The optional + argument `name` is a name identifying this string, and is only + used for error messages. + """ + + #print 'Parse string:\n',string # dbg + + string = string.expandtabs() + # If all lines begin with the same indentation, then strip it. + min_indent = self._min_indent(string) + if min_indent > 0: + string = '\n'.join([l[min_indent:] for l in string.split('\n')]) + + output = [] + charno, lineno = 0, 0 + + # We make 'all random' tests by adding the '# random' mark to every + # block of output in the test. + if self._RANDOM_TEST.search(string): + random_marker = '\n# random' + else: + random_marker = '' + + # Whether to convert the input from ipython to python syntax + ip2py = False + # Find all doctest examples in the string. First, try them as Python + # examples, then as IPython ones + terms = list(self._EXAMPLE_RE_PY.finditer(string)) + if terms: + # Normal Python example + #print '-'*70 # dbg + #print 'PyExample, Source:\n',string # dbg + #print '-'*70 # dbg + Example = doctest.Example + else: + # It's an ipython example. Note that IPExamples are run + # in-process, so their syntax must be turned into valid python. + # IPExternalExamples are run out-of-process (via pexpect) so they + # don't need any filtering (a real ipython will be executing them). + terms = list(self._EXAMPLE_RE_IP.finditer(string)) + if self._EXTERNAL_IP.search(string): + #print '-'*70 # dbg + #print 'IPExternalExample, Source:\n',string # dbg + #print '-'*70 # dbg + Example = IPExternalExample + else: + #print '-'*70 # dbg + #print 'IPExample, Source:\n',string # dbg + #print '-'*70 # dbg + Example = IPExample + ip2py = True + + for m in terms: + # Add the pre-example text to `output`. + output.append(string[charno:m.start()]) + # Update lineno (lines before this example) + lineno += string.count('\n', charno, m.start()) + # Extract info from the regexp match. + (source, options, want, exc_msg) = \ + self._parse_example(m, name, lineno,ip2py) + + # Append the random-output marker (it defaults to empty in most + # cases, it's only non-empty for 'all-random' tests): + want += random_marker + + if Example is IPExternalExample: + options[doctest.NORMALIZE_WHITESPACE] = True + want += '\n' + + # Create an Example, and add it to the list. + if not self._IS_BLANK_OR_COMMENT(source): + output.append(Example(source, want, exc_msg, + lineno=lineno, + indent=min_indent+len(m.group('indent')), + options=options)) + # Update lineno (lines inside this example) + lineno += string.count('\n', m.start(), m.end()) + # Update charno. + charno = m.end() + # Add any remaining post-example text to `output`. + output.append(string[charno:]) + return output + + def _parse_example(self, m, name, lineno,ip2py=False): + """ + Given a regular expression match from `_EXAMPLE_RE` (`m`), + return a pair `(source, want)`, where `source` is the matched + example's source code (with prompts and indentation stripped); + and `want` is the example's expected output (with indentation + stripped). + + `name` is the string's name, and `lineno` is the line number + where the example starts; both are used for error messages. + + Optional: + `ip2py`: if true, filter the input via IPython to convert the syntax + into valid python. + """ + + # Get the example's indentation level. + indent = len(m.group('indent')) + + # Divide source into lines; check that they're properly + # indented; and then strip their indentation & prompts. + source_lines = m.group('source').split('\n') + + # We're using variable-length input prompts + ps1 = m.group('ps1') + ps2 = m.group('ps2') + ps1_len = len(ps1) + + self._check_prompt_blank(source_lines, indent, name, lineno,ps1_len) + if ps2: + self._check_prefix(source_lines[1:], ' '*indent + ps2, name, lineno) + + source = '\n'.join([sl[indent+ps1_len+1:] for sl in source_lines]) + + if ip2py: + # Convert source input from IPython into valid Python syntax + source = self.ip2py(source) + + # Divide want into lines; check that it's properly indented; and + # then strip the indentation. Spaces before the last newline should + # be preserved, so plain rstrip() isn't good enough. + want = m.group('want') + want_lines = want.split('\n') + if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]): + del want_lines[-1] # forget final newline & spaces after it + self._check_prefix(want_lines, ' '*indent, name, + lineno + len(source_lines)) + + # Remove ipython output prompt that might be present in the first line + want_lines[0] = re.sub(r'Out\[\d+\]: \s*?\n?','',want_lines[0]) + + want = '\n'.join([wl[indent:] for wl in want_lines]) + + # If `want` contains a traceback message, then extract it. + m = self._EXCEPTION_RE.match(want) + if m: + exc_msg = m.group('msg') + else: + exc_msg = None + + # Extract options from the source. + options = self._find_options(source, name, lineno) + + return source, options, want, exc_msg + + def _check_prompt_blank(self, lines, indent, name, lineno, ps1_len): + """ + Given the lines of a source string (including prompts and + leading indentation), check to make sure that every prompt is + followed by a space character. If any line is not followed by + a space character, then raise ValueError. + + Note: IPython-modified version which takes the input prompt length as a + parameter, so that prompts of variable length can be dealt with. + """ + space_idx = indent+ps1_len + min_len = space_idx+1 + for i, line in enumerate(lines): + if len(line) >= min_len and line[space_idx] != ' ': + raise ValueError('line %r of the docstring for %s ' + 'lacks blank after %s: %r' % + (lineno+i+1, name, + line[indent:space_idx], line)) + + +SKIP = doctest.register_optionflag('SKIP') + + +class IPDocTestRunner(doctest.DocTestRunner,object): + """Test runner that synchronizes the IPython namespace with test globals. + """ + + def run(self, test, compileflags=None, out=None, clear_globs=True): + + # Hack: ipython needs access to the execution context of the example, + # so that it can propagate user variables loaded by %run into + # test.globs. We put them here into our modified %run as a function + # attribute. Our new %run will then only make the namespace update + # when called (rather than unconditionally updating test.globs here + # for all examples, most of which won't be calling %run anyway). + #_ip._ipdoctest_test_globs = test.globs + #_ip._ipdoctest_test_filename = test.filename + + test.globs.update(_ip.user_ns) + + # Override terminal size to standardise traceback format + with modified_env({'COLUMNS': '80', 'LINES': '24'}): + return super(IPDocTestRunner,self).run(test, + compileflags,out,clear_globs) + + +class DocFileCase(doctest.DocFileCase): + """Overrides to provide filename + """ + def address(self): + return (self._dt_test.filename, None, None) + + +class ExtensionDoctest(doctests.Doctest): + """Nose Plugin that supports doctests in extension modules. + """ + name = 'extdoctest' # call nosetests with --with-extdoctest + enabled = True + + def options(self, parser, env=os.environ): + Plugin.options(self, parser, env) + parser.add_option('--doctest-tests', action='store_true', + dest='doctest_tests', + default=env.get('NOSE_DOCTEST_TESTS',True), + help="Also look for doctests in test modules. " + "Note that classes, methods and functions should " + "have either doctests or non-doctest tests, " + "not both. [NOSE_DOCTEST_TESTS]") + parser.add_option('--doctest-extension', action="append", + dest="doctestExtension", + help="Also look for doctests in files with " + "this extension [NOSE_DOCTEST_EXTENSION]") + # Set the default as a list, if given in env; otherwise + # an additional value set on the command line will cause + # an error. + env_setting = env.get('NOSE_DOCTEST_EXTENSION') + if env_setting is not None: + parser.set_defaults(doctestExtension=tolist(env_setting)) + + + def configure(self, options, config): + Plugin.configure(self, options, config) + # Pull standard doctest plugin out of config; we will do doctesting + config.plugins.plugins = [p for p in config.plugins.plugins + if p.name != 'doctest'] + self.doctest_tests = options.doctest_tests + self.extension = tolist(options.doctestExtension) + + self.parser = doctest.DocTestParser() + self.finder = DocTestFinder() + self.checker = IPDoctestOutputChecker() + self.globs = None + self.extraglobs = None + + + def loadTestsFromExtensionModule(self,filename): + bpath,mod = os.path.split(filename) + modname = os.path.splitext(mod)[0] + try: + sys.path.append(bpath) + module = import_module(modname) + tests = list(self.loadTestsFromModule(module)) + finally: + sys.path.pop() + return tests + + # NOTE: the method below is almost a copy of the original one in nose, with + # a few modifications to control output checking. + + def loadTestsFromModule(self, module): + #print '*** ipdoctest - lTM',module # dbg + + if not self.matches(module.__name__): + log.debug("Doctest doesn't want module %s", module) + return + + tests = self.finder.find(module,globs=self.globs, + extraglobs=self.extraglobs) + if not tests: + return + + # always use whitespace and ellipsis options + optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS + + tests.sort() + module_file = module.__file__ + if module_file[-4:] in ('.pyc', '.pyo'): + module_file = module_file[:-1] + for test in tests: + if not test.examples: + continue + if not test.filename: + test.filename = module_file + + yield DocTestCase(test, + optionflags=optionflags, + checker=self.checker) + + + def loadTestsFromFile(self, filename): + #print "ipdoctest - from file", filename # dbg + if is_extension_module(filename): + for t in self.loadTestsFromExtensionModule(filename): + yield t + else: + if self.extension and anyp(filename.endswith, self.extension): + name = os.path.basename(filename) + with open(filename) as dh: + doc = dh.read() + test = self.parser.get_doctest( + doc, globs={'__file__': filename}, name=name, + filename=filename, lineno=0) + if test.examples: + #print 'FileCase:',test.examples # dbg + yield DocFileCase(test) + else: + yield False # no tests to load + + +class IPythonDoctest(ExtensionDoctest): + """Nose Plugin that supports doctests in extension modules. + """ + name = 'ipdoctest' # call nosetests with --with-ipdoctest + enabled = True + + def makeTest(self, obj, parent): + """Look for doctests in the given object, which will be a + function, method or class. + """ + #print 'Plugin analyzing:', obj, parent # dbg + # always use whitespace and ellipsis options + optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS + + doctests = self.finder.find(obj, module=getmodule(parent)) + if doctests: + for test in doctests: + if len(test.examples) == 0: + continue + + yield DocTestCase(test, obj=obj, + optionflags=optionflags, + checker=self.checker) + + def options(self, parser, env=os.environ): + #print "Options for nose plugin:", self.name # dbg + Plugin.options(self, parser, env) + parser.add_option('--ipdoctest-tests', action='store_true', + dest='ipdoctest_tests', + default=env.get('NOSE_IPDOCTEST_TESTS',True), + help="Also look for doctests in test modules. " + "Note that classes, methods and functions should " + "have either doctests or non-doctest tests, " + "not both. [NOSE_IPDOCTEST_TESTS]") + parser.add_option('--ipdoctest-extension', action="append", + dest="ipdoctest_extension", + help="Also look for doctests in files with " + "this extension [NOSE_IPDOCTEST_EXTENSION]") + # Set the default as a list, if given in env; otherwise + # an additional value set on the command line will cause + # an error. + env_setting = env.get('NOSE_IPDOCTEST_EXTENSION') + if env_setting is not None: + parser.set_defaults(ipdoctest_extension=tolist(env_setting)) + + def configure(self, options, config): + #print "Configuring nose plugin:", self.name # dbg + Plugin.configure(self, options, config) + # Pull standard doctest plugin out of config; we will do doctesting + config.plugins.plugins = [p for p in config.plugins.plugins + if p.name != 'doctest'] + self.doctest_tests = options.ipdoctest_tests + self.extension = tolist(options.ipdoctest_extension) + + self.parser = IPDocTestParser() + self.finder = DocTestFinder(parser=self.parser) + self.checker = IPDoctestOutputChecker() + self.globs = None + self.extraglobs = None diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/iptest.py b/contrib/python/ipython/py3/IPython/testing/plugin/iptest.py index 16625279ef4..e24e22a8309 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/iptest.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/iptest.py @@ -1,18 +1,18 @@ -#!/usr/bin/env python -"""Nose-based test runner. -""" - -from nose.core import main -from nose.plugins.builtin import plugins -from nose.plugins.doctests import Doctest - -from . import ipdoctest -from .ipdoctest import IPDocTestRunner - -if __name__ == '__main__': - print('WARNING: this code is incomplete!') - print() - - pp = [x() for x in plugins] # activate all builtin plugins first - main(testRunner=IPDocTestRunner(), - plugins=pp+[ipdoctest.IPythonDoctest(),Doctest()]) +#!/usr/bin/env python +"""Nose-based test runner. +""" + +from nose.core import main +from nose.plugins.builtin import plugins +from nose.plugins.doctests import Doctest + +from . import ipdoctest +from .ipdoctest import IPDocTestRunner + +if __name__ == '__main__': + print('WARNING: this code is incomplete!') + print() + + pp = [x() for x in plugins] # activate all builtin plugins first + main(testRunner=IPDocTestRunner(), + plugins=pp+[ipdoctest.IPythonDoctest(),Doctest()]) diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/setup.py b/contrib/python/ipython/py3/IPython/testing/plugin/setup.py index 785704337b0..a3281d30c8d 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/setup.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/setup.py @@ -1,18 +1,18 @@ -#!/usr/bin/env python -"""A Nose plugin to support IPython doctests. -""" - -from setuptools import setup - -setup(name='IPython doctest plugin', - version='0.1', - author='The IPython Team', - description = 'Nose plugin to load IPython-extended doctests', - license = 'LGPL', - py_modules = ['ipdoctest'], - entry_points = { - 'nose.plugins.0.10': ['ipdoctest = ipdoctest:IPythonDoctest', - 'extdoctest = ipdoctest:ExtensionDoctest', - ], - }, - ) +#!/usr/bin/env python +"""A Nose plugin to support IPython doctests. +""" + +from setuptools import setup + +setup(name='IPython doctest plugin', + version='0.1', + author='The IPython Team', + description = 'Nose plugin to load IPython-extended doctests', + license = 'LGPL', + py_modules = ['ipdoctest'], + entry_points = { + 'nose.plugins.0.10': ['ipdoctest = ipdoctest:IPythonDoctest', + 'extdoctest = ipdoctest:ExtensionDoctest', + ], + }, + ) diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/show_refs.py b/contrib/python/ipython/py3/IPython/testing/plugin/show_refs.py index 74436a822b5..b2c70adfc1e 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/show_refs.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/show_refs.py @@ -1,19 +1,19 @@ -"""Simple script to show reference holding behavior. - -This is used by a companion test case. -""" - -import gc - -class C(object): - def __del__(self): - pass - #print 'deleting object...' # dbg - -if __name__ == '__main__': - c = C() - - c_refs = gc.get_referrers(c) - ref_ids = list(map(id,c_refs)) - - print('c referrers:',list(map(type,c_refs))) +"""Simple script to show reference holding behavior. + +This is used by a companion test case. +""" + +import gc + +class C(object): + def __del__(self): + pass + #print 'deleting object...' # dbg + +if __name__ == '__main__': + c = C() + + c_refs = gc.get_referrers(c) + ref_ids = list(map(id,c_refs)) + + print('c referrers:',list(map(type,c_refs))) diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/simple.py b/contrib/python/ipython/py3/IPython/testing/plugin/simple.py index fe5aca1ecf0..3861977cab5 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/simple.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/simple.py @@ -1,33 +1,33 @@ -"""Simple example using doctests. - -This file just contains doctests both using plain python and IPython prompts. -All tests should be loaded by nose. -""" - -def pyfunc(): - """Some pure python tests... - - >>> pyfunc() - 'pyfunc' - - >>> import os - - >>> 2+3 - 5 - - >>> for i in range(3): - ... print(i, end=' ') - ... print(i+1, end=' ') - ... - 0 1 1 2 2 3 - """ - return 'pyfunc' - - -def ipyfunc2(): - """Some pure python tests... - - >>> 1+1 - 2 - """ - return 'pyfunc2' +"""Simple example using doctests. + +This file just contains doctests both using plain python and IPython prompts. +All tests should be loaded by nose. +""" + +def pyfunc(): + """Some pure python tests... + + >>> pyfunc() + 'pyfunc' + + >>> import os + + >>> 2+3 + 5 + + >>> for i in range(3): + ... print(i, end=' ') + ... print(i+1, end=' ') + ... + 0 1 1 2 2 3 + """ + return 'pyfunc' + + +def ipyfunc2(): + """Some pure python tests... + + >>> 1+1 + 2 + """ + return 'pyfunc2' diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/simplevars.py b/contrib/python/ipython/py3/IPython/testing/plugin/simplevars.py index cdacdbb3758..cac0b753124 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/simplevars.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/simplevars.py @@ -1,2 +1,2 @@ -x = 1 -print('x is:',x) +x = 1 +print('x is:',x) diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/test_combo.txt b/contrib/python/ipython/py3/IPython/testing/plugin/test_combo.txt index 0de694fe7e5..6c8759f3e72 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/test_combo.txt +++ b/contrib/python/ipython/py3/IPython/testing/plugin/test_combo.txt @@ -1,36 +1,36 @@ -======================= - Combo testing example -======================= - -This is a simple example that mixes ipython doctests:: - - In [1]: import code - - In [2]: 2**12 - Out[2]: 4096 - -with command-line example information that does *not* get executed:: - - $ mpirun -n 4 ipengine --controller-port=10000 --controller-ip=host0 - -and with literal examples of Python source code:: - - controller = dict(host='myhost', - engine_port=None, # default is 10105 - control_port=None, - ) - - # keys are hostnames, values are the number of engine on that host - engines = dict(node1=2, - node2=2, - node3=2, - node3=2, - ) - - # Force failure to detect that this test is being run. - 1/0 - -These source code examples are executed but no output is compared at all. An -error or failure is reported only if an exception is raised. - -NOTE: the execution of pure python blocks is not yet working! +======================= + Combo testing example +======================= + +This is a simple example that mixes ipython doctests:: + + In [1]: import code + + In [2]: 2**12 + Out[2]: 4096 + +with command-line example information that does *not* get executed:: + + $ mpirun -n 4 ipengine --controller-port=10000 --controller-ip=host0 + +and with literal examples of Python source code:: + + controller = dict(host='myhost', + engine_port=None, # default is 10105 + control_port=None, + ) + + # keys are hostnames, values are the number of engine on that host + engines = dict(node1=2, + node2=2, + node3=2, + node3=2, + ) + + # Force failure to detect that this test is being run. + 1/0 + +These source code examples are executed but no output is compared at all. An +error or failure is reported only if an exception is raised. + +NOTE: the execution of pure python blocks is not yet working! diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/test_example.txt b/contrib/python/ipython/py3/IPython/testing/plugin/test_example.txt index f6258b0615d..f8b681eb4ff 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/test_example.txt +++ b/contrib/python/ipython/py3/IPython/testing/plugin/test_example.txt @@ -1,24 +1,24 @@ -===================================== - Tests in example form - pure python -===================================== - -This file contains doctest examples embedded as code blocks, using normal -Python prompts. See the accompanying file for similar examples using IPython -prompts (you can't mix both types within one file). The following will be run -as a test:: - - >>> 1+1 - 2 - >>> print ("hello") - hello - -More than one example works:: - - >>> s="Hello World" - - >>> s.upper() - 'HELLO WORLD' - -but you should note that the *entire* test file is considered to be a single -test. Individual code blocks that fail are printed separately as ``example -failures``, but the whole file is still counted and reported as one test. +===================================== + Tests in example form - pure python +===================================== + +This file contains doctest examples embedded as code blocks, using normal +Python prompts. See the accompanying file for similar examples using IPython +prompts (you can't mix both types within one file). The following will be run +as a test:: + + >>> 1+1 + 2 + >>> print ("hello") + hello + +More than one example works:: + + >>> s="Hello World" + + >>> s.upper() + 'HELLO WORLD' + +but you should note that the *entire* test file is considered to be a single +test. Individual code blocks that fail are printed separately as ``example +failures``, but the whole file is still counted and reported as one test. diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/test_exampleip.txt b/contrib/python/ipython/py3/IPython/testing/plugin/test_exampleip.txt index cbc00cc9761..8afcbfdf7d8 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/test_exampleip.txt +++ b/contrib/python/ipython/py3/IPython/testing/plugin/test_exampleip.txt @@ -1,30 +1,30 @@ -================================= - Tests in example form - IPython -================================= - -You can write text files with examples that use IPython prompts (as long as you -use the nose ipython doctest plugin), but you can not mix and match prompt -styles in a single file. That is, you either use all ``>>>`` prompts or all -IPython-style prompts. Your test suite *can* have both types, you just need to -put each type of example in a separate. Using IPython prompts, you can paste -directly from your session:: - - In [5]: s="Hello World" - - In [6]: s.upper() - Out[6]: 'HELLO WORLD' - -Another example:: - - In [8]: 1+3 - Out[8]: 4 - -Just like in IPython docstrings, you can use all IPython syntax and features:: - - In [9]: !echo "hello" - hello - - In [10]: a='hi' - - In [11]: !echo $a - hi +================================= + Tests in example form - IPython +================================= + +You can write text files with examples that use IPython prompts (as long as you +use the nose ipython doctest plugin), but you can not mix and match prompt +styles in a single file. That is, you either use all ``>>>`` prompts or all +IPython-style prompts. Your test suite *can* have both types, you just need to +put each type of example in a separate. Using IPython prompts, you can paste +directly from your session:: + + In [5]: s="Hello World" + + In [6]: s.upper() + Out[6]: 'HELLO WORLD' + +Another example:: + + In [8]: 1+3 + Out[8]: 4 + +Just like in IPython docstrings, you can use all IPython syntax and features:: + + In [9]: !echo "hello" + hello + + In [10]: a='hi' + + In [11]: !echo $a + hi diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/test_ipdoctest.py b/contrib/python/ipython/py3/IPython/testing/plugin/test_ipdoctest.py index 2e9b688ca6f..d8f59916369 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/test_ipdoctest.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/test_ipdoctest.py @@ -1,76 +1,76 @@ -"""Tests for the ipdoctest machinery itself. - -Note: in a file named test_X, functions whose only test is their docstring (as -a doctest) and which have no test functionality of their own, should be called -'doctest_foo' instead of 'test_foo', otherwise they get double-counted (the -empty function call is counted as a test, which just inflates tests numbers -artificially). -""" - -def doctest_simple(): - """ipdoctest must handle simple inputs - - In [1]: 1 - Out[1]: 1 - - In [2]: print(1) - 1 - """ - -def doctest_multiline1(): - """The ipdoctest machinery must handle multiline examples gracefully. - - In [2]: for i in range(4): - ...: print(i) - ...: - 0 - 1 - 2 - 3 - """ - -def doctest_multiline2(): - """Multiline examples that define functions and print output. - - In [7]: def f(x): - ...: return x+1 - ...: - - In [8]: f(1) - Out[8]: 2 - - In [9]: def g(x): - ...: print('x is:',x) - ...: - - In [10]: g(1) - x is: 1 - - In [11]: g('hello') - x is: hello - """ - - -def doctest_multiline3(): - """Multiline examples with blank lines. - - In [12]: def h(x): - ....: if x>1: - ....: return x**2 - ....: # To leave a blank line in the input, you must mark it - ....: # with a comment character: - ....: # - ....: # otherwise the doctest parser gets confused. - ....: else: - ....: return -1 - ....: - - In [13]: h(5) - Out[13]: 25 - - In [14]: h(1) - Out[14]: -1 - - In [15]: h(0) - Out[15]: -1 - """ +"""Tests for the ipdoctest machinery itself. + +Note: in a file named test_X, functions whose only test is their docstring (as +a doctest) and which have no test functionality of their own, should be called +'doctest_foo' instead of 'test_foo', otherwise they get double-counted (the +empty function call is counted as a test, which just inflates tests numbers +artificially). +""" + +def doctest_simple(): + """ipdoctest must handle simple inputs + + In [1]: 1 + Out[1]: 1 + + In [2]: print(1) + 1 + """ + +def doctest_multiline1(): + """The ipdoctest machinery must handle multiline examples gracefully. + + In [2]: for i in range(4): + ...: print(i) + ...: + 0 + 1 + 2 + 3 + """ + +def doctest_multiline2(): + """Multiline examples that define functions and print output. + + In [7]: def f(x): + ...: return x+1 + ...: + + In [8]: f(1) + Out[8]: 2 + + In [9]: def g(x): + ...: print('x is:',x) + ...: + + In [10]: g(1) + x is: 1 + + In [11]: g('hello') + x is: hello + """ + + +def doctest_multiline3(): + """Multiline examples with blank lines. + + In [12]: def h(x): + ....: if x>1: + ....: return x**2 + ....: # To leave a blank line in the input, you must mark it + ....: # with a comment character: + ....: # + ....: # otherwise the doctest parser gets confused. + ....: else: + ....: return -1 + ....: + + In [13]: h(5) + Out[13]: 25 + + In [14]: h(1) + Out[14]: -1 + + In [15]: h(0) + Out[15]: -1 + """ diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/test_refs.py b/contrib/python/ipython/py3/IPython/testing/plugin/test_refs.py index d8464fbf574..bd7ad8fb3e3 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/test_refs.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/test_refs.py @@ -1,46 +1,46 @@ -"""Some simple tests for the plugin while running scripts. -""" -# Module imports -# Std lib -import inspect - -# Our own - -#----------------------------------------------------------------------------- -# Testing functions - -def test_trivial(): - """A trivial passing test.""" - pass - -def doctest_run(): - """Test running a trivial script. - - In [13]: run simplevars.py - x is: 1 - """ - -def doctest_runvars(): - """Test that variables defined in scripts get loaded correctly via %run. - - In [13]: run simplevars.py - x is: 1 - - In [14]: x - Out[14]: 1 - """ - -def doctest_ivars(): - """Test that variables defined interactively are picked up. - In [5]: zz=1 - - In [6]: zz - Out[6]: 1 - """ - -def doctest_refs(): - """DocTest reference holding issues when running scripts. - - In [32]: run show_refs.py - c referrers: [<... 'dict'>] - """ +"""Some simple tests for the plugin while running scripts. +""" +# Module imports +# Std lib +import inspect + +# Our own + +#----------------------------------------------------------------------------- +# Testing functions + +def test_trivial(): + """A trivial passing test.""" + pass + +def doctest_run(): + """Test running a trivial script. + + In [13]: run simplevars.py + x is: 1 + """ + +def doctest_runvars(): + """Test that variables defined in scripts get loaded correctly via %run. + + In [13]: run simplevars.py + x is: 1 + + In [14]: x + Out[14]: 1 + """ + +def doctest_ivars(): + """Test that variables defined interactively are picked up. + In [5]: zz=1 + + In [6]: zz + Out[6]: 1 + """ + +def doctest_refs(): + """DocTest reference holding issues when running scripts. + + In [32]: run show_refs.py + c referrers: [<... 'dict'>] + """ diff --git a/contrib/python/ipython/py3/IPython/testing/skipdoctest.py b/contrib/python/ipython/py3/IPython/testing/skipdoctest.py index 64801b6781c..b0cf83c449e 100644 --- a/contrib/python/ipython/py3/IPython/testing/skipdoctest.py +++ b/contrib/python/ipython/py3/IPython/testing/skipdoctest.py @@ -1,19 +1,19 @@ -"""Decorators marks that a doctest should be skipped. - -The IPython.testing.decorators module triggers various extra imports, including -numpy and sympy if they're present. Since this decorator is used in core parts -of IPython, it's in a separate module so that running IPython doesn't trigger -those imports.""" - -# Copyright (C) IPython Development Team -# Distributed under the terms of the Modified BSD License. - - -def skip_doctest(f): - """Decorator - mark a function or method for skipping its doctest. - - This decorator allows you to mark a function whose docstring you wish to - omit from testing, while preserving the docstring for introspection, help, - etc.""" - f.skip_doctest = True - return f +"""Decorators marks that a doctest should be skipped. + +The IPython.testing.decorators module triggers various extra imports, including +numpy and sympy if they're present. Since this decorator is used in core parts +of IPython, it's in a separate module so that running IPython doesn't trigger +those imports.""" + +# Copyright (C) IPython Development Team +# Distributed under the terms of the Modified BSD License. + + +def skip_doctest(f): + """Decorator - mark a function or method for skipping its doctest. + + This decorator allows you to mark a function whose docstring you wish to + omit from testing, while preserving the docstring for introspection, help, + etc.""" + f.skip_doctest = True + return f diff --git a/contrib/python/ipython/py3/IPython/testing/tools.py b/contrib/python/ipython/py3/IPython/testing/tools.py index cea8d77ab48..e7e7285f49e 100644 --- a/contrib/python/ipython/py3/IPython/testing/tools.py +++ b/contrib/python/ipython/py3/IPython/testing/tools.py @@ -1,471 +1,471 @@ -"""Generic testing tools. - -Authors -------- -- Fernando Perez <Fernando.Perez@berkeley.edu> -""" - - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import os -import re -import sys -import tempfile -import unittest - -from contextlib import contextmanager -from io import StringIO -from subprocess import Popen, PIPE -from unittest.mock import patch - -try: - # These tools are used by parts of the runtime, so we make the nose - # dependency optional at this point. Nose is a hard dependency to run the - # test suite, but NOT to use ipython itself. - import nose.tools as nt - has_nose = True -except ImportError: - has_nose = False - -from traitlets.config.loader import Config -from IPython.utils.process import get_output_error_code -from IPython.utils.text import list_strings -from IPython.utils.io import temp_pyfile, Tee -from IPython.utils import py3compat - -from . import decorators as dec -from . import skipdoctest - - -# The docstring for full_path doctests differently on win32 (different path -# separator) so just skip the doctest there. The example remains informative. -doctest_deco = skipdoctest.skip_doctest if sys.platform == 'win32' else dec.null_deco - -@doctest_deco -def full_path(startPath,files): - """Make full paths for all the listed files, based on startPath. - - Only the base part of startPath is kept, since this routine is typically - used with a script's ``__file__`` variable as startPath. The base of startPath - is then prepended to all the listed files, forming the output list. - - Parameters - ---------- - startPath : string - Initial path to use as the base for the results. This path is split - using os.path.split() and only its first component is kept. - - files : string or list - One or more files. - - Examples - -------- - - >>> full_path('/foo/bar.py',['a.txt','b.txt']) - ['/foo/a.txt', '/foo/b.txt'] - - >>> full_path('/foo',['a.txt','b.txt']) - ['/a.txt', '/b.txt'] - - If a single file is given, the output is still a list:: - - >>> full_path('/foo','a.txt') - ['/a.txt'] - """ - - files = list_strings(files) - base = os.path.split(startPath)[0] - return [ os.path.join(base,f) for f in files ] - - -def parse_test_output(txt): - """Parse the output of a test run and return errors, failures. - - Parameters - ---------- - txt : str - Text output of a test run, assumed to contain a line of one of the - following forms:: - - 'FAILED (errors=1)' - 'FAILED (failures=1)' - 'FAILED (errors=1, failures=1)' - - Returns - ------- - nerr, nfail - number of errors and failures. - """ - - err_m = re.search(r'^FAILED \(errors=(\d+)\)', txt, re.MULTILINE) - if err_m: - nerr = int(err_m.group(1)) - nfail = 0 - return nerr, nfail - - fail_m = re.search(r'^FAILED \(failures=(\d+)\)', txt, re.MULTILINE) - if fail_m: - nerr = 0 - nfail = int(fail_m.group(1)) - return nerr, nfail - - both_m = re.search(r'^FAILED \(errors=(\d+), failures=(\d+)\)', txt, - re.MULTILINE) - if both_m: - nerr = int(both_m.group(1)) - nfail = int(both_m.group(2)) - return nerr, nfail - - # If the input didn't match any of these forms, assume no error/failures - return 0, 0 - - -# So nose doesn't think this is a test -parse_test_output.__test__ = False - - -def default_argv(): - """Return a valid default argv for creating testing instances of ipython""" - - return ['--quick', # so no config file is loaded - # Other defaults to minimize side effects on stdout - '--colors=NoColor', '--no-term-title','--no-banner', - '--autocall=0'] - - -def default_config(): - """Return a config object with good defaults for testing.""" - config = Config() - config.TerminalInteractiveShell.colors = 'NoColor' - config.TerminalTerminalInteractiveShell.term_title = False, - config.TerminalInteractiveShell.autocall = 0 - f = tempfile.NamedTemporaryFile(suffix=u'test_hist.sqlite', delete=False) - config.HistoryManager.hist_file = f.name - f.close() - config.HistoryManager.db_cache_size = 10000 - return config - - -def get_ipython_cmd(as_string=False): - """ - Return appropriate IPython command line name. By default, this will return - a list that can be used with subprocess.Popen, for example, but passing - `as_string=True` allows for returning the IPython command as a string. - - Parameters - ---------- - as_string: bool - Flag to allow to return the command as a string. - """ - ipython_cmd = [sys.executable, "-m", "IPython"] - - if as_string: - ipython_cmd = " ".join(ipython_cmd) - - return ipython_cmd - -def ipexec(fname, options=None, commands=()): - """Utility to call 'ipython filename'. - - Starts IPython with a minimal and safe configuration to make startup as fast - as possible. - - Note that this starts IPython in a subprocess! - - Parameters - ---------- - fname : str - Name of file to be executed (should have .py or .ipy extension). - - options : optional, list - Extra command-line flags to be passed to IPython. - - commands : optional, list - Commands to send in on stdin - - Returns - ------- - ``(stdout, stderr)`` of ipython subprocess. - """ - if options is None: options = [] - - cmdargs = default_argv() + options - - test_dir = os.path.dirname(__file__) - - ipython_cmd = get_ipython_cmd() - # Absolute path for filename - full_fname = os.path.join(test_dir, fname) - full_cmd = ipython_cmd + cmdargs + ['--', full_fname] - env = os.environ.copy() - # FIXME: ignore all warnings in ipexec while we have shims - # should we keep suppressing warnings here, even after removing shims? - env['PYTHONWARNINGS'] = 'ignore' - # env.pop('PYTHONWARNINGS', None) # Avoid extraneous warnings appearing on stderr - for k, v in env.items(): - # Debug a bizarre failure we've seen on Windows: - # TypeError: environment can only contain strings - if not isinstance(v, str): - print(k, v) - p = Popen(full_cmd, stdout=PIPE, stderr=PIPE, stdin=PIPE, env=env) - out, err = p.communicate(input=py3compat.encode('\n'.join(commands)) or None) - out, err = py3compat.decode(out), py3compat.decode(err) - # `import readline` causes 'ESC[?1034h' to be output sometimes, - # so strip that out before doing comparisons - if out: - out = re.sub(r'\x1b\[[^h]+h', '', out) - return out, err - - -def ipexec_validate(fname, expected_out, expected_err='', - options=None, commands=()): - """Utility to call 'ipython filename' and validate output/error. - - This function raises an AssertionError if the validation fails. - - Note that this starts IPython in a subprocess! - - Parameters - ---------- - fname : str - Name of the file to be executed (should have .py or .ipy extension). - - expected_out : str - Expected stdout of the process. - - expected_err : optional, str - Expected stderr of the process. - - options : optional, list - Extra command-line flags to be passed to IPython. - - Returns - ------- - None - """ - - import nose.tools as nt - - out, err = ipexec(fname, options, commands) - #print 'OUT', out # dbg - #print 'ERR', err # dbg - # If there are any errors, we must check those before stdout, as they may be - # more informative than simply having an empty stdout. - if err: - if expected_err: - nt.assert_equal("\n".join(err.strip().splitlines()), "\n".join(expected_err.strip().splitlines())) - else: - raise ValueError('Running file %r produced error: %r' % - (fname, err)) - # If no errors or output on stderr was expected, match stdout - nt.assert_equal("\n".join(out.strip().splitlines()), "\n".join(expected_out.strip().splitlines())) - - -class TempFileMixin(unittest.TestCase): - """Utility class to create temporary Python/IPython files. - - Meant as a mixin class for test cases.""" - - def mktmp(self, src, ext='.py'): - """Make a valid python temp file.""" - fname = temp_pyfile(src, ext) - if not hasattr(self, 'tmps'): - self.tmps=[] - self.tmps.append(fname) - self.fname = fname - - def tearDown(self): - # If the tmpfile wasn't made because of skipped tests, like in - # win32, there's nothing to cleanup. - if hasattr(self, 'tmps'): - for fname in self.tmps: - # If the tmpfile wasn't made because of skipped tests, like in - # win32, there's nothing to cleanup. - try: - os.unlink(fname) - except: - # On Windows, even though we close the file, we still can't - # delete it. I have no clue why - pass - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, traceback): - self.tearDown() - - -pair_fail_msg = ("Testing {0}\n\n" - "In:\n" - " {1!r}\n" - "Expected:\n" - " {2!r}\n" - "Got:\n" - " {3!r}\n") -def check_pairs(func, pairs): - """Utility function for the common case of checking a function with a - sequence of input/output pairs. - - Parameters - ---------- - func : callable - The function to be tested. Should accept a single argument. - pairs : iterable - A list of (input, expected_output) tuples. - - Returns - ------- - None. Raises an AssertionError if any output does not match the expected - value. - """ - name = getattr(func, "func_name", getattr(func, "__name__", "<unknown>")) - for inp, expected in pairs: - out = func(inp) - assert out == expected, pair_fail_msg.format(name, inp, expected, out) - - -MyStringIO = StringIO - -_re_type = type(re.compile(r'')) - -notprinted_msg = """Did not find {0!r} in printed output (on {1}): -------- -{2!s} -------- -""" - -class AssertPrints(object): - """Context manager for testing that code prints certain text. - - Examples - -------- - >>> with AssertPrints("abc", suppress=False): - ... print("abcd") - ... print("def") - ... - abcd - def - """ - def __init__(self, s, channel='stdout', suppress=True): - self.s = s - if isinstance(self.s, (str, _re_type)): - self.s = [self.s] - self.channel = channel - self.suppress = suppress - - def __enter__(self): - self.orig_stream = getattr(sys, self.channel) - self.buffer = MyStringIO() - self.tee = Tee(self.buffer, channel=self.channel) - setattr(sys, self.channel, self.buffer if self.suppress else self.tee) - - def __exit__(self, etype, value, traceback): - try: - if value is not None: - # If an error was raised, don't check anything else - return False - self.tee.flush() - setattr(sys, self.channel, self.orig_stream) - printed = self.buffer.getvalue() - for s in self.s: - if isinstance(s, _re_type): - assert s.search(printed), notprinted_msg.format(s.pattern, self.channel, printed) - else: - assert s in printed, notprinted_msg.format(s, self.channel, printed) - return False - finally: - self.tee.close() - -printed_msg = """Found {0!r} in printed output (on {1}): -------- -{2!s} -------- -""" - -class AssertNotPrints(AssertPrints): - """Context manager for checking that certain output *isn't* produced. - - Counterpart of AssertPrints""" - def __exit__(self, etype, value, traceback): - try: - if value is not None: - # If an error was raised, don't check anything else - self.tee.close() - return False - self.tee.flush() - setattr(sys, self.channel, self.orig_stream) - printed = self.buffer.getvalue() - for s in self.s: - if isinstance(s, _re_type): - assert not s.search(printed),printed_msg.format( - s.pattern, self.channel, printed) - else: - assert s not in printed, printed_msg.format( - s, self.channel, printed) - return False - finally: - self.tee.close() - -@contextmanager -def mute_warn(): - from IPython.utils import warn - save_warn = warn.warn - warn.warn = lambda *a, **kw: None - try: - yield - finally: - warn.warn = save_warn - -@contextmanager -def make_tempfile(name): - """ Create an empty, named, temporary file for the duration of the context. - """ - open(name, 'w').close() - try: - yield - finally: - os.unlink(name) - -def fake_input(inputs): - """Temporarily replace the input() function to return the given values - - Use as a context manager: - - with fake_input(['result1', 'result2']): - ... - - Values are returned in order. If input() is called again after the last value - was used, EOFError is raised. - """ - it = iter(inputs) - def mock_input(prompt=''): - try: - return next(it) - except StopIteration: - raise EOFError('No more inputs given') - - return patch('builtins.input', mock_input) - -def help_output_test(subcommand=''): - """test that `ipython [subcommand] -h` works""" - cmd = get_ipython_cmd() + [subcommand, '-h'] - out, err, rc = get_output_error_code(cmd) - nt.assert_equal(rc, 0, err) - nt.assert_not_in("Traceback", err) - nt.assert_in("Options", out) - nt.assert_in("--help-all", out) - return out, err - - -def help_all_output_test(subcommand=''): - """test that `ipython [subcommand] --help-all` works""" - cmd = get_ipython_cmd() + [subcommand, '--help-all'] - out, err, rc = get_output_error_code(cmd) - nt.assert_equal(rc, 0, err) - nt.assert_not_in("Traceback", err) - nt.assert_in("Options", out) - nt.assert_in("Class", out) - return out, err - +"""Generic testing tools. + +Authors +------- +- Fernando Perez <Fernando.Perez@berkeley.edu> +""" + + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import os +import re +import sys +import tempfile +import unittest + +from contextlib import contextmanager +from io import StringIO +from subprocess import Popen, PIPE +from unittest.mock import patch + +try: + # These tools are used by parts of the runtime, so we make the nose + # dependency optional at this point. Nose is a hard dependency to run the + # test suite, but NOT to use ipython itself. + import nose.tools as nt + has_nose = True +except ImportError: + has_nose = False + +from traitlets.config.loader import Config +from IPython.utils.process import get_output_error_code +from IPython.utils.text import list_strings +from IPython.utils.io import temp_pyfile, Tee +from IPython.utils import py3compat + +from . import decorators as dec +from . import skipdoctest + + +# The docstring for full_path doctests differently on win32 (different path +# separator) so just skip the doctest there. The example remains informative. +doctest_deco = skipdoctest.skip_doctest if sys.platform == 'win32' else dec.null_deco + +@doctest_deco +def full_path(startPath,files): + """Make full paths for all the listed files, based on startPath. + + Only the base part of startPath is kept, since this routine is typically + used with a script's ``__file__`` variable as startPath. The base of startPath + is then prepended to all the listed files, forming the output list. + + Parameters + ---------- + startPath : string + Initial path to use as the base for the results. This path is split + using os.path.split() and only its first component is kept. + + files : string or list + One or more files. + + Examples + -------- + + >>> full_path('/foo/bar.py',['a.txt','b.txt']) + ['/foo/a.txt', '/foo/b.txt'] + + >>> full_path('/foo',['a.txt','b.txt']) + ['/a.txt', '/b.txt'] + + If a single file is given, the output is still a list:: + + >>> full_path('/foo','a.txt') + ['/a.txt'] + """ + + files = list_strings(files) + base = os.path.split(startPath)[0] + return [ os.path.join(base,f) for f in files ] + + +def parse_test_output(txt): + """Parse the output of a test run and return errors, failures. + + Parameters + ---------- + txt : str + Text output of a test run, assumed to contain a line of one of the + following forms:: + + 'FAILED (errors=1)' + 'FAILED (failures=1)' + 'FAILED (errors=1, failures=1)' + + Returns + ------- + nerr, nfail + number of errors and failures. + """ + + err_m = re.search(r'^FAILED \(errors=(\d+)\)', txt, re.MULTILINE) + if err_m: + nerr = int(err_m.group(1)) + nfail = 0 + return nerr, nfail + + fail_m = re.search(r'^FAILED \(failures=(\d+)\)', txt, re.MULTILINE) + if fail_m: + nerr = 0 + nfail = int(fail_m.group(1)) + return nerr, nfail + + both_m = re.search(r'^FAILED \(errors=(\d+), failures=(\d+)\)', txt, + re.MULTILINE) + if both_m: + nerr = int(both_m.group(1)) + nfail = int(both_m.group(2)) + return nerr, nfail + + # If the input didn't match any of these forms, assume no error/failures + return 0, 0 + + +# So nose doesn't think this is a test +parse_test_output.__test__ = False + + +def default_argv(): + """Return a valid default argv for creating testing instances of ipython""" + + return ['--quick', # so no config file is loaded + # Other defaults to minimize side effects on stdout + '--colors=NoColor', '--no-term-title','--no-banner', + '--autocall=0'] + + +def default_config(): + """Return a config object with good defaults for testing.""" + config = Config() + config.TerminalInteractiveShell.colors = 'NoColor' + config.TerminalTerminalInteractiveShell.term_title = False, + config.TerminalInteractiveShell.autocall = 0 + f = tempfile.NamedTemporaryFile(suffix=u'test_hist.sqlite', delete=False) + config.HistoryManager.hist_file = f.name + f.close() + config.HistoryManager.db_cache_size = 10000 + return config + + +def get_ipython_cmd(as_string=False): + """ + Return appropriate IPython command line name. By default, this will return + a list that can be used with subprocess.Popen, for example, but passing + `as_string=True` allows for returning the IPython command as a string. + + Parameters + ---------- + as_string: bool + Flag to allow to return the command as a string. + """ + ipython_cmd = [sys.executable, "-m", "IPython"] + + if as_string: + ipython_cmd = " ".join(ipython_cmd) + + return ipython_cmd + +def ipexec(fname, options=None, commands=()): + """Utility to call 'ipython filename'. + + Starts IPython with a minimal and safe configuration to make startup as fast + as possible. + + Note that this starts IPython in a subprocess! + + Parameters + ---------- + fname : str + Name of file to be executed (should have .py or .ipy extension). + + options : optional, list + Extra command-line flags to be passed to IPython. + + commands : optional, list + Commands to send in on stdin + + Returns + ------- + ``(stdout, stderr)`` of ipython subprocess. + """ + if options is None: options = [] + + cmdargs = default_argv() + options + + test_dir = os.path.dirname(__file__) + + ipython_cmd = get_ipython_cmd() + # Absolute path for filename + full_fname = os.path.join(test_dir, fname) + full_cmd = ipython_cmd + cmdargs + ['--', full_fname] + env = os.environ.copy() + # FIXME: ignore all warnings in ipexec while we have shims + # should we keep suppressing warnings here, even after removing shims? + env['PYTHONWARNINGS'] = 'ignore' + # env.pop('PYTHONWARNINGS', None) # Avoid extraneous warnings appearing on stderr + for k, v in env.items(): + # Debug a bizarre failure we've seen on Windows: + # TypeError: environment can only contain strings + if not isinstance(v, str): + print(k, v) + p = Popen(full_cmd, stdout=PIPE, stderr=PIPE, stdin=PIPE, env=env) + out, err = p.communicate(input=py3compat.encode('\n'.join(commands)) or None) + out, err = py3compat.decode(out), py3compat.decode(err) + # `import readline` causes 'ESC[?1034h' to be output sometimes, + # so strip that out before doing comparisons + if out: + out = re.sub(r'\x1b\[[^h]+h', '', out) + return out, err + + +def ipexec_validate(fname, expected_out, expected_err='', + options=None, commands=()): + """Utility to call 'ipython filename' and validate output/error. + + This function raises an AssertionError if the validation fails. + + Note that this starts IPython in a subprocess! + + Parameters + ---------- + fname : str + Name of the file to be executed (should have .py or .ipy extension). + + expected_out : str + Expected stdout of the process. + + expected_err : optional, str + Expected stderr of the process. + + options : optional, list + Extra command-line flags to be passed to IPython. + + Returns + ------- + None + """ + + import nose.tools as nt + + out, err = ipexec(fname, options, commands) + #print 'OUT', out # dbg + #print 'ERR', err # dbg + # If there are any errors, we must check those before stdout, as they may be + # more informative than simply having an empty stdout. + if err: + if expected_err: + nt.assert_equal("\n".join(err.strip().splitlines()), "\n".join(expected_err.strip().splitlines())) + else: + raise ValueError('Running file %r produced error: %r' % + (fname, err)) + # If no errors or output on stderr was expected, match stdout + nt.assert_equal("\n".join(out.strip().splitlines()), "\n".join(expected_out.strip().splitlines())) + + +class TempFileMixin(unittest.TestCase): + """Utility class to create temporary Python/IPython files. + + Meant as a mixin class for test cases.""" + + def mktmp(self, src, ext='.py'): + """Make a valid python temp file.""" + fname = temp_pyfile(src, ext) + if not hasattr(self, 'tmps'): + self.tmps=[] + self.tmps.append(fname) + self.fname = fname + + def tearDown(self): + # If the tmpfile wasn't made because of skipped tests, like in + # win32, there's nothing to cleanup. + if hasattr(self, 'tmps'): + for fname in self.tmps: + # If the tmpfile wasn't made because of skipped tests, like in + # win32, there's nothing to cleanup. + try: + os.unlink(fname) + except: + # On Windows, even though we close the file, we still can't + # delete it. I have no clue why + pass + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.tearDown() + + +pair_fail_msg = ("Testing {0}\n\n" + "In:\n" + " {1!r}\n" + "Expected:\n" + " {2!r}\n" + "Got:\n" + " {3!r}\n") +def check_pairs(func, pairs): + """Utility function for the common case of checking a function with a + sequence of input/output pairs. + + Parameters + ---------- + func : callable + The function to be tested. Should accept a single argument. + pairs : iterable + A list of (input, expected_output) tuples. + + Returns + ------- + None. Raises an AssertionError if any output does not match the expected + value. + """ + name = getattr(func, "func_name", getattr(func, "__name__", "<unknown>")) + for inp, expected in pairs: + out = func(inp) + assert out == expected, pair_fail_msg.format(name, inp, expected, out) + + +MyStringIO = StringIO + +_re_type = type(re.compile(r'')) + +notprinted_msg = """Did not find {0!r} in printed output (on {1}): +------- +{2!s} +------- +""" + +class AssertPrints(object): + """Context manager for testing that code prints certain text. + + Examples + -------- + >>> with AssertPrints("abc", suppress=False): + ... print("abcd") + ... print("def") + ... + abcd + def + """ + def __init__(self, s, channel='stdout', suppress=True): + self.s = s + if isinstance(self.s, (str, _re_type)): + self.s = [self.s] + self.channel = channel + self.suppress = suppress + + def __enter__(self): + self.orig_stream = getattr(sys, self.channel) + self.buffer = MyStringIO() + self.tee = Tee(self.buffer, channel=self.channel) + setattr(sys, self.channel, self.buffer if self.suppress else self.tee) + + def __exit__(self, etype, value, traceback): + try: + if value is not None: + # If an error was raised, don't check anything else + return False + self.tee.flush() + setattr(sys, self.channel, self.orig_stream) + printed = self.buffer.getvalue() + for s in self.s: + if isinstance(s, _re_type): + assert s.search(printed), notprinted_msg.format(s.pattern, self.channel, printed) + else: + assert s in printed, notprinted_msg.format(s, self.channel, printed) + return False + finally: + self.tee.close() + +printed_msg = """Found {0!r} in printed output (on {1}): +------- +{2!s} +------- +""" + +class AssertNotPrints(AssertPrints): + """Context manager for checking that certain output *isn't* produced. + + Counterpart of AssertPrints""" + def __exit__(self, etype, value, traceback): + try: + if value is not None: + # If an error was raised, don't check anything else + self.tee.close() + return False + self.tee.flush() + setattr(sys, self.channel, self.orig_stream) + printed = self.buffer.getvalue() + for s in self.s: + if isinstance(s, _re_type): + assert not s.search(printed),printed_msg.format( + s.pattern, self.channel, printed) + else: + assert s not in printed, printed_msg.format( + s, self.channel, printed) + return False + finally: + self.tee.close() + +@contextmanager +def mute_warn(): + from IPython.utils import warn + save_warn = warn.warn + warn.warn = lambda *a, **kw: None + try: + yield + finally: + warn.warn = save_warn + +@contextmanager +def make_tempfile(name): + """ Create an empty, named, temporary file for the duration of the context. + """ + open(name, 'w').close() + try: + yield + finally: + os.unlink(name) + +def fake_input(inputs): + """Temporarily replace the input() function to return the given values + + Use as a context manager: + + with fake_input(['result1', 'result2']): + ... + + Values are returned in order. If input() is called again after the last value + was used, EOFError is raised. + """ + it = iter(inputs) + def mock_input(prompt=''): + try: + return next(it) + except StopIteration: + raise EOFError('No more inputs given') + + return patch('builtins.input', mock_input) + +def help_output_test(subcommand=''): + """test that `ipython [subcommand] -h` works""" + cmd = get_ipython_cmd() + [subcommand, '-h'] + out, err, rc = get_output_error_code(cmd) + nt.assert_equal(rc, 0, err) + nt.assert_not_in("Traceback", err) + nt.assert_in("Options", out) + nt.assert_in("--help-all", out) + return out, err + + +def help_all_output_test(subcommand=''): + """test that `ipython [subcommand] --help-all` works""" + cmd = get_ipython_cmd() + [subcommand, '--help-all'] + out, err, rc = get_output_error_code(cmd) + nt.assert_equal(rc, 0, err) + nt.assert_not_in("Traceback", err) + nt.assert_in("Options", out) + nt.assert_in("Class", out) + return out, err + diff --git a/contrib/python/ipython/py3/IPython/utils/PyColorize.py b/contrib/python/ipython/py3/IPython/utils/PyColorize.py index 3fd5e77fb2a..86bb9af4c16 100644 --- a/contrib/python/ipython/py3/IPython/utils/PyColorize.py +++ b/contrib/python/ipython/py3/IPython/utils/PyColorize.py @@ -1,331 +1,331 @@ -# -*- coding: utf-8 -*- -""" -Class and program to colorize python source code for ANSI terminals. - -Based on an HTML code highlighter by Jurgen Hermann found at: -http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52298 - -Modifications by Fernando Perez (fperez@colorado.edu). - -Information on the original HTML highlighter follows: - -MoinMoin - Python Source Parser - -Title: Colorize Python source using the built-in tokenizer - -Submitter: Jurgen Hermann -Last Updated:2001/04/06 - -Version no:1.2 - -Description: - -This code is part of MoinMoin (http://moin.sourceforge.net/) and converts -Python source code to HTML markup, rendering comments, keywords, -operators, numeric and string literals in different colors. - -It shows how to use the built-in keyword, token and tokenize modules to -scan Python source code and re-emit it with no changes to its original -formatting (which is the hard part). -""" - -__all__ = ['ANSICodeColors', 'Parser'] - -_scheme_default = 'Linux' - - -# Imports -import keyword -import os -import sys -import token -import tokenize - -generate_tokens = tokenize.generate_tokens - -from IPython.utils.coloransi import TermColors, InputTermColors,ColorScheme, ColorSchemeTable -from .colorable import Colorable -from io import StringIO - -############################################################################# -### Python Source Parser (does Highlighting) -############################################################################# - -_KEYWORD = token.NT_OFFSET + 1 -_TEXT = token.NT_OFFSET + 2 - -#**************************************************************************** -# Builtin color schemes - -Colors = TermColors # just a shorthand - -# Build a few color schemes -NoColor = ColorScheme( - 'NoColor',{ - 'header' : Colors.NoColor, - token.NUMBER : Colors.NoColor, - token.OP : Colors.NoColor, - token.STRING : Colors.NoColor, - tokenize.COMMENT : Colors.NoColor, - token.NAME : Colors.NoColor, - token.ERRORTOKEN : Colors.NoColor, - - _KEYWORD : Colors.NoColor, - _TEXT : Colors.NoColor, - - 'in_prompt' : InputTermColors.NoColor, # Input prompt - 'in_number' : InputTermColors.NoColor, # Input prompt number - 'in_prompt2' : InputTermColors.NoColor, # Continuation prompt - 'in_normal' : InputTermColors.NoColor, # color off (usu. Colors.Normal) - - 'out_prompt' : Colors.NoColor, # Output prompt - 'out_number' : Colors.NoColor, # Output prompt number - - 'normal' : Colors.NoColor # color off (usu. Colors.Normal) - } ) - -LinuxColors = ColorScheme( - 'Linux',{ - 'header' : Colors.LightRed, - token.NUMBER : Colors.LightCyan, - token.OP : Colors.Yellow, - token.STRING : Colors.LightBlue, - tokenize.COMMENT : Colors.LightRed, - token.NAME : Colors.Normal, - token.ERRORTOKEN : Colors.Red, - - _KEYWORD : Colors.LightGreen, - _TEXT : Colors.Yellow, - - 'in_prompt' : InputTermColors.Green, - 'in_number' : InputTermColors.LightGreen, - 'in_prompt2' : InputTermColors.Green, - 'in_normal' : InputTermColors.Normal, # color off (usu. Colors.Normal) - - 'out_prompt' : Colors.Red, - 'out_number' : Colors.LightRed, - - 'normal' : Colors.Normal # color off (usu. Colors.Normal) - } ) - -NeutralColors = ColorScheme( - 'Neutral',{ - 'header' : Colors.Red, - token.NUMBER : Colors.Cyan, - token.OP : Colors.Blue, - token.STRING : Colors.Blue, - tokenize.COMMENT : Colors.Red, - token.NAME : Colors.Normal, - token.ERRORTOKEN : Colors.Red, - - _KEYWORD : Colors.Green, - _TEXT : Colors.Blue, - - 'in_prompt' : InputTermColors.Blue, - 'in_number' : InputTermColors.LightBlue, - 'in_prompt2' : InputTermColors.Blue, - 'in_normal' : InputTermColors.Normal, # color off (usu. Colors.Normal) - - 'out_prompt' : Colors.Red, - 'out_number' : Colors.LightRed, - - 'normal' : Colors.Normal # color off (usu. Colors.Normal) - } ) - -# Hack: the 'neutral' colours are not very visible on a dark background on -# Windows. Since Windows command prompts have a dark background by default, and -# relatively few users are likely to alter that, we will use the 'Linux' colours, -# designed for a dark background, as the default on Windows. Changing it here -# avoids affecting the prompt colours rendered by prompt_toolkit, where the -# neutral defaults do work OK. - -if os.name == 'nt': - NeutralColors = LinuxColors.copy(name='Neutral') - -LightBGColors = ColorScheme( - 'LightBG',{ - 'header' : Colors.Red, - token.NUMBER : Colors.Cyan, - token.OP : Colors.Blue, - token.STRING : Colors.Blue, - tokenize.COMMENT : Colors.Red, - token.NAME : Colors.Normal, - token.ERRORTOKEN : Colors.Red, - - - _KEYWORD : Colors.Green, - _TEXT : Colors.Blue, - - 'in_prompt' : InputTermColors.Blue, - 'in_number' : InputTermColors.LightBlue, - 'in_prompt2' : InputTermColors.Blue, - 'in_normal' : InputTermColors.Normal, # color off (usu. Colors.Normal) - - 'out_prompt' : Colors.Red, - 'out_number' : Colors.LightRed, - - 'normal' : Colors.Normal # color off (usu. Colors.Normal) - } ) - -# Build table of color schemes (needed by the parser) -ANSICodeColors = ColorSchemeTable([NoColor,LinuxColors,LightBGColors, NeutralColors], - _scheme_default) - -Undefined = object() - -class Parser(Colorable): - """ Format colored Python source. - """ - - def __init__(self, color_table=None, out = sys.stdout, parent=None, style=None): - """ Create a parser with a specified color table and output channel. - - Call format() to process code. - """ - - super(Parser, self).__init__(parent=parent) - - self.color_table = color_table if color_table else ANSICodeColors - self.out = out - self.pos = None - self.lines = None - self.raw = None - if not style: - self.style = self.default_style - else: - self.style = style - - - def format(self, raw, out=None, scheme=Undefined): - import warnings - if scheme is not Undefined: - warnings.warn('The `scheme` argument of IPython.utils.PyColorize:Parser.format is deprecated since IPython 6.0.' - 'It will have no effect. Set the parser `style` directly.', - stacklevel=2) - return self.format2(raw, out)[0] - - def format2(self, raw, out = None): - """ Parse and send the colored source. - - If out and scheme are not specified, the defaults (given to - constructor) are used. - - out should be a file-type object. Optionally, out can be given as the - string 'str' and the parser will automatically return the output in a - string.""" - - string_output = 0 - if out == 'str' or self.out == 'str' or \ - isinstance(self.out, StringIO): - # XXX - I don't really like this state handling logic, but at this - # point I don't want to make major changes, so adding the - # isinstance() check is the simplest I can do to ensure correct - # behavior. - out_old = self.out - self.out = StringIO() - string_output = 1 - elif out is not None: - self.out = out - else: - raise ValueError('`out` or `self.out` should be file-like or the value `"str"`') - - # Fast return of the unmodified input for NoColor scheme - if self.style == 'NoColor': - error = False - self.out.write(raw) - if string_output: - return raw, error - return None, error - - # local shorthands - colors = self.color_table[self.style].colors - self.colors = colors # put in object so __call__ sees it - - # Remove trailing whitespace and normalize tabs - self.raw = raw.expandtabs().rstrip() - - # store line offsets in self.lines - self.lines = [0, 0] - pos = 0 - raw_find = self.raw.find - lines_append = self.lines.append - while True: - pos = raw_find('\n', pos) + 1 - if not pos: - break - lines_append(pos) - lines_append(len(self.raw)) - - # parse the source and write it - self.pos = 0 - text = StringIO(self.raw) - - error = False - try: - for atoken in generate_tokens(text.readline): - self(*atoken) - except tokenize.TokenError as ex: - msg = ex.args[0] - line = ex.args[1][0] - self.out.write("%s\n\n*** ERROR: %s%s%s\n" % - (colors[token.ERRORTOKEN], - msg, self.raw[self.lines[line]:], - colors.normal) - ) - error = True - self.out.write(colors.normal+'\n') - if string_output: - output = self.out.getvalue() - self.out = out_old - return (output, error) - return (None, error) - - - def _inner_call_(self, toktype, toktext, start_pos): - """like call but write to a temporary buffer""" - buff = StringIO() - srow, scol = start_pos - colors = self.colors - owrite = buff.write - - # line separator, so this works across platforms - linesep = os.linesep - - # calculate new positions - oldpos = self.pos - newpos = self.lines[srow] + scol - self.pos = newpos + len(toktext) - - # send the original whitespace, if needed - if newpos > oldpos: - owrite(self.raw[oldpos:newpos]) - - # skip indenting tokens - if toktype in [token.INDENT, token.DEDENT]: - self.pos = newpos - buff.seek(0) - return buff.read() - - # map token type to a color group - if token.LPAR <= toktype <= token.OP: - toktype = token.OP - elif toktype == token.NAME and keyword.iskeyword(toktext): - toktype = _KEYWORD - color = colors.get(toktype, colors[_TEXT]) - - # Triple quoted strings must be handled carefully so that backtracking - # in pagers works correctly. We need color terminators on _each_ line. - if linesep in toktext: - toktext = toktext.replace(linesep, '%s%s%s' % - (colors.normal,linesep,color)) - - # send text - owrite('%s%s%s' % (color,toktext,colors.normal)) - buff.seek(0) - return buff.read() - - - def __call__(self, toktype, toktext, start_pos, end_pos, line): - """ Token handler, with syntax highlighting.""" - self.out.write( - self._inner_call_(toktype, toktext, start_pos)) +# -*- coding: utf-8 -*- +""" +Class and program to colorize python source code for ANSI terminals. + +Based on an HTML code highlighter by Jurgen Hermann found at: +http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52298 + +Modifications by Fernando Perez (fperez@colorado.edu). + +Information on the original HTML highlighter follows: + +MoinMoin - Python Source Parser + +Title: Colorize Python source using the built-in tokenizer + +Submitter: Jurgen Hermann +Last Updated:2001/04/06 + +Version no:1.2 + +Description: + +This code is part of MoinMoin (http://moin.sourceforge.net/) and converts +Python source code to HTML markup, rendering comments, keywords, +operators, numeric and string literals in different colors. + +It shows how to use the built-in keyword, token and tokenize modules to +scan Python source code and re-emit it with no changes to its original +formatting (which is the hard part). +""" + +__all__ = ['ANSICodeColors', 'Parser'] + +_scheme_default = 'Linux' + + +# Imports +import keyword +import os +import sys +import token +import tokenize + +generate_tokens = tokenize.generate_tokens + +from IPython.utils.coloransi import TermColors, InputTermColors,ColorScheme, ColorSchemeTable +from .colorable import Colorable +from io import StringIO + +############################################################################# +### Python Source Parser (does Highlighting) +############################################################################# + +_KEYWORD = token.NT_OFFSET + 1 +_TEXT = token.NT_OFFSET + 2 + +#**************************************************************************** +# Builtin color schemes + +Colors = TermColors # just a shorthand + +# Build a few color schemes +NoColor = ColorScheme( + 'NoColor',{ + 'header' : Colors.NoColor, + token.NUMBER : Colors.NoColor, + token.OP : Colors.NoColor, + token.STRING : Colors.NoColor, + tokenize.COMMENT : Colors.NoColor, + token.NAME : Colors.NoColor, + token.ERRORTOKEN : Colors.NoColor, + + _KEYWORD : Colors.NoColor, + _TEXT : Colors.NoColor, + + 'in_prompt' : InputTermColors.NoColor, # Input prompt + 'in_number' : InputTermColors.NoColor, # Input prompt number + 'in_prompt2' : InputTermColors.NoColor, # Continuation prompt + 'in_normal' : InputTermColors.NoColor, # color off (usu. Colors.Normal) + + 'out_prompt' : Colors.NoColor, # Output prompt + 'out_number' : Colors.NoColor, # Output prompt number + + 'normal' : Colors.NoColor # color off (usu. Colors.Normal) + } ) + +LinuxColors = ColorScheme( + 'Linux',{ + 'header' : Colors.LightRed, + token.NUMBER : Colors.LightCyan, + token.OP : Colors.Yellow, + token.STRING : Colors.LightBlue, + tokenize.COMMENT : Colors.LightRed, + token.NAME : Colors.Normal, + token.ERRORTOKEN : Colors.Red, + + _KEYWORD : Colors.LightGreen, + _TEXT : Colors.Yellow, + + 'in_prompt' : InputTermColors.Green, + 'in_number' : InputTermColors.LightGreen, + 'in_prompt2' : InputTermColors.Green, + 'in_normal' : InputTermColors.Normal, # color off (usu. Colors.Normal) + + 'out_prompt' : Colors.Red, + 'out_number' : Colors.LightRed, + + 'normal' : Colors.Normal # color off (usu. Colors.Normal) + } ) + +NeutralColors = ColorScheme( + 'Neutral',{ + 'header' : Colors.Red, + token.NUMBER : Colors.Cyan, + token.OP : Colors.Blue, + token.STRING : Colors.Blue, + tokenize.COMMENT : Colors.Red, + token.NAME : Colors.Normal, + token.ERRORTOKEN : Colors.Red, + + _KEYWORD : Colors.Green, + _TEXT : Colors.Blue, + + 'in_prompt' : InputTermColors.Blue, + 'in_number' : InputTermColors.LightBlue, + 'in_prompt2' : InputTermColors.Blue, + 'in_normal' : InputTermColors.Normal, # color off (usu. Colors.Normal) + + 'out_prompt' : Colors.Red, + 'out_number' : Colors.LightRed, + + 'normal' : Colors.Normal # color off (usu. Colors.Normal) + } ) + +# Hack: the 'neutral' colours are not very visible on a dark background on +# Windows. Since Windows command prompts have a dark background by default, and +# relatively few users are likely to alter that, we will use the 'Linux' colours, +# designed for a dark background, as the default on Windows. Changing it here +# avoids affecting the prompt colours rendered by prompt_toolkit, where the +# neutral defaults do work OK. + +if os.name == 'nt': + NeutralColors = LinuxColors.copy(name='Neutral') + +LightBGColors = ColorScheme( + 'LightBG',{ + 'header' : Colors.Red, + token.NUMBER : Colors.Cyan, + token.OP : Colors.Blue, + token.STRING : Colors.Blue, + tokenize.COMMENT : Colors.Red, + token.NAME : Colors.Normal, + token.ERRORTOKEN : Colors.Red, + + + _KEYWORD : Colors.Green, + _TEXT : Colors.Blue, + + 'in_prompt' : InputTermColors.Blue, + 'in_number' : InputTermColors.LightBlue, + 'in_prompt2' : InputTermColors.Blue, + 'in_normal' : InputTermColors.Normal, # color off (usu. Colors.Normal) + + 'out_prompt' : Colors.Red, + 'out_number' : Colors.LightRed, + + 'normal' : Colors.Normal # color off (usu. Colors.Normal) + } ) + +# Build table of color schemes (needed by the parser) +ANSICodeColors = ColorSchemeTable([NoColor,LinuxColors,LightBGColors, NeutralColors], + _scheme_default) + +Undefined = object() + +class Parser(Colorable): + """ Format colored Python source. + """ + + def __init__(self, color_table=None, out = sys.stdout, parent=None, style=None): + """ Create a parser with a specified color table and output channel. + + Call format() to process code. + """ + + super(Parser, self).__init__(parent=parent) + + self.color_table = color_table if color_table else ANSICodeColors + self.out = out + self.pos = None + self.lines = None + self.raw = None + if not style: + self.style = self.default_style + else: + self.style = style + + + def format(self, raw, out=None, scheme=Undefined): + import warnings + if scheme is not Undefined: + warnings.warn('The `scheme` argument of IPython.utils.PyColorize:Parser.format is deprecated since IPython 6.0.' + 'It will have no effect. Set the parser `style` directly.', + stacklevel=2) + return self.format2(raw, out)[0] + + def format2(self, raw, out = None): + """ Parse and send the colored source. + + If out and scheme are not specified, the defaults (given to + constructor) are used. + + out should be a file-type object. Optionally, out can be given as the + string 'str' and the parser will automatically return the output in a + string.""" + + string_output = 0 + if out == 'str' or self.out == 'str' or \ + isinstance(self.out, StringIO): + # XXX - I don't really like this state handling logic, but at this + # point I don't want to make major changes, so adding the + # isinstance() check is the simplest I can do to ensure correct + # behavior. + out_old = self.out + self.out = StringIO() + string_output = 1 + elif out is not None: + self.out = out + else: + raise ValueError('`out` or `self.out` should be file-like or the value `"str"`') + + # Fast return of the unmodified input for NoColor scheme + if self.style == 'NoColor': + error = False + self.out.write(raw) + if string_output: + return raw, error + return None, error + + # local shorthands + colors = self.color_table[self.style].colors + self.colors = colors # put in object so __call__ sees it + + # Remove trailing whitespace and normalize tabs + self.raw = raw.expandtabs().rstrip() + + # store line offsets in self.lines + self.lines = [0, 0] + pos = 0 + raw_find = self.raw.find + lines_append = self.lines.append + while True: + pos = raw_find('\n', pos) + 1 + if not pos: + break + lines_append(pos) + lines_append(len(self.raw)) + + # parse the source and write it + self.pos = 0 + text = StringIO(self.raw) + + error = False + try: + for atoken in generate_tokens(text.readline): + self(*atoken) + except tokenize.TokenError as ex: + msg = ex.args[0] + line = ex.args[1][0] + self.out.write("%s\n\n*** ERROR: %s%s%s\n" % + (colors[token.ERRORTOKEN], + msg, self.raw[self.lines[line]:], + colors.normal) + ) + error = True + self.out.write(colors.normal+'\n') + if string_output: + output = self.out.getvalue() + self.out = out_old + return (output, error) + return (None, error) + + + def _inner_call_(self, toktype, toktext, start_pos): + """like call but write to a temporary buffer""" + buff = StringIO() + srow, scol = start_pos + colors = self.colors + owrite = buff.write + + # line separator, so this works across platforms + linesep = os.linesep + + # calculate new positions + oldpos = self.pos + newpos = self.lines[srow] + scol + self.pos = newpos + len(toktext) + + # send the original whitespace, if needed + if newpos > oldpos: + owrite(self.raw[oldpos:newpos]) + + # skip indenting tokens + if toktype in [token.INDENT, token.DEDENT]: + self.pos = newpos + buff.seek(0) + return buff.read() + + # map token type to a color group + if token.LPAR <= toktype <= token.OP: + toktype = token.OP + elif toktype == token.NAME and keyword.iskeyword(toktext): + toktype = _KEYWORD + color = colors.get(toktype, colors[_TEXT]) + + # Triple quoted strings must be handled carefully so that backtracking + # in pagers works correctly. We need color terminators on _each_ line. + if linesep in toktext: + toktext = toktext.replace(linesep, '%s%s%s' % + (colors.normal,linesep,color)) + + # send text + owrite('%s%s%s' % (color,toktext,colors.normal)) + buff.seek(0) + return buff.read() + + + def __call__(self, toktype, toktext, start_pos, end_pos, line): + """ Token handler, with syntax highlighting.""" + self.out.write( + self._inner_call_(toktype, toktext, start_pos)) diff --git a/contrib/python/ipython/py3/IPython/utils/_process_cli.py b/contrib/python/ipython/py3/IPython/utils/_process_cli.py index a8e2179cdb2..89a31c31643 100644 --- a/contrib/python/ipython/py3/IPython/utils/_process_cli.py +++ b/contrib/python/ipython/py3/IPython/utils/_process_cli.py @@ -1,78 +1,78 @@ -"""cli-specific implementation of process utilities. - -cli - Common Language Infrastructure for IronPython. Code - can run on any operating system. Check os.name for os- - specific settings. - -This file is only meant to be imported by process.py, not by end-users. - -This file is largely untested. To become a full drop-in process -interface for IronPython will probably require you to help fill -in the details. -""" - -# Import cli libraries: -import clr -import System - -# Import Python libraries: -import os - -# Import IPython libraries: -from IPython.utils import py3compat -from ._process_common import arg_split - -def _find_cmd(cmd): - """Find the full path to a command using which.""" - paths = System.Environment.GetEnvironmentVariable("PATH").Split(os.pathsep) - for path in paths: - filename = os.path.join(path, cmd) - if System.IO.File.Exists(filename): - return py3compat.decode(filename) - raise OSError("command %r not found" % cmd) - -def system(cmd): - """ - system(cmd) should work in a cli environment on Mac OSX, Linux, - and Windows - """ - psi = System.Diagnostics.ProcessStartInfo(cmd) - psi.RedirectStandardOutput = True - psi.RedirectStandardError = True - psi.WindowStyle = System.Diagnostics.ProcessWindowStyle.Normal - psi.UseShellExecute = False - # Start up process: - reg = System.Diagnostics.Process.Start(psi) - -def getoutput(cmd): - """ - getoutput(cmd) should work in a cli environment on Mac OSX, Linux, - and Windows - """ - psi = System.Diagnostics.ProcessStartInfo(cmd) - psi.RedirectStandardOutput = True - psi.RedirectStandardError = True - psi.WindowStyle = System.Diagnostics.ProcessWindowStyle.Normal - psi.UseShellExecute = False - # Start up process: - reg = System.Diagnostics.Process.Start(psi) - myOutput = reg.StandardOutput - output = myOutput.ReadToEnd() - myError = reg.StandardError - error = myError.ReadToEnd() - return output - -def check_pid(pid): - """ - Check if a process with the given PID (pid) exists - """ - try: - System.Diagnostics.Process.GetProcessById(pid) - # process with given pid is running - return True - except System.InvalidOperationException: - # process wasn't started by this object (but is running) - return True - except System.ArgumentException: - # process with given pid isn't running - return False +"""cli-specific implementation of process utilities. + +cli - Common Language Infrastructure for IronPython. Code + can run on any operating system. Check os.name for os- + specific settings. + +This file is only meant to be imported by process.py, not by end-users. + +This file is largely untested. To become a full drop-in process +interface for IronPython will probably require you to help fill +in the details. +""" + +# Import cli libraries: +import clr +import System + +# Import Python libraries: +import os + +# Import IPython libraries: +from IPython.utils import py3compat +from ._process_common import arg_split + +def _find_cmd(cmd): + """Find the full path to a command using which.""" + paths = System.Environment.GetEnvironmentVariable("PATH").Split(os.pathsep) + for path in paths: + filename = os.path.join(path, cmd) + if System.IO.File.Exists(filename): + return py3compat.decode(filename) + raise OSError("command %r not found" % cmd) + +def system(cmd): + """ + system(cmd) should work in a cli environment on Mac OSX, Linux, + and Windows + """ + psi = System.Diagnostics.ProcessStartInfo(cmd) + psi.RedirectStandardOutput = True + psi.RedirectStandardError = True + psi.WindowStyle = System.Diagnostics.ProcessWindowStyle.Normal + psi.UseShellExecute = False + # Start up process: + reg = System.Diagnostics.Process.Start(psi) + +def getoutput(cmd): + """ + getoutput(cmd) should work in a cli environment on Mac OSX, Linux, + and Windows + """ + psi = System.Diagnostics.ProcessStartInfo(cmd) + psi.RedirectStandardOutput = True + psi.RedirectStandardError = True + psi.WindowStyle = System.Diagnostics.ProcessWindowStyle.Normal + psi.UseShellExecute = False + # Start up process: + reg = System.Diagnostics.Process.Start(psi) + myOutput = reg.StandardOutput + output = myOutput.ReadToEnd() + myError = reg.StandardError + error = myError.ReadToEnd() + return output + +def check_pid(pid): + """ + Check if a process with the given PID (pid) exists + """ + try: + System.Diagnostics.Process.GetProcessById(pid) + # process with given pid is running + return True + except System.InvalidOperationException: + # process wasn't started by this object (but is running) + return True + except System.ArgumentException: + # process with given pid isn't running + return False diff --git a/contrib/python/ipython/py3/IPython/utils/_process_common.py b/contrib/python/ipython/py3/IPython/utils/_process_common.py index 1cb4b7cd088..2a647dc7fa3 100644 --- a/contrib/python/ipython/py3/IPython/utils/_process_common.py +++ b/contrib/python/ipython/py3/IPython/utils/_process_common.py @@ -1,212 +1,212 @@ -"""Common utilities for the various process_* implementations. - -This file is only meant to be imported by the platform-specific implementations -of subprocess utilities, and it contains tools that are common to all of them. -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2010-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- -import subprocess -import shlex -import sys -import os - -from IPython.utils import py3compat - -#----------------------------------------------------------------------------- -# Function definitions -#----------------------------------------------------------------------------- - -def read_no_interrupt(p): - """Read from a pipe ignoring EINTR errors. - - This is necessary because when reading from pipes with GUI event loops - running in the background, often interrupts are raised that stop the - command from completing.""" - import errno - - try: - return p.read() - except IOError as err: - if err.errno != errno.EINTR: - raise - - -def process_handler(cmd, callback, stderr=subprocess.PIPE): - """Open a command in a shell subprocess and execute a callback. - - This function provides common scaffolding for creating subprocess.Popen() - calls. It creates a Popen object and then calls the callback with it. - - Parameters - ---------- - cmd : str or list - A command to be executed by the system, using :class:`subprocess.Popen`. - If a string is passed, it will be run in the system shell. If a list is - passed, it will be used directly as arguments. - - callback : callable - A one-argument function that will be called with the Popen object. - - stderr : file descriptor number, optional - By default this is set to ``subprocess.PIPE``, but you can also pass the - value ``subprocess.STDOUT`` to force the subprocess' stderr to go into - the same file descriptor as its stdout. This is useful to read stdout - and stderr combined in the order they are generated. - - Returns - ------- - The return value of the provided callback is returned. - """ - sys.stdout.flush() - sys.stderr.flush() - # On win32, close_fds can't be true when using pipes for stdin/out/err - close_fds = sys.platform != 'win32' - # Determine if cmd should be run with system shell. - shell = isinstance(cmd, str) - # On POSIX systems run shell commands with user-preferred shell. - executable = None - if shell and os.name == 'posix' and 'SHELL' in os.environ: - executable = os.environ['SHELL'] - p = subprocess.Popen(cmd, shell=shell, - executable=executable, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=stderr, - close_fds=close_fds) - - try: - out = callback(p) - except KeyboardInterrupt: - print('^C') - sys.stdout.flush() - sys.stderr.flush() - out = None - finally: - # Make really sure that we don't leave processes behind, in case the - # call above raises an exception - # We start by assuming the subprocess finished (to avoid NameErrors - # later depending on the path taken) - if p.returncode is None: - try: - p.terminate() - p.poll() - except OSError: - pass - # One last try on our way out - if p.returncode is None: - try: - p.kill() - except OSError: - pass - - return out - - -def getoutput(cmd): - """Run a command and return its stdout/stderr as a string. - - Parameters - ---------- - cmd : str or list - A command to be executed in the system shell. - - Returns - ------- - output : str - A string containing the combination of stdout and stderr from the - subprocess, in whatever order the subprocess originally wrote to its - file descriptors (so the order of the information in this string is the - correct order as would be seen if running the command in a terminal). - """ - out = process_handler(cmd, lambda p: p.communicate()[0], subprocess.STDOUT) - if out is None: - return '' - return py3compat.decode(out) - - -def getoutputerror(cmd): - """Return (standard output, standard error) of executing cmd in a shell. - - Accepts the same arguments as os.system(). - - Parameters - ---------- - cmd : str or list - A command to be executed in the system shell. - - Returns - ------- - stdout : str - stderr : str - """ - return get_output_error_code(cmd)[:2] - -def get_output_error_code(cmd): - """Return (standard output, standard error, return code) of executing cmd - in a shell. - - Accepts the same arguments as os.system(). - - Parameters - ---------- - cmd : str or list - A command to be executed in the system shell. - - Returns - ------- - stdout : str - stderr : str - returncode: int - """ - - out_err, p = process_handler(cmd, lambda p: (p.communicate(), p)) - if out_err is None: - return '', '', p.returncode - out, err = out_err - return py3compat.decode(out), py3compat.decode(err), p.returncode - -def arg_split(s, posix=False, strict=True): - """Split a command line's arguments in a shell-like manner. - - This is a modified version of the standard library's shlex.split() - function, but with a default of posix=False for splitting, so that quotes - in inputs are respected. - - if strict=False, then any errors shlex.split would raise will result in the - unparsed remainder being the last element of the list, rather than raising. - This is because we sometimes use arg_split to parse things other than - command-line args. - """ - - lex = shlex.shlex(s, posix=posix) - lex.whitespace_split = True - # Extract tokens, ensuring that things like leaving open quotes - # does not cause this to raise. This is important, because we - # sometimes pass Python source through this (e.g. %timeit f(" ")), - # and it shouldn't raise an exception. - # It may be a bad idea to parse things that are not command-line args - # through this function, but we do, so let's be safe about it. - lex.commenters='' #fix for GH-1269 - tokens = [] - while True: - try: - tokens.append(next(lex)) - except StopIteration: - break - except ValueError: - if strict: - raise - # couldn't parse, get remaining blob as last token - tokens.append(lex.token) - break - - return tokens +"""Common utilities for the various process_* implementations. + +This file is only meant to be imported by the platform-specific implementations +of subprocess utilities, and it contains tools that are common to all of them. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- +import subprocess +import shlex +import sys +import os + +from IPython.utils import py3compat + +#----------------------------------------------------------------------------- +# Function definitions +#----------------------------------------------------------------------------- + +def read_no_interrupt(p): + """Read from a pipe ignoring EINTR errors. + + This is necessary because when reading from pipes with GUI event loops + running in the background, often interrupts are raised that stop the + command from completing.""" + import errno + + try: + return p.read() + except IOError as err: + if err.errno != errno.EINTR: + raise + + +def process_handler(cmd, callback, stderr=subprocess.PIPE): + """Open a command in a shell subprocess and execute a callback. + + This function provides common scaffolding for creating subprocess.Popen() + calls. It creates a Popen object and then calls the callback with it. + + Parameters + ---------- + cmd : str or list + A command to be executed by the system, using :class:`subprocess.Popen`. + If a string is passed, it will be run in the system shell. If a list is + passed, it will be used directly as arguments. + + callback : callable + A one-argument function that will be called with the Popen object. + + stderr : file descriptor number, optional + By default this is set to ``subprocess.PIPE``, but you can also pass the + value ``subprocess.STDOUT`` to force the subprocess' stderr to go into + the same file descriptor as its stdout. This is useful to read stdout + and stderr combined in the order they are generated. + + Returns + ------- + The return value of the provided callback is returned. + """ + sys.stdout.flush() + sys.stderr.flush() + # On win32, close_fds can't be true when using pipes for stdin/out/err + close_fds = sys.platform != 'win32' + # Determine if cmd should be run with system shell. + shell = isinstance(cmd, str) + # On POSIX systems run shell commands with user-preferred shell. + executable = None + if shell and os.name == 'posix' and 'SHELL' in os.environ: + executable = os.environ['SHELL'] + p = subprocess.Popen(cmd, shell=shell, + executable=executable, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=stderr, + close_fds=close_fds) + + try: + out = callback(p) + except KeyboardInterrupt: + print('^C') + sys.stdout.flush() + sys.stderr.flush() + out = None + finally: + # Make really sure that we don't leave processes behind, in case the + # call above raises an exception + # We start by assuming the subprocess finished (to avoid NameErrors + # later depending on the path taken) + if p.returncode is None: + try: + p.terminate() + p.poll() + except OSError: + pass + # One last try on our way out + if p.returncode is None: + try: + p.kill() + except OSError: + pass + + return out + + +def getoutput(cmd): + """Run a command and return its stdout/stderr as a string. + + Parameters + ---------- + cmd : str or list + A command to be executed in the system shell. + + Returns + ------- + output : str + A string containing the combination of stdout and stderr from the + subprocess, in whatever order the subprocess originally wrote to its + file descriptors (so the order of the information in this string is the + correct order as would be seen if running the command in a terminal). + """ + out = process_handler(cmd, lambda p: p.communicate()[0], subprocess.STDOUT) + if out is None: + return '' + return py3compat.decode(out) + + +def getoutputerror(cmd): + """Return (standard output, standard error) of executing cmd in a shell. + + Accepts the same arguments as os.system(). + + Parameters + ---------- + cmd : str or list + A command to be executed in the system shell. + + Returns + ------- + stdout : str + stderr : str + """ + return get_output_error_code(cmd)[:2] + +def get_output_error_code(cmd): + """Return (standard output, standard error, return code) of executing cmd + in a shell. + + Accepts the same arguments as os.system(). + + Parameters + ---------- + cmd : str or list + A command to be executed in the system shell. + + Returns + ------- + stdout : str + stderr : str + returncode: int + """ + + out_err, p = process_handler(cmd, lambda p: (p.communicate(), p)) + if out_err is None: + return '', '', p.returncode + out, err = out_err + return py3compat.decode(out), py3compat.decode(err), p.returncode + +def arg_split(s, posix=False, strict=True): + """Split a command line's arguments in a shell-like manner. + + This is a modified version of the standard library's shlex.split() + function, but with a default of posix=False for splitting, so that quotes + in inputs are respected. + + if strict=False, then any errors shlex.split would raise will result in the + unparsed remainder being the last element of the list, rather than raising. + This is because we sometimes use arg_split to parse things other than + command-line args. + """ + + lex = shlex.shlex(s, posix=posix) + lex.whitespace_split = True + # Extract tokens, ensuring that things like leaving open quotes + # does not cause this to raise. This is important, because we + # sometimes pass Python source through this (e.g. %timeit f(" ")), + # and it shouldn't raise an exception. + # It may be a bad idea to parse things that are not command-line args + # through this function, but we do, so let's be safe about it. + lex.commenters='' #fix for GH-1269 + tokens = [] + while True: + try: + tokens.append(next(lex)) + except StopIteration: + break + except ValueError: + if strict: + raise + # couldn't parse, get remaining blob as last token + tokens.append(lex.token) + break + + return tokens diff --git a/contrib/python/ipython/py3/IPython/utils/_process_posix.py b/contrib/python/ipython/py3/IPython/utils/_process_posix.py index 8a4bf737c36..a11cad7697c 100644 --- a/contrib/python/ipython/py3/IPython/utils/_process_posix.py +++ b/contrib/python/ipython/py3/IPython/utils/_process_posix.py @@ -1,225 +1,225 @@ -"""Posix-specific implementation of process utilities. - -This file is only meant to be imported by process.py, not by end-users. -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2010-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Stdlib -import errno -import os -import subprocess as sp -import sys - -import pexpect - -# Our own -from ._process_common import getoutput, arg_split -from IPython.utils import py3compat -from IPython.utils.encoding import DEFAULT_ENCODING - -#----------------------------------------------------------------------------- -# Function definitions -#----------------------------------------------------------------------------- - -def _find_cmd(cmd): - """Find the full path to a command using which.""" - - path = sp.Popen(['/usr/bin/env', 'which', cmd], - stdout=sp.PIPE, stderr=sp.PIPE).communicate()[0] - return py3compat.decode(path) - - -class ProcessHandler(object): - """Execute subprocesses under the control of pexpect. - """ - # Timeout in seconds to wait on each reading of the subprocess' output. - # This should not be set too low to avoid cpu overusage from our side, - # since we read in a loop whose period is controlled by this timeout. - read_timeout = 0.05 - - # Timeout to give a process if we receive SIGINT, between sending the - # SIGINT to the process and forcefully terminating it. - terminate_timeout = 0.2 - - # File object where stdout and stderr of the subprocess will be written - logfile = None - - # Shell to call for subprocesses to execute - _sh = None - - @property - def sh(self): - if self._sh is None: - shell_name = os.environ.get("SHELL", "sh") - self._sh = pexpect.which(shell_name) - if self._sh is None: - raise OSError('"{}" shell not found'.format(shell_name)) - - return self._sh - - def __init__(self, logfile=None, read_timeout=None, terminate_timeout=None): - """Arguments are used for pexpect calls.""" - self.read_timeout = (ProcessHandler.read_timeout if read_timeout is - None else read_timeout) - self.terminate_timeout = (ProcessHandler.terminate_timeout if - terminate_timeout is None else - terminate_timeout) - self.logfile = sys.stdout if logfile is None else logfile - - def getoutput(self, cmd): - """Run a command and return its stdout/stderr as a string. - - Parameters - ---------- - cmd : str - A command to be executed in the system shell. - - Returns - ------- - output : str - A string containing the combination of stdout and stderr from the - subprocess, in whatever order the subprocess originally wrote to its - file descriptors (so the order of the information in this string is the - correct order as would be seen if running the command in a terminal). - """ - try: - return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n') - except KeyboardInterrupt: - print('^C', file=sys.stderr, end='') - - def getoutput_pexpect(self, cmd): - """Run a command and return its stdout/stderr as a string. - - Parameters - ---------- - cmd : str - A command to be executed in the system shell. - - Returns - ------- - output : str - A string containing the combination of stdout and stderr from the - subprocess, in whatever order the subprocess originally wrote to its - file descriptors (so the order of the information in this string is the - correct order as would be seen if running the command in a terminal). - """ - try: - return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n') - except KeyboardInterrupt: - print('^C', file=sys.stderr, end='') - - def system(self, cmd): - """Execute a command in a subshell. - - Parameters - ---------- - cmd : str - A command to be executed in the system shell. - - Returns - ------- - int : child's exitstatus - """ - # Get likely encoding for the output. - enc = DEFAULT_ENCODING - - # Patterns to match on the output, for pexpect. We read input and - # allow either a short timeout or EOF - patterns = [pexpect.TIMEOUT, pexpect.EOF] - # the index of the EOF pattern in the list. - # even though we know it's 1, this call means we don't have to worry if - # we change the above list, and forget to change this value: - EOF_index = patterns.index(pexpect.EOF) - # The size of the output stored so far in the process output buffer. - # Since pexpect only appends to this buffer, each time we print we - # record how far we've printed, so that next time we only print *new* - # content from the buffer. - out_size = 0 - try: - # Since we're not really searching the buffer for text patterns, we - # can set pexpect's search window to be tiny and it won't matter. - # We only search for the 'patterns' timeout or EOF, which aren't in - # the text itself. - #child = pexpect.spawn(pcmd, searchwindowsize=1) - if hasattr(pexpect, 'spawnb'): - child = pexpect.spawnb(self.sh, args=['-c', cmd]) # Pexpect-U - else: - child = pexpect.spawn(self.sh, args=['-c', cmd]) # Vanilla Pexpect - flush = sys.stdout.flush - while True: - # res is the index of the pattern that caused the match, so we - # know whether we've finished (if we matched EOF) or not - res_idx = child.expect_list(patterns, self.read_timeout) - print(child.before[out_size:].decode(enc, 'replace'), end='') - flush() - if res_idx==EOF_index: - break - # Update the pointer to what we've already printed - out_size = len(child.before) - except KeyboardInterrupt: - # We need to send ^C to the process. The ascii code for '^C' is 3 - # (the character is known as ETX for 'End of Text', see - # curses.ascii.ETX). - child.sendline(chr(3)) - # Read and print any more output the program might produce on its - # way out. - try: - out_size = len(child.before) - child.expect_list(patterns, self.terminate_timeout) - print(child.before[out_size:].decode(enc, 'replace'), end='') - sys.stdout.flush() - except KeyboardInterrupt: - # Impatient users tend to type it multiple times - pass - finally: - # Ensure the subprocess really is terminated - child.terminate(force=True) - # add isalive check, to ensure exitstatus is set: - child.isalive() - - # We follow the subprocess pattern, returning either the exit status - # as a positive number, or the terminating signal as a negative - # number. - # on Linux, sh returns 128+n for signals terminating child processes on Linux - # on BSD (OS X), the signal code is set instead - if child.exitstatus is None: - # on WIFSIGNALED, pexpect sets signalstatus, leaving exitstatus=None - if child.signalstatus is None: - # this condition may never occur, - # but let's be certain we always return an integer. - return 0 - return -child.signalstatus - if child.exitstatus > 128: - return -(child.exitstatus - 128) - return child.exitstatus - - -# Make system() with a functional interface for outside use. Note that we use -# getoutput() from the _common utils, which is built on top of popen(). Using -# pexpect to get subprocess output produces difficult to parse output, since -# programs think they are talking to a tty and produce highly formatted output -# (ls is a good example) that makes them hard. -system = ProcessHandler().system - -def check_pid(pid): - try: - os.kill(pid, 0) - except OSError as err: - if err.errno == errno.ESRCH: - return False - elif err.errno == errno.EPERM: - # Don't have permission to signal the process - probably means it exists - return True - raise - else: - return True +"""Posix-specific implementation of process utilities. + +This file is only meant to be imported by process.py, not by end-users. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib +import errno +import os +import subprocess as sp +import sys + +import pexpect + +# Our own +from ._process_common import getoutput, arg_split +from IPython.utils import py3compat +from IPython.utils.encoding import DEFAULT_ENCODING + +#----------------------------------------------------------------------------- +# Function definitions +#----------------------------------------------------------------------------- + +def _find_cmd(cmd): + """Find the full path to a command using which.""" + + path = sp.Popen(['/usr/bin/env', 'which', cmd], + stdout=sp.PIPE, stderr=sp.PIPE).communicate()[0] + return py3compat.decode(path) + + +class ProcessHandler(object): + """Execute subprocesses under the control of pexpect. + """ + # Timeout in seconds to wait on each reading of the subprocess' output. + # This should not be set too low to avoid cpu overusage from our side, + # since we read in a loop whose period is controlled by this timeout. + read_timeout = 0.05 + + # Timeout to give a process if we receive SIGINT, between sending the + # SIGINT to the process and forcefully terminating it. + terminate_timeout = 0.2 + + # File object where stdout and stderr of the subprocess will be written + logfile = None + + # Shell to call for subprocesses to execute + _sh = None + + @property + def sh(self): + if self._sh is None: + shell_name = os.environ.get("SHELL", "sh") + self._sh = pexpect.which(shell_name) + if self._sh is None: + raise OSError('"{}" shell not found'.format(shell_name)) + + return self._sh + + def __init__(self, logfile=None, read_timeout=None, terminate_timeout=None): + """Arguments are used for pexpect calls.""" + self.read_timeout = (ProcessHandler.read_timeout if read_timeout is + None else read_timeout) + self.terminate_timeout = (ProcessHandler.terminate_timeout if + terminate_timeout is None else + terminate_timeout) + self.logfile = sys.stdout if logfile is None else logfile + + def getoutput(self, cmd): + """Run a command and return its stdout/stderr as a string. + + Parameters + ---------- + cmd : str + A command to be executed in the system shell. + + Returns + ------- + output : str + A string containing the combination of stdout and stderr from the + subprocess, in whatever order the subprocess originally wrote to its + file descriptors (so the order of the information in this string is the + correct order as would be seen if running the command in a terminal). + """ + try: + return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n') + except KeyboardInterrupt: + print('^C', file=sys.stderr, end='') + + def getoutput_pexpect(self, cmd): + """Run a command and return its stdout/stderr as a string. + + Parameters + ---------- + cmd : str + A command to be executed in the system shell. + + Returns + ------- + output : str + A string containing the combination of stdout and stderr from the + subprocess, in whatever order the subprocess originally wrote to its + file descriptors (so the order of the information in this string is the + correct order as would be seen if running the command in a terminal). + """ + try: + return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n') + except KeyboardInterrupt: + print('^C', file=sys.stderr, end='') + + def system(self, cmd): + """Execute a command in a subshell. + + Parameters + ---------- + cmd : str + A command to be executed in the system shell. + + Returns + ------- + int : child's exitstatus + """ + # Get likely encoding for the output. + enc = DEFAULT_ENCODING + + # Patterns to match on the output, for pexpect. We read input and + # allow either a short timeout or EOF + patterns = [pexpect.TIMEOUT, pexpect.EOF] + # the index of the EOF pattern in the list. + # even though we know it's 1, this call means we don't have to worry if + # we change the above list, and forget to change this value: + EOF_index = patterns.index(pexpect.EOF) + # The size of the output stored so far in the process output buffer. + # Since pexpect only appends to this buffer, each time we print we + # record how far we've printed, so that next time we only print *new* + # content from the buffer. + out_size = 0 + try: + # Since we're not really searching the buffer for text patterns, we + # can set pexpect's search window to be tiny and it won't matter. + # We only search for the 'patterns' timeout or EOF, which aren't in + # the text itself. + #child = pexpect.spawn(pcmd, searchwindowsize=1) + if hasattr(pexpect, 'spawnb'): + child = pexpect.spawnb(self.sh, args=['-c', cmd]) # Pexpect-U + else: + child = pexpect.spawn(self.sh, args=['-c', cmd]) # Vanilla Pexpect + flush = sys.stdout.flush + while True: + # res is the index of the pattern that caused the match, so we + # know whether we've finished (if we matched EOF) or not + res_idx = child.expect_list(patterns, self.read_timeout) + print(child.before[out_size:].decode(enc, 'replace'), end='') + flush() + if res_idx==EOF_index: + break + # Update the pointer to what we've already printed + out_size = len(child.before) + except KeyboardInterrupt: + # We need to send ^C to the process. The ascii code for '^C' is 3 + # (the character is known as ETX for 'End of Text', see + # curses.ascii.ETX). + child.sendline(chr(3)) + # Read and print any more output the program might produce on its + # way out. + try: + out_size = len(child.before) + child.expect_list(patterns, self.terminate_timeout) + print(child.before[out_size:].decode(enc, 'replace'), end='') + sys.stdout.flush() + except KeyboardInterrupt: + # Impatient users tend to type it multiple times + pass + finally: + # Ensure the subprocess really is terminated + child.terminate(force=True) + # add isalive check, to ensure exitstatus is set: + child.isalive() + + # We follow the subprocess pattern, returning either the exit status + # as a positive number, or the terminating signal as a negative + # number. + # on Linux, sh returns 128+n for signals terminating child processes on Linux + # on BSD (OS X), the signal code is set instead + if child.exitstatus is None: + # on WIFSIGNALED, pexpect sets signalstatus, leaving exitstatus=None + if child.signalstatus is None: + # this condition may never occur, + # but let's be certain we always return an integer. + return 0 + return -child.signalstatus + if child.exitstatus > 128: + return -(child.exitstatus - 128) + return child.exitstatus + + +# Make system() with a functional interface for outside use. Note that we use +# getoutput() from the _common utils, which is built on top of popen(). Using +# pexpect to get subprocess output produces difficult to parse output, since +# programs think they are talking to a tty and produce highly formatted output +# (ls is a good example) that makes them hard. +system = ProcessHandler().system + +def check_pid(pid): + try: + os.kill(pid, 0) + except OSError as err: + if err.errno == errno.ESRCH: + return False + elif err.errno == errno.EPERM: + # Don't have permission to signal the process - probably means it exists + return True + raise + else: + return True diff --git a/contrib/python/ipython/py3/IPython/utils/_process_win32.py b/contrib/python/ipython/py3/IPython/utils/_process_win32.py index 275694db304..6d05bdaa12e 100644 --- a/contrib/python/ipython/py3/IPython/utils/_process_win32.py +++ b/contrib/python/ipython/py3/IPython/utils/_process_win32.py @@ -1,205 +1,205 @@ -"""Windows-specific implementation of process utilities. - -This file is only meant to be imported by process.py, not by end-users. -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2010-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# stdlib -import os -import sys -import ctypes -import time - -from ctypes import c_int, POINTER -from ctypes.wintypes import LPCWSTR, HLOCAL -from subprocess import STDOUT, TimeoutExpired -from threading import Thread - -# our own imports -from ._process_common import read_no_interrupt, process_handler, arg_split as py_arg_split -from . import py3compat -from .encoding import DEFAULT_ENCODING - -#----------------------------------------------------------------------------- -# Function definitions -#----------------------------------------------------------------------------- - -class AvoidUNCPath(object): - """A context manager to protect command execution from UNC paths. - - In the Win32 API, commands can't be invoked with the cwd being a UNC path. - This context manager temporarily changes directory to the 'C:' drive on - entering, and restores the original working directory on exit. - - The context manager returns the starting working directory *if* it made a - change and None otherwise, so that users can apply the necessary adjustment - to their system calls in the event of a change. - - Examples - -------- - :: - cmd = 'dir' - with AvoidUNCPath() as path: - if path is not None: - cmd = '"pushd %s &&"%s' % (path, cmd) - os.system(cmd) - """ - def __enter__(self): - self.path = os.getcwd() - self.is_unc_path = self.path.startswith(r"\\") - if self.is_unc_path: - # change to c drive (as cmd.exe cannot handle UNC addresses) - os.chdir("C:") - return self.path - else: - # We return None to signal that there was no change in the working - # directory - return None - - def __exit__(self, exc_type, exc_value, traceback): - if self.is_unc_path: - os.chdir(self.path) - - -def _find_cmd(cmd): - """Find the full path to a .bat or .exe using the win32api module.""" - try: - from win32api import SearchPath - except ImportError: - raise ImportError('you need to have pywin32 installed for this to work') - else: - PATH = os.environ['PATH'] - extensions = ['.exe', '.com', '.bat', '.py'] - path = None - for ext in extensions: - try: - path = SearchPath(PATH, cmd, ext)[0] - except: - pass - if path is None: - raise OSError("command %r not found" % cmd) - else: - return path - - -def _system_body(p): - """Callback for _system.""" - enc = DEFAULT_ENCODING - - def stdout_read(): - for line in read_no_interrupt(p.stdout).splitlines(): - line = line.decode(enc, 'replace') - print(line, file=sys.stdout) - - def stderr_read(): - for line in read_no_interrupt(p.stderr).splitlines(): - line = line.decode(enc, 'replace') - print(line, file=sys.stderr) - - Thread(target=stdout_read).start() - Thread(target=stderr_read).start() - - # Wait to finish for returncode. Unfortunately, Python has a bug where - # wait() isn't interruptible (https://bugs.python.org/issue28168) so poll in - # a loop instead of just doing `return p.wait()`. - while True: - result = p.poll() - if result is None: - time.sleep(0.01) - else: - return result - - -def system(cmd): - """Win32 version of os.system() that works with network shares. - - Note that this implementation returns None, as meant for use in IPython. - - Parameters - ---------- - cmd : str or list - A command to be executed in the system shell. - - Returns - ------- - int : child process' exit code. - """ - # The controller provides interactivity with both - # stdin and stdout - #import _process_win32_controller - #_process_win32_controller.system(cmd) - - with AvoidUNCPath() as path: - if path is not None: - cmd = '"pushd %s &&"%s' % (path, cmd) - return process_handler(cmd, _system_body) - -def getoutput(cmd): - """Return standard output of executing cmd in a shell. - - Accepts the same arguments as os.system(). - - Parameters - ---------- - cmd : str or list - A command to be executed in the system shell. - - Returns - ------- - stdout : str - """ - - with AvoidUNCPath() as path: - if path is not None: - cmd = '"pushd %s &&"%s' % (path, cmd) - out = process_handler(cmd, lambda p: p.communicate()[0], STDOUT) - - if out is None: - out = b'' - return py3compat.decode(out) - -try: - CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW - CommandLineToArgvW.arg_types = [LPCWSTR, POINTER(c_int)] - CommandLineToArgvW.restype = POINTER(LPCWSTR) - LocalFree = ctypes.windll.kernel32.LocalFree - LocalFree.res_type = HLOCAL - LocalFree.arg_types = [HLOCAL] - - def arg_split(commandline, posix=False, strict=True): - """Split a command line's arguments in a shell-like manner. - - This is a special version for windows that use a ctypes call to CommandLineToArgvW - to do the argv splitting. The posix parameter is ignored. - - If strict=False, process_common.arg_split(...strict=False) is used instead. - """ - #CommandLineToArgvW returns path to executable if called with empty string. - if commandline.strip() == "": - return [] - if not strict: - # not really a cl-arg, fallback on _process_common - return py_arg_split(commandline, posix=posix, strict=strict) - argvn = c_int() - result_pointer = CommandLineToArgvW(py3compat.cast_unicode(commandline.lstrip()), ctypes.byref(argvn)) - result_array_type = LPCWSTR * argvn.value - result = [arg for arg in result_array_type.from_address(ctypes.addressof(result_pointer.contents))] - retval = LocalFree(result_pointer) - return result -except AttributeError: - arg_split = py_arg_split - -def check_pid(pid): - # OpenProcess returns 0 if no such process (of ours) exists - # positive int otherwise - return bool(ctypes.windll.kernel32.OpenProcess(1,0,pid)) +"""Windows-specific implementation of process utilities. + +This file is only meant to be imported by process.py, not by end-users. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# stdlib +import os +import sys +import ctypes +import time + +from ctypes import c_int, POINTER +from ctypes.wintypes import LPCWSTR, HLOCAL +from subprocess import STDOUT, TimeoutExpired +from threading import Thread + +# our own imports +from ._process_common import read_no_interrupt, process_handler, arg_split as py_arg_split +from . import py3compat +from .encoding import DEFAULT_ENCODING + +#----------------------------------------------------------------------------- +# Function definitions +#----------------------------------------------------------------------------- + +class AvoidUNCPath(object): + """A context manager to protect command execution from UNC paths. + + In the Win32 API, commands can't be invoked with the cwd being a UNC path. + This context manager temporarily changes directory to the 'C:' drive on + entering, and restores the original working directory on exit. + + The context manager returns the starting working directory *if* it made a + change and None otherwise, so that users can apply the necessary adjustment + to their system calls in the event of a change. + + Examples + -------- + :: + cmd = 'dir' + with AvoidUNCPath() as path: + if path is not None: + cmd = '"pushd %s &&"%s' % (path, cmd) + os.system(cmd) + """ + def __enter__(self): + self.path = os.getcwd() + self.is_unc_path = self.path.startswith(r"\\") + if self.is_unc_path: + # change to c drive (as cmd.exe cannot handle UNC addresses) + os.chdir("C:") + return self.path + else: + # We return None to signal that there was no change in the working + # directory + return None + + def __exit__(self, exc_type, exc_value, traceback): + if self.is_unc_path: + os.chdir(self.path) + + +def _find_cmd(cmd): + """Find the full path to a .bat or .exe using the win32api module.""" + try: + from win32api import SearchPath + except ImportError: + raise ImportError('you need to have pywin32 installed for this to work') + else: + PATH = os.environ['PATH'] + extensions = ['.exe', '.com', '.bat', '.py'] + path = None + for ext in extensions: + try: + path = SearchPath(PATH, cmd, ext)[0] + except: + pass + if path is None: + raise OSError("command %r not found" % cmd) + else: + return path + + +def _system_body(p): + """Callback for _system.""" + enc = DEFAULT_ENCODING + + def stdout_read(): + for line in read_no_interrupt(p.stdout).splitlines(): + line = line.decode(enc, 'replace') + print(line, file=sys.stdout) + + def stderr_read(): + for line in read_no_interrupt(p.stderr).splitlines(): + line = line.decode(enc, 'replace') + print(line, file=sys.stderr) + + Thread(target=stdout_read).start() + Thread(target=stderr_read).start() + + # Wait to finish for returncode. Unfortunately, Python has a bug where + # wait() isn't interruptible (https://bugs.python.org/issue28168) so poll in + # a loop instead of just doing `return p.wait()`. + while True: + result = p.poll() + if result is None: + time.sleep(0.01) + else: + return result + + +def system(cmd): + """Win32 version of os.system() that works with network shares. + + Note that this implementation returns None, as meant for use in IPython. + + Parameters + ---------- + cmd : str or list + A command to be executed in the system shell. + + Returns + ------- + int : child process' exit code. + """ + # The controller provides interactivity with both + # stdin and stdout + #import _process_win32_controller + #_process_win32_controller.system(cmd) + + with AvoidUNCPath() as path: + if path is not None: + cmd = '"pushd %s &&"%s' % (path, cmd) + return process_handler(cmd, _system_body) + +def getoutput(cmd): + """Return standard output of executing cmd in a shell. + + Accepts the same arguments as os.system(). + + Parameters + ---------- + cmd : str or list + A command to be executed in the system shell. + + Returns + ------- + stdout : str + """ + + with AvoidUNCPath() as path: + if path is not None: + cmd = '"pushd %s &&"%s' % (path, cmd) + out = process_handler(cmd, lambda p: p.communicate()[0], STDOUT) + + if out is None: + out = b'' + return py3compat.decode(out) + +try: + CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW + CommandLineToArgvW.arg_types = [LPCWSTR, POINTER(c_int)] + CommandLineToArgvW.restype = POINTER(LPCWSTR) + LocalFree = ctypes.windll.kernel32.LocalFree + LocalFree.res_type = HLOCAL + LocalFree.arg_types = [HLOCAL] + + def arg_split(commandline, posix=False, strict=True): + """Split a command line's arguments in a shell-like manner. + + This is a special version for windows that use a ctypes call to CommandLineToArgvW + to do the argv splitting. The posix parameter is ignored. + + If strict=False, process_common.arg_split(...strict=False) is used instead. + """ + #CommandLineToArgvW returns path to executable if called with empty string. + if commandline.strip() == "": + return [] + if not strict: + # not really a cl-arg, fallback on _process_common + return py_arg_split(commandline, posix=posix, strict=strict) + argvn = c_int() + result_pointer = CommandLineToArgvW(py3compat.cast_unicode(commandline.lstrip()), ctypes.byref(argvn)) + result_array_type = LPCWSTR * argvn.value + result = [arg for arg in result_array_type.from_address(ctypes.addressof(result_pointer.contents))] + retval = LocalFree(result_pointer) + return result +except AttributeError: + arg_split = py_arg_split + +def check_pid(pid): + # OpenProcess returns 0 if no such process (of ours) exists + # positive int otherwise + return bool(ctypes.windll.kernel32.OpenProcess(1,0,pid)) diff --git a/contrib/python/ipython/py3/IPython/utils/_process_win32_controller.py b/contrib/python/ipython/py3/IPython/utils/_process_win32_controller.py index e2889800deb..c2e2329c45b 100644 --- a/contrib/python/ipython/py3/IPython/utils/_process_win32_controller.py +++ b/contrib/python/ipython/py3/IPython/utils/_process_win32_controller.py @@ -1,573 +1,573 @@ -"""Windows-specific implementation of process utilities with direct WinAPI. - -This file is meant to be used by process.py -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2010-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - - -# stdlib -import os, sys, threading -import ctypes, msvcrt - -# Win32 API types needed for the API calls -from ctypes import POINTER -from ctypes.wintypes import HANDLE, HLOCAL, LPVOID, WORD, DWORD, BOOL, \ - ULONG, LPCWSTR -LPDWORD = POINTER(DWORD) -LPHANDLE = POINTER(HANDLE) -ULONG_PTR = POINTER(ULONG) -class SECURITY_ATTRIBUTES(ctypes.Structure): - _fields_ = [("nLength", DWORD), - ("lpSecurityDescriptor", LPVOID), - ("bInheritHandle", BOOL)] -LPSECURITY_ATTRIBUTES = POINTER(SECURITY_ATTRIBUTES) -class STARTUPINFO(ctypes.Structure): - _fields_ = [("cb", DWORD), - ("lpReserved", LPCWSTR), - ("lpDesktop", LPCWSTR), - ("lpTitle", LPCWSTR), - ("dwX", DWORD), - ("dwY", DWORD), - ("dwXSize", DWORD), - ("dwYSize", DWORD), - ("dwXCountChars", DWORD), - ("dwYCountChars", DWORD), - ("dwFillAttribute", DWORD), - ("dwFlags", DWORD), - ("wShowWindow", WORD), - ("cbReserved2", WORD), - ("lpReserved2", LPVOID), - ("hStdInput", HANDLE), - ("hStdOutput", HANDLE), - ("hStdError", HANDLE)] -LPSTARTUPINFO = POINTER(STARTUPINFO) -class PROCESS_INFORMATION(ctypes.Structure): - _fields_ = [("hProcess", HANDLE), - ("hThread", HANDLE), - ("dwProcessId", DWORD), - ("dwThreadId", DWORD)] -LPPROCESS_INFORMATION = POINTER(PROCESS_INFORMATION) - -# Win32 API constants needed -ERROR_HANDLE_EOF = 38 -ERROR_BROKEN_PIPE = 109 -ERROR_NO_DATA = 232 -HANDLE_FLAG_INHERIT = 0x0001 -STARTF_USESTDHANDLES = 0x0100 -CREATE_SUSPENDED = 0x0004 -CREATE_NEW_CONSOLE = 0x0010 -CREATE_NO_WINDOW = 0x08000000 -STILL_ACTIVE = 259 -WAIT_TIMEOUT = 0x0102 -WAIT_FAILED = 0xFFFFFFFF -INFINITE = 0xFFFFFFFF -DUPLICATE_SAME_ACCESS = 0x00000002 -ENABLE_ECHO_INPUT = 0x0004 -ENABLE_LINE_INPUT = 0x0002 -ENABLE_PROCESSED_INPUT = 0x0001 - -# Win32 API functions needed -GetLastError = ctypes.windll.kernel32.GetLastError -GetLastError.argtypes = [] -GetLastError.restype = DWORD - -CreateFile = ctypes.windll.kernel32.CreateFileW -CreateFile.argtypes = [LPCWSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE] -CreateFile.restype = HANDLE - -CreatePipe = ctypes.windll.kernel32.CreatePipe -CreatePipe.argtypes = [POINTER(HANDLE), POINTER(HANDLE), - LPSECURITY_ATTRIBUTES, DWORD] -CreatePipe.restype = BOOL - -CreateProcess = ctypes.windll.kernel32.CreateProcessW -CreateProcess.argtypes = [LPCWSTR, LPCWSTR, LPSECURITY_ATTRIBUTES, - LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPCWSTR, LPSTARTUPINFO, - LPPROCESS_INFORMATION] -CreateProcess.restype = BOOL - -GetExitCodeProcess = ctypes.windll.kernel32.GetExitCodeProcess -GetExitCodeProcess.argtypes = [HANDLE, LPDWORD] -GetExitCodeProcess.restype = BOOL - -GetCurrentProcess = ctypes.windll.kernel32.GetCurrentProcess -GetCurrentProcess.argtypes = [] -GetCurrentProcess.restype = HANDLE - -ResumeThread = ctypes.windll.kernel32.ResumeThread -ResumeThread.argtypes = [HANDLE] -ResumeThread.restype = DWORD - -ReadFile = ctypes.windll.kernel32.ReadFile -ReadFile.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD, LPVOID] -ReadFile.restype = BOOL - -WriteFile = ctypes.windll.kernel32.WriteFile -WriteFile.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD, LPVOID] -WriteFile.restype = BOOL - -GetConsoleMode = ctypes.windll.kernel32.GetConsoleMode -GetConsoleMode.argtypes = [HANDLE, LPDWORD] -GetConsoleMode.restype = BOOL - -SetConsoleMode = ctypes.windll.kernel32.SetConsoleMode -SetConsoleMode.argtypes = [HANDLE, DWORD] -SetConsoleMode.restype = BOOL - -FlushConsoleInputBuffer = ctypes.windll.kernel32.FlushConsoleInputBuffer -FlushConsoleInputBuffer.argtypes = [HANDLE] -FlushConsoleInputBuffer.restype = BOOL - -WaitForSingleObject = ctypes.windll.kernel32.WaitForSingleObject -WaitForSingleObject.argtypes = [HANDLE, DWORD] -WaitForSingleObject.restype = DWORD - -DuplicateHandle = ctypes.windll.kernel32.DuplicateHandle -DuplicateHandle.argtypes = [HANDLE, HANDLE, HANDLE, LPHANDLE, - DWORD, BOOL, DWORD] -DuplicateHandle.restype = BOOL - -SetHandleInformation = ctypes.windll.kernel32.SetHandleInformation -SetHandleInformation.argtypes = [HANDLE, DWORD, DWORD] -SetHandleInformation.restype = BOOL - -CloseHandle = ctypes.windll.kernel32.CloseHandle -CloseHandle.argtypes = [HANDLE] -CloseHandle.restype = BOOL - -CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW -CommandLineToArgvW.argtypes = [LPCWSTR, POINTER(ctypes.c_int)] -CommandLineToArgvW.restype = POINTER(LPCWSTR) - -LocalFree = ctypes.windll.kernel32.LocalFree -LocalFree.argtypes = [HLOCAL] -LocalFree.restype = HLOCAL - -class AvoidUNCPath(object): - """A context manager to protect command execution from UNC paths. - - In the Win32 API, commands can't be invoked with the cwd being a UNC path. - This context manager temporarily changes directory to the 'C:' drive on - entering, and restores the original working directory on exit. - - The context manager returns the starting working directory *if* it made a - change and None otherwise, so that users can apply the necessary adjustment - to their system calls in the event of a change. - - Examples - -------- - :: - cmd = 'dir' - with AvoidUNCPath() as path: - if path is not None: - cmd = '"pushd %s &&"%s' % (path, cmd) - os.system(cmd) - """ - def __enter__(self): - self.path = os.getcwd() - self.is_unc_path = self.path.startswith(r"\\") - if self.is_unc_path: - # change to c drive (as cmd.exe cannot handle UNC addresses) - os.chdir("C:") - return self.path - else: - # We return None to signal that there was no change in the working - # directory - return None - - def __exit__(self, exc_type, exc_value, traceback): - if self.is_unc_path: - os.chdir(self.path) - - -class Win32ShellCommandController(object): - """Runs a shell command in a 'with' context. - - This implementation is Win32-specific. - - Example: - # Runs the command interactively with default console stdin/stdout - with ShellCommandController('python -i') as scc: - scc.run() - - # Runs the command using the provided functions for stdin/stdout - def my_stdout_func(s): - # print or save the string 's' - write_to_stdout(s) - def my_stdin_func(): - # If input is available, return it as a string. - if input_available(): - return get_input() - # If no input available, return None after a short delay to - # keep from blocking. - else: - time.sleep(0.01) - return None - - with ShellCommandController('python -i') as scc: - scc.run(my_stdout_func, my_stdin_func) - """ - - def __init__(self, cmd, mergeout = True): - """Initializes the shell command controller. - - The cmd is the program to execute, and mergeout is - whether to blend stdout and stderr into one output - in stdout. Merging them together in this fashion more - reliably keeps stdout and stderr in the correct order - especially for interactive shell usage. - """ - self.cmd = cmd - self.mergeout = mergeout - - def __enter__(self): - cmd = self.cmd - mergeout = self.mergeout - - self.hstdout, self.hstdin, self.hstderr = None, None, None - self.piProcInfo = None - try: - p_hstdout, c_hstdout, p_hstderr, \ - c_hstderr, p_hstdin, c_hstdin = [None]*6 - - # SECURITY_ATTRIBUTES with inherit handle set to True - saAttr = SECURITY_ATTRIBUTES() - saAttr.nLength = ctypes.sizeof(saAttr) - saAttr.bInheritHandle = True - saAttr.lpSecurityDescriptor = None - - def create_pipe(uninherit): - """Creates a Windows pipe, which consists of two handles. - - The 'uninherit' parameter controls which handle is not - inherited by the child process. - """ - handles = HANDLE(), HANDLE() - if not CreatePipe(ctypes.byref(handles[0]), - ctypes.byref(handles[1]), ctypes.byref(saAttr), 0): - raise ctypes.WinError() - if not SetHandleInformation(handles[uninherit], - HANDLE_FLAG_INHERIT, 0): - raise ctypes.WinError() - return handles[0].value, handles[1].value - - p_hstdout, c_hstdout = create_pipe(uninherit=0) - # 'mergeout' signals that stdout and stderr should be merged. - # We do that by using one pipe for both of them. - if mergeout: - c_hstderr = HANDLE() - if not DuplicateHandle(GetCurrentProcess(), c_hstdout, - GetCurrentProcess(), ctypes.byref(c_hstderr), - 0, True, DUPLICATE_SAME_ACCESS): - raise ctypes.WinError() - else: - p_hstderr, c_hstderr = create_pipe(uninherit=0) - c_hstdin, p_hstdin = create_pipe(uninherit=1) - - # Create the process object - piProcInfo = PROCESS_INFORMATION() - siStartInfo = STARTUPINFO() - siStartInfo.cb = ctypes.sizeof(siStartInfo) - siStartInfo.hStdInput = c_hstdin - siStartInfo.hStdOutput = c_hstdout - siStartInfo.hStdError = c_hstderr - siStartInfo.dwFlags = STARTF_USESTDHANDLES - dwCreationFlags = CREATE_SUSPENDED | CREATE_NO_WINDOW # | CREATE_NEW_CONSOLE - - if not CreateProcess(None, - u"cmd.exe /c " + cmd, - None, None, True, dwCreationFlags, - None, None, ctypes.byref(siStartInfo), - ctypes.byref(piProcInfo)): - raise ctypes.WinError() - - # Close this process's versions of the child handles - CloseHandle(c_hstdin) - c_hstdin = None - CloseHandle(c_hstdout) - c_hstdout = None - if c_hstderr is not None: - CloseHandle(c_hstderr) - c_hstderr = None - - # Transfer ownership of the parent handles to the object - self.hstdin = p_hstdin - p_hstdin = None - self.hstdout = p_hstdout - p_hstdout = None - if not mergeout: - self.hstderr = p_hstderr - p_hstderr = None - self.piProcInfo = piProcInfo - - finally: - if p_hstdin: - CloseHandle(p_hstdin) - if c_hstdin: - CloseHandle(c_hstdin) - if p_hstdout: - CloseHandle(p_hstdout) - if c_hstdout: - CloseHandle(c_hstdout) - if p_hstderr: - CloseHandle(p_hstderr) - if c_hstderr: - CloseHandle(c_hstderr) - - return self - - def _stdin_thread(self, handle, hprocess, func, stdout_func): - exitCode = DWORD() - bytesWritten = DWORD(0) - while True: - #print("stdin thread loop start") - # Get the input string (may be bytes or unicode) - data = func() - - # None signals to poll whether the process has exited - if data is None: - #print("checking for process completion") - if not GetExitCodeProcess(hprocess, ctypes.byref(exitCode)): - raise ctypes.WinError() - if exitCode.value != STILL_ACTIVE: - return - # TESTING: Does zero-sized writefile help? - if not WriteFile(handle, "", 0, - ctypes.byref(bytesWritten), None): - raise ctypes.WinError() - continue - #print("\nGot str %s\n" % repr(data), file=sys.stderr) - - # Encode the string to the console encoding - if isinstance(data, unicode): #FIXME: Python3 - data = data.encode('utf_8') - - # What we have now must be a string of bytes - if not isinstance(data, str): #FIXME: Python3 - raise RuntimeError("internal stdin function string error") - - # An empty string signals EOF - if len(data) == 0: - return - - # In a windows console, sometimes the input is echoed, - # but sometimes not. How do we determine when to do this? - stdout_func(data) - # WriteFile may not accept all the data at once. - # Loop until everything is processed - while len(data) != 0: - #print("Calling writefile") - if not WriteFile(handle, data, len(data), - ctypes.byref(bytesWritten), None): - # This occurs at exit - if GetLastError() == ERROR_NO_DATA: - return - raise ctypes.WinError() - #print("Called writefile") - data = data[bytesWritten.value:] - - def _stdout_thread(self, handle, func): - # Allocate the output buffer - data = ctypes.create_string_buffer(4096) - while True: - bytesRead = DWORD(0) - if not ReadFile(handle, data, 4096, - ctypes.byref(bytesRead), None): - le = GetLastError() - if le == ERROR_BROKEN_PIPE: - return - else: - raise ctypes.WinError() - # FIXME: Python3 - s = data.value[0:bytesRead.value] - #print("\nv: %s" % repr(s), file=sys.stderr) - func(s.decode('utf_8', 'replace')) - - def run(self, stdout_func = None, stdin_func = None, stderr_func = None): - """Runs the process, using the provided functions for I/O. - - The function stdin_func should return strings whenever a - character or characters become available. - The functions stdout_func and stderr_func are called whenever - something is printed to stdout or stderr, respectively. - These functions are called from different threads (but not - concurrently, because of the GIL). - """ - if stdout_func is None and stdin_func is None and stderr_func is None: - return self._run_stdio() - - if stderr_func is not None and self.mergeout: - raise RuntimeError("Shell command was initiated with " - "merged stdin/stdout, but a separate stderr_func " - "was provided to the run() method") - - # Create a thread for each input/output handle - stdin_thread = None - threads = [] - if stdin_func: - stdin_thread = threading.Thread(target=self._stdin_thread, - args=(self.hstdin, self.piProcInfo.hProcess, - stdin_func, stdout_func)) - threads.append(threading.Thread(target=self._stdout_thread, - args=(self.hstdout, stdout_func))) - if not self.mergeout: - if stderr_func is None: - stderr_func = stdout_func - threads.append(threading.Thread(target=self._stdout_thread, - args=(self.hstderr, stderr_func))) - # Start the I/O threads and the process - if ResumeThread(self.piProcInfo.hThread) == 0xFFFFFFFF: - raise ctypes.WinError() - if stdin_thread is not None: - stdin_thread.start() - for thread in threads: - thread.start() - # Wait for the process to complete - if WaitForSingleObject(self.piProcInfo.hProcess, INFINITE) == \ - WAIT_FAILED: - raise ctypes.WinError() - # Wait for the I/O threads to complete - for thread in threads: - thread.join() - - # Wait for the stdin thread to complete - if stdin_thread is not None: - stdin_thread.join() - - def _stdin_raw_nonblock(self): - """Use the raw Win32 handle of sys.stdin to do non-blocking reads""" - # WARNING: This is experimental, and produces inconsistent results. - # It's possible for the handle not to be appropriate for use - # with WaitForSingleObject, among other things. - handle = msvcrt.get_osfhandle(sys.stdin.fileno()) - result = WaitForSingleObject(handle, 100) - if result == WAIT_FAILED: - raise ctypes.WinError() - elif result == WAIT_TIMEOUT: - print(".", end='') - return None - else: - data = ctypes.create_string_buffer(256) - bytesRead = DWORD(0) - print('?', end='') - - if not ReadFile(handle, data, 256, - ctypes.byref(bytesRead), None): - raise ctypes.WinError() - # This ensures the non-blocking works with an actual console - # Not checking the error, so the processing will still work with - # other handle types - FlushConsoleInputBuffer(handle) - - data = data.value - data = data.replace('\r\n', '\n') - data = data.replace('\r', '\n') - print(repr(data) + " ", end='') - return data - - def _stdin_raw_block(self): - """Use a blocking stdin read""" - # The big problem with the blocking read is that it doesn't - # exit when it's supposed to in all contexts. An extra - # key-press may be required to trigger the exit. - try: - data = sys.stdin.read(1) - data = data.replace('\r', '\n') - return data - except WindowsError as we: - if we.winerror == ERROR_NO_DATA: - # This error occurs when the pipe is closed - return None - else: - # Otherwise let the error propagate - raise we - - def _stdout_raw(self, s): - """Writes the string to stdout""" - print(s, end='', file=sys.stdout) - sys.stdout.flush() - - def _stderr_raw(self, s): - """Writes the string to stdout""" - print(s, end='', file=sys.stderr) - sys.stderr.flush() - - def _run_stdio(self): - """Runs the process using the system standard I/O. - - IMPORTANT: stdin needs to be asynchronous, so the Python - sys.stdin object is not used. Instead, - msvcrt.kbhit/getwch are used asynchronously. - """ - # Disable Line and Echo mode - #lpMode = DWORD() - #handle = msvcrt.get_osfhandle(sys.stdin.fileno()) - #if GetConsoleMode(handle, ctypes.byref(lpMode)): - # set_console_mode = True - # if not SetConsoleMode(handle, lpMode.value & - # ~(ENABLE_ECHO_INPUT | ENABLE_LINE_INPUT | ENABLE_PROCESSED_INPUT)): - # raise ctypes.WinError() - - if self.mergeout: - return self.run(stdout_func = self._stdout_raw, - stdin_func = self._stdin_raw_block) - else: - return self.run(stdout_func = self._stdout_raw, - stdin_func = self._stdin_raw_block, - stderr_func = self._stderr_raw) - - # Restore the previous console mode - #if set_console_mode: - # if not SetConsoleMode(handle, lpMode.value): - # raise ctypes.WinError() - - def __exit__(self, exc_type, exc_value, traceback): - if self.hstdin: - CloseHandle(self.hstdin) - self.hstdin = None - if self.hstdout: - CloseHandle(self.hstdout) - self.hstdout = None - if self.hstderr: - CloseHandle(self.hstderr) - self.hstderr = None - if self.piProcInfo is not None: - CloseHandle(self.piProcInfo.hProcess) - CloseHandle(self.piProcInfo.hThread) - self.piProcInfo = None - - -def system(cmd): - """Win32 version of os.system() that works with network shares. - - Note that this implementation returns None, as meant for use in IPython. - - Parameters - ---------- - cmd : str - A command to be executed in the system shell. - - Returns - ------- - None : we explicitly do NOT return the subprocess status code, as this - utility is meant to be used extensively in IPython, where any return value - would trigger :func:`sys.displayhook` calls. - """ - with AvoidUNCPath() as path: - if path is not None: - cmd = '"pushd %s &&"%s' % (path, cmd) - with Win32ShellCommandController(cmd) as scc: - scc.run() - - -if __name__ == "__main__": - print("Test starting!") - #system("cmd") - system("python -i") - print("Test finished!") +"""Windows-specific implementation of process utilities with direct WinAPI. + +This file is meant to be used by process.py +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + + +# stdlib +import os, sys, threading +import ctypes, msvcrt + +# Win32 API types needed for the API calls +from ctypes import POINTER +from ctypes.wintypes import HANDLE, HLOCAL, LPVOID, WORD, DWORD, BOOL, \ + ULONG, LPCWSTR +LPDWORD = POINTER(DWORD) +LPHANDLE = POINTER(HANDLE) +ULONG_PTR = POINTER(ULONG) +class SECURITY_ATTRIBUTES(ctypes.Structure): + _fields_ = [("nLength", DWORD), + ("lpSecurityDescriptor", LPVOID), + ("bInheritHandle", BOOL)] +LPSECURITY_ATTRIBUTES = POINTER(SECURITY_ATTRIBUTES) +class STARTUPINFO(ctypes.Structure): + _fields_ = [("cb", DWORD), + ("lpReserved", LPCWSTR), + ("lpDesktop", LPCWSTR), + ("lpTitle", LPCWSTR), + ("dwX", DWORD), + ("dwY", DWORD), + ("dwXSize", DWORD), + ("dwYSize", DWORD), + ("dwXCountChars", DWORD), + ("dwYCountChars", DWORD), + ("dwFillAttribute", DWORD), + ("dwFlags", DWORD), + ("wShowWindow", WORD), + ("cbReserved2", WORD), + ("lpReserved2", LPVOID), + ("hStdInput", HANDLE), + ("hStdOutput", HANDLE), + ("hStdError", HANDLE)] +LPSTARTUPINFO = POINTER(STARTUPINFO) +class PROCESS_INFORMATION(ctypes.Structure): + _fields_ = [("hProcess", HANDLE), + ("hThread", HANDLE), + ("dwProcessId", DWORD), + ("dwThreadId", DWORD)] +LPPROCESS_INFORMATION = POINTER(PROCESS_INFORMATION) + +# Win32 API constants needed +ERROR_HANDLE_EOF = 38 +ERROR_BROKEN_PIPE = 109 +ERROR_NO_DATA = 232 +HANDLE_FLAG_INHERIT = 0x0001 +STARTF_USESTDHANDLES = 0x0100 +CREATE_SUSPENDED = 0x0004 +CREATE_NEW_CONSOLE = 0x0010 +CREATE_NO_WINDOW = 0x08000000 +STILL_ACTIVE = 259 +WAIT_TIMEOUT = 0x0102 +WAIT_FAILED = 0xFFFFFFFF +INFINITE = 0xFFFFFFFF +DUPLICATE_SAME_ACCESS = 0x00000002 +ENABLE_ECHO_INPUT = 0x0004 +ENABLE_LINE_INPUT = 0x0002 +ENABLE_PROCESSED_INPUT = 0x0001 + +# Win32 API functions needed +GetLastError = ctypes.windll.kernel32.GetLastError +GetLastError.argtypes = [] +GetLastError.restype = DWORD + +CreateFile = ctypes.windll.kernel32.CreateFileW +CreateFile.argtypes = [LPCWSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE] +CreateFile.restype = HANDLE + +CreatePipe = ctypes.windll.kernel32.CreatePipe +CreatePipe.argtypes = [POINTER(HANDLE), POINTER(HANDLE), + LPSECURITY_ATTRIBUTES, DWORD] +CreatePipe.restype = BOOL + +CreateProcess = ctypes.windll.kernel32.CreateProcessW +CreateProcess.argtypes = [LPCWSTR, LPCWSTR, LPSECURITY_ATTRIBUTES, + LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPCWSTR, LPSTARTUPINFO, + LPPROCESS_INFORMATION] +CreateProcess.restype = BOOL + +GetExitCodeProcess = ctypes.windll.kernel32.GetExitCodeProcess +GetExitCodeProcess.argtypes = [HANDLE, LPDWORD] +GetExitCodeProcess.restype = BOOL + +GetCurrentProcess = ctypes.windll.kernel32.GetCurrentProcess +GetCurrentProcess.argtypes = [] +GetCurrentProcess.restype = HANDLE + +ResumeThread = ctypes.windll.kernel32.ResumeThread +ResumeThread.argtypes = [HANDLE] +ResumeThread.restype = DWORD + +ReadFile = ctypes.windll.kernel32.ReadFile +ReadFile.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD, LPVOID] +ReadFile.restype = BOOL + +WriteFile = ctypes.windll.kernel32.WriteFile +WriteFile.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD, LPVOID] +WriteFile.restype = BOOL + +GetConsoleMode = ctypes.windll.kernel32.GetConsoleMode +GetConsoleMode.argtypes = [HANDLE, LPDWORD] +GetConsoleMode.restype = BOOL + +SetConsoleMode = ctypes.windll.kernel32.SetConsoleMode +SetConsoleMode.argtypes = [HANDLE, DWORD] +SetConsoleMode.restype = BOOL + +FlushConsoleInputBuffer = ctypes.windll.kernel32.FlushConsoleInputBuffer +FlushConsoleInputBuffer.argtypes = [HANDLE] +FlushConsoleInputBuffer.restype = BOOL + +WaitForSingleObject = ctypes.windll.kernel32.WaitForSingleObject +WaitForSingleObject.argtypes = [HANDLE, DWORD] +WaitForSingleObject.restype = DWORD + +DuplicateHandle = ctypes.windll.kernel32.DuplicateHandle +DuplicateHandle.argtypes = [HANDLE, HANDLE, HANDLE, LPHANDLE, + DWORD, BOOL, DWORD] +DuplicateHandle.restype = BOOL + +SetHandleInformation = ctypes.windll.kernel32.SetHandleInformation +SetHandleInformation.argtypes = [HANDLE, DWORD, DWORD] +SetHandleInformation.restype = BOOL + +CloseHandle = ctypes.windll.kernel32.CloseHandle +CloseHandle.argtypes = [HANDLE] +CloseHandle.restype = BOOL + +CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW +CommandLineToArgvW.argtypes = [LPCWSTR, POINTER(ctypes.c_int)] +CommandLineToArgvW.restype = POINTER(LPCWSTR) + +LocalFree = ctypes.windll.kernel32.LocalFree +LocalFree.argtypes = [HLOCAL] +LocalFree.restype = HLOCAL + +class AvoidUNCPath(object): + """A context manager to protect command execution from UNC paths. + + In the Win32 API, commands can't be invoked with the cwd being a UNC path. + This context manager temporarily changes directory to the 'C:' drive on + entering, and restores the original working directory on exit. + + The context manager returns the starting working directory *if* it made a + change and None otherwise, so that users can apply the necessary adjustment + to their system calls in the event of a change. + + Examples + -------- + :: + cmd = 'dir' + with AvoidUNCPath() as path: + if path is not None: + cmd = '"pushd %s &&"%s' % (path, cmd) + os.system(cmd) + """ + def __enter__(self): + self.path = os.getcwd() + self.is_unc_path = self.path.startswith(r"\\") + if self.is_unc_path: + # change to c drive (as cmd.exe cannot handle UNC addresses) + os.chdir("C:") + return self.path + else: + # We return None to signal that there was no change in the working + # directory + return None + + def __exit__(self, exc_type, exc_value, traceback): + if self.is_unc_path: + os.chdir(self.path) + + +class Win32ShellCommandController(object): + """Runs a shell command in a 'with' context. + + This implementation is Win32-specific. + + Example: + # Runs the command interactively with default console stdin/stdout + with ShellCommandController('python -i') as scc: + scc.run() + + # Runs the command using the provided functions for stdin/stdout + def my_stdout_func(s): + # print or save the string 's' + write_to_stdout(s) + def my_stdin_func(): + # If input is available, return it as a string. + if input_available(): + return get_input() + # If no input available, return None after a short delay to + # keep from blocking. + else: + time.sleep(0.01) + return None + + with ShellCommandController('python -i') as scc: + scc.run(my_stdout_func, my_stdin_func) + """ + + def __init__(self, cmd, mergeout = True): + """Initializes the shell command controller. + + The cmd is the program to execute, and mergeout is + whether to blend stdout and stderr into one output + in stdout. Merging them together in this fashion more + reliably keeps stdout and stderr in the correct order + especially for interactive shell usage. + """ + self.cmd = cmd + self.mergeout = mergeout + + def __enter__(self): + cmd = self.cmd + mergeout = self.mergeout + + self.hstdout, self.hstdin, self.hstderr = None, None, None + self.piProcInfo = None + try: + p_hstdout, c_hstdout, p_hstderr, \ + c_hstderr, p_hstdin, c_hstdin = [None]*6 + + # SECURITY_ATTRIBUTES with inherit handle set to True + saAttr = SECURITY_ATTRIBUTES() + saAttr.nLength = ctypes.sizeof(saAttr) + saAttr.bInheritHandle = True + saAttr.lpSecurityDescriptor = None + + def create_pipe(uninherit): + """Creates a Windows pipe, which consists of two handles. + + The 'uninherit' parameter controls which handle is not + inherited by the child process. + """ + handles = HANDLE(), HANDLE() + if not CreatePipe(ctypes.byref(handles[0]), + ctypes.byref(handles[1]), ctypes.byref(saAttr), 0): + raise ctypes.WinError() + if not SetHandleInformation(handles[uninherit], + HANDLE_FLAG_INHERIT, 0): + raise ctypes.WinError() + return handles[0].value, handles[1].value + + p_hstdout, c_hstdout = create_pipe(uninherit=0) + # 'mergeout' signals that stdout and stderr should be merged. + # We do that by using one pipe for both of them. + if mergeout: + c_hstderr = HANDLE() + if not DuplicateHandle(GetCurrentProcess(), c_hstdout, + GetCurrentProcess(), ctypes.byref(c_hstderr), + 0, True, DUPLICATE_SAME_ACCESS): + raise ctypes.WinError() + else: + p_hstderr, c_hstderr = create_pipe(uninherit=0) + c_hstdin, p_hstdin = create_pipe(uninherit=1) + + # Create the process object + piProcInfo = PROCESS_INFORMATION() + siStartInfo = STARTUPINFO() + siStartInfo.cb = ctypes.sizeof(siStartInfo) + siStartInfo.hStdInput = c_hstdin + siStartInfo.hStdOutput = c_hstdout + siStartInfo.hStdError = c_hstderr + siStartInfo.dwFlags = STARTF_USESTDHANDLES + dwCreationFlags = CREATE_SUSPENDED | CREATE_NO_WINDOW # | CREATE_NEW_CONSOLE + + if not CreateProcess(None, + u"cmd.exe /c " + cmd, + None, None, True, dwCreationFlags, + None, None, ctypes.byref(siStartInfo), + ctypes.byref(piProcInfo)): + raise ctypes.WinError() + + # Close this process's versions of the child handles + CloseHandle(c_hstdin) + c_hstdin = None + CloseHandle(c_hstdout) + c_hstdout = None + if c_hstderr is not None: + CloseHandle(c_hstderr) + c_hstderr = None + + # Transfer ownership of the parent handles to the object + self.hstdin = p_hstdin + p_hstdin = None + self.hstdout = p_hstdout + p_hstdout = None + if not mergeout: + self.hstderr = p_hstderr + p_hstderr = None + self.piProcInfo = piProcInfo + + finally: + if p_hstdin: + CloseHandle(p_hstdin) + if c_hstdin: + CloseHandle(c_hstdin) + if p_hstdout: + CloseHandle(p_hstdout) + if c_hstdout: + CloseHandle(c_hstdout) + if p_hstderr: + CloseHandle(p_hstderr) + if c_hstderr: + CloseHandle(c_hstderr) + + return self + + def _stdin_thread(self, handle, hprocess, func, stdout_func): + exitCode = DWORD() + bytesWritten = DWORD(0) + while True: + #print("stdin thread loop start") + # Get the input string (may be bytes or unicode) + data = func() + + # None signals to poll whether the process has exited + if data is None: + #print("checking for process completion") + if not GetExitCodeProcess(hprocess, ctypes.byref(exitCode)): + raise ctypes.WinError() + if exitCode.value != STILL_ACTIVE: + return + # TESTING: Does zero-sized writefile help? + if not WriteFile(handle, "", 0, + ctypes.byref(bytesWritten), None): + raise ctypes.WinError() + continue + #print("\nGot str %s\n" % repr(data), file=sys.stderr) + + # Encode the string to the console encoding + if isinstance(data, unicode): #FIXME: Python3 + data = data.encode('utf_8') + + # What we have now must be a string of bytes + if not isinstance(data, str): #FIXME: Python3 + raise RuntimeError("internal stdin function string error") + + # An empty string signals EOF + if len(data) == 0: + return + + # In a windows console, sometimes the input is echoed, + # but sometimes not. How do we determine when to do this? + stdout_func(data) + # WriteFile may not accept all the data at once. + # Loop until everything is processed + while len(data) != 0: + #print("Calling writefile") + if not WriteFile(handle, data, len(data), + ctypes.byref(bytesWritten), None): + # This occurs at exit + if GetLastError() == ERROR_NO_DATA: + return + raise ctypes.WinError() + #print("Called writefile") + data = data[bytesWritten.value:] + + def _stdout_thread(self, handle, func): + # Allocate the output buffer + data = ctypes.create_string_buffer(4096) + while True: + bytesRead = DWORD(0) + if not ReadFile(handle, data, 4096, + ctypes.byref(bytesRead), None): + le = GetLastError() + if le == ERROR_BROKEN_PIPE: + return + else: + raise ctypes.WinError() + # FIXME: Python3 + s = data.value[0:bytesRead.value] + #print("\nv: %s" % repr(s), file=sys.stderr) + func(s.decode('utf_8', 'replace')) + + def run(self, stdout_func = None, stdin_func = None, stderr_func = None): + """Runs the process, using the provided functions for I/O. + + The function stdin_func should return strings whenever a + character or characters become available. + The functions stdout_func and stderr_func are called whenever + something is printed to stdout or stderr, respectively. + These functions are called from different threads (but not + concurrently, because of the GIL). + """ + if stdout_func is None and stdin_func is None and stderr_func is None: + return self._run_stdio() + + if stderr_func is not None and self.mergeout: + raise RuntimeError("Shell command was initiated with " + "merged stdin/stdout, but a separate stderr_func " + "was provided to the run() method") + + # Create a thread for each input/output handle + stdin_thread = None + threads = [] + if stdin_func: + stdin_thread = threading.Thread(target=self._stdin_thread, + args=(self.hstdin, self.piProcInfo.hProcess, + stdin_func, stdout_func)) + threads.append(threading.Thread(target=self._stdout_thread, + args=(self.hstdout, stdout_func))) + if not self.mergeout: + if stderr_func is None: + stderr_func = stdout_func + threads.append(threading.Thread(target=self._stdout_thread, + args=(self.hstderr, stderr_func))) + # Start the I/O threads and the process + if ResumeThread(self.piProcInfo.hThread) == 0xFFFFFFFF: + raise ctypes.WinError() + if stdin_thread is not None: + stdin_thread.start() + for thread in threads: + thread.start() + # Wait for the process to complete + if WaitForSingleObject(self.piProcInfo.hProcess, INFINITE) == \ + WAIT_FAILED: + raise ctypes.WinError() + # Wait for the I/O threads to complete + for thread in threads: + thread.join() + + # Wait for the stdin thread to complete + if stdin_thread is not None: + stdin_thread.join() + + def _stdin_raw_nonblock(self): + """Use the raw Win32 handle of sys.stdin to do non-blocking reads""" + # WARNING: This is experimental, and produces inconsistent results. + # It's possible for the handle not to be appropriate for use + # with WaitForSingleObject, among other things. + handle = msvcrt.get_osfhandle(sys.stdin.fileno()) + result = WaitForSingleObject(handle, 100) + if result == WAIT_FAILED: + raise ctypes.WinError() + elif result == WAIT_TIMEOUT: + print(".", end='') + return None + else: + data = ctypes.create_string_buffer(256) + bytesRead = DWORD(0) + print('?', end='') + + if not ReadFile(handle, data, 256, + ctypes.byref(bytesRead), None): + raise ctypes.WinError() + # This ensures the non-blocking works with an actual console + # Not checking the error, so the processing will still work with + # other handle types + FlushConsoleInputBuffer(handle) + + data = data.value + data = data.replace('\r\n', '\n') + data = data.replace('\r', '\n') + print(repr(data) + " ", end='') + return data + + def _stdin_raw_block(self): + """Use a blocking stdin read""" + # The big problem with the blocking read is that it doesn't + # exit when it's supposed to in all contexts. An extra + # key-press may be required to trigger the exit. + try: + data = sys.stdin.read(1) + data = data.replace('\r', '\n') + return data + except WindowsError as we: + if we.winerror == ERROR_NO_DATA: + # This error occurs when the pipe is closed + return None + else: + # Otherwise let the error propagate + raise we + + def _stdout_raw(self, s): + """Writes the string to stdout""" + print(s, end='', file=sys.stdout) + sys.stdout.flush() + + def _stderr_raw(self, s): + """Writes the string to stdout""" + print(s, end='', file=sys.stderr) + sys.stderr.flush() + + def _run_stdio(self): + """Runs the process using the system standard I/O. + + IMPORTANT: stdin needs to be asynchronous, so the Python + sys.stdin object is not used. Instead, + msvcrt.kbhit/getwch are used asynchronously. + """ + # Disable Line and Echo mode + #lpMode = DWORD() + #handle = msvcrt.get_osfhandle(sys.stdin.fileno()) + #if GetConsoleMode(handle, ctypes.byref(lpMode)): + # set_console_mode = True + # if not SetConsoleMode(handle, lpMode.value & + # ~(ENABLE_ECHO_INPUT | ENABLE_LINE_INPUT | ENABLE_PROCESSED_INPUT)): + # raise ctypes.WinError() + + if self.mergeout: + return self.run(stdout_func = self._stdout_raw, + stdin_func = self._stdin_raw_block) + else: + return self.run(stdout_func = self._stdout_raw, + stdin_func = self._stdin_raw_block, + stderr_func = self._stderr_raw) + + # Restore the previous console mode + #if set_console_mode: + # if not SetConsoleMode(handle, lpMode.value): + # raise ctypes.WinError() + + def __exit__(self, exc_type, exc_value, traceback): + if self.hstdin: + CloseHandle(self.hstdin) + self.hstdin = None + if self.hstdout: + CloseHandle(self.hstdout) + self.hstdout = None + if self.hstderr: + CloseHandle(self.hstderr) + self.hstderr = None + if self.piProcInfo is not None: + CloseHandle(self.piProcInfo.hProcess) + CloseHandle(self.piProcInfo.hThread) + self.piProcInfo = None + + +def system(cmd): + """Win32 version of os.system() that works with network shares. + + Note that this implementation returns None, as meant for use in IPython. + + Parameters + ---------- + cmd : str + A command to be executed in the system shell. + + Returns + ------- + None : we explicitly do NOT return the subprocess status code, as this + utility is meant to be used extensively in IPython, where any return value + would trigger :func:`sys.displayhook` calls. + """ + with AvoidUNCPath() as path: + if path is not None: + cmd = '"pushd %s &&"%s' % (path, cmd) + with Win32ShellCommandController(cmd) as scc: + scc.run() + + +if __name__ == "__main__": + print("Test starting!") + #system("cmd") + system("python -i") + print("Test finished!") diff --git a/contrib/python/ipython/py3/IPython/utils/_sysinfo.py b/contrib/python/ipython/py3/IPython/utils/_sysinfo.py index 13aad2ca5b8..084b3b16a18 100644 --- a/contrib/python/ipython/py3/IPython/utils/_sysinfo.py +++ b/contrib/python/ipython/py3/IPython/utils/_sysinfo.py @@ -1,2 +1,2 @@ -# GENERATED BY setup.py -commit = u"e321e760a" +# GENERATED BY setup.py +commit = u"e321e760a" diff --git a/contrib/python/ipython/py3/IPython/utils/capture.py b/contrib/python/ipython/py3/IPython/utils/capture.py index 34f79a76db6..97b6336688b 100644 --- a/contrib/python/ipython/py3/IPython/utils/capture.py +++ b/contrib/python/ipython/py3/IPython/utils/capture.py @@ -1,170 +1,170 @@ -# encoding: utf-8 -"""IO capturing utilities.""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -import sys -from io import StringIO - -#----------------------------------------------------------------------------- -# Classes and functions -#----------------------------------------------------------------------------- - - -class RichOutput(object): - def __init__(self, data=None, metadata=None, transient=None, update=False): - self.data = data or {} - self.metadata = metadata or {} - self.transient = transient or {} - self.update = update - - def display(self): - from IPython.display import publish_display_data - publish_display_data(data=self.data, metadata=self.metadata, - transient=self.transient, update=self.update) - - def _repr_mime_(self, mime): - if mime not in self.data: - return - data = self.data[mime] - if mime in self.metadata: - return data, self.metadata[mime] - else: - return data - - def _repr_mimebundle_(self, include=None, exclude=None): - return self.data, self.metadata - - def _repr_html_(self): - return self._repr_mime_("text/html") - - def _repr_latex_(self): - return self._repr_mime_("text/latex") - - def _repr_json_(self): - return self._repr_mime_("application/json") - - def _repr_javascript_(self): - return self._repr_mime_("application/javascript") - - def _repr_png_(self): - return self._repr_mime_("image/png") - - def _repr_jpeg_(self): - return self._repr_mime_("image/jpeg") - - def _repr_svg_(self): - return self._repr_mime_("image/svg+xml") - - -class CapturedIO(object): - """Simple object for containing captured stdout/err and rich display StringIO objects - - Each instance `c` has three attributes: - - - ``c.stdout`` : standard output as a string - - ``c.stderr`` : standard error as a string - - ``c.outputs``: a list of rich display outputs - - Additionally, there's a ``c.show()`` method which will print all of the - above in the same order, and can be invoked simply via ``c()``. - """ - - def __init__(self, stdout, stderr, outputs=None): - self._stdout = stdout - self._stderr = stderr - if outputs is None: - outputs = [] - self._outputs = outputs - - def __str__(self): - return self.stdout - - @property - def stdout(self): - "Captured standard output" - if not self._stdout: - return '' - return self._stdout.getvalue() - - @property - def stderr(self): - "Captured standard error" - if not self._stderr: - return '' - return self._stderr.getvalue() - - @property - def outputs(self): - """A list of the captured rich display outputs, if any. - - If you have a CapturedIO object ``c``, these can be displayed in IPython - using:: - - from IPython.display import display - for o in c.outputs: - display(o) - """ - return [ RichOutput(**kargs) for kargs in self._outputs ] - - def show(self): - """write my output to sys.stdout/err as appropriate""" - sys.stdout.write(self.stdout) - sys.stderr.write(self.stderr) - sys.stdout.flush() - sys.stderr.flush() - for kargs in self._outputs: - RichOutput(**kargs).display() - - __call__ = show - - -class capture_output(object): - """context manager for capturing stdout/err""" - stdout = True - stderr = True - display = True - - def __init__(self, stdout=True, stderr=True, display=True): - self.stdout = stdout - self.stderr = stderr - self.display = display - self.shell = None - - def __enter__(self): - from IPython.core.getipython import get_ipython - from IPython.core.displaypub import CapturingDisplayPublisher - from IPython.core.displayhook import CapturingDisplayHook - - self.sys_stdout = sys.stdout - self.sys_stderr = sys.stderr - - if self.display: - self.shell = get_ipython() - if self.shell is None: - self.save_display_pub = None - self.display = False - - stdout = stderr = outputs = None - if self.stdout: - stdout = sys.stdout = StringIO() - if self.stderr: - stderr = sys.stderr = StringIO() - if self.display: - self.save_display_pub = self.shell.display_pub - self.shell.display_pub = CapturingDisplayPublisher() - outputs = self.shell.display_pub.outputs - self.save_display_hook = sys.displayhook - sys.displayhook = CapturingDisplayHook(shell=self.shell, - outputs=outputs) - - return CapturedIO(stdout, stderr, outputs) - - def __exit__(self, exc_type, exc_value, traceback): - sys.stdout = self.sys_stdout - sys.stderr = self.sys_stderr - if self.display and self.shell: - self.shell.display_pub = self.save_display_pub - sys.displayhook = self.save_display_hook +# encoding: utf-8 +"""IO capturing utilities.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +import sys +from io import StringIO + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + + +class RichOutput(object): + def __init__(self, data=None, metadata=None, transient=None, update=False): + self.data = data or {} + self.metadata = metadata or {} + self.transient = transient or {} + self.update = update + + def display(self): + from IPython.display import publish_display_data + publish_display_data(data=self.data, metadata=self.metadata, + transient=self.transient, update=self.update) + + def _repr_mime_(self, mime): + if mime not in self.data: + return + data = self.data[mime] + if mime in self.metadata: + return data, self.metadata[mime] + else: + return data + + def _repr_mimebundle_(self, include=None, exclude=None): + return self.data, self.metadata + + def _repr_html_(self): + return self._repr_mime_("text/html") + + def _repr_latex_(self): + return self._repr_mime_("text/latex") + + def _repr_json_(self): + return self._repr_mime_("application/json") + + def _repr_javascript_(self): + return self._repr_mime_("application/javascript") + + def _repr_png_(self): + return self._repr_mime_("image/png") + + def _repr_jpeg_(self): + return self._repr_mime_("image/jpeg") + + def _repr_svg_(self): + return self._repr_mime_("image/svg+xml") + + +class CapturedIO(object): + """Simple object for containing captured stdout/err and rich display StringIO objects + + Each instance `c` has three attributes: + + - ``c.stdout`` : standard output as a string + - ``c.stderr`` : standard error as a string + - ``c.outputs``: a list of rich display outputs + + Additionally, there's a ``c.show()`` method which will print all of the + above in the same order, and can be invoked simply via ``c()``. + """ + + def __init__(self, stdout, stderr, outputs=None): + self._stdout = stdout + self._stderr = stderr + if outputs is None: + outputs = [] + self._outputs = outputs + + def __str__(self): + return self.stdout + + @property + def stdout(self): + "Captured standard output" + if not self._stdout: + return '' + return self._stdout.getvalue() + + @property + def stderr(self): + "Captured standard error" + if not self._stderr: + return '' + return self._stderr.getvalue() + + @property + def outputs(self): + """A list of the captured rich display outputs, if any. + + If you have a CapturedIO object ``c``, these can be displayed in IPython + using:: + + from IPython.display import display + for o in c.outputs: + display(o) + """ + return [ RichOutput(**kargs) for kargs in self._outputs ] + + def show(self): + """write my output to sys.stdout/err as appropriate""" + sys.stdout.write(self.stdout) + sys.stderr.write(self.stderr) + sys.stdout.flush() + sys.stderr.flush() + for kargs in self._outputs: + RichOutput(**kargs).display() + + __call__ = show + + +class capture_output(object): + """context manager for capturing stdout/err""" + stdout = True + stderr = True + display = True + + def __init__(self, stdout=True, stderr=True, display=True): + self.stdout = stdout + self.stderr = stderr + self.display = display + self.shell = None + + def __enter__(self): + from IPython.core.getipython import get_ipython + from IPython.core.displaypub import CapturingDisplayPublisher + from IPython.core.displayhook import CapturingDisplayHook + + self.sys_stdout = sys.stdout + self.sys_stderr = sys.stderr + + if self.display: + self.shell = get_ipython() + if self.shell is None: + self.save_display_pub = None + self.display = False + + stdout = stderr = outputs = None + if self.stdout: + stdout = sys.stdout = StringIO() + if self.stderr: + stderr = sys.stderr = StringIO() + if self.display: + self.save_display_pub = self.shell.display_pub + self.shell.display_pub = CapturingDisplayPublisher() + outputs = self.shell.display_pub.outputs + self.save_display_hook = sys.displayhook + sys.displayhook = CapturingDisplayHook(shell=self.shell, + outputs=outputs) + + return CapturedIO(stdout, stderr, outputs) + + def __exit__(self, exc_type, exc_value, traceback): + sys.stdout = self.sys_stdout + sys.stderr = self.sys_stderr + if self.display and self.shell: + self.shell.display_pub = self.save_display_pub + sys.displayhook = self.save_display_hook diff --git a/contrib/python/ipython/py3/IPython/utils/colorable.py b/contrib/python/ipython/py3/IPython/utils/colorable.py index 5ff5bc77aaf..1e3caef62b4 100644 --- a/contrib/python/ipython/py3/IPython/utils/colorable.py +++ b/contrib/python/ipython/py3/IPython/utils/colorable.py @@ -1,25 +1,25 @@ -#***************************************************************************** -# Copyright (C) 2016 The IPython Team <ipython-dev@scipy.org> -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#***************************************************************************** - -""" -Color managing related utilities -""" - -import pygments - -from traitlets.config import Configurable -from traitlets import Unicode - - -available_themes = lambda : [s for s in pygments.styles.get_all_styles()]+['NoColor','LightBG','Linux', 'Neutral'] - -class Colorable(Configurable): - """ - A subclass of configurable for all the classes that have a `default_scheme` - """ - default_style=Unicode('LightBG').tag(config=True) - +#***************************************************************************** +# Copyright (C) 2016 The IPython Team <ipython-dev@scipy.org> +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + +""" +Color managing related utilities +""" + +import pygments + +from traitlets.config import Configurable +from traitlets import Unicode + + +available_themes = lambda : [s for s in pygments.styles.get_all_styles()]+['NoColor','LightBG','Linux', 'Neutral'] + +class Colorable(Configurable): + """ + A subclass of configurable for all the classes that have a `default_scheme` + """ + default_style=Unicode('LightBG').tag(config=True) + diff --git a/contrib/python/ipython/py3/IPython/utils/coloransi.py b/contrib/python/ipython/py3/IPython/utils/coloransi.py index 597c69fe11f..bc8e8377f79 100644 --- a/contrib/python/ipython/py3/IPython/utils/coloransi.py +++ b/contrib/python/ipython/py3/IPython/utils/coloransi.py @@ -1,187 +1,187 @@ -# -*- coding: utf-8 -*- -"""Tools for coloring text in ANSI terminals. -""" - -#***************************************************************************** -# Copyright (C) 2002-2006 Fernando Perez. <fperez@colorado.edu> -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#***************************************************************************** - -__all__ = ['TermColors','InputTermColors','ColorScheme','ColorSchemeTable'] - -import os - -from IPython.utils.ipstruct import Struct - -color_templates = ( - # Dark colors - ("Black" , "0;30"), - ("Red" , "0;31"), - ("Green" , "0;32"), - ("Brown" , "0;33"), - ("Blue" , "0;34"), - ("Purple" , "0;35"), - ("Cyan" , "0;36"), - ("LightGray" , "0;37"), - # Light colors - ("DarkGray" , "1;30"), - ("LightRed" , "1;31"), - ("LightGreen" , "1;32"), - ("Yellow" , "1;33"), - ("LightBlue" , "1;34"), - ("LightPurple" , "1;35"), - ("LightCyan" , "1;36"), - ("White" , "1;37"), - # Blinking colors. Probably should not be used in anything serious. - ("BlinkBlack" , "5;30"), - ("BlinkRed" , "5;31"), - ("BlinkGreen" , "5;32"), - ("BlinkYellow" , "5;33"), - ("BlinkBlue" , "5;34"), - ("BlinkPurple" , "5;35"), - ("BlinkCyan" , "5;36"), - ("BlinkLightGray", "5;37"), - ) - -def make_color_table(in_class): - """Build a set of color attributes in a class. - - Helper function for building the :class:`TermColors` and - :class`InputTermColors`. - """ - for name,value in color_templates: - setattr(in_class,name,in_class._base % value) - -class TermColors: - """Color escape sequences. - - This class defines the escape sequences for all the standard (ANSI?) - colors in terminals. Also defines a NoColor escape which is just the null - string, suitable for defining 'dummy' color schemes in terminals which get - confused by color escapes. - - This class should be used as a mixin for building color schemes.""" - - NoColor = '' # for color schemes in color-less terminals. - Normal = '\033[0m' # Reset normal coloring - _base = '\033[%sm' # Template for all other colors - -# Build the actual color table as a set of class attributes: -make_color_table(TermColors) - -class InputTermColors: - """Color escape sequences for input prompts. - - This class is similar to TermColors, but the escapes are wrapped in \001 - and \002 so that readline can properly know the length of each line and - can wrap lines accordingly. Use this class for any colored text which - needs to be used in input prompts, such as in calls to raw_input(). - - This class defines the escape sequences for all the standard (ANSI?) - colors in terminals. Also defines a NoColor escape which is just the null - string, suitable for defining 'dummy' color schemes in terminals which get - confused by color escapes. - - This class should be used as a mixin for building color schemes.""" - - NoColor = '' # for color schemes in color-less terminals. - - if os.name == 'nt' and os.environ.get('TERM','dumb') == 'emacs': - # (X)emacs on W32 gets confused with \001 and \002 so we remove them - Normal = '\033[0m' # Reset normal coloring - _base = '\033[%sm' # Template for all other colors - else: - Normal = '\001\033[0m\002' # Reset normal coloring - _base = '\001\033[%sm\002' # Template for all other colors - -# Build the actual color table as a set of class attributes: -make_color_table(InputTermColors) - -class NoColors: - """This defines all the same names as the colour classes, but maps them to - empty strings, so it can easily be substituted to turn off colours.""" - NoColor = '' - Normal = '' - -for name, value in color_templates: - setattr(NoColors, name, '') - -class ColorScheme: - """Generic color scheme class. Just a name and a Struct.""" - def __init__(self,__scheme_name_,colordict=None,**colormap): - self.name = __scheme_name_ - if colordict is None: - self.colors = Struct(**colormap) - else: - self.colors = Struct(colordict) - - def copy(self,name=None): - """Return a full copy of the object, optionally renaming it.""" - if name is None: - name = self.name - return ColorScheme(name, self.colors.dict()) - -class ColorSchemeTable(dict): - """General class to handle tables of color schemes. - - It's basically a dict of color schemes with a couple of shorthand - attributes and some convenient methods. - - active_scheme_name -> obvious - active_colors -> actual color table of the active scheme""" - - def __init__(self, scheme_list=None, default_scheme=''): - """Create a table of color schemes. - - The table can be created empty and manually filled or it can be - created with a list of valid color schemes AND the specification for - the default active scheme. - """ - - # create object attributes to be set later - self.active_scheme_name = '' - self.active_colors = None - - if scheme_list: - if default_scheme == '': - raise ValueError('you must specify the default color scheme') - for scheme in scheme_list: - self.add_scheme(scheme) - self.set_active_scheme(default_scheme) - - def copy(self): - """Return full copy of object""" - return ColorSchemeTable(self.values(),self.active_scheme_name) - - def add_scheme(self,new_scheme): - """Add a new color scheme to the table.""" - if not isinstance(new_scheme,ColorScheme): - raise ValueError('ColorSchemeTable only accepts ColorScheme instances') - self[new_scheme.name] = new_scheme - - def set_active_scheme(self,scheme,case_sensitive=0): - """Set the currently active scheme. - - Names are by default compared in a case-insensitive way, but this can - be changed by setting the parameter case_sensitive to true.""" - - scheme_names = list(self.keys()) - if case_sensitive: - valid_schemes = scheme_names - scheme_test = scheme - else: - valid_schemes = [s.lower() for s in scheme_names] - scheme_test = scheme.lower() - try: - scheme_idx = valid_schemes.index(scheme_test) - except ValueError: - raise ValueError('Unrecognized color scheme: ' + scheme + \ - '\nValid schemes: '+str(scheme_names).replace("'', ",'')) - else: - active = scheme_names[scheme_idx] - self.active_scheme_name = active - self.active_colors = self[active].colors - # Now allow using '' as an index for the current active scheme - self[''] = self[active] +# -*- coding: utf-8 -*- +"""Tools for coloring text in ANSI terminals. +""" + +#***************************************************************************** +# Copyright (C) 2002-2006 Fernando Perez. <fperez@colorado.edu> +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + +__all__ = ['TermColors','InputTermColors','ColorScheme','ColorSchemeTable'] + +import os + +from IPython.utils.ipstruct import Struct + +color_templates = ( + # Dark colors + ("Black" , "0;30"), + ("Red" , "0;31"), + ("Green" , "0;32"), + ("Brown" , "0;33"), + ("Blue" , "0;34"), + ("Purple" , "0;35"), + ("Cyan" , "0;36"), + ("LightGray" , "0;37"), + # Light colors + ("DarkGray" , "1;30"), + ("LightRed" , "1;31"), + ("LightGreen" , "1;32"), + ("Yellow" , "1;33"), + ("LightBlue" , "1;34"), + ("LightPurple" , "1;35"), + ("LightCyan" , "1;36"), + ("White" , "1;37"), + # Blinking colors. Probably should not be used in anything serious. + ("BlinkBlack" , "5;30"), + ("BlinkRed" , "5;31"), + ("BlinkGreen" , "5;32"), + ("BlinkYellow" , "5;33"), + ("BlinkBlue" , "5;34"), + ("BlinkPurple" , "5;35"), + ("BlinkCyan" , "5;36"), + ("BlinkLightGray", "5;37"), + ) + +def make_color_table(in_class): + """Build a set of color attributes in a class. + + Helper function for building the :class:`TermColors` and + :class`InputTermColors`. + """ + for name,value in color_templates: + setattr(in_class,name,in_class._base % value) + +class TermColors: + """Color escape sequences. + + This class defines the escape sequences for all the standard (ANSI?) + colors in terminals. Also defines a NoColor escape which is just the null + string, suitable for defining 'dummy' color schemes in terminals which get + confused by color escapes. + + This class should be used as a mixin for building color schemes.""" + + NoColor = '' # for color schemes in color-less terminals. + Normal = '\033[0m' # Reset normal coloring + _base = '\033[%sm' # Template for all other colors + +# Build the actual color table as a set of class attributes: +make_color_table(TermColors) + +class InputTermColors: + """Color escape sequences for input prompts. + + This class is similar to TermColors, but the escapes are wrapped in \001 + and \002 so that readline can properly know the length of each line and + can wrap lines accordingly. Use this class for any colored text which + needs to be used in input prompts, such as in calls to raw_input(). + + This class defines the escape sequences for all the standard (ANSI?) + colors in terminals. Also defines a NoColor escape which is just the null + string, suitable for defining 'dummy' color schemes in terminals which get + confused by color escapes. + + This class should be used as a mixin for building color schemes.""" + + NoColor = '' # for color schemes in color-less terminals. + + if os.name == 'nt' and os.environ.get('TERM','dumb') == 'emacs': + # (X)emacs on W32 gets confused with \001 and \002 so we remove them + Normal = '\033[0m' # Reset normal coloring + _base = '\033[%sm' # Template for all other colors + else: + Normal = '\001\033[0m\002' # Reset normal coloring + _base = '\001\033[%sm\002' # Template for all other colors + +# Build the actual color table as a set of class attributes: +make_color_table(InputTermColors) + +class NoColors: + """This defines all the same names as the colour classes, but maps them to + empty strings, so it can easily be substituted to turn off colours.""" + NoColor = '' + Normal = '' + +for name, value in color_templates: + setattr(NoColors, name, '') + +class ColorScheme: + """Generic color scheme class. Just a name and a Struct.""" + def __init__(self,__scheme_name_,colordict=None,**colormap): + self.name = __scheme_name_ + if colordict is None: + self.colors = Struct(**colormap) + else: + self.colors = Struct(colordict) + + def copy(self,name=None): + """Return a full copy of the object, optionally renaming it.""" + if name is None: + name = self.name + return ColorScheme(name, self.colors.dict()) + +class ColorSchemeTable(dict): + """General class to handle tables of color schemes. + + It's basically a dict of color schemes with a couple of shorthand + attributes and some convenient methods. + + active_scheme_name -> obvious + active_colors -> actual color table of the active scheme""" + + def __init__(self, scheme_list=None, default_scheme=''): + """Create a table of color schemes. + + The table can be created empty and manually filled or it can be + created with a list of valid color schemes AND the specification for + the default active scheme. + """ + + # create object attributes to be set later + self.active_scheme_name = '' + self.active_colors = None + + if scheme_list: + if default_scheme == '': + raise ValueError('you must specify the default color scheme') + for scheme in scheme_list: + self.add_scheme(scheme) + self.set_active_scheme(default_scheme) + + def copy(self): + """Return full copy of object""" + return ColorSchemeTable(self.values(),self.active_scheme_name) + + def add_scheme(self,new_scheme): + """Add a new color scheme to the table.""" + if not isinstance(new_scheme,ColorScheme): + raise ValueError('ColorSchemeTable only accepts ColorScheme instances') + self[new_scheme.name] = new_scheme + + def set_active_scheme(self,scheme,case_sensitive=0): + """Set the currently active scheme. + + Names are by default compared in a case-insensitive way, but this can + be changed by setting the parameter case_sensitive to true.""" + + scheme_names = list(self.keys()) + if case_sensitive: + valid_schemes = scheme_names + scheme_test = scheme + else: + valid_schemes = [s.lower() for s in scheme_names] + scheme_test = scheme.lower() + try: + scheme_idx = valid_schemes.index(scheme_test) + except ValueError: + raise ValueError('Unrecognized color scheme: ' + scheme + \ + '\nValid schemes: '+str(scheme_names).replace("'', ",'')) + else: + active = scheme_names[scheme_idx] + self.active_scheme_name = active + self.active_colors = self[active].colors + # Now allow using '' as an index for the current active scheme + self[''] = self[active] diff --git a/contrib/python/ipython/py3/IPython/utils/contexts.py b/contrib/python/ipython/py3/IPython/utils/contexts.py index 214abace2c8..4d379b0eda1 100644 --- a/contrib/python/ipython/py3/IPython/utils/contexts.py +++ b/contrib/python/ipython/py3/IPython/utils/contexts.py @@ -1,74 +1,74 @@ -# encoding: utf-8 -"""Miscellaneous context managers. -""" - -import warnings - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -class preserve_keys(object): - """Preserve a set of keys in a dictionary. - - Upon entering the context manager the current values of the keys - will be saved. Upon exiting, the dictionary will be updated to - restore the original value of the preserved keys. Preserved keys - which did not exist when entering the context manager will be - deleted. - - Examples - -------- - - >>> d = {'a': 1, 'b': 2, 'c': 3} - >>> with preserve_keys(d, 'b', 'c', 'd'): - ... del d['a'] - ... del d['b'] # will be reset to 2 - ... d['c'] = None # will be reset to 3 - ... d['d'] = 4 # will be deleted - ... d['e'] = 5 - ... print(sorted(d.items())) - ... - [('c', None), ('d', 4), ('e', 5)] - >>> print(sorted(d.items())) - [('b', 2), ('c', 3), ('e', 5)] - """ - - def __init__(self, dictionary, *keys): - self.dictionary = dictionary - self.keys = keys - - def __enter__(self): - # Actions to perform upon exiting. - to_delete = [] - to_update = {} - - d = self.dictionary - for k in self.keys: - if k in d: - to_update[k] = d[k] - else: - to_delete.append(k) - - self.to_delete = to_delete - self.to_update = to_update - - def __exit__(self, *exc_info): - d = self.dictionary - - for k in self.to_delete: - d.pop(k, None) - d.update(self.to_update) - - -class NoOpContext(object): - """ - Deprecated - - Context manager that does nothing.""" - - def __init__(self): - warnings.warn("""NoOpContext is deprecated since IPython 5.0 """, - DeprecationWarning, stacklevel=2) - - def __enter__(self): pass - def __exit__(self, type, value, traceback): pass +# encoding: utf-8 +"""Miscellaneous context managers. +""" + +import warnings + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +class preserve_keys(object): + """Preserve a set of keys in a dictionary. + + Upon entering the context manager the current values of the keys + will be saved. Upon exiting, the dictionary will be updated to + restore the original value of the preserved keys. Preserved keys + which did not exist when entering the context manager will be + deleted. + + Examples + -------- + + >>> d = {'a': 1, 'b': 2, 'c': 3} + >>> with preserve_keys(d, 'b', 'c', 'd'): + ... del d['a'] + ... del d['b'] # will be reset to 2 + ... d['c'] = None # will be reset to 3 + ... d['d'] = 4 # will be deleted + ... d['e'] = 5 + ... print(sorted(d.items())) + ... + [('c', None), ('d', 4), ('e', 5)] + >>> print(sorted(d.items())) + [('b', 2), ('c', 3), ('e', 5)] + """ + + def __init__(self, dictionary, *keys): + self.dictionary = dictionary + self.keys = keys + + def __enter__(self): + # Actions to perform upon exiting. + to_delete = [] + to_update = {} + + d = self.dictionary + for k in self.keys: + if k in d: + to_update[k] = d[k] + else: + to_delete.append(k) + + self.to_delete = to_delete + self.to_update = to_update + + def __exit__(self, *exc_info): + d = self.dictionary + + for k in self.to_delete: + d.pop(k, None) + d.update(self.to_update) + + +class NoOpContext(object): + """ + Deprecated + + Context manager that does nothing.""" + + def __init__(self): + warnings.warn("""NoOpContext is deprecated since IPython 5.0 """, + DeprecationWarning, stacklevel=2) + + def __enter__(self): pass + def __exit__(self, type, value, traceback): pass diff --git a/contrib/python/ipython/py3/IPython/utils/daemonize.py b/contrib/python/ipython/py3/IPython/utils/daemonize.py index 9a4a013f0a7..44b4a2832e0 100644 --- a/contrib/python/ipython/py3/IPython/utils/daemonize.py +++ b/contrib/python/ipython/py3/IPython/utils/daemonize.py @@ -1,4 +1,4 @@ -from warnings import warn - -warn("IPython.utils.daemonize has moved to ipyparallel.apps.daemonize since IPython 4.0", DeprecationWarning, stacklevel=2) -from ipyparallel.apps.daemonize import daemonize +from warnings import warn + +warn("IPython.utils.daemonize has moved to ipyparallel.apps.daemonize since IPython 4.0", DeprecationWarning, stacklevel=2) +from ipyparallel.apps.daemonize import daemonize diff --git a/contrib/python/ipython/py3/IPython/utils/data.py b/contrib/python/ipython/py3/IPython/utils/data.py index 7c449e9b830..433c90916c2 100644 --- a/contrib/python/ipython/py3/IPython/utils/data.py +++ b/contrib/python/ipython/py3/IPython/utils/data.py @@ -1,30 +1,30 @@ -# encoding: utf-8 -"""Utilities for working with data structures like lists, dicts and tuples. -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - - -def uniq_stable(elems): - """uniq_stable(elems) -> list - - Return from an iterable, a list of all the unique elements in the input, - but maintaining the order in which they first appear. - - Note: All elements in the input must be hashable for this routine - to work, as it internally uses a set for efficiency reasons. - """ - seen = set() - return [x for x in elems if x not in seen and not seen.add(x)] - - -def chop(seq, size): - """Chop a sequence into chunks of the given size.""" - return [seq[i:i+size] for i in range(0,len(seq),size)] - - +# encoding: utf-8 +"""Utilities for working with data structures like lists, dicts and tuples. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + + +def uniq_stable(elems): + """uniq_stable(elems) -> list + + Return from an iterable, a list of all the unique elements in the input, + but maintaining the order in which they first appear. + + Note: All elements in the input must be hashable for this routine + to work, as it internally uses a set for efficiency reasons. + """ + seen = set() + return [x for x in elems if x not in seen and not seen.add(x)] + + +def chop(seq, size): + """Chop a sequence into chunks of the given size.""" + return [seq[i:i+size] for i in range(0,len(seq),size)] + + diff --git a/contrib/python/ipython/py3/IPython/utils/decorators.py b/contrib/python/ipython/py3/IPython/utils/decorators.py index 79be8ca1e60..c26485553c2 100644 --- a/contrib/python/ipython/py3/IPython/utils/decorators.py +++ b/contrib/python/ipython/py3/IPython/utils/decorators.py @@ -1,58 +1,58 @@ -# encoding: utf-8 -"""Decorators that don't go anywhere else. - -This module contains misc. decorators that don't really go with another module -in :mod:`IPython.utils`. Beore putting something here please see if it should -go into another topical module in :mod:`IPython.utils`. -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -def flag_calls(func): - """Wrap a function to detect and flag when it gets called. - - This is a decorator which takes a function and wraps it in a function with - a 'called' attribute. wrapper.called is initialized to False. - - The wrapper.called attribute is set to False right before each call to the - wrapped function, so if the call fails it remains False. After the call - completes, wrapper.called is set to True and the output is returned. - - Testing for truth in wrapper.called allows you to determine if a call to - func() was attempted and succeeded.""" - - # don't wrap twice - if hasattr(func, 'called'): - return func - - def wrapper(*args,**kw): - wrapper.called = False - out = func(*args,**kw) - wrapper.called = True - return out - - wrapper.called = False - wrapper.__doc__ = func.__doc__ - return wrapper - -def undoc(func): - """Mark a function or class as undocumented. - - This is found by inspecting the AST, so for now it must be used directly - as @undoc, not as e.g. @decorators.undoc - """ - return func - +# encoding: utf-8 +"""Decorators that don't go anywhere else. + +This module contains misc. decorators that don't really go with another module +in :mod:`IPython.utils`. Beore putting something here please see if it should +go into another topical module in :mod:`IPython.utils`. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def flag_calls(func): + """Wrap a function to detect and flag when it gets called. + + This is a decorator which takes a function and wraps it in a function with + a 'called' attribute. wrapper.called is initialized to False. + + The wrapper.called attribute is set to False right before each call to the + wrapped function, so if the call fails it remains False. After the call + completes, wrapper.called is set to True and the output is returned. + + Testing for truth in wrapper.called allows you to determine if a call to + func() was attempted and succeeded.""" + + # don't wrap twice + if hasattr(func, 'called'): + return func + + def wrapper(*args,**kw): + wrapper.called = False + out = func(*args,**kw) + wrapper.called = True + return out + + wrapper.called = False + wrapper.__doc__ = func.__doc__ + return wrapper + +def undoc(func): + """Mark a function or class as undocumented. + + This is found by inspecting the AST, so for now it must be used directly + as @undoc, not as e.g. @decorators.undoc + """ + return func + diff --git a/contrib/python/ipython/py3/IPython/utils/dir2.py b/contrib/python/ipython/py3/IPython/utils/dir2.py index 4521baecf4e..9f19b2dd84f 100644 --- a/contrib/python/ipython/py3/IPython/utils/dir2.py +++ b/contrib/python/ipython/py3/IPython/utils/dir2.py @@ -1,84 +1,84 @@ -# encoding: utf-8 -"""A fancy version of Python's builtin :func:`dir` function. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import inspect -import types - - -def safe_hasattr(obj, attr): - """In recent versions of Python, hasattr() only catches AttributeError. - This catches all errors. - """ - try: - getattr(obj, attr) - return True - except: - return False - - -def dir2(obj): - """dir2(obj) -> list of strings - - Extended version of the Python builtin dir(), which does a few extra - checks. - - This version is guaranteed to return only a list of true strings, whereas - dir() returns anything that objects inject into themselves, even if they - are later not really valid for attribute access (many extension libraries - have such bugs). - """ - - # Start building the attribute list via dir(), and then complete it - # with a few extra special-purpose calls. - - try: - words = set(dir(obj)) - except Exception: - # TypeError: dir(obj) does not return a list - words = set() - - if safe_hasattr(obj, '__class__'): - words |= set(dir(obj.__class__)) - - # filter out non-string attributes which may be stuffed by dir() calls - # and poor coding in third-party modules - - words = [w for w in words if isinstance(w, str)] - return sorted(words) - - -def get_real_method(obj, name): - """Like getattr, but with a few extra sanity checks: - - - If obj is a class, ignore everything except class methods - - Check if obj is a proxy that claims to have all attributes - - Catch attribute access failing with any exception - - Check that the attribute is a callable object - - Returns the method or None. - """ - try: - canary = getattr(obj, '_ipython_canary_method_should_not_exist_', None) - except Exception: - return None - - if canary is not None: - # It claimed to have an attribute it should never have - return None - - try: - m = getattr(obj, name, None) - except Exception: - return None - - if inspect.isclass(obj) and not isinstance(m, types.MethodType): - return None - - if callable(m): - return m - - return None +# encoding: utf-8 +"""A fancy version of Python's builtin :func:`dir` function. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import inspect +import types + + +def safe_hasattr(obj, attr): + """In recent versions of Python, hasattr() only catches AttributeError. + This catches all errors. + """ + try: + getattr(obj, attr) + return True + except: + return False + + +def dir2(obj): + """dir2(obj) -> list of strings + + Extended version of the Python builtin dir(), which does a few extra + checks. + + This version is guaranteed to return only a list of true strings, whereas + dir() returns anything that objects inject into themselves, even if they + are later not really valid for attribute access (many extension libraries + have such bugs). + """ + + # Start building the attribute list via dir(), and then complete it + # with a few extra special-purpose calls. + + try: + words = set(dir(obj)) + except Exception: + # TypeError: dir(obj) does not return a list + words = set() + + if safe_hasattr(obj, '__class__'): + words |= set(dir(obj.__class__)) + + # filter out non-string attributes which may be stuffed by dir() calls + # and poor coding in third-party modules + + words = [w for w in words if isinstance(w, str)] + return sorted(words) + + +def get_real_method(obj, name): + """Like getattr, but with a few extra sanity checks: + + - If obj is a class, ignore everything except class methods + - Check if obj is a proxy that claims to have all attributes + - Catch attribute access failing with any exception + - Check that the attribute is a callable object + + Returns the method or None. + """ + try: + canary = getattr(obj, '_ipython_canary_method_should_not_exist_', None) + except Exception: + return None + + if canary is not None: + # It claimed to have an attribute it should never have + return None + + try: + m = getattr(obj, name, None) + except Exception: + return None + + if inspect.isclass(obj) and not isinstance(m, types.MethodType): + return None + + if callable(m): + return m + + return None diff --git a/contrib/python/ipython/py3/IPython/utils/encoding.py b/contrib/python/ipython/py3/IPython/utils/encoding.py index 84bc80c617b..69a319ef0ef 100644 --- a/contrib/python/ipython/py3/IPython/utils/encoding.py +++ b/contrib/python/ipython/py3/IPython/utils/encoding.py @@ -1,71 +1,71 @@ -# coding: utf-8 -""" -Utilities for dealing with text encodings -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2012 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- -import sys -import locale -import warnings - -# to deal with the possibility of sys.std* not being a stream at all -def get_stream_enc(stream, default=None): - """Return the given stream's encoding or a default. - - There are cases where ``sys.std*`` might not actually be a stream, so - check for the encoding attribute prior to returning it, and return - a default if it doesn't exist or evaluates as False. ``default`` - is None if not provided. - """ - if not hasattr(stream, 'encoding') or not stream.encoding: - return default - else: - return stream.encoding - -# Less conservative replacement for sys.getdefaultencoding, that will try -# to match the environment. -# Defined here as central function, so if we find better choices, we -# won't need to make changes all over IPython. -def getdefaultencoding(prefer_stream=True): - """Return IPython's guess for the default encoding for bytes as text. - - If prefer_stream is True (default), asks for stdin.encoding first, - to match the calling Terminal, but that is often None for subprocesses. - - Then fall back on locale.getpreferredencoding(), - which should be a sensible platform default (that respects LANG environment), - and finally to sys.getdefaultencoding() which is the most conservative option, - and usually UTF8 as of Python 3. - """ - enc = None - if prefer_stream: - enc = get_stream_enc(sys.stdin) - if not enc or enc=='ascii': - try: - # There are reports of getpreferredencoding raising errors - # in some cases, which may well be fixed, but let's be conservative here. - enc = locale.getpreferredencoding() - except Exception: - pass - enc = enc or sys.getdefaultencoding() - # On windows `cp0` can be returned to indicate that there is no code page. - # Since cp0 is an invalid encoding return instead cp1252 which is the - # Western European default. - if enc == 'cp0': - warnings.warn( - "Invalid code page cp0 detected - using cp1252 instead." - "If cp1252 is incorrect please ensure a valid code page " - "is defined for the process.", RuntimeWarning) - return 'cp1252' - return enc - -DEFAULT_ENCODING = getdefaultencoding() +# coding: utf-8 +""" +Utilities for dealing with text encodings +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2012 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- +import sys +import locale +import warnings + +# to deal with the possibility of sys.std* not being a stream at all +def get_stream_enc(stream, default=None): + """Return the given stream's encoding or a default. + + There are cases where ``sys.std*`` might not actually be a stream, so + check for the encoding attribute prior to returning it, and return + a default if it doesn't exist or evaluates as False. ``default`` + is None if not provided. + """ + if not hasattr(stream, 'encoding') or not stream.encoding: + return default + else: + return stream.encoding + +# Less conservative replacement for sys.getdefaultencoding, that will try +# to match the environment. +# Defined here as central function, so if we find better choices, we +# won't need to make changes all over IPython. +def getdefaultencoding(prefer_stream=True): + """Return IPython's guess for the default encoding for bytes as text. + + If prefer_stream is True (default), asks for stdin.encoding first, + to match the calling Terminal, but that is often None for subprocesses. + + Then fall back on locale.getpreferredencoding(), + which should be a sensible platform default (that respects LANG environment), + and finally to sys.getdefaultencoding() which is the most conservative option, + and usually UTF8 as of Python 3. + """ + enc = None + if prefer_stream: + enc = get_stream_enc(sys.stdin) + if not enc or enc=='ascii': + try: + # There are reports of getpreferredencoding raising errors + # in some cases, which may well be fixed, but let's be conservative here. + enc = locale.getpreferredencoding() + except Exception: + pass + enc = enc or sys.getdefaultencoding() + # On windows `cp0` can be returned to indicate that there is no code page. + # Since cp0 is an invalid encoding return instead cp1252 which is the + # Western European default. + if enc == 'cp0': + warnings.warn( + "Invalid code page cp0 detected - using cp1252 instead." + "If cp1252 is incorrect please ensure a valid code page " + "is defined for the process.", RuntimeWarning) + return 'cp1252' + return enc + +DEFAULT_ENCODING = getdefaultencoding() diff --git a/contrib/python/ipython/py3/IPython/utils/eventful.py b/contrib/python/ipython/py3/IPython/utils/eventful.py index a5b5841232b..661851ed37c 100644 --- a/contrib/python/ipython/py3/IPython/utils/eventful.py +++ b/contrib/python/ipython/py3/IPython/utils/eventful.py @@ -1,6 +1,6 @@ - -from warnings import warn - -warn("IPython.utils.eventful has moved to traitlets.eventful", stacklevel=2) - -from traitlets.eventful import * + +from warnings import warn + +warn("IPython.utils.eventful has moved to traitlets.eventful", stacklevel=2) + +from traitlets.eventful import * diff --git a/contrib/python/ipython/py3/IPython/utils/frame.py b/contrib/python/ipython/py3/IPython/utils/frame.py index 04c67527b68..74c6d4197f4 100644 --- a/contrib/python/ipython/py3/IPython/utils/frame.py +++ b/contrib/python/ipython/py3/IPython/utils/frame.py @@ -1,94 +1,94 @@ -# encoding: utf-8 -""" -Utilities for working with stack frames. -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import sys - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -def extract_vars(*names,**kw): - """Extract a set of variables by name from another frame. - - Parameters - ---------- - *names : str - One or more variable names which will be extracted from the caller's - frame. - - depth : integer, optional - How many frames in the stack to walk when looking for your variables. - The default is 0, which will use the frame where the call was made. - - - Examples - -------- - :: - - In [2]: def func(x): - ...: y = 1 - ...: print(sorted(extract_vars('x','y').items())) - ...: - - In [3]: func('hello') - [('x', 'hello'), ('y', 1)] - """ - - depth = kw.get('depth',0) - - callerNS = sys._getframe(depth+1).f_locals - return dict((k,callerNS[k]) for k in names) - - -def extract_vars_above(*names): - """Extract a set of variables by name from another frame. - - Similar to extractVars(), but with a specified depth of 1, so that names - are extracted exactly from above the caller. - - This is simply a convenience function so that the very common case (for us) - of skipping exactly 1 frame doesn't have to construct a special dict for - keyword passing.""" - - callerNS = sys._getframe(2).f_locals - return dict((k,callerNS[k]) for k in names) - - -def debugx(expr,pre_msg=''): - """Print the value of an expression from the caller's frame. - - Takes an expression, evaluates it in the caller's frame and prints both - the given expression and the resulting value (as well as a debug mark - indicating the name of the calling function. The input must be of a form - suitable for eval(). - - An optional message can be passed, which will be prepended to the printed - expr->value pair.""" - - cf = sys._getframe(1) - print('[DBG:%s] %s%s -> %r' % (cf.f_code.co_name,pre_msg,expr, - eval(expr,cf.f_globals,cf.f_locals))) - - -# deactivate it by uncommenting the following line, which makes it a no-op -#def debugx(expr,pre_msg=''): pass - -def extract_module_locals(depth=0): - """Returns (module, locals) of the function `depth` frames away from the caller""" - f = sys._getframe(depth + 1) - global_ns = f.f_globals - module = sys.modules[global_ns['__name__']] - return (module, f.f_locals) +# encoding: utf-8 +""" +Utilities for working with stack frames. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import sys + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def extract_vars(*names,**kw): + """Extract a set of variables by name from another frame. + + Parameters + ---------- + *names : str + One or more variable names which will be extracted from the caller's + frame. + + depth : integer, optional + How many frames in the stack to walk when looking for your variables. + The default is 0, which will use the frame where the call was made. + + + Examples + -------- + :: + + In [2]: def func(x): + ...: y = 1 + ...: print(sorted(extract_vars('x','y').items())) + ...: + + In [3]: func('hello') + [('x', 'hello'), ('y', 1)] + """ + + depth = kw.get('depth',0) + + callerNS = sys._getframe(depth+1).f_locals + return dict((k,callerNS[k]) for k in names) + + +def extract_vars_above(*names): + """Extract a set of variables by name from another frame. + + Similar to extractVars(), but with a specified depth of 1, so that names + are extracted exactly from above the caller. + + This is simply a convenience function so that the very common case (for us) + of skipping exactly 1 frame doesn't have to construct a special dict for + keyword passing.""" + + callerNS = sys._getframe(2).f_locals + return dict((k,callerNS[k]) for k in names) + + +def debugx(expr,pre_msg=''): + """Print the value of an expression from the caller's frame. + + Takes an expression, evaluates it in the caller's frame and prints both + the given expression and the resulting value (as well as a debug mark + indicating the name of the calling function. The input must be of a form + suitable for eval(). + + An optional message can be passed, which will be prepended to the printed + expr->value pair.""" + + cf = sys._getframe(1) + print('[DBG:%s] %s%s -> %r' % (cf.f_code.co_name,pre_msg,expr, + eval(expr,cf.f_globals,cf.f_locals))) + + +# deactivate it by uncommenting the following line, which makes it a no-op +#def debugx(expr,pre_msg=''): pass + +def extract_module_locals(depth=0): + """Returns (module, locals) of the function `depth` frames away from the caller""" + f = sys._getframe(depth + 1) + global_ns = f.f_globals + module = sys.modules[global_ns['__name__']] + return (module, f.f_locals) diff --git a/contrib/python/ipython/py3/IPython/utils/generics.py b/contrib/python/ipython/py3/IPython/utils/generics.py index 7bff95e7b32..fcada6f44df 100644 --- a/contrib/python/ipython/py3/IPython/utils/generics.py +++ b/contrib/python/ipython/py3/IPython/utils/generics.py @@ -1,30 +1,30 @@ -# encoding: utf-8 -"""Generic functions for extending IPython. -""" - -from IPython.core.error import TryNext -from functools import singledispatch - - -@singledispatch -def inspect_object(obj): - """Called when you do obj?""" - raise TryNext - - -@singledispatch -def complete_object(obj, prev_completions): - """Custom completer dispatching for python objects. - - Parameters - ---------- - obj : object - The object to complete. - prev_completions : list - List of attributes discovered so far. - - This should return the list of attributes in obj. If you only wish to - add to the attributes already discovered normally, return - own_attrs + prev_completions. - """ - raise TryNext +# encoding: utf-8 +"""Generic functions for extending IPython. +""" + +from IPython.core.error import TryNext +from functools import singledispatch + + +@singledispatch +def inspect_object(obj): + """Called when you do obj?""" + raise TryNext + + +@singledispatch +def complete_object(obj, prev_completions): + """Custom completer dispatching for python objects. + + Parameters + ---------- + obj : object + The object to complete. + prev_completions : list + List of attributes discovered so far. + + This should return the list of attributes in obj. If you only wish to + add to the attributes already discovered normally, return + own_attrs + prev_completions. + """ + raise TryNext diff --git a/contrib/python/ipython/py3/IPython/utils/importstring.py b/contrib/python/ipython/py3/IPython/utils/importstring.py index 2c7a2a167e1..c8e1840eb37 100644 --- a/contrib/python/ipython/py3/IPython/utils/importstring.py +++ b/contrib/python/ipython/py3/IPython/utils/importstring.py @@ -1,39 +1,39 @@ -# encoding: utf-8 -""" -A simple utility to import something by its string name. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -def import_item(name): - """Import and return ``bar`` given the string ``foo.bar``. - - Calling ``bar = import_item("foo.bar")`` is the functional equivalent of - executing the code ``from foo import bar``. - - Parameters - ---------- - name : string - The fully qualified name of the module/package being imported. - - Returns - ------- - mod : module object - The module that was imported. - """ - - parts = name.rsplit('.', 1) - if len(parts) == 2: - # called with 'foo.bar....' - package, obj = parts - module = __import__(package, fromlist=[obj]) - try: - pak = getattr(module, obj) - except AttributeError: - raise ImportError('No module named %s' % obj) - return pak - else: - # called with un-dotted string - return __import__(parts[0]) +# encoding: utf-8 +""" +A simple utility to import something by its string name. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +def import_item(name): + """Import and return ``bar`` given the string ``foo.bar``. + + Calling ``bar = import_item("foo.bar")`` is the functional equivalent of + executing the code ``from foo import bar``. + + Parameters + ---------- + name : string + The fully qualified name of the module/package being imported. + + Returns + ------- + mod : module object + The module that was imported. + """ + + parts = name.rsplit('.', 1) + if len(parts) == 2: + # called with 'foo.bar....' + package, obj = parts + module = __import__(package, fromlist=[obj]) + try: + pak = getattr(module, obj) + except AttributeError: + raise ImportError('No module named %s' % obj) + return pak + else: + # called with un-dotted string + return __import__(parts[0]) diff --git a/contrib/python/ipython/py3/IPython/utils/io.py b/contrib/python/ipython/py3/IPython/utils/io.py index e69abbebf20..fab9bae7971 100644 --- a/contrib/python/ipython/py3/IPython/utils/io.py +++ b/contrib/python/ipython/py3/IPython/utils/io.py @@ -1,248 +1,248 @@ -# encoding: utf-8 -""" -IO related utilities. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - - -import atexit -import os -import sys -import tempfile -import warnings -from warnings import warn - -from IPython.utils.decorators import undoc -from .capture import CapturedIO, capture_output - -@undoc -class IOStream: - - def __init__(self, stream, fallback=None): - warn('IOStream is deprecated since IPython 5.0, use sys.{stdin,stdout,stderr} instead', - DeprecationWarning, stacklevel=2) - if not hasattr(stream,'write') or not hasattr(stream,'flush'): - if fallback is not None: - stream = fallback - else: - raise ValueError("fallback required, but not specified") - self.stream = stream - self._swrite = stream.write - - # clone all methods not overridden: - def clone(meth): - return not hasattr(self, meth) and not meth.startswith('_') - for meth in filter(clone, dir(stream)): - try: - val = getattr(stream, meth) - except AttributeError: - pass - else: - setattr(self, meth, val) - - def __repr__(self): - cls = self.__class__ - tpl = '{mod}.{cls}({args})' - return tpl.format(mod=cls.__module__, cls=cls.__name__, args=self.stream) - - def write(self,data): - warn('IOStream is deprecated since IPython 5.0, use sys.{stdin,stdout,stderr} instead', - DeprecationWarning, stacklevel=2) - try: - self._swrite(data) - except: - try: - # print handles some unicode issues which may trip a plain - # write() call. Emulate write() by using an empty end - # argument. - print(data, end='', file=self.stream) - except: - # if we get here, something is seriously broken. - print('ERROR - failed to write data to stream:', self.stream, - file=sys.stderr) - - def writelines(self, lines): - warn('IOStream is deprecated since IPython 5.0, use sys.{stdin,stdout,stderr} instead', - DeprecationWarning, stacklevel=2) - if isinstance(lines, str): - lines = [lines] - for line in lines: - self.write(line) - - # This class used to have a writeln method, but regular files and streams - # in Python don't have this method. We need to keep this completely - # compatible so we removed it. - - @property - def closed(self): - return self.stream.closed - - def close(self): - pass - -# setup stdin/stdout/stderr to sys.stdin/sys.stdout/sys.stderr -devnull = open(os.devnull, 'w') -atexit.register(devnull.close) - -# io.std* are deprecated, but don't show our own deprecation warnings -# during initialization of the deprecated API. -with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - stdin = IOStream(sys.stdin, fallback=devnull) - stdout = IOStream(sys.stdout, fallback=devnull) - stderr = IOStream(sys.stderr, fallback=devnull) - -class Tee(object): - """A class to duplicate an output stream to stdout/err. - - This works in a manner very similar to the Unix 'tee' command. - - When the object is closed or deleted, it closes the original file given to - it for duplication. - """ - # Inspired by: - # http://mail.python.org/pipermail/python-list/2007-May/442737.html - - def __init__(self, file_or_name, mode="w", channel='stdout'): - """Construct a new Tee object. - - Parameters - ---------- - file_or_name : filename or open filehandle (writable) - File that will be duplicated - - mode : optional, valid mode for open(). - If a filename was give, open with this mode. - - channel : str, one of ['stdout', 'stderr'] - """ - if channel not in ['stdout', 'stderr']: - raise ValueError('Invalid channel spec %s' % channel) - - if hasattr(file_or_name, 'write') and hasattr(file_or_name, 'seek'): - self.file = file_or_name - else: - self.file = open(file_or_name, mode) - self.channel = channel - self.ostream = getattr(sys, channel) - setattr(sys, channel, self) - self._closed = False - - def close(self): - """Close the file and restore the channel.""" - self.flush() - setattr(sys, self.channel, self.ostream) - self.file.close() - self._closed = True - - def write(self, data): - """Write data to both channels.""" - self.file.write(data) - self.ostream.write(data) - self.ostream.flush() - - def flush(self): - """Flush both channels.""" - self.file.flush() - self.ostream.flush() - - def __del__(self): - if not self._closed: - self.close() - - -def ask_yes_no(prompt, default=None, interrupt=None): - """Asks a question and returns a boolean (y/n) answer. - - If default is given (one of 'y','n'), it is used if the user input is - empty. If interrupt is given (one of 'y','n'), it is used if the user - presses Ctrl-C. Otherwise the question is repeated until an answer is - given. - - An EOF is treated as the default answer. If there is no default, an - exception is raised to prevent infinite loops. - - Valid answers are: y/yes/n/no (match is not case sensitive).""" - - answers = {'y':True,'n':False,'yes':True,'no':False} - ans = None - while ans not in answers.keys(): - try: - ans = input(prompt+' ').lower() - if not ans: # response was an empty string - ans = default - except KeyboardInterrupt: - if interrupt: - ans = interrupt - print("\r") - except EOFError: - if default in answers.keys(): - ans = default - print() - else: - raise - - return answers[ans] - - -def temp_pyfile(src, ext='.py'): - """Make a temporary python file, return filename and filehandle. - - Parameters - ---------- - src : string or list of strings (no need for ending newlines if list) - Source code to be written to the file. - - ext : optional, string - Extension for the generated file. - - Returns - ------- - (filename, open filehandle) - It is the caller's responsibility to close the open file and unlink it. - """ - fname = tempfile.mkstemp(ext)[1] - with open(fname,'w') as f: - f.write(src) - f.flush() - return fname - -@undoc -def atomic_writing(*args, **kwargs): - """DEPRECATED: moved to notebook.services.contents.fileio""" - warn("IPython.utils.io.atomic_writing has moved to notebook.services.contents.fileio since IPython 4.0", DeprecationWarning, stacklevel=2) - from notebook.services.contents.fileio import atomic_writing - return atomic_writing(*args, **kwargs) - -@undoc -def raw_print(*args, **kw): - """DEPRECATED: Raw print to sys.__stdout__, otherwise identical interface to print().""" - warn("IPython.utils.io.raw_print has been deprecated since IPython 7.0", DeprecationWarning, stacklevel=2) - - print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'), - file=sys.__stdout__) - sys.__stdout__.flush() - -@undoc -def raw_print_err(*args, **kw): - """DEPRECATED: Raw print to sys.__stderr__, otherwise identical interface to print().""" - warn("IPython.utils.io.raw_print_err has been deprecated since IPython 7.0", DeprecationWarning, stacklevel=2) - - print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'), - file=sys.__stderr__) - sys.__stderr__.flush() - -# used by IPykernel <- 4.9. Removed during IPython 7-dev period and re-added -# Keep for a version or two then should remove -rprint = raw_print -rprinte = raw_print_err - -@undoc -def unicode_std_stream(stream='stdout'): - """DEPRECATED, moved to nbconvert.utils.io""" - warn("IPython.utils.io.unicode_std_stream has moved to nbconvert.utils.io since IPython 4.0", DeprecationWarning, stacklevel=2) - from nbconvert.utils.io import unicode_std_stream - return unicode_std_stream(stream) +# encoding: utf-8 +""" +IO related utilities. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + + +import atexit +import os +import sys +import tempfile +import warnings +from warnings import warn + +from IPython.utils.decorators import undoc +from .capture import CapturedIO, capture_output + +@undoc +class IOStream: + + def __init__(self, stream, fallback=None): + warn('IOStream is deprecated since IPython 5.0, use sys.{stdin,stdout,stderr} instead', + DeprecationWarning, stacklevel=2) + if not hasattr(stream,'write') or not hasattr(stream,'flush'): + if fallback is not None: + stream = fallback + else: + raise ValueError("fallback required, but not specified") + self.stream = stream + self._swrite = stream.write + + # clone all methods not overridden: + def clone(meth): + return not hasattr(self, meth) and not meth.startswith('_') + for meth in filter(clone, dir(stream)): + try: + val = getattr(stream, meth) + except AttributeError: + pass + else: + setattr(self, meth, val) + + def __repr__(self): + cls = self.__class__ + tpl = '{mod}.{cls}({args})' + return tpl.format(mod=cls.__module__, cls=cls.__name__, args=self.stream) + + def write(self,data): + warn('IOStream is deprecated since IPython 5.0, use sys.{stdin,stdout,stderr} instead', + DeprecationWarning, stacklevel=2) + try: + self._swrite(data) + except: + try: + # print handles some unicode issues which may trip a plain + # write() call. Emulate write() by using an empty end + # argument. + print(data, end='', file=self.stream) + except: + # if we get here, something is seriously broken. + print('ERROR - failed to write data to stream:', self.stream, + file=sys.stderr) + + def writelines(self, lines): + warn('IOStream is deprecated since IPython 5.0, use sys.{stdin,stdout,stderr} instead', + DeprecationWarning, stacklevel=2) + if isinstance(lines, str): + lines = [lines] + for line in lines: + self.write(line) + + # This class used to have a writeln method, but regular files and streams + # in Python don't have this method. We need to keep this completely + # compatible so we removed it. + + @property + def closed(self): + return self.stream.closed + + def close(self): + pass + +# setup stdin/stdout/stderr to sys.stdin/sys.stdout/sys.stderr +devnull = open(os.devnull, 'w') +atexit.register(devnull.close) + +# io.std* are deprecated, but don't show our own deprecation warnings +# during initialization of the deprecated API. +with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + stdin = IOStream(sys.stdin, fallback=devnull) + stdout = IOStream(sys.stdout, fallback=devnull) + stderr = IOStream(sys.stderr, fallback=devnull) + +class Tee(object): + """A class to duplicate an output stream to stdout/err. + + This works in a manner very similar to the Unix 'tee' command. + + When the object is closed or deleted, it closes the original file given to + it for duplication. + """ + # Inspired by: + # http://mail.python.org/pipermail/python-list/2007-May/442737.html + + def __init__(self, file_or_name, mode="w", channel='stdout'): + """Construct a new Tee object. + + Parameters + ---------- + file_or_name : filename or open filehandle (writable) + File that will be duplicated + + mode : optional, valid mode for open(). + If a filename was give, open with this mode. + + channel : str, one of ['stdout', 'stderr'] + """ + if channel not in ['stdout', 'stderr']: + raise ValueError('Invalid channel spec %s' % channel) + + if hasattr(file_or_name, 'write') and hasattr(file_or_name, 'seek'): + self.file = file_or_name + else: + self.file = open(file_or_name, mode) + self.channel = channel + self.ostream = getattr(sys, channel) + setattr(sys, channel, self) + self._closed = False + + def close(self): + """Close the file and restore the channel.""" + self.flush() + setattr(sys, self.channel, self.ostream) + self.file.close() + self._closed = True + + def write(self, data): + """Write data to both channels.""" + self.file.write(data) + self.ostream.write(data) + self.ostream.flush() + + def flush(self): + """Flush both channels.""" + self.file.flush() + self.ostream.flush() + + def __del__(self): + if not self._closed: + self.close() + + +def ask_yes_no(prompt, default=None, interrupt=None): + """Asks a question and returns a boolean (y/n) answer. + + If default is given (one of 'y','n'), it is used if the user input is + empty. If interrupt is given (one of 'y','n'), it is used if the user + presses Ctrl-C. Otherwise the question is repeated until an answer is + given. + + An EOF is treated as the default answer. If there is no default, an + exception is raised to prevent infinite loops. + + Valid answers are: y/yes/n/no (match is not case sensitive).""" + + answers = {'y':True,'n':False,'yes':True,'no':False} + ans = None + while ans not in answers.keys(): + try: + ans = input(prompt+' ').lower() + if not ans: # response was an empty string + ans = default + except KeyboardInterrupt: + if interrupt: + ans = interrupt + print("\r") + except EOFError: + if default in answers.keys(): + ans = default + print() + else: + raise + + return answers[ans] + + +def temp_pyfile(src, ext='.py'): + """Make a temporary python file, return filename and filehandle. + + Parameters + ---------- + src : string or list of strings (no need for ending newlines if list) + Source code to be written to the file. + + ext : optional, string + Extension for the generated file. + + Returns + ------- + (filename, open filehandle) + It is the caller's responsibility to close the open file and unlink it. + """ + fname = tempfile.mkstemp(ext)[1] + with open(fname,'w') as f: + f.write(src) + f.flush() + return fname + +@undoc +def atomic_writing(*args, **kwargs): + """DEPRECATED: moved to notebook.services.contents.fileio""" + warn("IPython.utils.io.atomic_writing has moved to notebook.services.contents.fileio since IPython 4.0", DeprecationWarning, stacklevel=2) + from notebook.services.contents.fileio import atomic_writing + return atomic_writing(*args, **kwargs) + +@undoc +def raw_print(*args, **kw): + """DEPRECATED: Raw print to sys.__stdout__, otherwise identical interface to print().""" + warn("IPython.utils.io.raw_print has been deprecated since IPython 7.0", DeprecationWarning, stacklevel=2) + + print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'), + file=sys.__stdout__) + sys.__stdout__.flush() + +@undoc +def raw_print_err(*args, **kw): + """DEPRECATED: Raw print to sys.__stderr__, otherwise identical interface to print().""" + warn("IPython.utils.io.raw_print_err has been deprecated since IPython 7.0", DeprecationWarning, stacklevel=2) + + print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'), + file=sys.__stderr__) + sys.__stderr__.flush() + +# used by IPykernel <- 4.9. Removed during IPython 7-dev period and re-added +# Keep for a version or two then should remove +rprint = raw_print +rprinte = raw_print_err + +@undoc +def unicode_std_stream(stream='stdout'): + """DEPRECATED, moved to nbconvert.utils.io""" + warn("IPython.utils.io.unicode_std_stream has moved to nbconvert.utils.io since IPython 4.0", DeprecationWarning, stacklevel=2) + from nbconvert.utils.io import unicode_std_stream + return unicode_std_stream(stream) diff --git a/contrib/python/ipython/py3/IPython/utils/ipstruct.py b/contrib/python/ipython/py3/IPython/utils/ipstruct.py index e17760b4f9c..e2b3e8fa4c5 100644 --- a/contrib/python/ipython/py3/IPython/utils/ipstruct.py +++ b/contrib/python/ipython/py3/IPython/utils/ipstruct.py @@ -1,391 +1,391 @@ -# encoding: utf-8 -"""A dict subclass that supports attribute style access. - -Authors: - -* Fernando Perez (original) -* Brian Granger (refactoring to a dict subclass) -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -__all__ = ['Struct'] - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - - -class Struct(dict): - """A dict subclass with attribute style access. - - This dict subclass has a a few extra features: - - * Attribute style access. - * Protection of class members (like keys, items) when using attribute - style access. - * The ability to restrict assignment to only existing keys. - * Intelligent merging. - * Overloaded operators. - """ - _allownew = True - def __init__(self, *args, **kw): - """Initialize with a dictionary, another Struct, or data. - - Parameters - ---------- - args : dict, Struct - Initialize with one dict or Struct - kw : dict - Initialize with key, value pairs. - - Examples - -------- - - >>> s = Struct(a=10,b=30) - >>> s.a - 10 - >>> s.b - 30 - >>> s2 = Struct(s,c=30) - >>> sorted(s2.keys()) - ['a', 'b', 'c'] - """ - object.__setattr__(self, '_allownew', True) - dict.__init__(self, *args, **kw) - - def __setitem__(self, key, value): - """Set an item with check for allownew. - - Examples - -------- - - >>> s = Struct() - >>> s['a'] = 10 - >>> s.allow_new_attr(False) - >>> s['a'] = 10 - >>> s['a'] - 10 - >>> try: - ... s['b'] = 20 - ... except KeyError: - ... print('this is not allowed') - ... - this is not allowed - """ - if not self._allownew and key not in self: - raise KeyError( - "can't create new attribute %s when allow_new_attr(False)" % key) - dict.__setitem__(self, key, value) - - def __setattr__(self, key, value): - """Set an attr with protection of class members. - - This calls :meth:`self.__setitem__` but convert :exc:`KeyError` to - :exc:`AttributeError`. - - Examples - -------- - - >>> s = Struct() - >>> s.a = 10 - >>> s.a - 10 - >>> try: - ... s.get = 10 - ... except AttributeError: - ... print("you can't set a class member") - ... - you can't set a class member - """ - # If key is an str it might be a class member or instance var - if isinstance(key, str): - # I can't simply call hasattr here because it calls getattr, which - # calls self.__getattr__, which returns True for keys in - # self._data. But I only want keys in the class and in - # self.__dict__ - if key in self.__dict__ or hasattr(Struct, key): - raise AttributeError( - 'attr %s is a protected member of class Struct.' % key - ) - try: - self.__setitem__(key, value) - except KeyError as e: - raise AttributeError(e) - - def __getattr__(self, key): - """Get an attr by calling :meth:`dict.__getitem__`. - - Like :meth:`__setattr__`, this method converts :exc:`KeyError` to - :exc:`AttributeError`. - - Examples - -------- - - >>> s = Struct(a=10) - >>> s.a - 10 - >>> type(s.get) - <... 'builtin_function_or_method'> - >>> try: - ... s.b - ... except AttributeError: - ... print("I don't have that key") - ... - I don't have that key - """ - try: - result = self[key] - except KeyError: - raise AttributeError(key) - else: - return result - - def __iadd__(self, other): - """s += s2 is a shorthand for s.merge(s2). - - Examples - -------- - - >>> s = Struct(a=10,b=30) - >>> s2 = Struct(a=20,c=40) - >>> s += s2 - >>> sorted(s.keys()) - ['a', 'b', 'c'] - """ - self.merge(other) - return self - - def __add__(self,other): - """s + s2 -> New Struct made from s.merge(s2). - - Examples - -------- - - >>> s1 = Struct(a=10,b=30) - >>> s2 = Struct(a=20,c=40) - >>> s = s1 + s2 - >>> sorted(s.keys()) - ['a', 'b', 'c'] - """ - sout = self.copy() - sout.merge(other) - return sout - - def __sub__(self,other): - """s1 - s2 -> remove keys in s2 from s1. - - Examples - -------- - - >>> s1 = Struct(a=10,b=30) - >>> s2 = Struct(a=40) - >>> s = s1 - s2 - >>> s - {'b': 30} - """ - sout = self.copy() - sout -= other - return sout - - def __isub__(self,other): - """Inplace remove keys from self that are in other. - - Examples - -------- - - >>> s1 = Struct(a=10,b=30) - >>> s2 = Struct(a=40) - >>> s1 -= s2 - >>> s1 - {'b': 30} - """ - for k in other.keys(): - if k in self: - del self[k] - return self - - def __dict_invert(self, data): - """Helper function for merge. - - Takes a dictionary whose values are lists and returns a dict with - the elements of each list as keys and the original keys as values. - """ - outdict = {} - for k,lst in data.items(): - if isinstance(lst, str): - lst = lst.split() - for entry in lst: - outdict[entry] = k - return outdict - - def dict(self): - return self - - def copy(self): - """Return a copy as a Struct. - - Examples - -------- - - >>> s = Struct(a=10,b=30) - >>> s2 = s.copy() - >>> type(s2) is Struct - True - """ - return Struct(dict.copy(self)) - - def hasattr(self, key): - """hasattr function available as a method. - - Implemented like has_key. - - Examples - -------- - - >>> s = Struct(a=10) - >>> s.hasattr('a') - True - >>> s.hasattr('b') - False - >>> s.hasattr('get') - False - """ - return key in self - - def allow_new_attr(self, allow = True): - """Set whether new attributes can be created in this Struct. - - This can be used to catch typos by verifying that the attribute user - tries to change already exists in this Struct. - """ - object.__setattr__(self, '_allownew', allow) - - def merge(self, __loc_data__=None, __conflict_solve=None, **kw): - """Merge two Structs with customizable conflict resolution. - - This is similar to :meth:`update`, but much more flexible. First, a - dict is made from data+key=value pairs. When merging this dict with - the Struct S, the optional dictionary 'conflict' is used to decide - what to do. - - If conflict is not given, the default behavior is to preserve any keys - with their current value (the opposite of the :meth:`update` method's - behavior). - - Parameters - ---------- - __loc_data : dict, Struct - The data to merge into self - __conflict_solve : dict - The conflict policy dict. The keys are binary functions used to - resolve the conflict and the values are lists of strings naming - the keys the conflict resolution function applies to. Instead of - a list of strings a space separated string can be used, like - 'a b c'. - kw : dict - Additional key, value pairs to merge in - - Notes - ----- - - The `__conflict_solve` dict is a dictionary of binary functions which will be used to - solve key conflicts. Here is an example:: - - __conflict_solve = dict( - func1=['a','b','c'], - func2=['d','e'] - ) - - In this case, the function :func:`func1` will be used to resolve - keys 'a', 'b' and 'c' and the function :func:`func2` will be used for - keys 'd' and 'e'. This could also be written as:: - - __conflict_solve = dict(func1='a b c',func2='d e') - - These functions will be called for each key they apply to with the - form:: - - func1(self['a'], other['a']) - - The return value is used as the final merged value. - - As a convenience, merge() provides five (the most commonly needed) - pre-defined policies: preserve, update, add, add_flip and add_s. The - easiest explanation is their implementation:: - - preserve = lambda old,new: old - update = lambda old,new: new - add = lambda old,new: old + new - add_flip = lambda old,new: new + old # note change of order! - add_s = lambda old,new: old + ' ' + new # only for str! - - You can use those four words (as strings) as keys instead - of defining them as functions, and the merge method will substitute - the appropriate functions for you. - - For more complicated conflict resolution policies, you still need to - construct your own functions. - - Examples - -------- - - This show the default policy: - - >>> s = Struct(a=10,b=30) - >>> s2 = Struct(a=20,c=40) - >>> s.merge(s2) - >>> sorted(s.items()) - [('a', 10), ('b', 30), ('c', 40)] - - Now, show how to specify a conflict dict: - - >>> s = Struct(a=10,b=30) - >>> s2 = Struct(a=20,b=40) - >>> conflict = {'update':'a','add':'b'} - >>> s.merge(s2,conflict) - >>> sorted(s.items()) - [('a', 20), ('b', 70)] - """ - - data_dict = dict(__loc_data__,**kw) - - # policies for conflict resolution: two argument functions which return - # the value that will go in the new struct - preserve = lambda old,new: old - update = lambda old,new: new - add = lambda old,new: old + new - add_flip = lambda old,new: new + old # note change of order! - add_s = lambda old,new: old + ' ' + new - - # default policy is to keep current keys when there's a conflict - conflict_solve = dict.fromkeys(self, preserve) - - # the conflict_solve dictionary is given by the user 'inverted': we - # need a name-function mapping, it comes as a function -> names - # dict. Make a local copy (b/c we'll make changes), replace user - # strings for the three builtin policies and invert it. - if __conflict_solve: - inv_conflict_solve_user = __conflict_solve.copy() - for name, func in [('preserve',preserve), ('update',update), - ('add',add), ('add_flip',add_flip), - ('add_s',add_s)]: - if name in inv_conflict_solve_user.keys(): - inv_conflict_solve_user[func] = inv_conflict_solve_user[name] - del inv_conflict_solve_user[name] - conflict_solve.update(self.__dict_invert(inv_conflict_solve_user)) - for key in data_dict: - if key not in self: - self[key] = data_dict[key] - else: - self[key] = conflict_solve[key](self[key],data_dict[key]) - +# encoding: utf-8 +"""A dict subclass that supports attribute style access. + +Authors: + +* Fernando Perez (original) +* Brian Granger (refactoring to a dict subclass) +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +__all__ = ['Struct'] + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + + +class Struct(dict): + """A dict subclass with attribute style access. + + This dict subclass has a a few extra features: + + * Attribute style access. + * Protection of class members (like keys, items) when using attribute + style access. + * The ability to restrict assignment to only existing keys. + * Intelligent merging. + * Overloaded operators. + """ + _allownew = True + def __init__(self, *args, **kw): + """Initialize with a dictionary, another Struct, or data. + + Parameters + ---------- + args : dict, Struct + Initialize with one dict or Struct + kw : dict + Initialize with key, value pairs. + + Examples + -------- + + >>> s = Struct(a=10,b=30) + >>> s.a + 10 + >>> s.b + 30 + >>> s2 = Struct(s,c=30) + >>> sorted(s2.keys()) + ['a', 'b', 'c'] + """ + object.__setattr__(self, '_allownew', True) + dict.__init__(self, *args, **kw) + + def __setitem__(self, key, value): + """Set an item with check for allownew. + + Examples + -------- + + >>> s = Struct() + >>> s['a'] = 10 + >>> s.allow_new_attr(False) + >>> s['a'] = 10 + >>> s['a'] + 10 + >>> try: + ... s['b'] = 20 + ... except KeyError: + ... print('this is not allowed') + ... + this is not allowed + """ + if not self._allownew and key not in self: + raise KeyError( + "can't create new attribute %s when allow_new_attr(False)" % key) + dict.__setitem__(self, key, value) + + def __setattr__(self, key, value): + """Set an attr with protection of class members. + + This calls :meth:`self.__setitem__` but convert :exc:`KeyError` to + :exc:`AttributeError`. + + Examples + -------- + + >>> s = Struct() + >>> s.a = 10 + >>> s.a + 10 + >>> try: + ... s.get = 10 + ... except AttributeError: + ... print("you can't set a class member") + ... + you can't set a class member + """ + # If key is an str it might be a class member or instance var + if isinstance(key, str): + # I can't simply call hasattr here because it calls getattr, which + # calls self.__getattr__, which returns True for keys in + # self._data. But I only want keys in the class and in + # self.__dict__ + if key in self.__dict__ or hasattr(Struct, key): + raise AttributeError( + 'attr %s is a protected member of class Struct.' % key + ) + try: + self.__setitem__(key, value) + except KeyError as e: + raise AttributeError(e) + + def __getattr__(self, key): + """Get an attr by calling :meth:`dict.__getitem__`. + + Like :meth:`__setattr__`, this method converts :exc:`KeyError` to + :exc:`AttributeError`. + + Examples + -------- + + >>> s = Struct(a=10) + >>> s.a + 10 + >>> type(s.get) + <... 'builtin_function_or_method'> + >>> try: + ... s.b + ... except AttributeError: + ... print("I don't have that key") + ... + I don't have that key + """ + try: + result = self[key] + except KeyError: + raise AttributeError(key) + else: + return result + + def __iadd__(self, other): + """s += s2 is a shorthand for s.merge(s2). + + Examples + -------- + + >>> s = Struct(a=10,b=30) + >>> s2 = Struct(a=20,c=40) + >>> s += s2 + >>> sorted(s.keys()) + ['a', 'b', 'c'] + """ + self.merge(other) + return self + + def __add__(self,other): + """s + s2 -> New Struct made from s.merge(s2). + + Examples + -------- + + >>> s1 = Struct(a=10,b=30) + >>> s2 = Struct(a=20,c=40) + >>> s = s1 + s2 + >>> sorted(s.keys()) + ['a', 'b', 'c'] + """ + sout = self.copy() + sout.merge(other) + return sout + + def __sub__(self,other): + """s1 - s2 -> remove keys in s2 from s1. + + Examples + -------- + + >>> s1 = Struct(a=10,b=30) + >>> s2 = Struct(a=40) + >>> s = s1 - s2 + >>> s + {'b': 30} + """ + sout = self.copy() + sout -= other + return sout + + def __isub__(self,other): + """Inplace remove keys from self that are in other. + + Examples + -------- + + >>> s1 = Struct(a=10,b=30) + >>> s2 = Struct(a=40) + >>> s1 -= s2 + >>> s1 + {'b': 30} + """ + for k in other.keys(): + if k in self: + del self[k] + return self + + def __dict_invert(self, data): + """Helper function for merge. + + Takes a dictionary whose values are lists and returns a dict with + the elements of each list as keys and the original keys as values. + """ + outdict = {} + for k,lst in data.items(): + if isinstance(lst, str): + lst = lst.split() + for entry in lst: + outdict[entry] = k + return outdict + + def dict(self): + return self + + def copy(self): + """Return a copy as a Struct. + + Examples + -------- + + >>> s = Struct(a=10,b=30) + >>> s2 = s.copy() + >>> type(s2) is Struct + True + """ + return Struct(dict.copy(self)) + + def hasattr(self, key): + """hasattr function available as a method. + + Implemented like has_key. + + Examples + -------- + + >>> s = Struct(a=10) + >>> s.hasattr('a') + True + >>> s.hasattr('b') + False + >>> s.hasattr('get') + False + """ + return key in self + + def allow_new_attr(self, allow = True): + """Set whether new attributes can be created in this Struct. + + This can be used to catch typos by verifying that the attribute user + tries to change already exists in this Struct. + """ + object.__setattr__(self, '_allownew', allow) + + def merge(self, __loc_data__=None, __conflict_solve=None, **kw): + """Merge two Structs with customizable conflict resolution. + + This is similar to :meth:`update`, but much more flexible. First, a + dict is made from data+key=value pairs. When merging this dict with + the Struct S, the optional dictionary 'conflict' is used to decide + what to do. + + If conflict is not given, the default behavior is to preserve any keys + with their current value (the opposite of the :meth:`update` method's + behavior). + + Parameters + ---------- + __loc_data : dict, Struct + The data to merge into self + __conflict_solve : dict + The conflict policy dict. The keys are binary functions used to + resolve the conflict and the values are lists of strings naming + the keys the conflict resolution function applies to. Instead of + a list of strings a space separated string can be used, like + 'a b c'. + kw : dict + Additional key, value pairs to merge in + + Notes + ----- + + The `__conflict_solve` dict is a dictionary of binary functions which will be used to + solve key conflicts. Here is an example:: + + __conflict_solve = dict( + func1=['a','b','c'], + func2=['d','e'] + ) + + In this case, the function :func:`func1` will be used to resolve + keys 'a', 'b' and 'c' and the function :func:`func2` will be used for + keys 'd' and 'e'. This could also be written as:: + + __conflict_solve = dict(func1='a b c',func2='d e') + + These functions will be called for each key they apply to with the + form:: + + func1(self['a'], other['a']) + + The return value is used as the final merged value. + + As a convenience, merge() provides five (the most commonly needed) + pre-defined policies: preserve, update, add, add_flip and add_s. The + easiest explanation is their implementation:: + + preserve = lambda old,new: old + update = lambda old,new: new + add = lambda old,new: old + new + add_flip = lambda old,new: new + old # note change of order! + add_s = lambda old,new: old + ' ' + new # only for str! + + You can use those four words (as strings) as keys instead + of defining them as functions, and the merge method will substitute + the appropriate functions for you. + + For more complicated conflict resolution policies, you still need to + construct your own functions. + + Examples + -------- + + This show the default policy: + + >>> s = Struct(a=10,b=30) + >>> s2 = Struct(a=20,c=40) + >>> s.merge(s2) + >>> sorted(s.items()) + [('a', 10), ('b', 30), ('c', 40)] + + Now, show how to specify a conflict dict: + + >>> s = Struct(a=10,b=30) + >>> s2 = Struct(a=20,b=40) + >>> conflict = {'update':'a','add':'b'} + >>> s.merge(s2,conflict) + >>> sorted(s.items()) + [('a', 20), ('b', 70)] + """ + + data_dict = dict(__loc_data__,**kw) + + # policies for conflict resolution: two argument functions which return + # the value that will go in the new struct + preserve = lambda old,new: old + update = lambda old,new: new + add = lambda old,new: old + new + add_flip = lambda old,new: new + old # note change of order! + add_s = lambda old,new: old + ' ' + new + + # default policy is to keep current keys when there's a conflict + conflict_solve = dict.fromkeys(self, preserve) + + # the conflict_solve dictionary is given by the user 'inverted': we + # need a name-function mapping, it comes as a function -> names + # dict. Make a local copy (b/c we'll make changes), replace user + # strings for the three builtin policies and invert it. + if __conflict_solve: + inv_conflict_solve_user = __conflict_solve.copy() + for name, func in [('preserve',preserve), ('update',update), + ('add',add), ('add_flip',add_flip), + ('add_s',add_s)]: + if name in inv_conflict_solve_user.keys(): + inv_conflict_solve_user[func] = inv_conflict_solve_user[name] + del inv_conflict_solve_user[name] + conflict_solve.update(self.__dict_invert(inv_conflict_solve_user)) + for key in data_dict: + if key not in self: + self[key] = data_dict[key] + else: + self[key] = conflict_solve[key](self[key],data_dict[key]) + diff --git a/contrib/python/ipython/py3/IPython/utils/jsonutil.py b/contrib/python/ipython/py3/IPython/utils/jsonutil.py index 359633ed6d4..2672e09e169 100644 --- a/contrib/python/ipython/py3/IPython/utils/jsonutil.py +++ b/contrib/python/ipython/py3/IPython/utils/jsonutil.py @@ -1,5 +1,5 @@ -from warnings import warn - -warn("IPython.utils.jsonutil has moved to jupyter_client.jsonutil", stacklevel=2) - -from jupyter_client.jsonutil import * +from warnings import warn + +warn("IPython.utils.jsonutil has moved to jupyter_client.jsonutil", stacklevel=2) + +from jupyter_client.jsonutil import * diff --git a/contrib/python/ipython/py3/IPython/utils/localinterfaces.py b/contrib/python/ipython/py3/IPython/utils/localinterfaces.py index 716d0918ed7..2f911222d8d 100644 --- a/contrib/python/ipython/py3/IPython/utils/localinterfaces.py +++ b/contrib/python/ipython/py3/IPython/utils/localinterfaces.py @@ -1,5 +1,5 @@ -from warnings import warn - -warn("IPython.utils.localinterfaces has moved to jupyter_client.localinterfaces", stacklevel=2) - -from jupyter_client.localinterfaces import * +from warnings import warn + +warn("IPython.utils.localinterfaces has moved to jupyter_client.localinterfaces", stacklevel=2) + +from jupyter_client.localinterfaces import * diff --git a/contrib/python/ipython/py3/IPython/utils/log.py b/contrib/python/ipython/py3/IPython/utils/log.py index f9e12281b0a..bb262eda936 100644 --- a/contrib/python/ipython/py3/IPython/utils/log.py +++ b/contrib/python/ipython/py3/IPython/utils/log.py @@ -1,6 +1,6 @@ - -from warnings import warn - -warn("IPython.utils.log has moved to traitlets.log", stacklevel=2) - -from traitlets.log import * + +from warnings import warn + +warn("IPython.utils.log has moved to traitlets.log", stacklevel=2) + +from traitlets.log import * diff --git a/contrib/python/ipython/py3/IPython/utils/module_paths.py b/contrib/python/ipython/py3/IPython/utils/module_paths.py index 21848843351..0570c322e6a 100644 --- a/contrib/python/ipython/py3/IPython/utils/module_paths.py +++ b/contrib/python/ipython/py3/IPython/utils/module_paths.py @@ -1,70 +1,70 @@ -"""Utility functions for finding modules - -Utility functions for finding modules on sys.path. - -`find_module` returns a path to module or None, given certain conditions. - -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2011, the IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -# Stdlib imports -import importlib -import os - -# Third-party imports - -# Our own imports - - -#----------------------------------------------------------------------------- -# Globals and constants -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Local utilities -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Classes and functions -#----------------------------------------------------------------------------- - -def find_mod(module_name): - """ - Find module `module_name` on sys.path, and return the path to module `module_name`. - - - If `module_name` refers to a module directory, then return path to __init__ file. - - If `module_name` is a directory without an __init__file, return None. - - If module is missing or does not have a `.py` or `.pyw` extension, return None. - - Note that we are not interested in running bytecode. - - Otherwise, return the fill path of the module. - - Parameters - ---------- - module_name : str - - Returns - ------- - module_path : str - Path to module `module_name`, its __init__.py, or None, - depending on above conditions. - """ - loader = importlib.util.find_spec(module_name) - module_path = loader.origin - if module_path is None: - return None - else: - split_path = module_path.split(".") - if split_path[-1] in ["py", "pyw"]: - return module_path - else: - return None +"""Utility functions for finding modules + +Utility functions for finding modules on sys.path. + +`find_module` returns a path to module or None, given certain conditions. + +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2011, the IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib imports +import importlib +import os + +# Third-party imports + +# Our own imports + + +#----------------------------------------------------------------------------- +# Globals and constants +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Local utilities +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + +def find_mod(module_name): + """ + Find module `module_name` on sys.path, and return the path to module `module_name`. + + - If `module_name` refers to a module directory, then return path to __init__ file. + - If `module_name` is a directory without an __init__file, return None. + - If module is missing or does not have a `.py` or `.pyw` extension, return None. + - Note that we are not interested in running bytecode. + - Otherwise, return the fill path of the module. + + Parameters + ---------- + module_name : str + + Returns + ------- + module_path : str + Path to module `module_name`, its __init__.py, or None, + depending on above conditions. + """ + loader = importlib.util.find_spec(module_name) + module_path = loader.origin + if module_path is None: + return None + else: + split_path = module_path.split(".") + if split_path[-1] in ["py", "pyw"]: + return module_path + else: + return None diff --git a/contrib/python/ipython/py3/IPython/utils/openpy.py b/contrib/python/ipython/py3/IPython/utils/openpy.py index 75f4601a1f0..c90d2b53a30 100644 --- a/contrib/python/ipython/py3/IPython/utils/openpy.py +++ b/contrib/python/ipython/py3/IPython/utils/openpy.py @@ -1,103 +1,103 @@ -""" -Tools to open .py files as Unicode, using the encoding specified within the file, -as per PEP 263. - -Much of the code is taken from the tokenize module in Python 3.2. -""" - -import io -from io import TextIOWrapper, BytesIO -import re -from tokenize import open, detect_encoding - -cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)", re.UNICODE) -cookie_comment_re = re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)", re.UNICODE) - -def source_to_unicode(txt, errors='replace', skip_encoding_cookie=True): - """Converts a bytes string with python source code to unicode. - - Unicode strings are passed through unchanged. Byte strings are checked - for the python source file encoding cookie to determine encoding. - txt can be either a bytes buffer or a string containing the source - code. - """ - if isinstance(txt, str): - return txt - if isinstance(txt, bytes): - buffer = BytesIO(txt) - else: - buffer = txt - try: - encoding, _ = detect_encoding(buffer.readline) - except SyntaxError: - encoding = "ascii" - buffer.seek(0) - with TextIOWrapper(buffer, encoding, errors=errors, line_buffering=True) as text: - text.mode = 'r' - if skip_encoding_cookie: - return u"".join(strip_encoding_cookie(text)) - else: - return text.read() - -def strip_encoding_cookie(filelike): - """Generator to pull lines from a text-mode file, skipping the encoding - cookie if it is found in the first two lines. - """ - it = iter(filelike) - try: - first = next(it) - if not cookie_comment_re.match(first): - yield first - second = next(it) - if not cookie_comment_re.match(second): - yield second - except StopIteration: - return - - for line in it: - yield line - -def read_py_file(filename, skip_encoding_cookie=True): - """Read a Python file, using the encoding declared inside the file. - - Parameters - ---------- - filename : str - The path to the file to read. - skip_encoding_cookie : bool - If True (the default), and the encoding declaration is found in the first - two lines, that line will be excluded from the output. - - Returns - ------- - A unicode string containing the contents of the file. - """ - with open(filename) as f: # the open function defined in this module. - if skip_encoding_cookie: - return "".join(strip_encoding_cookie(f)) - else: - return f.read() - -def read_py_url(url, errors='replace', skip_encoding_cookie=True): - """Read a Python file from a URL, using the encoding declared inside the file. - - Parameters - ---------- - url : str - The URL from which to fetch the file. - errors : str - How to handle decoding errors in the file. Options are the same as for - bytes.decode(), but here 'replace' is the default. - skip_encoding_cookie : bool - If True (the default), and the encoding declaration is found in the first - two lines, that line will be excluded from the output. - - Returns - ------- - A unicode string containing the contents of the file. - """ - # Deferred import for faster start - from urllib.request import urlopen - response = urlopen(url) - buffer = io.BytesIO(response.read()) - return source_to_unicode(buffer, errors, skip_encoding_cookie) +""" +Tools to open .py files as Unicode, using the encoding specified within the file, +as per PEP 263. + +Much of the code is taken from the tokenize module in Python 3.2. +""" + +import io +from io import TextIOWrapper, BytesIO +import re +from tokenize import open, detect_encoding + +cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)", re.UNICODE) +cookie_comment_re = re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)", re.UNICODE) + +def source_to_unicode(txt, errors='replace', skip_encoding_cookie=True): + """Converts a bytes string with python source code to unicode. + + Unicode strings are passed through unchanged. Byte strings are checked + for the python source file encoding cookie to determine encoding. + txt can be either a bytes buffer or a string containing the source + code. + """ + if isinstance(txt, str): + return txt + if isinstance(txt, bytes): + buffer = BytesIO(txt) + else: + buffer = txt + try: + encoding, _ = detect_encoding(buffer.readline) + except SyntaxError: + encoding = "ascii" + buffer.seek(0) + with TextIOWrapper(buffer, encoding, errors=errors, line_buffering=True) as text: + text.mode = 'r' + if skip_encoding_cookie: + return u"".join(strip_encoding_cookie(text)) + else: + return text.read() + +def strip_encoding_cookie(filelike): + """Generator to pull lines from a text-mode file, skipping the encoding + cookie if it is found in the first two lines. + """ + it = iter(filelike) + try: + first = next(it) + if not cookie_comment_re.match(first): + yield first + second = next(it) + if not cookie_comment_re.match(second): + yield second + except StopIteration: + return + + for line in it: + yield line + +def read_py_file(filename, skip_encoding_cookie=True): + """Read a Python file, using the encoding declared inside the file. + + Parameters + ---------- + filename : str + The path to the file to read. + skip_encoding_cookie : bool + If True (the default), and the encoding declaration is found in the first + two lines, that line will be excluded from the output. + + Returns + ------- + A unicode string containing the contents of the file. + """ + with open(filename) as f: # the open function defined in this module. + if skip_encoding_cookie: + return "".join(strip_encoding_cookie(f)) + else: + return f.read() + +def read_py_url(url, errors='replace', skip_encoding_cookie=True): + """Read a Python file from a URL, using the encoding declared inside the file. + + Parameters + ---------- + url : str + The URL from which to fetch the file. + errors : str + How to handle decoding errors in the file. Options are the same as for + bytes.decode(), but here 'replace' is the default. + skip_encoding_cookie : bool + If True (the default), and the encoding declaration is found in the first + two lines, that line will be excluded from the output. + + Returns + ------- + A unicode string containing the contents of the file. + """ + # Deferred import for faster start + from urllib.request import urlopen + response = urlopen(url) + buffer = io.BytesIO(response.read()) + return source_to_unicode(buffer, errors, skip_encoding_cookie) diff --git a/contrib/python/ipython/py3/IPython/utils/path.py b/contrib/python/ipython/py3/IPython/utils/path.py index de88872f858..0fb6144e19f 100644 --- a/contrib/python/ipython/py3/IPython/utils/path.py +++ b/contrib/python/ipython/py3/IPython/utils/path.py @@ -1,436 +1,436 @@ -# encoding: utf-8 -""" -Utilities for path handling. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import os -import sys -import errno -import shutil -import random -import glob -from warnings import warn - -from IPython.utils.process import system -from IPython.utils.decorators import undoc - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- -fs_encoding = sys.getfilesystemencoding() - -def _writable_dir(path): - """Whether `path` is a directory, to which the user has write access.""" - return os.path.isdir(path) and os.access(path, os.W_OK) - -if sys.platform == 'win32': - def _get_long_path_name(path): - """Get a long path name (expand ~) on Windows using ctypes. - - Examples - -------- - - >>> get_long_path_name('c:\\docume~1') - 'c:\\\\Documents and Settings' - - """ - try: - import ctypes - except ImportError: - raise ImportError('you need to have ctypes installed for this to work') - _GetLongPathName = ctypes.windll.kernel32.GetLongPathNameW - _GetLongPathName.argtypes = [ctypes.c_wchar_p, ctypes.c_wchar_p, - ctypes.c_uint ] - - buf = ctypes.create_unicode_buffer(260) - rv = _GetLongPathName(path, buf, 260) - if rv == 0 or rv > 260: - return path - else: - return buf.value -else: - def _get_long_path_name(path): - """Dummy no-op.""" - return path - - - -def get_long_path_name(path): - """Expand a path into its long form. - - On Windows this expands any ~ in the paths. On other platforms, it is - a null operation. - """ - return _get_long_path_name(path) - - -def unquote_filename(name, win32=(sys.platform=='win32')): - """ On Windows, remove leading and trailing quotes from filenames. - - This function has been deprecated and should not be used any more: - unquoting is now taken care of by :func:`IPython.utils.process.arg_split`. - """ - warn("'unquote_filename' is deprecated since IPython 5.0 and should not " - "be used anymore", DeprecationWarning, stacklevel=2) - if win32: - if name.startswith(("'", '"')) and name.endswith(("'", '"')): - name = name[1:-1] - return name - - -def compress_user(path): - """Reverse of :func:`os.path.expanduser` - """ - home = os.path.expanduser('~') - if path.startswith(home): - path = "~" + path[len(home):] - return path - -def get_py_filename(name, force_win32=None): - """Return a valid python filename in the current directory. - - If the given name is not a file, it adds '.py' and searches again. - Raises IOError with an informative message if the file isn't found. - """ - - name = os.path.expanduser(name) - if force_win32 is not None: - warn("The 'force_win32' argument to 'get_py_filename' is deprecated " - "since IPython 5.0 and should not be used anymore", - DeprecationWarning, stacklevel=2) - if not os.path.isfile(name) and not name.endswith('.py'): - name += '.py' - if os.path.isfile(name): - return name - else: - raise IOError('File `%r` not found.' % name) - - -def filefind(filename, path_dirs=None): - """Find a file by looking through a sequence of paths. - - This iterates through a sequence of paths looking for a file and returns - the full, absolute path of the first occurrence of the file. If no set of - path dirs is given, the filename is tested as is, after running through - :func:`expandvars` and :func:`expanduser`. Thus a simple call:: - - filefind('myfile.txt') - - will find the file in the current working dir, but:: - - filefind('~/myfile.txt') - - Will find the file in the users home directory. This function does not - automatically try any paths, such as the cwd or the user's home directory. - - Parameters - ---------- - filename : str - The filename to look for. - path_dirs : str, None or sequence of str - The sequence of paths to look for the file in. If None, the filename - need to be absolute or be in the cwd. If a string, the string is - put into a sequence and the searched. If a sequence, walk through - each element and join with ``filename``, calling :func:`expandvars` - and :func:`expanduser` before testing for existence. - - Returns - ------- - Raises :exc:`IOError` or returns absolute path to file. - """ - - # If paths are quoted, abspath gets confused, strip them... - filename = filename.strip('"').strip("'") - # If the input is an absolute path, just check it exists - if os.path.isabs(filename) and os.path.isfile(filename): - return filename - - if path_dirs is None: - path_dirs = ("",) - elif isinstance(path_dirs, str): - path_dirs = (path_dirs,) - - for path in path_dirs: - if path == '.': path = os.getcwd() - testname = expand_path(os.path.join(path, filename)) - if os.path.isfile(testname): - return os.path.abspath(testname) - - raise IOError("File %r does not exist in any of the search paths: %r" % - (filename, path_dirs) ) - - -class HomeDirError(Exception): - pass - - -def get_home_dir(require_writable=False) -> str: - """Return the 'home' directory, as a unicode string. - - Uses os.path.expanduser('~'), and checks for writability. - - See stdlib docs for how this is determined. - For Python <3.8, $HOME is first priority on *ALL* platforms. - For Python >=3.8 on Windows, %HOME% is no longer considered. - - Parameters - ---------- - - require_writable : bool [default: False] - if True: - guarantees the return value is a writable directory, otherwise - raises HomeDirError - if False: - The path is resolved, but it is not guaranteed to exist or be writable. - """ - - homedir = os.path.expanduser('~') - # Next line will make things work even when /home/ is a symlink to - # /usr/home as it is on FreeBSD, for example - homedir = os.path.realpath(homedir) - - if not _writable_dir(homedir) and os.name == 'nt': - # expanduser failed, use the registry to get the 'My Documents' folder. - try: - import winreg as wreg - with wreg.OpenKey( - wreg.HKEY_CURRENT_USER, - r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" - ) as key: - homedir = wreg.QueryValueEx(key,'Personal')[0] - except: - pass - - if (not require_writable) or _writable_dir(homedir): - assert isinstance(homedir, str), "Homedir shoudl be unicode not bytes" - return homedir - else: - raise HomeDirError('%s is not a writable dir, ' - 'set $HOME environment variable to override' % homedir) - -def get_xdg_dir(): - """Return the XDG_CONFIG_HOME, if it is defined and exists, else None. - - This is only for non-OS X posix (Linux,Unix,etc.) systems. - """ - - env = os.environ - - if os.name == 'posix' and sys.platform != 'darwin': - # Linux, Unix, AIX, etc. - # use ~/.config if empty OR not set - xdg = env.get("XDG_CONFIG_HOME", None) or os.path.join(get_home_dir(), '.config') - if xdg and _writable_dir(xdg): - assert isinstance(xdg, str) - return xdg - - return None - - -def get_xdg_cache_dir(): - """Return the XDG_CACHE_HOME, if it is defined and exists, else None. - - This is only for non-OS X posix (Linux,Unix,etc.) systems. - """ - - env = os.environ - - if os.name == 'posix' and sys.platform != 'darwin': - # Linux, Unix, AIX, etc. - # use ~/.cache if empty OR not set - xdg = env.get("XDG_CACHE_HOME", None) or os.path.join(get_home_dir(), '.cache') - if xdg and _writable_dir(xdg): - assert isinstance(xdg, str) - return xdg - - return None - - -@undoc -def get_ipython_dir(): - warn("get_ipython_dir has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) - from IPython.paths import get_ipython_dir - return get_ipython_dir() - -@undoc -def get_ipython_cache_dir(): - warn("get_ipython_cache_dir has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) - from IPython.paths import get_ipython_cache_dir - return get_ipython_cache_dir() - -@undoc -def get_ipython_package_dir(): - warn("get_ipython_package_dir has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) - from IPython.paths import get_ipython_package_dir - return get_ipython_package_dir() - -@undoc -def get_ipython_module_path(module_str): - warn("get_ipython_module_path has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) - from IPython.paths import get_ipython_module_path - return get_ipython_module_path(module_str) - -@undoc -def locate_profile(profile='default'): - warn("locate_profile has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) - from IPython.paths import locate_profile - return locate_profile(profile=profile) - -def expand_path(s): - """Expand $VARS and ~names in a string, like a shell - - :Examples: - - In [2]: os.environ['FOO']='test' - - In [3]: expand_path('variable FOO is $FOO') - Out[3]: 'variable FOO is test' - """ - # This is a pretty subtle hack. When expand user is given a UNC path - # on Windows (\\server\share$\%username%), os.path.expandvars, removes - # the $ to get (\\server\share\%username%). I think it considered $ - # alone an empty var. But, we need the $ to remains there (it indicates - # a hidden share). - if os.name=='nt': - s = s.replace('$\\', 'IPYTHON_TEMP') - s = os.path.expandvars(os.path.expanduser(s)) - if os.name=='nt': - s = s.replace('IPYTHON_TEMP', '$\\') - return s - - -def unescape_glob(string): - """Unescape glob pattern in `string`.""" - def unescape(s): - for pattern in '*[]!?': - s = s.replace(r'\{0}'.format(pattern), pattern) - return s - return '\\'.join(map(unescape, string.split('\\\\'))) - - -def shellglob(args): - """ - Do glob expansion for each element in `args` and return a flattened list. - - Unmatched glob pattern will remain as-is in the returned list. - - """ - expanded = [] - # Do not unescape backslash in Windows as it is interpreted as - # path separator: - unescape = unescape_glob if sys.platform != 'win32' else lambda x: x - for a in args: - expanded.extend(glob.glob(a) or [unescape(a)]) - return expanded - - -def target_outdated(target,deps): - """Determine whether a target is out of date. - - target_outdated(target,deps) -> 1/0 - - deps: list of filenames which MUST exist. - target: single filename which may or may not exist. - - If target doesn't exist or is older than any file listed in deps, return - true, otherwise return false. - """ - try: - target_time = os.path.getmtime(target) - except os.error: - return 1 - for dep in deps: - dep_time = os.path.getmtime(dep) - if dep_time > target_time: - #print "For target",target,"Dep failed:",dep # dbg - #print "times (dep,tar):",dep_time,target_time # dbg - return 1 - return 0 - - -def target_update(target,deps,cmd): - """Update a target with a given command given a list of dependencies. - - target_update(target,deps,cmd) -> runs cmd if target is outdated. - - This is just a wrapper around target_outdated() which calls the given - command if target is outdated.""" - - if target_outdated(target,deps): - system(cmd) - - -ENOLINK = 1998 - -def link(src, dst): - """Hard links ``src`` to ``dst``, returning 0 or errno. - - Note that the special errno ``ENOLINK`` will be returned if ``os.link`` isn't - supported by the operating system. - """ - - if not hasattr(os, "link"): - return ENOLINK - link_errno = 0 - try: - os.link(src, dst) - except OSError as e: - link_errno = e.errno - return link_errno - - -def link_or_copy(src, dst): - """Attempts to hardlink ``src`` to ``dst``, copying if the link fails. - - Attempts to maintain the semantics of ``shutil.copy``. - - Because ``os.link`` does not overwrite files, a unique temporary file - will be used if the target already exists, then that file will be moved - into place. - """ - - if os.path.isdir(dst): - dst = os.path.join(dst, os.path.basename(src)) - - link_errno = link(src, dst) - if link_errno == errno.EEXIST: - if os.stat(src).st_ino == os.stat(dst).st_ino: - # dst is already a hard link to the correct file, so we don't need - # to do anything else. If we try to link and rename the file - # anyway, we get duplicate files - see http://bugs.python.org/issue21876 - return - - new_dst = dst + "-temp-%04X" %(random.randint(1, 16**4), ) - try: - link_or_copy(src, new_dst) - except: - try: - os.remove(new_dst) - except OSError: - pass - raise - os.rename(new_dst, dst) - elif link_errno != 0: - # Either link isn't supported, or the filesystem doesn't support - # linking, or 'src' and 'dst' are on different filesystems. - shutil.copy(src, dst) - -def ensure_dir_exists(path, mode=0o755): - """ensure that a directory exists - - If it doesn't exist, try to create it and protect against a race condition - if another process is doing the same. - - The default permissions are 755, which differ from os.makedirs default of 777. - """ - if not os.path.exists(path): - try: - os.makedirs(path, mode=mode) - except OSError as e: - if e.errno != errno.EEXIST: - raise - elif not os.path.isdir(path): - raise IOError("%r exists but is not a directory" % path) +# encoding: utf-8 +""" +Utilities for path handling. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import os +import sys +import errno +import shutil +import random +import glob +from warnings import warn + +from IPython.utils.process import system +from IPython.utils.decorators import undoc + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- +fs_encoding = sys.getfilesystemencoding() + +def _writable_dir(path): + """Whether `path` is a directory, to which the user has write access.""" + return os.path.isdir(path) and os.access(path, os.W_OK) + +if sys.platform == 'win32': + def _get_long_path_name(path): + """Get a long path name (expand ~) on Windows using ctypes. + + Examples + -------- + + >>> get_long_path_name('c:\\docume~1') + 'c:\\\\Documents and Settings' + + """ + try: + import ctypes + except ImportError: + raise ImportError('you need to have ctypes installed for this to work') + _GetLongPathName = ctypes.windll.kernel32.GetLongPathNameW + _GetLongPathName.argtypes = [ctypes.c_wchar_p, ctypes.c_wchar_p, + ctypes.c_uint ] + + buf = ctypes.create_unicode_buffer(260) + rv = _GetLongPathName(path, buf, 260) + if rv == 0 or rv > 260: + return path + else: + return buf.value +else: + def _get_long_path_name(path): + """Dummy no-op.""" + return path + + + +def get_long_path_name(path): + """Expand a path into its long form. + + On Windows this expands any ~ in the paths. On other platforms, it is + a null operation. + """ + return _get_long_path_name(path) + + +def unquote_filename(name, win32=(sys.platform=='win32')): + """ On Windows, remove leading and trailing quotes from filenames. + + This function has been deprecated and should not be used any more: + unquoting is now taken care of by :func:`IPython.utils.process.arg_split`. + """ + warn("'unquote_filename' is deprecated since IPython 5.0 and should not " + "be used anymore", DeprecationWarning, stacklevel=2) + if win32: + if name.startswith(("'", '"')) and name.endswith(("'", '"')): + name = name[1:-1] + return name + + +def compress_user(path): + """Reverse of :func:`os.path.expanduser` + """ + home = os.path.expanduser('~') + if path.startswith(home): + path = "~" + path[len(home):] + return path + +def get_py_filename(name, force_win32=None): + """Return a valid python filename in the current directory. + + If the given name is not a file, it adds '.py' and searches again. + Raises IOError with an informative message if the file isn't found. + """ + + name = os.path.expanduser(name) + if force_win32 is not None: + warn("The 'force_win32' argument to 'get_py_filename' is deprecated " + "since IPython 5.0 and should not be used anymore", + DeprecationWarning, stacklevel=2) + if not os.path.isfile(name) and not name.endswith('.py'): + name += '.py' + if os.path.isfile(name): + return name + else: + raise IOError('File `%r` not found.' % name) + + +def filefind(filename, path_dirs=None): + """Find a file by looking through a sequence of paths. + + This iterates through a sequence of paths looking for a file and returns + the full, absolute path of the first occurrence of the file. If no set of + path dirs is given, the filename is tested as is, after running through + :func:`expandvars` and :func:`expanduser`. Thus a simple call:: + + filefind('myfile.txt') + + will find the file in the current working dir, but:: + + filefind('~/myfile.txt') + + Will find the file in the users home directory. This function does not + automatically try any paths, such as the cwd or the user's home directory. + + Parameters + ---------- + filename : str + The filename to look for. + path_dirs : str, None or sequence of str + The sequence of paths to look for the file in. If None, the filename + need to be absolute or be in the cwd. If a string, the string is + put into a sequence and the searched. If a sequence, walk through + each element and join with ``filename``, calling :func:`expandvars` + and :func:`expanduser` before testing for existence. + + Returns + ------- + Raises :exc:`IOError` or returns absolute path to file. + """ + + # If paths are quoted, abspath gets confused, strip them... + filename = filename.strip('"').strip("'") + # If the input is an absolute path, just check it exists + if os.path.isabs(filename) and os.path.isfile(filename): + return filename + + if path_dirs is None: + path_dirs = ("",) + elif isinstance(path_dirs, str): + path_dirs = (path_dirs,) + + for path in path_dirs: + if path == '.': path = os.getcwd() + testname = expand_path(os.path.join(path, filename)) + if os.path.isfile(testname): + return os.path.abspath(testname) + + raise IOError("File %r does not exist in any of the search paths: %r" % + (filename, path_dirs) ) + + +class HomeDirError(Exception): + pass + + +def get_home_dir(require_writable=False) -> str: + """Return the 'home' directory, as a unicode string. + + Uses os.path.expanduser('~'), and checks for writability. + + See stdlib docs for how this is determined. + For Python <3.8, $HOME is first priority on *ALL* platforms. + For Python >=3.8 on Windows, %HOME% is no longer considered. + + Parameters + ---------- + + require_writable : bool [default: False] + if True: + guarantees the return value is a writable directory, otherwise + raises HomeDirError + if False: + The path is resolved, but it is not guaranteed to exist or be writable. + """ + + homedir = os.path.expanduser('~') + # Next line will make things work even when /home/ is a symlink to + # /usr/home as it is on FreeBSD, for example + homedir = os.path.realpath(homedir) + + if not _writable_dir(homedir) and os.name == 'nt': + # expanduser failed, use the registry to get the 'My Documents' folder. + try: + import winreg as wreg + with wreg.OpenKey( + wreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) as key: + homedir = wreg.QueryValueEx(key,'Personal')[0] + except: + pass + + if (not require_writable) or _writable_dir(homedir): + assert isinstance(homedir, str), "Homedir shoudl be unicode not bytes" + return homedir + else: + raise HomeDirError('%s is not a writable dir, ' + 'set $HOME environment variable to override' % homedir) + +def get_xdg_dir(): + """Return the XDG_CONFIG_HOME, if it is defined and exists, else None. + + This is only for non-OS X posix (Linux,Unix,etc.) systems. + """ + + env = os.environ + + if os.name == 'posix' and sys.platform != 'darwin': + # Linux, Unix, AIX, etc. + # use ~/.config if empty OR not set + xdg = env.get("XDG_CONFIG_HOME", None) or os.path.join(get_home_dir(), '.config') + if xdg and _writable_dir(xdg): + assert isinstance(xdg, str) + return xdg + + return None + + +def get_xdg_cache_dir(): + """Return the XDG_CACHE_HOME, if it is defined and exists, else None. + + This is only for non-OS X posix (Linux,Unix,etc.) systems. + """ + + env = os.environ + + if os.name == 'posix' and sys.platform != 'darwin': + # Linux, Unix, AIX, etc. + # use ~/.cache if empty OR not set + xdg = env.get("XDG_CACHE_HOME", None) or os.path.join(get_home_dir(), '.cache') + if xdg and _writable_dir(xdg): + assert isinstance(xdg, str) + return xdg + + return None + + +@undoc +def get_ipython_dir(): + warn("get_ipython_dir has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) + from IPython.paths import get_ipython_dir + return get_ipython_dir() + +@undoc +def get_ipython_cache_dir(): + warn("get_ipython_cache_dir has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) + from IPython.paths import get_ipython_cache_dir + return get_ipython_cache_dir() + +@undoc +def get_ipython_package_dir(): + warn("get_ipython_package_dir has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) + from IPython.paths import get_ipython_package_dir + return get_ipython_package_dir() + +@undoc +def get_ipython_module_path(module_str): + warn("get_ipython_module_path has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) + from IPython.paths import get_ipython_module_path + return get_ipython_module_path(module_str) + +@undoc +def locate_profile(profile='default'): + warn("locate_profile has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) + from IPython.paths import locate_profile + return locate_profile(profile=profile) + +def expand_path(s): + """Expand $VARS and ~names in a string, like a shell + + :Examples: + + In [2]: os.environ['FOO']='test' + + In [3]: expand_path('variable FOO is $FOO') + Out[3]: 'variable FOO is test' + """ + # This is a pretty subtle hack. When expand user is given a UNC path + # on Windows (\\server\share$\%username%), os.path.expandvars, removes + # the $ to get (\\server\share\%username%). I think it considered $ + # alone an empty var. But, we need the $ to remains there (it indicates + # a hidden share). + if os.name=='nt': + s = s.replace('$\\', 'IPYTHON_TEMP') + s = os.path.expandvars(os.path.expanduser(s)) + if os.name=='nt': + s = s.replace('IPYTHON_TEMP', '$\\') + return s + + +def unescape_glob(string): + """Unescape glob pattern in `string`.""" + def unescape(s): + for pattern in '*[]!?': + s = s.replace(r'\{0}'.format(pattern), pattern) + return s + return '\\'.join(map(unescape, string.split('\\\\'))) + + +def shellglob(args): + """ + Do glob expansion for each element in `args` and return a flattened list. + + Unmatched glob pattern will remain as-is in the returned list. + + """ + expanded = [] + # Do not unescape backslash in Windows as it is interpreted as + # path separator: + unescape = unescape_glob if sys.platform != 'win32' else lambda x: x + for a in args: + expanded.extend(glob.glob(a) or [unescape(a)]) + return expanded + + +def target_outdated(target,deps): + """Determine whether a target is out of date. + + target_outdated(target,deps) -> 1/0 + + deps: list of filenames which MUST exist. + target: single filename which may or may not exist. + + If target doesn't exist or is older than any file listed in deps, return + true, otherwise return false. + """ + try: + target_time = os.path.getmtime(target) + except os.error: + return 1 + for dep in deps: + dep_time = os.path.getmtime(dep) + if dep_time > target_time: + #print "For target",target,"Dep failed:",dep # dbg + #print "times (dep,tar):",dep_time,target_time # dbg + return 1 + return 0 + + +def target_update(target,deps,cmd): + """Update a target with a given command given a list of dependencies. + + target_update(target,deps,cmd) -> runs cmd if target is outdated. + + This is just a wrapper around target_outdated() which calls the given + command if target is outdated.""" + + if target_outdated(target,deps): + system(cmd) + + +ENOLINK = 1998 + +def link(src, dst): + """Hard links ``src`` to ``dst``, returning 0 or errno. + + Note that the special errno ``ENOLINK`` will be returned if ``os.link`` isn't + supported by the operating system. + """ + + if not hasattr(os, "link"): + return ENOLINK + link_errno = 0 + try: + os.link(src, dst) + except OSError as e: + link_errno = e.errno + return link_errno + + +def link_or_copy(src, dst): + """Attempts to hardlink ``src`` to ``dst``, copying if the link fails. + + Attempts to maintain the semantics of ``shutil.copy``. + + Because ``os.link`` does not overwrite files, a unique temporary file + will be used if the target already exists, then that file will be moved + into place. + """ + + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + + link_errno = link(src, dst) + if link_errno == errno.EEXIST: + if os.stat(src).st_ino == os.stat(dst).st_ino: + # dst is already a hard link to the correct file, so we don't need + # to do anything else. If we try to link and rename the file + # anyway, we get duplicate files - see http://bugs.python.org/issue21876 + return + + new_dst = dst + "-temp-%04X" %(random.randint(1, 16**4), ) + try: + link_or_copy(src, new_dst) + except: + try: + os.remove(new_dst) + except OSError: + pass + raise + os.rename(new_dst, dst) + elif link_errno != 0: + # Either link isn't supported, or the filesystem doesn't support + # linking, or 'src' and 'dst' are on different filesystems. + shutil.copy(src, dst) + +def ensure_dir_exists(path, mode=0o755): + """ensure that a directory exists + + If it doesn't exist, try to create it and protect against a race condition + if another process is doing the same. + + The default permissions are 755, which differ from os.makedirs default of 777. + """ + if not os.path.exists(path): + try: + os.makedirs(path, mode=mode) + except OSError as e: + if e.errno != errno.EEXIST: + raise + elif not os.path.isdir(path): + raise IOError("%r exists but is not a directory" % path) diff --git a/contrib/python/ipython/py3/IPython/utils/pickleutil.py b/contrib/python/ipython/py3/IPython/utils/pickleutil.py index 0a4802d595f..785e6f6c80a 100644 --- a/contrib/python/ipython/py3/IPython/utils/pickleutil.py +++ b/contrib/python/ipython/py3/IPython/utils/pickleutil.py @@ -1,5 +1,5 @@ -from warnings import warn - -warn("IPython.utils.pickleutil has moved to ipykernel.pickleutil", stacklevel=2) - -from ipykernel.pickleutil import * +from warnings import warn + +warn("IPython.utils.pickleutil has moved to ipykernel.pickleutil", stacklevel=2) + +from ipykernel.pickleutil import * diff --git a/contrib/python/ipython/py3/IPython/utils/process.py b/contrib/python/ipython/py3/IPython/utils/process.py index 67a16a7863d..489b7c13d0c 100644 --- a/contrib/python/ipython/py3/IPython/utils/process.py +++ b/contrib/python/ipython/py3/IPython/utils/process.py @@ -1,69 +1,69 @@ -# encoding: utf-8 -""" -Utilities for working with external processes. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -import os -import shutil -import sys - -if sys.platform == 'win32': - from ._process_win32 import system, getoutput, arg_split, check_pid -elif sys.platform == 'cli': - from ._process_cli import system, getoutput, arg_split, check_pid -else: - from ._process_posix import system, getoutput, arg_split, check_pid - -from ._process_common import getoutputerror, get_output_error_code, process_handler - - -class FindCmdError(Exception): - pass - - -def find_cmd(cmd): - """Find absolute path to executable cmd in a cross platform manner. - - This function tries to determine the full path to a command line program - using `which` on Unix/Linux/OS X and `win32api` on Windows. Most of the - time it will use the version that is first on the users `PATH`. - - Warning, don't use this to find IPython command line programs as there - is a risk you will find the wrong one. Instead find those using the - following code and looking for the application itself:: - - import sys - argv = [sys.executable, '-m', 'IPython'] - - Parameters - ---------- - cmd : str - The command line program to look for. - """ - path = shutil.which(cmd) - if path is None: - raise FindCmdError('command could not be found: %s' % cmd) - return path - - -def abbrev_cwd(): - """ Return abbreviated version of cwd, e.g. d:mydir """ - cwd = os.getcwd().replace('\\','/') - drivepart = '' - tail = cwd - if sys.platform == 'win32': - if len(cwd) < 4: - return cwd - drivepart,tail = os.path.splitdrive(cwd) - - - parts = tail.split('/') - if len(parts) > 2: - tail = '/'.join(parts[-2:]) - - return (drivepart + ( - cwd == '/' and '/' or tail)) +# encoding: utf-8 +""" +Utilities for working with external processes. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +import os +import shutil +import sys + +if sys.platform == 'win32': + from ._process_win32 import system, getoutput, arg_split, check_pid +elif sys.platform == 'cli': + from ._process_cli import system, getoutput, arg_split, check_pid +else: + from ._process_posix import system, getoutput, arg_split, check_pid + +from ._process_common import getoutputerror, get_output_error_code, process_handler + + +class FindCmdError(Exception): + pass + + +def find_cmd(cmd): + """Find absolute path to executable cmd in a cross platform manner. + + This function tries to determine the full path to a command line program + using `which` on Unix/Linux/OS X and `win32api` on Windows. Most of the + time it will use the version that is first on the users `PATH`. + + Warning, don't use this to find IPython command line programs as there + is a risk you will find the wrong one. Instead find those using the + following code and looking for the application itself:: + + import sys + argv = [sys.executable, '-m', 'IPython'] + + Parameters + ---------- + cmd : str + The command line program to look for. + """ + path = shutil.which(cmd) + if path is None: + raise FindCmdError('command could not be found: %s' % cmd) + return path + + +def abbrev_cwd(): + """ Return abbreviated version of cwd, e.g. d:mydir """ + cwd = os.getcwd().replace('\\','/') + drivepart = '' + tail = cwd + if sys.platform == 'win32': + if len(cwd) < 4: + return cwd + drivepart,tail = os.path.splitdrive(cwd) + + + parts = tail.split('/') + if len(parts) > 2: + tail = '/'.join(parts[-2:]) + + return (drivepart + ( + cwd == '/' and '/' or tail)) diff --git a/contrib/python/ipython/py3/IPython/utils/py3compat.py b/contrib/python/ipython/py3/IPython/utils/py3compat.py index 7aee6afd47b..c7587873005 100644 --- a/contrib/python/ipython/py3/IPython/utils/py3compat.py +++ b/contrib/python/ipython/py3/IPython/utils/py3compat.py @@ -1,191 +1,191 @@ -# coding: utf-8 -"""Compatibility tricks for Python 3. Mainly to do with unicode. - -This file is deprecated and will be removed in a future version. -""" -import functools -import os -import sys -import re -import shutil -import types -import platform - -from .encoding import DEFAULT_ENCODING - - -def decode(s, encoding=None): - encoding = encoding or DEFAULT_ENCODING - return s.decode(encoding, "replace") - -def encode(u, encoding=None): - encoding = encoding or DEFAULT_ENCODING - return u.encode(encoding, "replace") - - -def cast_unicode(s, encoding=None): - if isinstance(s, bytes): - return decode(s, encoding) - return s - -def cast_bytes(s, encoding=None): - if not isinstance(s, bytes): - return encode(s, encoding) - return s - -def buffer_to_bytes(buf): - """Cast a buffer object to bytes""" - if not isinstance(buf, bytes): - buf = bytes(buf) - return buf - -def _modify_str_or_docstring(str_change_func): - @functools.wraps(str_change_func) - def wrapper(func_or_str): - if isinstance(func_or_str, (str,)): - func = None - doc = func_or_str - else: - func = func_or_str - doc = func.__doc__ - - # PYTHONOPTIMIZE=2 strips docstrings, so they can disappear unexpectedly - if doc is not None: - doc = str_change_func(doc) - - if func: - func.__doc__ = doc - return func - return doc - return wrapper - -def safe_unicode(e): - """unicode(e) with various fallbacks. Used for exceptions, which may not be - safe to call unicode() on. - """ - try: - return str(e) - except UnicodeError: - pass - - try: - return repr(e) - except UnicodeError: - pass - - return u'Unrecoverably corrupt evalue' - -# shutil.which from Python 3.4 -def _shutil_which(cmd, mode=os.F_OK | os.X_OK, path=None): - """Given a command, mode, and a PATH string, return the path which - conforms to the given mode on the PATH, or None if there is no such - file. - - `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result - of os.environ.get("PATH"), or can be overridden with a custom search - path. - - This is a backport of shutil.which from Python 3.4 - """ - # Check that a given file can be accessed with the correct mode. - # Additionally check that `file` is not a directory, as on Windows - # directories pass the os.access check. - def _access_check(fn, mode): - return (os.path.exists(fn) and os.access(fn, mode) - and not os.path.isdir(fn)) - - # If we're given a path with a directory part, look it up directly rather - # than referring to PATH directories. This includes checking relative to the - # current directory, e.g. ./script - if os.path.dirname(cmd): - if _access_check(cmd, mode): - return cmd - return None - - if path is None: - path = os.environ.get("PATH", os.defpath) - if not path: - return None - path = path.split(os.pathsep) - - if sys.platform == "win32": - # The current directory takes precedence on Windows. - if not os.curdir in path: - path.insert(0, os.curdir) - - # PATHEXT is necessary to check on Windows. - pathext = os.environ.get("PATHEXT", "").split(os.pathsep) - # See if the given file matches any of the expected path extensions. - # This will allow us to short circuit when given "python.exe". - # If it does match, only test that one, otherwise we have to try - # others. - if any(cmd.lower().endswith(ext.lower()) for ext in pathext): - files = [cmd] - else: - files = [cmd + ext for ext in pathext] - else: - # On other platforms you don't have things like PATHEXT to tell you - # what file suffixes are executable, so just pass on cmd as-is. - files = [cmd] - - seen = set() - for dir in path: - normdir = os.path.normcase(dir) - if not normdir in seen: - seen.add(normdir) - for thefile in files: - name = os.path.join(dir, thefile) - if _access_check(name, mode): - return name - return None - -PY3 = True - -# keep reference to builtin_mod because the kernel overrides that value -# to forward requests to a frontend. -def input(prompt=''): - return builtin_mod.input(prompt) - -builtin_mod_name = "builtins" -import builtins as builtin_mod - - -which = shutil.which - -def isidentifier(s, dotted=False): - if dotted: - return all(isidentifier(a) for a in s.split(".")) - return s.isidentifier() - -getcwd = os.getcwd - -MethodType = types.MethodType - -def execfile(fname, glob, loc=None, compiler=None): - loc = loc if (loc is not None) else glob - with open(fname, 'rb') as f: - compiler = compiler or compile - exec(compiler(f.read(), fname, 'exec'), glob, loc) - -# Refactor print statements in doctests. -_print_statement_re = re.compile(r"\bprint (?P<expr>.*)$", re.MULTILINE) - -# Abstract u'abc' syntax: -@_modify_str_or_docstring -def u_format(s): - """"{u}'abc'" --> "'abc'" (Python 3) - - Accepts a string or a function, so it can be used as a decorator.""" - return s.format(u='') - - -PY2 = not PY3 -PYPY = platform.python_implementation() == "PyPy" - -# Cython still rely on that as a Dec 28 2019 -# See https://github.com/cython/cython/pull/3291 and -# https://github.com/ipython/ipython/issues/12068 -def no_code(x, encoding=None): - return x -unicode_to_str = cast_bytes_py2 = no_code - +# coding: utf-8 +"""Compatibility tricks for Python 3. Mainly to do with unicode. + +This file is deprecated and will be removed in a future version. +""" +import functools +import os +import sys +import re +import shutil +import types +import platform + +from .encoding import DEFAULT_ENCODING + + +def decode(s, encoding=None): + encoding = encoding or DEFAULT_ENCODING + return s.decode(encoding, "replace") + +def encode(u, encoding=None): + encoding = encoding or DEFAULT_ENCODING + return u.encode(encoding, "replace") + + +def cast_unicode(s, encoding=None): + if isinstance(s, bytes): + return decode(s, encoding) + return s + +def cast_bytes(s, encoding=None): + if not isinstance(s, bytes): + return encode(s, encoding) + return s + +def buffer_to_bytes(buf): + """Cast a buffer object to bytes""" + if not isinstance(buf, bytes): + buf = bytes(buf) + return buf + +def _modify_str_or_docstring(str_change_func): + @functools.wraps(str_change_func) + def wrapper(func_or_str): + if isinstance(func_or_str, (str,)): + func = None + doc = func_or_str + else: + func = func_or_str + doc = func.__doc__ + + # PYTHONOPTIMIZE=2 strips docstrings, so they can disappear unexpectedly + if doc is not None: + doc = str_change_func(doc) + + if func: + func.__doc__ = doc + return func + return doc + return wrapper + +def safe_unicode(e): + """unicode(e) with various fallbacks. Used for exceptions, which may not be + safe to call unicode() on. + """ + try: + return str(e) + except UnicodeError: + pass + + try: + return repr(e) + except UnicodeError: + pass + + return u'Unrecoverably corrupt evalue' + +# shutil.which from Python 3.4 +def _shutil_which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + This is a backport of shutil.which from Python 3.4 + """ + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + return (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)) + + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to the + # current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if not os.curdir in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + # If it does match, only test that one, otherwise we have to try + # others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if not normdir in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + +PY3 = True + +# keep reference to builtin_mod because the kernel overrides that value +# to forward requests to a frontend. +def input(prompt=''): + return builtin_mod.input(prompt) + +builtin_mod_name = "builtins" +import builtins as builtin_mod + + +which = shutil.which + +def isidentifier(s, dotted=False): + if dotted: + return all(isidentifier(a) for a in s.split(".")) + return s.isidentifier() + +getcwd = os.getcwd + +MethodType = types.MethodType + +def execfile(fname, glob, loc=None, compiler=None): + loc = loc if (loc is not None) else glob + with open(fname, 'rb') as f: + compiler = compiler or compile + exec(compiler(f.read(), fname, 'exec'), glob, loc) + +# Refactor print statements in doctests. +_print_statement_re = re.compile(r"\bprint (?P<expr>.*)$", re.MULTILINE) + +# Abstract u'abc' syntax: +@_modify_str_or_docstring +def u_format(s): + """"{u}'abc'" --> "'abc'" (Python 3) + + Accepts a string or a function, so it can be used as a decorator.""" + return s.format(u='') + + +PY2 = not PY3 +PYPY = platform.python_implementation() == "PyPy" + +# Cython still rely on that as a Dec 28 2019 +# See https://github.com/cython/cython/pull/3291 and +# https://github.com/ipython/ipython/issues/12068 +def no_code(x, encoding=None): + return x +unicode_to_str = cast_bytes_py2 = no_code + diff --git a/contrib/python/ipython/py3/IPython/utils/sentinel.py b/contrib/python/ipython/py3/IPython/utils/sentinel.py index 7af2558c1a8..dc57a2591ca 100644 --- a/contrib/python/ipython/py3/IPython/utils/sentinel.py +++ b/contrib/python/ipython/py3/IPython/utils/sentinel.py @@ -1,17 +1,17 @@ -"""Sentinel class for constants with useful reprs""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -class Sentinel(object): - - def __init__(self, name, module, docstring=None): - self.name = name - self.module = module - if docstring: - self.__doc__ = docstring - - - def __repr__(self): - return str(self.module)+'.'+self.name - +"""Sentinel class for constants with useful reprs""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +class Sentinel(object): + + def __init__(self, name, module, docstring=None): + self.name = name + self.module = module + if docstring: + self.__doc__ = docstring + + + def __repr__(self): + return str(self.module)+'.'+self.name + diff --git a/contrib/python/ipython/py3/IPython/utils/shimmodule.py b/contrib/python/ipython/py3/IPython/utils/shimmodule.py index 097e82149b4..b70ac135bf8 100644 --- a/contrib/python/ipython/py3/IPython/utils/shimmodule.py +++ b/contrib/python/ipython/py3/IPython/utils/shimmodule.py @@ -1,94 +1,94 @@ -"""A shim module for deprecated imports -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -import types -from importlib import import_module - -from .importstring import import_item - - -class ShimWarning(Warning): - """A warning to show when a module has moved, and a shim is in its place.""" - -class ShimImporter(object): - """Import hook for a shim. - - This ensures that submodule imports return the real target module, - not a clone that will confuse `is` and `isinstance` checks. - """ - def __init__(self, src, mirror): - self.src = src - self.mirror = mirror - - def _mirror_name(self, fullname): - """get the name of the mirrored module""" - - return self.mirror + fullname[len(self.src):] - - def find_module(self, fullname, path=None): - """Return self if we should be used to import the module.""" - if fullname.startswith(self.src + '.'): - mirror_name = self._mirror_name(fullname) - try: - mod = import_item(mirror_name) - except ImportError: - return - else: - if not isinstance(mod, types.ModuleType): - # not a module - return None - return self - - def load_module(self, fullname): - """Import the mirrored module, and insert it into sys.modules""" - mirror_name = self._mirror_name(fullname) - mod = import_item(mirror_name) - sys.modules[fullname] = mod - return mod - - -class ShimModule(types.ModuleType): - - def __init__(self, *args, **kwargs): - self._mirror = kwargs.pop("mirror") - src = kwargs.pop("src", None) - if src: - kwargs['name'] = src.rsplit('.', 1)[-1] - super(ShimModule, self).__init__(*args, **kwargs) - # add import hook for descendent modules - if src: - sys.meta_path.append( - ShimImporter(src=src, mirror=self._mirror) - ) - - @property - def __path__(self): - return [] - - @property - def __spec__(self): - """Don't produce __spec__ until requested""" - return import_module(self._mirror).__spec__ - - def __dir__(self): - return dir(import_module(self._mirror)) - - @property - def __all__(self): - """Ensure __all__ is always defined""" - mod = import_module(self._mirror) - try: - return mod.__all__ - except AttributeError: - return [name for name in dir(mod) if not name.startswith('_')] - - def __getattr__(self, key): - # Use the equivalent of import_item(name), see below - name = "%s.%s" % (self._mirror, key) - try: - return import_item(name) - except ImportError: - raise AttributeError(key) +"""A shim module for deprecated imports +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import sys +import types +from importlib import import_module + +from .importstring import import_item + + +class ShimWarning(Warning): + """A warning to show when a module has moved, and a shim is in its place.""" + +class ShimImporter(object): + """Import hook for a shim. + + This ensures that submodule imports return the real target module, + not a clone that will confuse `is` and `isinstance` checks. + """ + def __init__(self, src, mirror): + self.src = src + self.mirror = mirror + + def _mirror_name(self, fullname): + """get the name of the mirrored module""" + + return self.mirror + fullname[len(self.src):] + + def find_module(self, fullname, path=None): + """Return self if we should be used to import the module.""" + if fullname.startswith(self.src + '.'): + mirror_name = self._mirror_name(fullname) + try: + mod = import_item(mirror_name) + except ImportError: + return + else: + if not isinstance(mod, types.ModuleType): + # not a module + return None + return self + + def load_module(self, fullname): + """Import the mirrored module, and insert it into sys.modules""" + mirror_name = self._mirror_name(fullname) + mod = import_item(mirror_name) + sys.modules[fullname] = mod + return mod + + +class ShimModule(types.ModuleType): + + def __init__(self, *args, **kwargs): + self._mirror = kwargs.pop("mirror") + src = kwargs.pop("src", None) + if src: + kwargs['name'] = src.rsplit('.', 1)[-1] + super(ShimModule, self).__init__(*args, **kwargs) + # add import hook for descendent modules + if src: + sys.meta_path.append( + ShimImporter(src=src, mirror=self._mirror) + ) + + @property + def __path__(self): + return [] + + @property + def __spec__(self): + """Don't produce __spec__ until requested""" + return import_module(self._mirror).__spec__ + + def __dir__(self): + return dir(import_module(self._mirror)) + + @property + def __all__(self): + """Ensure __all__ is always defined""" + mod = import_module(self._mirror) + try: + return mod.__all__ + except AttributeError: + return [name for name in dir(mod) if not name.startswith('_')] + + def __getattr__(self, key): + # Use the equivalent of import_item(name), see below + name = "%s.%s" % (self._mirror, key) + try: + return import_item(name) + except ImportError: + raise AttributeError(key) diff --git a/contrib/python/ipython/py3/IPython/utils/signatures.py b/contrib/python/ipython/py3/IPython/utils/signatures.py index ba35b9858d4..88d72b185eb 100644 --- a/contrib/python/ipython/py3/IPython/utils/signatures.py +++ b/contrib/python/ipython/py3/IPython/utils/signatures.py @@ -1,12 +1,12 @@ -"""DEPRECATED: Function signature objects for callables. - -Use the standard library version if available, as it is more up to date. -Fallback on backport otherwise. -""" - -import warnings -warnings.warn("{} backport for Python 2 is deprecated in IPython 6, which only supports " - "Python 3. Import directly from standard library `inspect`".format(__name__), - DeprecationWarning, stacklevel=2) - -from inspect import BoundArguments, Parameter, Signature, signature +"""DEPRECATED: Function signature objects for callables. + +Use the standard library version if available, as it is more up to date. +Fallback on backport otherwise. +""" + +import warnings +warnings.warn("{} backport for Python 2 is deprecated in IPython 6, which only supports " + "Python 3. Import directly from standard library `inspect`".format(__name__), + DeprecationWarning, stacklevel=2) + +from inspect import BoundArguments, Parameter, Signature, signature diff --git a/contrib/python/ipython/py3/IPython/utils/strdispatch.py b/contrib/python/ipython/py3/IPython/utils/strdispatch.py index a6183404e75..d6bf510535e 100644 --- a/contrib/python/ipython/py3/IPython/utils/strdispatch.py +++ b/contrib/python/ipython/py3/IPython/utils/strdispatch.py @@ -1,68 +1,68 @@ -"""String dispatch class to match regexps and dispatch commands. -""" - -# Stdlib imports -import re - -# Our own modules -from IPython.core.hooks import CommandChainDispatcher - -# Code begins -class StrDispatch(object): - """Dispatch (lookup) a set of strings / regexps for match. - - Example: - - >>> dis = StrDispatch() - >>> dis.add_s('hei',34, priority = 4) - >>> dis.add_s('hei',123, priority = 2) - >>> dis.add_re('h.i', 686) - >>> print(list(dis.flat_matches('hei'))) - [123, 34, 686] - """ - - def __init__(self): - self.strs = {} - self.regexs = {} - - def add_s(self, s, obj, priority= 0 ): - """ Adds a target 'string' for dispatching """ - - chain = self.strs.get(s, CommandChainDispatcher()) - chain.add(obj,priority) - self.strs[s] = chain - - def add_re(self, regex, obj, priority= 0 ): - """ Adds a target regexp for dispatching """ - - chain = self.regexs.get(regex, CommandChainDispatcher()) - chain.add(obj,priority) - self.regexs[regex] = chain - - def dispatch(self, key): - """ Get a seq of Commandchain objects that match key """ - if key in self.strs: - yield self.strs[key] - - for r, obj in self.regexs.items(): - if re.match(r, key): - yield obj - else: - #print "nomatch",key # dbg - pass - - def __repr__(self): - return "<Strdispatch %s, %s>" % (self.strs, self.regexs) - - def s_matches(self, key): - if key not in self.strs: - return - for el in self.strs[key]: - yield el[1] - - def flat_matches(self, key): - """ Yield all 'value' targets, without priority """ - for val in self.dispatch(key): - for el in val: - yield el[1] # only value, no priority - return +"""String dispatch class to match regexps and dispatch commands. +""" + +# Stdlib imports +import re + +# Our own modules +from IPython.core.hooks import CommandChainDispatcher + +# Code begins +class StrDispatch(object): + """Dispatch (lookup) a set of strings / regexps for match. + + Example: + + >>> dis = StrDispatch() + >>> dis.add_s('hei',34, priority = 4) + >>> dis.add_s('hei',123, priority = 2) + >>> dis.add_re('h.i', 686) + >>> print(list(dis.flat_matches('hei'))) + [123, 34, 686] + """ + + def __init__(self): + self.strs = {} + self.regexs = {} + + def add_s(self, s, obj, priority= 0 ): + """ Adds a target 'string' for dispatching """ + + chain = self.strs.get(s, CommandChainDispatcher()) + chain.add(obj,priority) + self.strs[s] = chain + + def add_re(self, regex, obj, priority= 0 ): + """ Adds a target regexp for dispatching """ + + chain = self.regexs.get(regex, CommandChainDispatcher()) + chain.add(obj,priority) + self.regexs[regex] = chain + + def dispatch(self, key): + """ Get a seq of Commandchain objects that match key """ + if key in self.strs: + yield self.strs[key] + + for r, obj in self.regexs.items(): + if re.match(r, key): + yield obj + else: + #print "nomatch",key # dbg + pass + + def __repr__(self): + return "<Strdispatch %s, %s>" % (self.strs, self.regexs) + + def s_matches(self, key): + if key not in self.strs: + return + for el in self.strs[key]: + yield el[1] + + def flat_matches(self, key): + """ Yield all 'value' targets, without priority """ + for val in self.dispatch(key): + for el in val: + yield el[1] # only value, no priority + return diff --git a/contrib/python/ipython/py3/IPython/utils/sysinfo.py b/contrib/python/ipython/py3/IPython/utils/sysinfo.py index f082921c546..07d14fd8a48 100644 --- a/contrib/python/ipython/py3/IPython/utils/sysinfo.py +++ b/contrib/python/ipython/py3/IPython/utils/sysinfo.py @@ -1,166 +1,166 @@ -# encoding: utf-8 -""" -Utilities for getting information about IPython and the system it's running in. -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import os -import platform -import pprint -import sys -import subprocess - -from IPython.core import release -from IPython.utils import _sysinfo, encoding - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -def pkg_commit_hash(pkg_path): - """Get short form of commit hash given directory `pkg_path` - - We get the commit hash from (in order of preference): - - * IPython.utils._sysinfo.commit - * git output, if we are in a git repository - - If these fail, we return a not-found placeholder tuple - - Parameters - ---------- - pkg_path : str - directory containing package - only used for getting commit from active repo - - Returns - ------- - hash_from : str - Where we got the hash from - description - hash_str : str - short form of hash - """ - # Try and get commit from written commit text file - if _sysinfo.commit: - return "installation", _sysinfo.commit - - # maybe we are in a repository - proc = subprocess.Popen('git rev-parse --short HEAD'.split(' '), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - cwd=pkg_path) - repo_commit, _ = proc.communicate() - if repo_commit: - return 'repository', repo_commit.strip().decode('ascii') - return '(none found)', '<not found>' - - -def pkg_info(pkg_path): - """Return dict describing the context of this package - - Parameters - ---------- - pkg_path : str - path containing __init__.py for package - - Returns - ------- - context : dict - with named parameters of interest - """ - src, hsh = pkg_commit_hash(pkg_path) - return dict( - ipython_version=release.version, - ipython_path=pkg_path, - commit_source=src, - commit_hash=hsh, - sys_version=sys.version, - sys_executable=sys.executable, - sys_platform=sys.platform, - platform=platform.platform(), - os_name=os.name, - default_encoding=encoding.DEFAULT_ENCODING, - ) - -def get_sys_info(): - """Return useful information about IPython and the system, as a dict.""" - p = os.path - path = p.realpath(p.dirname(p.abspath(p.join(__file__, '..')))) - return pkg_info(path) - -def sys_info(): - """Return useful information about IPython and the system, as a string. - - Examples - -------- - :: - - In [2]: print(sys_info()) - {'commit_hash': '144fdae', # random - 'commit_source': 'repository', - 'ipython_path': '/home/fperez/usr/lib/python2.6/site-packages/IPython', - 'ipython_version': '0.11.dev', - 'os_name': 'posix', - 'platform': 'Linux-2.6.35-22-generic-i686-with-Ubuntu-10.10-maverick', - 'sys_executable': '/usr/bin/python', - 'sys_platform': 'linux2', - 'sys_version': '2.6.6 (r266:84292, Sep 15 2010, 15:52:39) \\n[GCC 4.4.5]'} - """ - return pprint.pformat(get_sys_info()) - -def _num_cpus_unix(): - """Return the number of active CPUs on a Unix system.""" - return os.sysconf("SC_NPROCESSORS_ONLN") - - -def _num_cpus_darwin(): - """Return the number of active CPUs on a Darwin system.""" - p = subprocess.Popen(['sysctl','-n','hw.ncpu'],stdout=subprocess.PIPE) - return p.stdout.read() - - -def _num_cpus_windows(): - """Return the number of active CPUs on a Windows system.""" - return os.environ.get("NUMBER_OF_PROCESSORS") - - -def num_cpus(): - """Return the effective number of CPUs in the system as an integer. - - This cross-platform function makes an attempt at finding the total number of - available CPUs in the system, as returned by various underlying system and - python calls. - - If it can't find a sensible answer, it returns 1 (though an error *may* make - it return a large positive number that's actually incorrect). - """ - - # Many thanks to the Parallel Python project (http://www.parallelpython.com) - # for the names of the keys we needed to look up for this function. This - # code was inspired by their equivalent function. - - ncpufuncs = {'Linux':_num_cpus_unix, - 'Darwin':_num_cpus_darwin, - 'Windows':_num_cpus_windows - } - - ncpufunc = ncpufuncs.get(platform.system(), - # default to unix version (Solaris, AIX, etc) - _num_cpus_unix) - - try: - ncpus = max(1,int(ncpufunc())) - except: - ncpus = 1 - return ncpus - +# encoding: utf-8 +""" +Utilities for getting information about IPython and the system it's running in. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import platform +import pprint +import sys +import subprocess + +from IPython.core import release +from IPython.utils import _sysinfo, encoding + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def pkg_commit_hash(pkg_path): + """Get short form of commit hash given directory `pkg_path` + + We get the commit hash from (in order of preference): + + * IPython.utils._sysinfo.commit + * git output, if we are in a git repository + + If these fail, we return a not-found placeholder tuple + + Parameters + ---------- + pkg_path : str + directory containing package + only used for getting commit from active repo + + Returns + ------- + hash_from : str + Where we got the hash from - description + hash_str : str + short form of hash + """ + # Try and get commit from written commit text file + if _sysinfo.commit: + return "installation", _sysinfo.commit + + # maybe we are in a repository + proc = subprocess.Popen('git rev-parse --short HEAD'.split(' '), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=pkg_path) + repo_commit, _ = proc.communicate() + if repo_commit: + return 'repository', repo_commit.strip().decode('ascii') + return '(none found)', '<not found>' + + +def pkg_info(pkg_path): + """Return dict describing the context of this package + + Parameters + ---------- + pkg_path : str + path containing __init__.py for package + + Returns + ------- + context : dict + with named parameters of interest + """ + src, hsh = pkg_commit_hash(pkg_path) + return dict( + ipython_version=release.version, + ipython_path=pkg_path, + commit_source=src, + commit_hash=hsh, + sys_version=sys.version, + sys_executable=sys.executable, + sys_platform=sys.platform, + platform=platform.platform(), + os_name=os.name, + default_encoding=encoding.DEFAULT_ENCODING, + ) + +def get_sys_info(): + """Return useful information about IPython and the system, as a dict.""" + p = os.path + path = p.realpath(p.dirname(p.abspath(p.join(__file__, '..')))) + return pkg_info(path) + +def sys_info(): + """Return useful information about IPython and the system, as a string. + + Examples + -------- + :: + + In [2]: print(sys_info()) + {'commit_hash': '144fdae', # random + 'commit_source': 'repository', + 'ipython_path': '/home/fperez/usr/lib/python2.6/site-packages/IPython', + 'ipython_version': '0.11.dev', + 'os_name': 'posix', + 'platform': 'Linux-2.6.35-22-generic-i686-with-Ubuntu-10.10-maverick', + 'sys_executable': '/usr/bin/python', + 'sys_platform': 'linux2', + 'sys_version': '2.6.6 (r266:84292, Sep 15 2010, 15:52:39) \\n[GCC 4.4.5]'} + """ + return pprint.pformat(get_sys_info()) + +def _num_cpus_unix(): + """Return the number of active CPUs on a Unix system.""" + return os.sysconf("SC_NPROCESSORS_ONLN") + + +def _num_cpus_darwin(): + """Return the number of active CPUs on a Darwin system.""" + p = subprocess.Popen(['sysctl','-n','hw.ncpu'],stdout=subprocess.PIPE) + return p.stdout.read() + + +def _num_cpus_windows(): + """Return the number of active CPUs on a Windows system.""" + return os.environ.get("NUMBER_OF_PROCESSORS") + + +def num_cpus(): + """Return the effective number of CPUs in the system as an integer. + + This cross-platform function makes an attempt at finding the total number of + available CPUs in the system, as returned by various underlying system and + python calls. + + If it can't find a sensible answer, it returns 1 (though an error *may* make + it return a large positive number that's actually incorrect). + """ + + # Many thanks to the Parallel Python project (http://www.parallelpython.com) + # for the names of the keys we needed to look up for this function. This + # code was inspired by their equivalent function. + + ncpufuncs = {'Linux':_num_cpus_unix, + 'Darwin':_num_cpus_darwin, + 'Windows':_num_cpus_windows + } + + ncpufunc = ncpufuncs.get(platform.system(), + # default to unix version (Solaris, AIX, etc) + _num_cpus_unix) + + try: + ncpus = max(1,int(ncpufunc())) + except: + ncpus = 1 + return ncpus + diff --git a/contrib/python/ipython/py3/IPython/utils/syspathcontext.py b/contrib/python/ipython/py3/IPython/utils/syspathcontext.py index 1da30064b26..bd1c51500d6 100644 --- a/contrib/python/ipython/py3/IPython/utils/syspathcontext.py +++ b/contrib/python/ipython/py3/IPython/utils/syspathcontext.py @@ -1,62 +1,62 @@ -# encoding: utf-8 -""" -Context managers for adding things to sys.path temporarily. - -Authors: - -* Brian Granger -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -import sys - - -class appended_to_syspath(object): - """A context for appending a directory to sys.path for a second.""" - - def __init__(self, dir): - self.dir = dir - - def __enter__(self): - if self.dir not in sys.path: - sys.path.append(self.dir) - self.added = True - else: - self.added = False - - def __exit__(self, type, value, traceback): - if self.added: - try: - sys.path.remove(self.dir) - except ValueError: - pass - # Returning False causes any exceptions to be re-raised. - return False - -class prepended_to_syspath(object): - """A context for prepending a directory to sys.path for a second.""" - - def __init__(self, dir): - self.dir = dir - - def __enter__(self): - if self.dir not in sys.path: - sys.path.insert(0,self.dir) - self.added = True - else: - self.added = False - - def __exit__(self, type, value, traceback): - if self.added: - try: - sys.path.remove(self.dir) - except ValueError: - pass - # Returning False causes any exceptions to be re-raised. - return False +# encoding: utf-8 +""" +Context managers for adding things to sys.path temporarily. + +Authors: + +* Brian Granger +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +import sys + + +class appended_to_syspath(object): + """A context for appending a directory to sys.path for a second.""" + + def __init__(self, dir): + self.dir = dir + + def __enter__(self): + if self.dir not in sys.path: + sys.path.append(self.dir) + self.added = True + else: + self.added = False + + def __exit__(self, type, value, traceback): + if self.added: + try: + sys.path.remove(self.dir) + except ValueError: + pass + # Returning False causes any exceptions to be re-raised. + return False + +class prepended_to_syspath(object): + """A context for prepending a directory to sys.path for a second.""" + + def __init__(self, dir): + self.dir = dir + + def __enter__(self): + if self.dir not in sys.path: + sys.path.insert(0,self.dir) + self.added = True + else: + self.added = False + + def __exit__(self, type, value, traceback): + if self.added: + try: + sys.path.remove(self.dir) + except ValueError: + pass + # Returning False causes any exceptions to be re-raised. + return False diff --git a/contrib/python/ipython/py3/IPython/utils/tempdir.py b/contrib/python/ipython/py3/IPython/utils/tempdir.py index 2446ee2cccf..98f6aeb3c6b 100644 --- a/contrib/python/ipython/py3/IPython/utils/tempdir.py +++ b/contrib/python/ipython/py3/IPython/utils/tempdir.py @@ -1,57 +1,57 @@ -""" This module contains classes - NamedFileInTemporaryDirectory, TemporaryWorkingDirectory. - -These classes add extra features such as creating a named file in temporary directory and -creating a context manager for the working directory which is also temporary. -""" - -import os as _os -from tempfile import TemporaryDirectory - - -class NamedFileInTemporaryDirectory(object): - - def __init__(self, filename, mode='w+b', bufsize=-1, **kwds): - """ - Open a file named `filename` in a temporary directory. - - This context manager is preferred over `NamedTemporaryFile` in - stdlib `tempfile` when one needs to reopen the file. - - Arguments `mode` and `bufsize` are passed to `open`. - Rest of the arguments are passed to `TemporaryDirectory`. - - """ - self._tmpdir = TemporaryDirectory(**kwds) - path = _os.path.join(self._tmpdir.name, filename) - self.file = open(path, mode, bufsize) - - def cleanup(self): - self.file.close() - self._tmpdir.cleanup() - - __del__ = cleanup - - def __enter__(self): - return self.file - - def __exit__(self, type, value, traceback): - self.cleanup() - - -class TemporaryWorkingDirectory(TemporaryDirectory): - """ - Creates a temporary directory and sets the cwd to that directory. - Automatically reverts to previous cwd upon cleanup. - Usage example: - - with TemporaryWorkingDirectory() as tmpdir: - ... - """ - def __enter__(self): - self.old_wd = _os.getcwd() - _os.chdir(self.name) - return super(TemporaryWorkingDirectory, self).__enter__() - - def __exit__(self, exc, value, tb): - _os.chdir(self.old_wd) - return super(TemporaryWorkingDirectory, self).__exit__(exc, value, tb) +""" This module contains classes - NamedFileInTemporaryDirectory, TemporaryWorkingDirectory. + +These classes add extra features such as creating a named file in temporary directory and +creating a context manager for the working directory which is also temporary. +""" + +import os as _os +from tempfile import TemporaryDirectory + + +class NamedFileInTemporaryDirectory(object): + + def __init__(self, filename, mode='w+b', bufsize=-1, **kwds): + """ + Open a file named `filename` in a temporary directory. + + This context manager is preferred over `NamedTemporaryFile` in + stdlib `tempfile` when one needs to reopen the file. + + Arguments `mode` and `bufsize` are passed to `open`. + Rest of the arguments are passed to `TemporaryDirectory`. + + """ + self._tmpdir = TemporaryDirectory(**kwds) + path = _os.path.join(self._tmpdir.name, filename) + self.file = open(path, mode, bufsize) + + def cleanup(self): + self.file.close() + self._tmpdir.cleanup() + + __del__ = cleanup + + def __enter__(self): + return self.file + + def __exit__(self, type, value, traceback): + self.cleanup() + + +class TemporaryWorkingDirectory(TemporaryDirectory): + """ + Creates a temporary directory and sets the cwd to that directory. + Automatically reverts to previous cwd upon cleanup. + Usage example: + + with TemporaryWorkingDirectory() as tmpdir: + ... + """ + def __enter__(self): + self.old_wd = _os.getcwd() + _os.chdir(self.name) + return super(TemporaryWorkingDirectory, self).__enter__() + + def __exit__(self, exc, value, tb): + _os.chdir(self.old_wd) + return super(TemporaryWorkingDirectory, self).__exit__(exc, value, tb) diff --git a/contrib/python/ipython/py3/IPython/utils/terminal.py b/contrib/python/ipython/py3/IPython/utils/terminal.py index b2f8e165132..4e1800208c0 100644 --- a/contrib/python/ipython/py3/IPython/utils/terminal.py +++ b/contrib/python/ipython/py3/IPython/utils/terminal.py @@ -1,129 +1,129 @@ -# encoding: utf-8 -""" -Utilities for working with terminals. - -Authors: - -* Brian E. Granger -* Fernando Perez -* Alexander Belchenko (e-mail: bialix AT ukr.net) -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import os -import sys -import warnings -from shutil import get_terminal_size as _get_terminal_size - -# This variable is part of the expected API of the module: -ignore_termtitle = True - - - -if os.name == 'posix': - def _term_clear(): - os.system('clear') -elif sys.platform == 'win32': - def _term_clear(): - os.system('cls') -else: - def _term_clear(): - pass - - - -def toggle_set_term_title(val): - """Control whether set_term_title is active or not. - - set_term_title() allows writing to the console titlebar. In embedded - widgets this can cause problems, so this call can be used to toggle it on - or off as needed. - - The default state of the module is for the function to be disabled. - - Parameters - ---------- - val : bool - If True, set_term_title() actually writes to the terminal (using the - appropriate platform-specific module). If False, it is a no-op. - """ - global ignore_termtitle - ignore_termtitle = not(val) - - -def _set_term_title(*args,**kw): - """Dummy no-op.""" - pass - - -def _restore_term_title(): - pass - - -def _set_term_title_xterm(title): - """ Change virtual terminal title in xterm-workalikes """ - # save the current title to the xterm "stack" - sys.stdout.write('\033[22;0t') - sys.stdout.write('\033]0;%s\007' % title) - - -def _restore_term_title_xterm(): - sys.stdout.write('\033[23;0t') - - -if os.name == 'posix': - TERM = os.environ.get('TERM','') - if TERM.startswith('xterm'): - _set_term_title = _set_term_title_xterm - _restore_term_title = _restore_term_title_xterm -elif sys.platform == 'win32': - try: - import ctypes - - SetConsoleTitleW = ctypes.windll.kernel32.SetConsoleTitleW - SetConsoleTitleW.argtypes = [ctypes.c_wchar_p] - - def _set_term_title(title): - """Set terminal title using ctypes to access the Win32 APIs.""" - SetConsoleTitleW(title) - except ImportError: - def _set_term_title(title): - """Set terminal title using the 'title' command.""" - global ignore_termtitle - - try: - # Cannot be on network share when issuing system commands - curr = os.getcwd() - os.chdir("C:") - ret = os.system("title " + title) - finally: - os.chdir(curr) - if ret: - # non-zero return code signals error, don't try again - ignore_termtitle = True - - -def set_term_title(title): - """Set terminal title using the necessary platform-dependent calls.""" - if ignore_termtitle: - return - _set_term_title(title) - - -def restore_term_title(): - """Restore, if possible, terminal title to the original state""" - if ignore_termtitle: - return - _restore_term_title() - - -def freeze_term_title(): - warnings.warn("This function is deprecated, use toggle_set_term_title()") - global ignore_termtitle - ignore_termtitle = True - - -def get_terminal_size(defaultx=80, defaulty=25): - return _get_terminal_size((defaultx, defaulty)) +# encoding: utf-8 +""" +Utilities for working with terminals. + +Authors: + +* Brian E. Granger +* Fernando Perez +* Alexander Belchenko (e-mail: bialix AT ukr.net) +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import os +import sys +import warnings +from shutil import get_terminal_size as _get_terminal_size + +# This variable is part of the expected API of the module: +ignore_termtitle = True + + + +if os.name == 'posix': + def _term_clear(): + os.system('clear') +elif sys.platform == 'win32': + def _term_clear(): + os.system('cls') +else: + def _term_clear(): + pass + + + +def toggle_set_term_title(val): + """Control whether set_term_title is active or not. + + set_term_title() allows writing to the console titlebar. In embedded + widgets this can cause problems, so this call can be used to toggle it on + or off as needed. + + The default state of the module is for the function to be disabled. + + Parameters + ---------- + val : bool + If True, set_term_title() actually writes to the terminal (using the + appropriate platform-specific module). If False, it is a no-op. + """ + global ignore_termtitle + ignore_termtitle = not(val) + + +def _set_term_title(*args,**kw): + """Dummy no-op.""" + pass + + +def _restore_term_title(): + pass + + +def _set_term_title_xterm(title): + """ Change virtual terminal title in xterm-workalikes """ + # save the current title to the xterm "stack" + sys.stdout.write('\033[22;0t') + sys.stdout.write('\033]0;%s\007' % title) + + +def _restore_term_title_xterm(): + sys.stdout.write('\033[23;0t') + + +if os.name == 'posix': + TERM = os.environ.get('TERM','') + if TERM.startswith('xterm'): + _set_term_title = _set_term_title_xterm + _restore_term_title = _restore_term_title_xterm +elif sys.platform == 'win32': + try: + import ctypes + + SetConsoleTitleW = ctypes.windll.kernel32.SetConsoleTitleW + SetConsoleTitleW.argtypes = [ctypes.c_wchar_p] + + def _set_term_title(title): + """Set terminal title using ctypes to access the Win32 APIs.""" + SetConsoleTitleW(title) + except ImportError: + def _set_term_title(title): + """Set terminal title using the 'title' command.""" + global ignore_termtitle + + try: + # Cannot be on network share when issuing system commands + curr = os.getcwd() + os.chdir("C:") + ret = os.system("title " + title) + finally: + os.chdir(curr) + if ret: + # non-zero return code signals error, don't try again + ignore_termtitle = True + + +def set_term_title(title): + """Set terminal title using the necessary platform-dependent calls.""" + if ignore_termtitle: + return + _set_term_title(title) + + +def restore_term_title(): + """Restore, if possible, terminal title to the original state""" + if ignore_termtitle: + return + _restore_term_title() + + +def freeze_term_title(): + warnings.warn("This function is deprecated, use toggle_set_term_title()") + global ignore_termtitle + ignore_termtitle = True + + +def get_terminal_size(defaultx=80, defaulty=25): + return _get_terminal_size((defaultx, defaulty)) diff --git a/contrib/python/ipython/py3/IPython/utils/text.py b/contrib/python/ipython/py3/IPython/utils/text.py index f590cbc18a9..256fdab5843 100644 --- a/contrib/python/ipython/py3/IPython/utils/text.py +++ b/contrib/python/ipython/py3/IPython/utils/text.py @@ -1,763 +1,763 @@ -# encoding: utf-8 -""" -Utilities for working with strings and text. - -Inheritance diagram: - -.. inheritance-diagram:: IPython.utils.text - :parts: 3 -""" - -import os -import re -import string -import sys -import textwrap -from string import Formatter -from pathlib import Path - -from IPython.utils import py3compat - -# datetime.strftime date format for ipython -if sys.platform == 'win32': - date_format = "%B %d, %Y" -else: - date_format = "%B %-d, %Y" - -class LSString(str): - """String derivative with a special access attributes. - - These are normal strings, but with the special attributes: - - .l (or .list) : value as list (split on newlines). - .n (or .nlstr): original value (the string itself). - .s (or .spstr): value as whitespace-separated string. - .p (or .paths): list of path objects (requires path.py package) - - Any values which require transformations are computed only once and - cached. - - Such strings are very useful to efficiently interact with the shell, which - typically only understands whitespace-separated options for commands.""" - - def get_list(self): - try: - return self.__list - except AttributeError: - self.__list = self.split('\n') - return self.__list - - l = list = property(get_list) - - def get_spstr(self): - try: - return self.__spstr - except AttributeError: - self.__spstr = self.replace('\n',' ') - return self.__spstr - - s = spstr = property(get_spstr) - - def get_nlstr(self): - return self - - n = nlstr = property(get_nlstr) - - def get_paths(self): - try: - return self.__paths - except AttributeError: - self.__paths = [Path(p) for p in self.split('\n') if os.path.exists(p)] - return self.__paths - - p = paths = property(get_paths) - -# FIXME: We need to reimplement type specific displayhook and then add this -# back as a custom printer. This should also be moved outside utils into the -# core. - -# def print_lsstring(arg): -# """ Prettier (non-repr-like) and more informative printer for LSString """ -# print "LSString (.p, .n, .l, .s available). Value:" -# print arg -# -# -# print_lsstring = result_display.register(LSString)(print_lsstring) - - -class SList(list): - """List derivative with a special access attributes. - - These are normal lists, but with the special attributes: - - * .l (or .list) : value as list (the list itself). - * .n (or .nlstr): value as a string, joined on newlines. - * .s (or .spstr): value as a string, joined on spaces. - * .p (or .paths): list of path objects (requires path.py package) - - Any values which require transformations are computed only once and - cached.""" - - def get_list(self): - return self - - l = list = property(get_list) - - def get_spstr(self): - try: - return self.__spstr - except AttributeError: - self.__spstr = ' '.join(self) - return self.__spstr - - s = spstr = property(get_spstr) - - def get_nlstr(self): - try: - return self.__nlstr - except AttributeError: - self.__nlstr = '\n'.join(self) - return self.__nlstr - - n = nlstr = property(get_nlstr) - - def get_paths(self): - try: - return self.__paths - except AttributeError: - self.__paths = [Path(p) for p in self if os.path.exists(p)] - return self.__paths - - p = paths = property(get_paths) - - def grep(self, pattern, prune = False, field = None): - """ Return all strings matching 'pattern' (a regex or callable) - - This is case-insensitive. If prune is true, return all items - NOT matching the pattern. - - If field is specified, the match must occur in the specified - whitespace-separated field. - - Examples:: - - a.grep( lambda x: x.startswith('C') ) - a.grep('Cha.*log', prune=1) - a.grep('chm', field=-1) - """ - - def match_target(s): - if field is None: - return s - parts = s.split() - try: - tgt = parts[field] - return tgt - except IndexError: - return "" - - if isinstance(pattern, str): - pred = lambda x : re.search(pattern, x, re.IGNORECASE) - else: - pred = pattern - if not prune: - return SList([el for el in self if pred(match_target(el))]) - else: - return SList([el for el in self if not pred(match_target(el))]) - - def fields(self, *fields): - """ Collect whitespace-separated fields from string list - - Allows quick awk-like usage of string lists. - - Example data (in var a, created by 'a = !ls -l'):: - - -rwxrwxrwx 1 ville None 18 Dec 14 2006 ChangeLog - drwxrwxrwx+ 6 ville None 0 Oct 24 18:05 IPython - - * ``a.fields(0)`` is ``['-rwxrwxrwx', 'drwxrwxrwx+']`` - * ``a.fields(1,0)`` is ``['1 -rwxrwxrwx', '6 drwxrwxrwx+']`` - (note the joining by space). - * ``a.fields(-1)`` is ``['ChangeLog', 'IPython']`` - - IndexErrors are ignored. - - Without args, fields() just split()'s the strings. - """ - if len(fields) == 0: - return [el.split() for el in self] - - res = SList() - for el in [f.split() for f in self]: - lineparts = [] - - for fd in fields: - try: - lineparts.append(el[fd]) - except IndexError: - pass - if lineparts: - res.append(" ".join(lineparts)) - - return res - - def sort(self,field= None, nums = False): - """ sort by specified fields (see fields()) - - Example:: - - a.sort(1, nums = True) - - Sorts a by second field, in numerical order (so that 21 > 3) - - """ - - #decorate, sort, undecorate - if field is not None: - dsu = [[SList([line]).fields(field), line] for line in self] - else: - dsu = [[line, line] for line in self] - if nums: - for i in range(len(dsu)): - numstr = "".join([ch for ch in dsu[i][0] if ch.isdigit()]) - try: - n = int(numstr) - except ValueError: - n = 0 - dsu[i][0] = n - - - dsu.sort() - return SList([t[1] for t in dsu]) - - -# FIXME: We need to reimplement type specific displayhook and then add this -# back as a custom printer. This should also be moved outside utils into the -# core. - -# def print_slist(arg): -# """ Prettier (non-repr-like) and more informative printer for SList """ -# print "SList (.p, .n, .l, .s, .grep(), .fields(), sort() available):" -# if hasattr(arg, 'hideonce') and arg.hideonce: -# arg.hideonce = False -# return -# -# nlprint(arg) # This was a nested list printer, now removed. -# -# print_slist = result_display.register(SList)(print_slist) - - -def indent(instr,nspaces=4, ntabs=0, flatten=False): - """Indent a string a given number of spaces or tabstops. - - indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces. - - Parameters - ---------- - - instr : basestring - The string to be indented. - nspaces : int (default: 4) - The number of spaces to be indented. - ntabs : int (default: 0) - The number of tabs to be indented. - flatten : bool (default: False) - Whether to scrub existing indentation. If True, all lines will be - aligned to the same indentation. If False, existing indentation will - be strictly increased. - - Returns - ------- - - str|unicode : string indented by ntabs and nspaces. - - """ - if instr is None: - return - ind = '\t'*ntabs+' '*nspaces - if flatten: - pat = re.compile(r'^\s*', re.MULTILINE) - else: - pat = re.compile(r'^', re.MULTILINE) - outstr = re.sub(pat, ind, instr) - if outstr.endswith(os.linesep+ind): - return outstr[:-len(ind)] - else: - return outstr - - -def list_strings(arg): - """Always return a list of strings, given a string or list of strings - as input. - - Examples - -------- - :: - - In [7]: list_strings('A single string') - Out[7]: ['A single string'] - - In [8]: list_strings(['A single string in a list']) - Out[8]: ['A single string in a list'] - - In [9]: list_strings(['A','list','of','strings']) - Out[9]: ['A', 'list', 'of', 'strings'] - """ - - if isinstance(arg, str): - return [arg] - else: - return arg - - -def marquee(txt='',width=78,mark='*'): - """Return the input string centered in a 'marquee'. - - Examples - -------- - :: - - In [16]: marquee('A test',40) - Out[16]: '**************** A test ****************' - - In [17]: marquee('A test',40,'-') - Out[17]: '---------------- A test ----------------' - - In [18]: marquee('A test',40,' ') - Out[18]: ' A test ' - - """ - if not txt: - return (mark*width)[:width] - nmark = (width-len(txt)-2)//len(mark)//2 - if nmark < 0: nmark =0 - marks = mark*nmark - return '%s %s %s' % (marks,txt,marks) - - -ini_spaces_re = re.compile(r'^(\s+)') - -def num_ini_spaces(strng): - """Return the number of initial spaces in a string""" - - ini_spaces = ini_spaces_re.match(strng) - if ini_spaces: - return ini_spaces.end() - else: - return 0 - - -def format_screen(strng): - """Format a string for screen printing. - - This removes some latex-type format codes.""" - # Paragraph continue - par_re = re.compile(r'\\$',re.MULTILINE) - strng = par_re.sub('',strng) - return strng - - -def dedent(text): - """Equivalent of textwrap.dedent that ignores unindented first line. - - This means it will still dedent strings like: - '''foo - is a bar - ''' - - For use in wrap_paragraphs. - """ - - if text.startswith('\n'): - # text starts with blank line, don't ignore the first line - return textwrap.dedent(text) - - # split first line - splits = text.split('\n',1) - if len(splits) == 1: - # only one line - return textwrap.dedent(text) - - first, rest = splits - # dedent everything but the first line - rest = textwrap.dedent(rest) - return '\n'.join([first, rest]) - - -def wrap_paragraphs(text, ncols=80): - """Wrap multiple paragraphs to fit a specified width. - - This is equivalent to textwrap.wrap, but with support for multiple - paragraphs, as separated by empty lines. - - Returns - ------- - - list of complete paragraphs, wrapped to fill `ncols` columns. - """ - paragraph_re = re.compile(r'\n(\s*\n)+', re.MULTILINE) - text = dedent(text).strip() - paragraphs = paragraph_re.split(text)[::2] # every other entry is space - out_ps = [] - indent_re = re.compile(r'\n\s+', re.MULTILINE) - for p in paragraphs: - # presume indentation that survives dedent is meaningful formatting, - # so don't fill unless text is flush. - if indent_re.search(p) is None: - # wrap paragraph - p = textwrap.fill(p, ncols) - out_ps.append(p) - return out_ps - - -def strip_email_quotes(text): - """Strip leading email quotation characters ('>'). - - Removes any combination of leading '>' interspersed with whitespace that - appears *identically* in all lines of the input text. - - Parameters - ---------- - text : str - - Examples - -------- - - Simple uses:: - - In [2]: strip_email_quotes('> > text') - Out[2]: 'text' - - In [3]: strip_email_quotes('> > text\\n> > more') - Out[3]: 'text\\nmore' - - Note how only the common prefix that appears in all lines is stripped:: - - In [4]: strip_email_quotes('> > text\\n> > more\\n> more...') - Out[4]: '> text\\n> more\\nmore...' - - So if any line has no quote marks ('>'), then none are stripped from any - of them :: - - In [5]: strip_email_quotes('> > text\\n> > more\\nlast different') - Out[5]: '> > text\\n> > more\\nlast different' - """ - lines = text.splitlines() - strip_len = 0 - - for characters in zip(*lines): - # Check if all characters in this position are the same - if len(set(characters)) > 1: - break - prefix_char = characters[0] - - if prefix_char in string.whitespace or prefix_char == ">": - strip_len += 1 - else: - break - - text = "\n".join([ln[strip_len:] for ln in lines]) - return text - - -def strip_ansi(source): - """ - Remove ansi escape codes from text. - - Parameters - ---------- - source : str - Source to remove the ansi from - """ - return re.sub(r'\033\[(\d|;)+?m', '', source) - - -class EvalFormatter(Formatter): - """A String Formatter that allows evaluation of simple expressions. - - Note that this version interprets a : as specifying a format string (as per - standard string formatting), so if slicing is required, you must explicitly - create a slice. - - This is to be used in templating cases, such as the parallel batch - script templates, where simple arithmetic on arguments is useful. - - Examples - -------- - :: - - In [1]: f = EvalFormatter() - In [2]: f.format('{n//4}', n=8) - Out[2]: '2' - - In [3]: f.format("{greeting[slice(2,4)]}", greeting="Hello") - Out[3]: 'll' - """ - def get_field(self, name, args, kwargs): - v = eval(name, kwargs) - return v, name - -#XXX: As of Python 3.4, the format string parsing no longer splits on a colon -# inside [], so EvalFormatter can handle slicing. Once we only support 3.4 and -# above, it should be possible to remove FullEvalFormatter. - -class FullEvalFormatter(Formatter): - """A String Formatter that allows evaluation of simple expressions. - - Any time a format key is not found in the kwargs, - it will be tried as an expression in the kwargs namespace. - - Note that this version allows slicing using [1:2], so you cannot specify - a format string. Use :class:`EvalFormatter` to permit format strings. - - Examples - -------- - :: - - In [1]: f = FullEvalFormatter() - In [2]: f.format('{n//4}', n=8) - Out[2]: '2' - - In [3]: f.format('{list(range(5))[2:4]}') - Out[3]: '[2, 3]' - - In [4]: f.format('{3*2}') - Out[4]: '6' - """ - # copied from Formatter._vformat with minor changes to allow eval - # and replace the format_spec code with slicing - def vformat(self, format_string:str, args, kwargs)->str: - result = [] - for literal_text, field_name, format_spec, conversion in \ - self.parse(format_string): - - # output the literal text - if literal_text: - result.append(literal_text) - - # if there's a field, output it - if field_name is not None: - # this is some markup, find the object and do - # the formatting - - if format_spec: - # override format spec, to allow slicing: - field_name = ':'.join([field_name, format_spec]) - - # eval the contents of the field for the object - # to be formatted - obj = eval(field_name, kwargs) - - # do any conversion on the resulting object - obj = self.convert_field(obj, conversion) - - # format the object and append to the result - result.append(self.format_field(obj, '')) - - return ''.join(result) - - -class DollarFormatter(FullEvalFormatter): - """Formatter allowing Itpl style $foo replacement, for names and attribute - access only. Standard {foo} replacement also works, and allows full - evaluation of its arguments. - - Examples - -------- - :: - - In [1]: f = DollarFormatter() - In [2]: f.format('{n//4}', n=8) - Out[2]: '2' - - In [3]: f.format('23 * 76 is $result', result=23*76) - Out[3]: '23 * 76 is 1748' - - In [4]: f.format('$a or {b}', a=1, b=2) - Out[4]: '1 or 2' - """ - _dollar_pattern_ignore_single_quote = re.compile(r"(.*?)\$(\$?[\w\.]+)(?=([^']*'[^']*')*[^']*$)") - def parse(self, fmt_string): - for literal_txt, field_name, format_spec, conversion \ - in Formatter.parse(self, fmt_string): - - # Find $foo patterns in the literal text. - continue_from = 0 - txt = "" - for m in self._dollar_pattern_ignore_single_quote.finditer(literal_txt): - new_txt, new_field = m.group(1,2) - # $$foo --> $foo - if new_field.startswith("$"): - txt += new_txt + new_field - else: - yield (txt + new_txt, new_field, "", None) - txt = "" - continue_from = m.end() - - # Re-yield the {foo} style pattern - yield (txt + literal_txt[continue_from:], field_name, format_spec, conversion) - - def __repr__(self): - return "<DollarFormatter>" - -#----------------------------------------------------------------------------- -# Utils to columnize a list of string -#----------------------------------------------------------------------------- - -def _col_chunks(l, max_rows, row_first=False): - """Yield successive max_rows-sized column chunks from l.""" - if row_first: - ncols = (len(l) // max_rows) + (len(l) % max_rows > 0) - for i in range(ncols): - yield [l[j] for j in range(i, len(l), ncols)] - else: - for i in range(0, len(l), max_rows): - yield l[i:(i + max_rows)] - - -def _find_optimal(rlist, row_first=False, separator_size=2, displaywidth=80): - """Calculate optimal info to columnize a list of string""" - for max_rows in range(1, len(rlist) + 1): - col_widths = list(map(max, _col_chunks(rlist, max_rows, row_first))) - sumlength = sum(col_widths) - ncols = len(col_widths) - if sumlength + separator_size * (ncols - 1) <= displaywidth: - break - return {'num_columns': ncols, - 'optimal_separator_width': (displaywidth - sumlength) // (ncols - 1) if (ncols - 1) else 0, - 'max_rows': max_rows, - 'column_widths': col_widths - } - - -def _get_or_default(mylist, i, default=None): - """return list item number, or default if don't exist""" - if i >= len(mylist): - return default - else : - return mylist[i] - - -def compute_item_matrix(items, row_first=False, empty=None, *args, **kwargs) : - """Returns a nested list, and info to columnize items - - Parameters - ---------- - - items - list of strings to columize - row_first : (default False) - Whether to compute columns for a row-first matrix instead of - column-first (default). - empty : (default None) - default value to fill list if needed - separator_size : int (default=2) - How much characters will be used as a separation between each columns. - displaywidth : int (default=80) - The width of the area onto which the columns should enter - - Returns - ------- - - strings_matrix - - nested list of string, the outer most list contains as many list as - rows, the innermost lists have each as many element as columns. If the - total number of elements in `items` does not equal the product of - rows*columns, the last element of some lists are filled with `None`. - - dict_info - some info to make columnize easier: - - num_columns - number of columns - max_rows - maximum number of rows (final number may be less) - column_widths - list of with of each columns - optimal_separator_width - best separator width between columns - - Examples - -------- - :: - - In [1]: l = ['aaa','b','cc','d','eeeee','f','g','h','i','j','k','l'] - In [2]: list, info = compute_item_matrix(l, displaywidth=12) - In [3]: list - Out[3]: [['aaa', 'f', 'k'], ['b', 'g', 'l'], ['cc', 'h', None], ['d', 'i', None], ['eeeee', 'j', None]] - In [4]: ideal = {'num_columns': 3, 'column_widths': [5, 1, 1], 'optimal_separator_width': 2, 'max_rows': 5} - In [5]: all((info[k] == ideal[k] for k in ideal.keys())) - Out[5]: True - """ - info = _find_optimal(list(map(len, items)), row_first, *args, **kwargs) - nrow, ncol = info['max_rows'], info['num_columns'] - if row_first: - return ([[_get_or_default(items, r * ncol + c, default=empty) for c in range(ncol)] for r in range(nrow)], info) - else: - return ([[_get_or_default(items, c * nrow + r, default=empty) for c in range(ncol)] for r in range(nrow)], info) - - -def columnize(items, row_first=False, separator=' ', displaywidth=80, spread=False): - """ Transform a list of strings into a single string with columns. - - Parameters - ---------- - items : sequence of strings - The strings to process. - - row_first : (default False) - Whether to compute columns for a row-first matrix instead of - column-first (default). - - separator : str, optional [default is two spaces] - The string that separates columns. - - displaywidth : int, optional [default is 80] - Width of the display in number of characters. - - Returns - ------- - The formatted string. - """ - if not items: - return '\n' - matrix, info = compute_item_matrix(items, row_first=row_first, separator_size=len(separator), displaywidth=displaywidth) - if spread: - separator = separator.ljust(int(info['optimal_separator_width'])) - fmatrix = [filter(None, x) for x in matrix] - sjoin = lambda x : separator.join([ y.ljust(w, ' ') for y, w in zip(x, info['column_widths'])]) - return '\n'.join(map(sjoin, fmatrix))+'\n' - - -def get_text_list(list_, last_sep=' and ', sep=", ", wrap_item_with=""): - """ - Return a string with a natural enumeration of items - - >>> get_text_list(['a', 'b', 'c', 'd']) - 'a, b, c and d' - >>> get_text_list(['a', 'b', 'c'], ' or ') - 'a, b or c' - >>> get_text_list(['a', 'b', 'c'], ', ') - 'a, b, c' - >>> get_text_list(['a', 'b'], ' or ') - 'a or b' - >>> get_text_list(['a']) - 'a' - >>> get_text_list([]) - '' - >>> get_text_list(['a', 'b'], wrap_item_with="`") - '`a` and `b`' - >>> get_text_list(['a', 'b', 'c', 'd'], " = ", sep=" + ") - 'a + b + c = d' - """ - if len(list_) == 0: - return '' - if wrap_item_with: - list_ = ['%s%s%s' % (wrap_item_with, item, wrap_item_with) for - item in list_] - if len(list_) == 1: - return list_[0] - return '%s%s%s' % ( - sep.join(i for i in list_[:-1]), - last_sep, list_[-1]) +# encoding: utf-8 +""" +Utilities for working with strings and text. + +Inheritance diagram: + +.. inheritance-diagram:: IPython.utils.text + :parts: 3 +""" + +import os +import re +import string +import sys +import textwrap +from string import Formatter +from pathlib import Path + +from IPython.utils import py3compat + +# datetime.strftime date format for ipython +if sys.platform == 'win32': + date_format = "%B %d, %Y" +else: + date_format = "%B %-d, %Y" + +class LSString(str): + """String derivative with a special access attributes. + + These are normal strings, but with the special attributes: + + .l (or .list) : value as list (split on newlines). + .n (or .nlstr): original value (the string itself). + .s (or .spstr): value as whitespace-separated string. + .p (or .paths): list of path objects (requires path.py package) + + Any values which require transformations are computed only once and + cached. + + Such strings are very useful to efficiently interact with the shell, which + typically only understands whitespace-separated options for commands.""" + + def get_list(self): + try: + return self.__list + except AttributeError: + self.__list = self.split('\n') + return self.__list + + l = list = property(get_list) + + def get_spstr(self): + try: + return self.__spstr + except AttributeError: + self.__spstr = self.replace('\n',' ') + return self.__spstr + + s = spstr = property(get_spstr) + + def get_nlstr(self): + return self + + n = nlstr = property(get_nlstr) + + def get_paths(self): + try: + return self.__paths + except AttributeError: + self.__paths = [Path(p) for p in self.split('\n') if os.path.exists(p)] + return self.__paths + + p = paths = property(get_paths) + +# FIXME: We need to reimplement type specific displayhook and then add this +# back as a custom printer. This should also be moved outside utils into the +# core. + +# def print_lsstring(arg): +# """ Prettier (non-repr-like) and more informative printer for LSString """ +# print "LSString (.p, .n, .l, .s available). Value:" +# print arg +# +# +# print_lsstring = result_display.register(LSString)(print_lsstring) + + +class SList(list): + """List derivative with a special access attributes. + + These are normal lists, but with the special attributes: + + * .l (or .list) : value as list (the list itself). + * .n (or .nlstr): value as a string, joined on newlines. + * .s (or .spstr): value as a string, joined on spaces. + * .p (or .paths): list of path objects (requires path.py package) + + Any values which require transformations are computed only once and + cached.""" + + def get_list(self): + return self + + l = list = property(get_list) + + def get_spstr(self): + try: + return self.__spstr + except AttributeError: + self.__spstr = ' '.join(self) + return self.__spstr + + s = spstr = property(get_spstr) + + def get_nlstr(self): + try: + return self.__nlstr + except AttributeError: + self.__nlstr = '\n'.join(self) + return self.__nlstr + + n = nlstr = property(get_nlstr) + + def get_paths(self): + try: + return self.__paths + except AttributeError: + self.__paths = [Path(p) for p in self if os.path.exists(p)] + return self.__paths + + p = paths = property(get_paths) + + def grep(self, pattern, prune = False, field = None): + """ Return all strings matching 'pattern' (a regex or callable) + + This is case-insensitive. If prune is true, return all items + NOT matching the pattern. + + If field is specified, the match must occur in the specified + whitespace-separated field. + + Examples:: + + a.grep( lambda x: x.startswith('C') ) + a.grep('Cha.*log', prune=1) + a.grep('chm', field=-1) + """ + + def match_target(s): + if field is None: + return s + parts = s.split() + try: + tgt = parts[field] + return tgt + except IndexError: + return "" + + if isinstance(pattern, str): + pred = lambda x : re.search(pattern, x, re.IGNORECASE) + else: + pred = pattern + if not prune: + return SList([el for el in self if pred(match_target(el))]) + else: + return SList([el for el in self if not pred(match_target(el))]) + + def fields(self, *fields): + """ Collect whitespace-separated fields from string list + + Allows quick awk-like usage of string lists. + + Example data (in var a, created by 'a = !ls -l'):: + + -rwxrwxrwx 1 ville None 18 Dec 14 2006 ChangeLog + drwxrwxrwx+ 6 ville None 0 Oct 24 18:05 IPython + + * ``a.fields(0)`` is ``['-rwxrwxrwx', 'drwxrwxrwx+']`` + * ``a.fields(1,0)`` is ``['1 -rwxrwxrwx', '6 drwxrwxrwx+']`` + (note the joining by space). + * ``a.fields(-1)`` is ``['ChangeLog', 'IPython']`` + + IndexErrors are ignored. + + Without args, fields() just split()'s the strings. + """ + if len(fields) == 0: + return [el.split() for el in self] + + res = SList() + for el in [f.split() for f in self]: + lineparts = [] + + for fd in fields: + try: + lineparts.append(el[fd]) + except IndexError: + pass + if lineparts: + res.append(" ".join(lineparts)) + + return res + + def sort(self,field= None, nums = False): + """ sort by specified fields (see fields()) + + Example:: + + a.sort(1, nums = True) + + Sorts a by second field, in numerical order (so that 21 > 3) + + """ + + #decorate, sort, undecorate + if field is not None: + dsu = [[SList([line]).fields(field), line] for line in self] + else: + dsu = [[line, line] for line in self] + if nums: + for i in range(len(dsu)): + numstr = "".join([ch for ch in dsu[i][0] if ch.isdigit()]) + try: + n = int(numstr) + except ValueError: + n = 0 + dsu[i][0] = n + + + dsu.sort() + return SList([t[1] for t in dsu]) + + +# FIXME: We need to reimplement type specific displayhook and then add this +# back as a custom printer. This should also be moved outside utils into the +# core. + +# def print_slist(arg): +# """ Prettier (non-repr-like) and more informative printer for SList """ +# print "SList (.p, .n, .l, .s, .grep(), .fields(), sort() available):" +# if hasattr(arg, 'hideonce') and arg.hideonce: +# arg.hideonce = False +# return +# +# nlprint(arg) # This was a nested list printer, now removed. +# +# print_slist = result_display.register(SList)(print_slist) + + +def indent(instr,nspaces=4, ntabs=0, flatten=False): + """Indent a string a given number of spaces or tabstops. + + indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces. + + Parameters + ---------- + + instr : basestring + The string to be indented. + nspaces : int (default: 4) + The number of spaces to be indented. + ntabs : int (default: 0) + The number of tabs to be indented. + flatten : bool (default: False) + Whether to scrub existing indentation. If True, all lines will be + aligned to the same indentation. If False, existing indentation will + be strictly increased. + + Returns + ------- + + str|unicode : string indented by ntabs and nspaces. + + """ + if instr is None: + return + ind = '\t'*ntabs+' '*nspaces + if flatten: + pat = re.compile(r'^\s*', re.MULTILINE) + else: + pat = re.compile(r'^', re.MULTILINE) + outstr = re.sub(pat, ind, instr) + if outstr.endswith(os.linesep+ind): + return outstr[:-len(ind)] + else: + return outstr + + +def list_strings(arg): + """Always return a list of strings, given a string or list of strings + as input. + + Examples + -------- + :: + + In [7]: list_strings('A single string') + Out[7]: ['A single string'] + + In [8]: list_strings(['A single string in a list']) + Out[8]: ['A single string in a list'] + + In [9]: list_strings(['A','list','of','strings']) + Out[9]: ['A', 'list', 'of', 'strings'] + """ + + if isinstance(arg, str): + return [arg] + else: + return arg + + +def marquee(txt='',width=78,mark='*'): + """Return the input string centered in a 'marquee'. + + Examples + -------- + :: + + In [16]: marquee('A test',40) + Out[16]: '**************** A test ****************' + + In [17]: marquee('A test',40,'-') + Out[17]: '---------------- A test ----------------' + + In [18]: marquee('A test',40,' ') + Out[18]: ' A test ' + + """ + if not txt: + return (mark*width)[:width] + nmark = (width-len(txt)-2)//len(mark)//2 + if nmark < 0: nmark =0 + marks = mark*nmark + return '%s %s %s' % (marks,txt,marks) + + +ini_spaces_re = re.compile(r'^(\s+)') + +def num_ini_spaces(strng): + """Return the number of initial spaces in a string""" + + ini_spaces = ini_spaces_re.match(strng) + if ini_spaces: + return ini_spaces.end() + else: + return 0 + + +def format_screen(strng): + """Format a string for screen printing. + + This removes some latex-type format codes.""" + # Paragraph continue + par_re = re.compile(r'\\$',re.MULTILINE) + strng = par_re.sub('',strng) + return strng + + +def dedent(text): + """Equivalent of textwrap.dedent that ignores unindented first line. + + This means it will still dedent strings like: + '''foo + is a bar + ''' + + For use in wrap_paragraphs. + """ + + if text.startswith('\n'): + # text starts with blank line, don't ignore the first line + return textwrap.dedent(text) + + # split first line + splits = text.split('\n',1) + if len(splits) == 1: + # only one line + return textwrap.dedent(text) + + first, rest = splits + # dedent everything but the first line + rest = textwrap.dedent(rest) + return '\n'.join([first, rest]) + + +def wrap_paragraphs(text, ncols=80): + """Wrap multiple paragraphs to fit a specified width. + + This is equivalent to textwrap.wrap, but with support for multiple + paragraphs, as separated by empty lines. + + Returns + ------- + + list of complete paragraphs, wrapped to fill `ncols` columns. + """ + paragraph_re = re.compile(r'\n(\s*\n)+', re.MULTILINE) + text = dedent(text).strip() + paragraphs = paragraph_re.split(text)[::2] # every other entry is space + out_ps = [] + indent_re = re.compile(r'\n\s+', re.MULTILINE) + for p in paragraphs: + # presume indentation that survives dedent is meaningful formatting, + # so don't fill unless text is flush. + if indent_re.search(p) is None: + # wrap paragraph + p = textwrap.fill(p, ncols) + out_ps.append(p) + return out_ps + + +def strip_email_quotes(text): + """Strip leading email quotation characters ('>'). + + Removes any combination of leading '>' interspersed with whitespace that + appears *identically* in all lines of the input text. + + Parameters + ---------- + text : str + + Examples + -------- + + Simple uses:: + + In [2]: strip_email_quotes('> > text') + Out[2]: 'text' + + In [3]: strip_email_quotes('> > text\\n> > more') + Out[3]: 'text\\nmore' + + Note how only the common prefix that appears in all lines is stripped:: + + In [4]: strip_email_quotes('> > text\\n> > more\\n> more...') + Out[4]: '> text\\n> more\\nmore...' + + So if any line has no quote marks ('>'), then none are stripped from any + of them :: + + In [5]: strip_email_quotes('> > text\\n> > more\\nlast different') + Out[5]: '> > text\\n> > more\\nlast different' + """ + lines = text.splitlines() + strip_len = 0 + + for characters in zip(*lines): + # Check if all characters in this position are the same + if len(set(characters)) > 1: + break + prefix_char = characters[0] + + if prefix_char in string.whitespace or prefix_char == ">": + strip_len += 1 + else: + break + + text = "\n".join([ln[strip_len:] for ln in lines]) + return text + + +def strip_ansi(source): + """ + Remove ansi escape codes from text. + + Parameters + ---------- + source : str + Source to remove the ansi from + """ + return re.sub(r'\033\[(\d|;)+?m', '', source) + + +class EvalFormatter(Formatter): + """A String Formatter that allows evaluation of simple expressions. + + Note that this version interprets a : as specifying a format string (as per + standard string formatting), so if slicing is required, you must explicitly + create a slice. + + This is to be used in templating cases, such as the parallel batch + script templates, where simple arithmetic on arguments is useful. + + Examples + -------- + :: + + In [1]: f = EvalFormatter() + In [2]: f.format('{n//4}', n=8) + Out[2]: '2' + + In [3]: f.format("{greeting[slice(2,4)]}", greeting="Hello") + Out[3]: 'll' + """ + def get_field(self, name, args, kwargs): + v = eval(name, kwargs) + return v, name + +#XXX: As of Python 3.4, the format string parsing no longer splits on a colon +# inside [], so EvalFormatter can handle slicing. Once we only support 3.4 and +# above, it should be possible to remove FullEvalFormatter. + +class FullEvalFormatter(Formatter): + """A String Formatter that allows evaluation of simple expressions. + + Any time a format key is not found in the kwargs, + it will be tried as an expression in the kwargs namespace. + + Note that this version allows slicing using [1:2], so you cannot specify + a format string. Use :class:`EvalFormatter` to permit format strings. + + Examples + -------- + :: + + In [1]: f = FullEvalFormatter() + In [2]: f.format('{n//4}', n=8) + Out[2]: '2' + + In [3]: f.format('{list(range(5))[2:4]}') + Out[3]: '[2, 3]' + + In [4]: f.format('{3*2}') + Out[4]: '6' + """ + # copied from Formatter._vformat with minor changes to allow eval + # and replace the format_spec code with slicing + def vformat(self, format_string:str, args, kwargs)->str: + result = [] + for literal_text, field_name, format_spec, conversion in \ + self.parse(format_string): + + # output the literal text + if literal_text: + result.append(literal_text) + + # if there's a field, output it + if field_name is not None: + # this is some markup, find the object and do + # the formatting + + if format_spec: + # override format spec, to allow slicing: + field_name = ':'.join([field_name, format_spec]) + + # eval the contents of the field for the object + # to be formatted + obj = eval(field_name, kwargs) + + # do any conversion on the resulting object + obj = self.convert_field(obj, conversion) + + # format the object and append to the result + result.append(self.format_field(obj, '')) + + return ''.join(result) + + +class DollarFormatter(FullEvalFormatter): + """Formatter allowing Itpl style $foo replacement, for names and attribute + access only. Standard {foo} replacement also works, and allows full + evaluation of its arguments. + + Examples + -------- + :: + + In [1]: f = DollarFormatter() + In [2]: f.format('{n//4}', n=8) + Out[2]: '2' + + In [3]: f.format('23 * 76 is $result', result=23*76) + Out[3]: '23 * 76 is 1748' + + In [4]: f.format('$a or {b}', a=1, b=2) + Out[4]: '1 or 2' + """ + _dollar_pattern_ignore_single_quote = re.compile(r"(.*?)\$(\$?[\w\.]+)(?=([^']*'[^']*')*[^']*$)") + def parse(self, fmt_string): + for literal_txt, field_name, format_spec, conversion \ + in Formatter.parse(self, fmt_string): + + # Find $foo patterns in the literal text. + continue_from = 0 + txt = "" + for m in self._dollar_pattern_ignore_single_quote.finditer(literal_txt): + new_txt, new_field = m.group(1,2) + # $$foo --> $foo + if new_field.startswith("$"): + txt += new_txt + new_field + else: + yield (txt + new_txt, new_field, "", None) + txt = "" + continue_from = m.end() + + # Re-yield the {foo} style pattern + yield (txt + literal_txt[continue_from:], field_name, format_spec, conversion) + + def __repr__(self): + return "<DollarFormatter>" + +#----------------------------------------------------------------------------- +# Utils to columnize a list of string +#----------------------------------------------------------------------------- + +def _col_chunks(l, max_rows, row_first=False): + """Yield successive max_rows-sized column chunks from l.""" + if row_first: + ncols = (len(l) // max_rows) + (len(l) % max_rows > 0) + for i in range(ncols): + yield [l[j] for j in range(i, len(l), ncols)] + else: + for i in range(0, len(l), max_rows): + yield l[i:(i + max_rows)] + + +def _find_optimal(rlist, row_first=False, separator_size=2, displaywidth=80): + """Calculate optimal info to columnize a list of string""" + for max_rows in range(1, len(rlist) + 1): + col_widths = list(map(max, _col_chunks(rlist, max_rows, row_first))) + sumlength = sum(col_widths) + ncols = len(col_widths) + if sumlength + separator_size * (ncols - 1) <= displaywidth: + break + return {'num_columns': ncols, + 'optimal_separator_width': (displaywidth - sumlength) // (ncols - 1) if (ncols - 1) else 0, + 'max_rows': max_rows, + 'column_widths': col_widths + } + + +def _get_or_default(mylist, i, default=None): + """return list item number, or default if don't exist""" + if i >= len(mylist): + return default + else : + return mylist[i] + + +def compute_item_matrix(items, row_first=False, empty=None, *args, **kwargs) : + """Returns a nested list, and info to columnize items + + Parameters + ---------- + + items + list of strings to columize + row_first : (default False) + Whether to compute columns for a row-first matrix instead of + column-first (default). + empty : (default None) + default value to fill list if needed + separator_size : int (default=2) + How much characters will be used as a separation between each columns. + displaywidth : int (default=80) + The width of the area onto which the columns should enter + + Returns + ------- + + strings_matrix + + nested list of string, the outer most list contains as many list as + rows, the innermost lists have each as many element as columns. If the + total number of elements in `items` does not equal the product of + rows*columns, the last element of some lists are filled with `None`. + + dict_info + some info to make columnize easier: + + num_columns + number of columns + max_rows + maximum number of rows (final number may be less) + column_widths + list of with of each columns + optimal_separator_width + best separator width between columns + + Examples + -------- + :: + + In [1]: l = ['aaa','b','cc','d','eeeee','f','g','h','i','j','k','l'] + In [2]: list, info = compute_item_matrix(l, displaywidth=12) + In [3]: list + Out[3]: [['aaa', 'f', 'k'], ['b', 'g', 'l'], ['cc', 'h', None], ['d', 'i', None], ['eeeee', 'j', None]] + In [4]: ideal = {'num_columns': 3, 'column_widths': [5, 1, 1], 'optimal_separator_width': 2, 'max_rows': 5} + In [5]: all((info[k] == ideal[k] for k in ideal.keys())) + Out[5]: True + """ + info = _find_optimal(list(map(len, items)), row_first, *args, **kwargs) + nrow, ncol = info['max_rows'], info['num_columns'] + if row_first: + return ([[_get_or_default(items, r * ncol + c, default=empty) for c in range(ncol)] for r in range(nrow)], info) + else: + return ([[_get_or_default(items, c * nrow + r, default=empty) for c in range(ncol)] for r in range(nrow)], info) + + +def columnize(items, row_first=False, separator=' ', displaywidth=80, spread=False): + """ Transform a list of strings into a single string with columns. + + Parameters + ---------- + items : sequence of strings + The strings to process. + + row_first : (default False) + Whether to compute columns for a row-first matrix instead of + column-first (default). + + separator : str, optional [default is two spaces] + The string that separates columns. + + displaywidth : int, optional [default is 80] + Width of the display in number of characters. + + Returns + ------- + The formatted string. + """ + if not items: + return '\n' + matrix, info = compute_item_matrix(items, row_first=row_first, separator_size=len(separator), displaywidth=displaywidth) + if spread: + separator = separator.ljust(int(info['optimal_separator_width'])) + fmatrix = [filter(None, x) for x in matrix] + sjoin = lambda x : separator.join([ y.ljust(w, ' ') for y, w in zip(x, info['column_widths'])]) + return '\n'.join(map(sjoin, fmatrix))+'\n' + + +def get_text_list(list_, last_sep=' and ', sep=", ", wrap_item_with=""): + """ + Return a string with a natural enumeration of items + + >>> get_text_list(['a', 'b', 'c', 'd']) + 'a, b, c and d' + >>> get_text_list(['a', 'b', 'c'], ' or ') + 'a, b or c' + >>> get_text_list(['a', 'b', 'c'], ', ') + 'a, b, c' + >>> get_text_list(['a', 'b'], ' or ') + 'a or b' + >>> get_text_list(['a']) + 'a' + >>> get_text_list([]) + '' + >>> get_text_list(['a', 'b'], wrap_item_with="`") + '`a` and `b`' + >>> get_text_list(['a', 'b', 'c', 'd'], " = ", sep=" + ") + 'a + b + c = d' + """ + if len(list_) == 0: + return '' + if wrap_item_with: + list_ = ['%s%s%s' % (wrap_item_with, item, wrap_item_with) for + item in list_] + if len(list_) == 1: + return list_[0] + return '%s%s%s' % ( + sep.join(i for i in list_[:-1]), + last_sep, list_[-1]) diff --git a/contrib/python/ipython/py3/IPython/utils/timing.py b/contrib/python/ipython/py3/IPython/utils/timing.py index 187cfb36529..92f6883c4af 100644 --- a/contrib/python/ipython/py3/IPython/utils/timing.py +++ b/contrib/python/ipython/py3/IPython/utils/timing.py @@ -1,122 +1,122 @@ -# encoding: utf-8 -""" -Utilities for timing code execution. -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import time - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -# If possible (Unix), use the resource module instead of time.clock() -try: - import resource -except ImportError: - resource = None - -# Some implementations (like jyputerlite) don't have getrusage -if resource is not None and hasattr(resource, "getrusage"): - def clocku(): - """clocku() -> floating point number - - Return the *USER* CPU time in seconds since the start of the process. - This is done via a call to resource.getrusage, so it avoids the - wraparound problems in time.clock().""" - - return resource.getrusage(resource.RUSAGE_SELF)[0] - - def clocks(): - """clocks() -> floating point number - - Return the *SYSTEM* CPU time in seconds since the start of the process. - This is done via a call to resource.getrusage, so it avoids the - wraparound problems in time.clock().""" - - return resource.getrusage(resource.RUSAGE_SELF)[1] - - def clock(): - """clock() -> floating point number - - Return the *TOTAL USER+SYSTEM* CPU time in seconds since the start of - the process. This is done via a call to resource.getrusage, so it - avoids the wraparound problems in time.clock().""" - - u,s = resource.getrusage(resource.RUSAGE_SELF)[:2] - return u+s - - def clock2(): - """clock2() -> (t_user,t_system) - - Similar to clock(), but return a tuple of user/system times.""" - return resource.getrusage(resource.RUSAGE_SELF)[:2] - -else: - # There is no distinction of user/system time under windows, so we just use - # time.perff_counter() for everything... - clocku = clocks = clock = time.perf_counter - def clock2(): - """Under windows, system CPU time can't be measured. - - This just returns perf_counter() and zero.""" - return time.perf_counter(),0.0 - - -def timings_out(reps,func,*args,**kw): - """timings_out(reps,func,*args,**kw) -> (t_total,t_per_call,output) - - Execute a function reps times, return a tuple with the elapsed total - CPU time in seconds, the time per call and the function's output. - - Under Unix, the return value is the sum of user+system time consumed by - the process, computed via the resource module. This prevents problems - related to the wraparound effect which the time.clock() function has. - - Under Windows the return value is in wall clock seconds. See the - documentation for the time module for more details.""" - - reps = int(reps) - assert reps >=1, 'reps must be >= 1' - if reps==1: - start = clock() - out = func(*args,**kw) - tot_time = clock()-start - else: - rng = range(reps-1) # the last time is executed separately to store output - start = clock() - for dummy in rng: func(*args,**kw) - out = func(*args,**kw) # one last time - tot_time = clock()-start - av_time = tot_time / reps - return tot_time,av_time,out - - -def timings(reps,func,*args,**kw): - """timings(reps,func,*args,**kw) -> (t_total,t_per_call) - - Execute a function reps times, return a tuple with the elapsed total CPU - time in seconds and the time per call. These are just the first two values - in timings_out().""" - - return timings_out(reps,func,*args,**kw)[0:2] - - -def timing(func,*args,**kw): - """timing(func,*args,**kw) -> t_total - - Execute a function once, return the elapsed total CPU time in - seconds. This is just the first value in timings_out().""" - - return timings_out(1,func,*args,**kw)[0] - +# encoding: utf-8 +""" +Utilities for timing code execution. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import time + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +# If possible (Unix), use the resource module instead of time.clock() +try: + import resource +except ImportError: + resource = None + +# Some implementations (like jyputerlite) don't have getrusage +if resource is not None and hasattr(resource, "getrusage"): + def clocku(): + """clocku() -> floating point number + + Return the *USER* CPU time in seconds since the start of the process. + This is done via a call to resource.getrusage, so it avoids the + wraparound problems in time.clock().""" + + return resource.getrusage(resource.RUSAGE_SELF)[0] + + def clocks(): + """clocks() -> floating point number + + Return the *SYSTEM* CPU time in seconds since the start of the process. + This is done via a call to resource.getrusage, so it avoids the + wraparound problems in time.clock().""" + + return resource.getrusage(resource.RUSAGE_SELF)[1] + + def clock(): + """clock() -> floating point number + + Return the *TOTAL USER+SYSTEM* CPU time in seconds since the start of + the process. This is done via a call to resource.getrusage, so it + avoids the wraparound problems in time.clock().""" + + u,s = resource.getrusage(resource.RUSAGE_SELF)[:2] + return u+s + + def clock2(): + """clock2() -> (t_user,t_system) + + Similar to clock(), but return a tuple of user/system times.""" + return resource.getrusage(resource.RUSAGE_SELF)[:2] + +else: + # There is no distinction of user/system time under windows, so we just use + # time.perff_counter() for everything... + clocku = clocks = clock = time.perf_counter + def clock2(): + """Under windows, system CPU time can't be measured. + + This just returns perf_counter() and zero.""" + return time.perf_counter(),0.0 + + +def timings_out(reps,func,*args,**kw): + """timings_out(reps,func,*args,**kw) -> (t_total,t_per_call,output) + + Execute a function reps times, return a tuple with the elapsed total + CPU time in seconds, the time per call and the function's output. + + Under Unix, the return value is the sum of user+system time consumed by + the process, computed via the resource module. This prevents problems + related to the wraparound effect which the time.clock() function has. + + Under Windows the return value is in wall clock seconds. See the + documentation for the time module for more details.""" + + reps = int(reps) + assert reps >=1, 'reps must be >= 1' + if reps==1: + start = clock() + out = func(*args,**kw) + tot_time = clock()-start + else: + rng = range(reps-1) # the last time is executed separately to store output + start = clock() + for dummy in rng: func(*args,**kw) + out = func(*args,**kw) # one last time + tot_time = clock()-start + av_time = tot_time / reps + return tot_time,av_time,out + + +def timings(reps,func,*args,**kw): + """timings(reps,func,*args,**kw) -> (t_total,t_per_call) + + Execute a function reps times, return a tuple with the elapsed total CPU + time in seconds and the time per call. These are just the first two values + in timings_out().""" + + return timings_out(reps,func,*args,**kw)[0:2] + + +def timing(func,*args,**kw): + """timing(func,*args,**kw) -> t_total + + Execute a function once, return the elapsed total CPU time in + seconds. This is just the first value in timings_out().""" + + return timings_out(1,func,*args,**kw)[0] + diff --git a/contrib/python/ipython/py3/IPython/utils/tokenutil.py b/contrib/python/ipython/py3/IPython/utils/tokenutil.py index bccf969541a..28f8b6d5261 100644 --- a/contrib/python/ipython/py3/IPython/utils/tokenutil.py +++ b/contrib/python/ipython/py3/IPython/utils/tokenutil.py @@ -1,130 +1,130 @@ -"""Token-related utilities""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -from collections import namedtuple -from io import StringIO -from keyword import iskeyword - -import tokenize - - -Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line']) - -def generate_tokens(readline): - """wrap generate_tokens to catch EOF errors""" - try: - for token in tokenize.generate_tokens(readline): - yield token - except tokenize.TokenError: - # catch EOF error - return - -def line_at_cursor(cell, cursor_pos=0): - """Return the line in a cell at a given cursor position - - Used for calling line-based APIs that don't support multi-line input, yet. - - Parameters - ---------- - - cell: str - multiline block of text - cursor_pos: integer - the cursor position - - Returns - ------- - - (line, offset): (string, integer) - The line with the current cursor, and the character offset of the start of the line. - """ - offset = 0 - lines = cell.splitlines(True) - for line in lines: - next_offset = offset + len(line) - if not line.endswith('\n'): - # If the last line doesn't have a trailing newline, treat it as if - # it does so that the cursor at the end of the line still counts - # as being on that line. - next_offset += 1 - if next_offset > cursor_pos: - break - offset = next_offset - else: - line = "" - return (line, offset) - -def token_at_cursor(cell, cursor_pos=0): - """Get the token at a given cursor - - Used for introspection. - - Function calls are prioritized, so the token for the callable will be returned - if the cursor is anywhere inside the call. - - Parameters - ---------- - - cell : unicode - A block of Python code - cursor_pos : int - The location of the cursor in the block where the token should be found - """ - names = [] - tokens = [] - call_names = [] - - offsets = {1: 0} # lines start at 1 - for tup in generate_tokens(StringIO(cell).readline): - - tok = Token(*tup) - - # token, text, start, end, line = tup - start_line, start_col = tok.start - end_line, end_col = tok.end - if end_line + 1 not in offsets: - # keep track of offsets for each line - lines = tok.line.splitlines(True) - for lineno, line in enumerate(lines, start_line + 1): - if lineno not in offsets: - offsets[lineno] = offsets[lineno-1] + len(line) - - offset = offsets[start_line] - # allow '|foo' to find 'foo' at the beginning of a line - boundary = cursor_pos + 1 if start_col == 0 else cursor_pos - if offset + start_col >= boundary: - # current token starts after the cursor, - # don't consume it - break - - if tok.token == tokenize.NAME and not iskeyword(tok.text): - if names and tokens and tokens[-1].token == tokenize.OP and tokens[-1].text == '.': - names[-1] = "%s.%s" % (names[-1], tok.text) - else: - names.append(tok.text) - elif tok.token == tokenize.OP: - if tok.text == '=' and names: - # don't inspect the lhs of an assignment - names.pop(-1) - if tok.text == '(' and names: - # if we are inside a function call, inspect the function - call_names.append(names[-1]) - elif tok.text == ')' and call_names: - call_names.pop(-1) - - tokens.append(tok) - - if offsets[end_line] + end_col > cursor_pos: - # we found the cursor, stop reading - break - - if call_names: - return call_names[-1] - elif names: - return names[-1] - else: - return '' - - +"""Token-related utilities""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +from collections import namedtuple +from io import StringIO +from keyword import iskeyword + +import tokenize + + +Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line']) + +def generate_tokens(readline): + """wrap generate_tokens to catch EOF errors""" + try: + for token in tokenize.generate_tokens(readline): + yield token + except tokenize.TokenError: + # catch EOF error + return + +def line_at_cursor(cell, cursor_pos=0): + """Return the line in a cell at a given cursor position + + Used for calling line-based APIs that don't support multi-line input, yet. + + Parameters + ---------- + + cell: str + multiline block of text + cursor_pos: integer + the cursor position + + Returns + ------- + + (line, offset): (string, integer) + The line with the current cursor, and the character offset of the start of the line. + """ + offset = 0 + lines = cell.splitlines(True) + for line in lines: + next_offset = offset + len(line) + if not line.endswith('\n'): + # If the last line doesn't have a trailing newline, treat it as if + # it does so that the cursor at the end of the line still counts + # as being on that line. + next_offset += 1 + if next_offset > cursor_pos: + break + offset = next_offset + else: + line = "" + return (line, offset) + +def token_at_cursor(cell, cursor_pos=0): + """Get the token at a given cursor + + Used for introspection. + + Function calls are prioritized, so the token for the callable will be returned + if the cursor is anywhere inside the call. + + Parameters + ---------- + + cell : unicode + A block of Python code + cursor_pos : int + The location of the cursor in the block where the token should be found + """ + names = [] + tokens = [] + call_names = [] + + offsets = {1: 0} # lines start at 1 + for tup in generate_tokens(StringIO(cell).readline): + + tok = Token(*tup) + + # token, text, start, end, line = tup + start_line, start_col = tok.start + end_line, end_col = tok.end + if end_line + 1 not in offsets: + # keep track of offsets for each line + lines = tok.line.splitlines(True) + for lineno, line in enumerate(lines, start_line + 1): + if lineno not in offsets: + offsets[lineno] = offsets[lineno-1] + len(line) + + offset = offsets[start_line] + # allow '|foo' to find 'foo' at the beginning of a line + boundary = cursor_pos + 1 if start_col == 0 else cursor_pos + if offset + start_col >= boundary: + # current token starts after the cursor, + # don't consume it + break + + if tok.token == tokenize.NAME and not iskeyword(tok.text): + if names and tokens and tokens[-1].token == tokenize.OP and tokens[-1].text == '.': + names[-1] = "%s.%s" % (names[-1], tok.text) + else: + names.append(tok.text) + elif tok.token == tokenize.OP: + if tok.text == '=' and names: + # don't inspect the lhs of an assignment + names.pop(-1) + if tok.text == '(' and names: + # if we are inside a function call, inspect the function + call_names.append(names[-1]) + elif tok.text == ')' and call_names: + call_names.pop(-1) + + tokens.append(tok) + + if offsets[end_line] + end_col > cursor_pos: + # we found the cursor, stop reading + break + + if call_names: + return call_names[-1] + elif names: + return names[-1] + else: + return '' + + diff --git a/contrib/python/ipython/py3/IPython/utils/traitlets.py b/contrib/python/ipython/py3/IPython/utils/traitlets.py index de35a84b767..2f979fa7271 100644 --- a/contrib/python/ipython/py3/IPython/utils/traitlets.py +++ b/contrib/python/ipython/py3/IPython/utils/traitlets.py @@ -1,6 +1,6 @@ - -from warnings import warn - -warn("IPython.utils.traitlets has moved to a top-level traitlets package.", stacklevel=2) - -from traitlets import * + +from warnings import warn + +warn("IPython.utils.traitlets has moved to a top-level traitlets package.", stacklevel=2) + +from traitlets import * diff --git a/contrib/python/ipython/py3/IPython/utils/tz.py b/contrib/python/ipython/py3/IPython/utils/tz.py index 14172b2f4a7..b315d532d12 100644 --- a/contrib/python/ipython/py3/IPython/utils/tz.py +++ b/contrib/python/ipython/py3/IPython/utils/tz.py @@ -1,46 +1,46 @@ -# encoding: utf-8 -""" -Timezone utilities - -Just UTC-awareness right now -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2013 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -from datetime import tzinfo, timedelta, datetime - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- -# constant for zero offset -ZERO = timedelta(0) - -class tzUTC(tzinfo): - """tzinfo object for UTC (zero offset)""" - - def utcoffset(self, d): - return ZERO - - def dst(self, d): - return ZERO - -UTC = tzUTC() - -def utc_aware(unaware): - """decorator for adding UTC tzinfo to datetime's utcfoo methods""" - def utc_method(*args, **kwargs): - dt = unaware(*args, **kwargs) - return dt.replace(tzinfo=UTC) - return utc_method - -utcfromtimestamp = utc_aware(datetime.utcfromtimestamp) -utcnow = utc_aware(datetime.utcnow) +# encoding: utf-8 +""" +Timezone utilities + +Just UTC-awareness right now +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2013 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +from datetime import tzinfo, timedelta, datetime + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- +# constant for zero offset +ZERO = timedelta(0) + +class tzUTC(tzinfo): + """tzinfo object for UTC (zero offset)""" + + def utcoffset(self, d): + return ZERO + + def dst(self, d): + return ZERO + +UTC = tzUTC() + +def utc_aware(unaware): + """decorator for adding UTC tzinfo to datetime's utcfoo methods""" + def utc_method(*args, **kwargs): + dt = unaware(*args, **kwargs) + return dt.replace(tzinfo=UTC) + return utc_method + +utcfromtimestamp = utc_aware(datetime.utcfromtimestamp) +utcnow = utc_aware(datetime.utcnow) diff --git a/contrib/python/ipython/py3/IPython/utils/ulinecache.py b/contrib/python/ipython/py3/IPython/utils/ulinecache.py index e0e2abdef36..0b4ede08e6f 100644 --- a/contrib/python/ipython/py3/IPython/utils/ulinecache.py +++ b/contrib/python/ipython/py3/IPython/utils/ulinecache.py @@ -1,21 +1,21 @@ -""" -This module has been deprecated since IPython 6.0. - -Wrapper around linecache which decodes files to unicode according to PEP 263. -""" -import functools -import linecache -from warnings import warn - -getline = linecache.getline - -# getlines has to be looked up at runtime, because doctests monkeypatch it. -@functools.wraps(linecache.getlines) -def getlines(filename, module_globals=None): - """ - Deprecated since IPython 6.0 - """ - warn(("`IPython.utils.ulinecache.getlines` is deprecated since" - " IPython 6.0 and will be removed in future versions."), - DeprecationWarning, stacklevel=2) - return linecache.getlines(filename, module_globals=module_globals) +""" +This module has been deprecated since IPython 6.0. + +Wrapper around linecache which decodes files to unicode according to PEP 263. +""" +import functools +import linecache +from warnings import warn + +getline = linecache.getline + +# getlines has to be looked up at runtime, because doctests monkeypatch it. +@functools.wraps(linecache.getlines) +def getlines(filename, module_globals=None): + """ + Deprecated since IPython 6.0 + """ + warn(("`IPython.utils.ulinecache.getlines` is deprecated since" + " IPython 6.0 and will be removed in future versions."), + DeprecationWarning, stacklevel=2) + return linecache.getlines(filename, module_globals=module_globals) diff --git a/contrib/python/ipython/py3/IPython/utils/version.py b/contrib/python/ipython/py3/IPython/utils/version.py index 3d1018f7bd4..1de0047e6b4 100644 --- a/contrib/python/ipython/py3/IPython/utils/version.py +++ b/contrib/python/ipython/py3/IPython/utils/version.py @@ -1,36 +1,36 @@ -# encoding: utf-8 -""" -Utilities for version comparison - -It is a bit ridiculous that we need these. -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2013 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -from distutils.version import LooseVersion - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -def check_version(v, check): - """check version string v >= check - - If dev/prerelease tags result in TypeError for string-number comparison, - it is assumed that the dependency is satisfied. - Users on dev branches are responsible for keeping their own packages up to date. - """ - try: - return LooseVersion(v) >= LooseVersion(check) - except TypeError: - return True - +# encoding: utf-8 +""" +Utilities for version comparison + +It is a bit ridiculous that we need these. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2013 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +from distutils.version import LooseVersion + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def check_version(v, check): + """check version string v >= check + + If dev/prerelease tags result in TypeError for string-number comparison, + it is assumed that the dependency is satisfied. + Users on dev branches are responsible for keeping their own packages up to date. + """ + try: + return LooseVersion(v) >= LooseVersion(check) + except TypeError: + return True + diff --git a/contrib/python/ipython/py3/IPython/utils/wildcard.py b/contrib/python/ipython/py3/IPython/utils/wildcard.py index 1df2540dd84..cbef8c5175b 100644 --- a/contrib/python/ipython/py3/IPython/utils/wildcard.py +++ b/contrib/python/ipython/py3/IPython/utils/wildcard.py @@ -1,111 +1,111 @@ -# -*- coding: utf-8 -*- -"""Support for wildcard pattern matching in object inspection. - -Authors -------- -- Jörgen Stenarson <jorgen.stenarson@bostream.nu> -- Thomas Kluyver -""" - -#***************************************************************************** -# Copyright (C) 2005 Jörgen Stenarson <jorgen.stenarson@bostream.nu> -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#***************************************************************************** - -import re -import types - -from IPython.utils.dir2 import dir2 - -def create_typestr2type_dicts(dont_include_in_type2typestr=["lambda"]): - """Return dictionaries mapping lower case typename (e.g. 'tuple') to type - objects from the types package, and vice versa.""" - typenamelist = [tname for tname in dir(types) if tname.endswith("Type")] - typestr2type, type2typestr = {}, {} - - for tname in typenamelist: - name = tname[:-4].lower() # Cut 'Type' off the end of the name - obj = getattr(types, tname) - typestr2type[name] = obj - if name not in dont_include_in_type2typestr: - type2typestr[obj] = name - return typestr2type, type2typestr - -typestr2type, type2typestr = create_typestr2type_dicts() - -def is_type(obj, typestr_or_type): - """is_type(obj, typestr_or_type) verifies if obj is of a certain type. It - can take strings or actual python types for the second argument, i.e. - 'tuple'<->TupleType. 'all' matches all types. - - TODO: Should be extended for choosing more than one type.""" - if typestr_or_type == "all": - return True - if type(typestr_or_type) == type: - test_type = typestr_or_type - else: - test_type = typestr2type.get(typestr_or_type, False) - if test_type: - return isinstance(obj, test_type) - return False - -def show_hidden(str, show_all=False): - """Return true for strings starting with single _ if show_all is true.""" - return show_all or str.startswith("__") or not str.startswith("_") - -def dict_dir(obj): - """Produce a dictionary of an object's attributes. Builds on dir2 by - checking that a getattr() call actually succeeds.""" - ns = {} - for key in dir2(obj): - # This seemingly unnecessary try/except is actually needed - # because there is code out there with metaclasses that - # create 'write only' attributes, where a getattr() call - # will fail even if the attribute appears listed in the - # object's dictionary. Properties can actually do the same - # thing. In particular, Traits use this pattern - try: - ns[key] = getattr(obj, key) - except AttributeError: - pass - return ns - -def filter_ns(ns, name_pattern="*", type_pattern="all", ignore_case=True, - show_all=True): - """Filter a namespace dictionary by name pattern and item type.""" - pattern = name_pattern.replace("*",".*").replace("?",".") - if ignore_case: - reg = re.compile(pattern+"$", re.I) - else: - reg = re.compile(pattern+"$") - - # Check each one matches regex; shouldn't be hidden; of correct type. - return dict((key,obj) for key, obj in ns.items() if reg.match(key) \ - and show_hidden(key, show_all) \ - and is_type(obj, type_pattern) ) - -def list_namespace(namespace, type_pattern, filter, ignore_case=False, show_all=False): - """Return dictionary of all objects in a namespace dictionary that match - type_pattern and filter.""" - pattern_list=filter.split(".") - if len(pattern_list) == 1: - return filter_ns(namespace, name_pattern=pattern_list[0], - type_pattern=type_pattern, - ignore_case=ignore_case, show_all=show_all) - else: - # This is where we can change if all objects should be searched or - # only modules. Just change the type_pattern to module to search only - # modules - filtered = filter_ns(namespace, name_pattern=pattern_list[0], - type_pattern="all", - ignore_case=ignore_case, show_all=show_all) - results = {} - for name, obj in filtered.items(): - ns = list_namespace(dict_dir(obj), type_pattern, - ".".join(pattern_list[1:]), - ignore_case=ignore_case, show_all=show_all) - for inner_name, inner_obj in ns.items(): - results["%s.%s"%(name,inner_name)] = inner_obj - return results +# -*- coding: utf-8 -*- +"""Support for wildcard pattern matching in object inspection. + +Authors +------- +- Jörgen Stenarson <jorgen.stenarson@bostream.nu> +- Thomas Kluyver +""" + +#***************************************************************************** +# Copyright (C) 2005 Jörgen Stenarson <jorgen.stenarson@bostream.nu> +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + +import re +import types + +from IPython.utils.dir2 import dir2 + +def create_typestr2type_dicts(dont_include_in_type2typestr=["lambda"]): + """Return dictionaries mapping lower case typename (e.g. 'tuple') to type + objects from the types package, and vice versa.""" + typenamelist = [tname for tname in dir(types) if tname.endswith("Type")] + typestr2type, type2typestr = {}, {} + + for tname in typenamelist: + name = tname[:-4].lower() # Cut 'Type' off the end of the name + obj = getattr(types, tname) + typestr2type[name] = obj + if name not in dont_include_in_type2typestr: + type2typestr[obj] = name + return typestr2type, type2typestr + +typestr2type, type2typestr = create_typestr2type_dicts() + +def is_type(obj, typestr_or_type): + """is_type(obj, typestr_or_type) verifies if obj is of a certain type. It + can take strings or actual python types for the second argument, i.e. + 'tuple'<->TupleType. 'all' matches all types. + + TODO: Should be extended for choosing more than one type.""" + if typestr_or_type == "all": + return True + if type(typestr_or_type) == type: + test_type = typestr_or_type + else: + test_type = typestr2type.get(typestr_or_type, False) + if test_type: + return isinstance(obj, test_type) + return False + +def show_hidden(str, show_all=False): + """Return true for strings starting with single _ if show_all is true.""" + return show_all or str.startswith("__") or not str.startswith("_") + +def dict_dir(obj): + """Produce a dictionary of an object's attributes. Builds on dir2 by + checking that a getattr() call actually succeeds.""" + ns = {} + for key in dir2(obj): + # This seemingly unnecessary try/except is actually needed + # because there is code out there with metaclasses that + # create 'write only' attributes, where a getattr() call + # will fail even if the attribute appears listed in the + # object's dictionary. Properties can actually do the same + # thing. In particular, Traits use this pattern + try: + ns[key] = getattr(obj, key) + except AttributeError: + pass + return ns + +def filter_ns(ns, name_pattern="*", type_pattern="all", ignore_case=True, + show_all=True): + """Filter a namespace dictionary by name pattern and item type.""" + pattern = name_pattern.replace("*",".*").replace("?",".") + if ignore_case: + reg = re.compile(pattern+"$", re.I) + else: + reg = re.compile(pattern+"$") + + # Check each one matches regex; shouldn't be hidden; of correct type. + return dict((key,obj) for key, obj in ns.items() if reg.match(key) \ + and show_hidden(key, show_all) \ + and is_type(obj, type_pattern) ) + +def list_namespace(namespace, type_pattern, filter, ignore_case=False, show_all=False): + """Return dictionary of all objects in a namespace dictionary that match + type_pattern and filter.""" + pattern_list=filter.split(".") + if len(pattern_list) == 1: + return filter_ns(namespace, name_pattern=pattern_list[0], + type_pattern=type_pattern, + ignore_case=ignore_case, show_all=show_all) + else: + # This is where we can change if all objects should be searched or + # only modules. Just change the type_pattern to module to search only + # modules + filtered = filter_ns(namespace, name_pattern=pattern_list[0], + type_pattern="all", + ignore_case=ignore_case, show_all=show_all) + results = {} + for name, obj in filtered.items(): + ns = list_namespace(dict_dir(obj), type_pattern, + ".".join(pattern_list[1:]), + ignore_case=ignore_case, show_all=show_all) + for inner_name, inner_obj in ns.items(): + results["%s.%s"%(name,inner_name)] = inner_obj + return results diff --git a/contrib/python/ipython/py3/LICENSE b/contrib/python/ipython/py3/LICENSE index d682b2eaa0e..d4bb8d39dfe 100644 --- a/contrib/python/ipython/py3/LICENSE +++ b/contrib/python/ipython/py3/LICENSE @@ -1,33 +1,33 @@ -BSD 3-Clause License - -- Copyright (c) 2008-Present, IPython Development Team -- Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu> -- Copyright (c) 2001, Janko Hauser <jhauser@zscout.de> -- Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu> - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +BSD 3-Clause License + +- Copyright (c) 2008-Present, IPython Development Team +- Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu> +- Copyright (c) 2001, Janko Hauser <jhauser@zscout.de> +- Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu> + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/contrib/python/ipython/py3/README.rst b/contrib/python/ipython/py3/README.rst index 3f296c50025..940047656f2 100644 --- a/contrib/python/ipython/py3/README.rst +++ b/contrib/python/ipython/py3/README.rst @@ -1,148 +1,148 @@ -.. image:: https://codecov.io/github/ipython/ipython/coverage.svg?branch=master - :target: https://codecov.io/github/ipython/ipython?branch=master - -.. image:: https://img.shields.io/pypi/v/IPython.svg - :target: https://pypi.python.org/pypi/ipython - -.. image:: https://img.shields.io/travis/ipython/ipython.svg - :target: https://travis-ci.org/ipython/ipython - -.. image:: https://www.codetriage.com/ipython/ipython/badges/users.svg - :target: https://www.codetriage.com/ipython/ipython/ - -.. image:: https://raster.shields.io/badge/Follows-NEP29-brightgreen.png - :target: https://numpy.org/neps/nep-0029-deprecation_policy.html - - -=========================================== - IPython: Productive Interactive Computing -=========================================== - -Overview -======== - -Welcome to IPython. Our full documentation is available on `ipython.readthedocs.io -<https://ipython.readthedocs.io/en/stable/>`_ and contains information on how to install, use, and -contribute to the project. - -**IPython versions and Python Support** - -Starting with IPython 7.10, IPython follows `NEP 29 <https://numpy.org/neps/nep-0029-deprecation_policy.html>`_ - -**IPython 7.17+** requires Python version 3.7 and above. - -**IPython 7.10+** requires Python version 3.6 and above. - -**IPython 7.0** requires Python version 3.5 and above. - -**IPython 6.x** requires Python version 3.3 and above. - -**IPython 5.x LTS** is the compatible release for Python 2.7. -If you require Python 2 support, you **must** use IPython 5.x LTS. Please -update your project configurations and requirements as necessary. - - -The Notebook, Qt console and a number of other pieces are now parts of *Jupyter*. -See the `Jupyter installation docs <https://jupyter.readthedocs.io/en/latest/install.html>`__ -if you want to use these. - - - - -Development and Instant running -=============================== - -You can find the latest version of the development documentation on `readthedocs -<https://ipython.readthedocs.io/en/latest/>`_. - -You can run IPython from this directory without even installing it system-wide -by typing at the terminal:: - - $ python -m IPython - -Or see the `development installation docs -<https://ipython.readthedocs.io/en/latest/install/install.html#installing-the-development-version>`_ -for the latest revision on read the docs. - -Documentation and installation instructions for older version of IPython can be -found on the `IPython website <https://ipython.org/documentation.html>`_ - - - -IPython requires Python version 3 or above -========================================== - -Starting with version 6.0, IPython does not support Python 2.7, 3.0, 3.1, or -3.2. - -For a version compatible with Python 2.7, please install the 5.x LTS Long Term -Support version. - -If you are encountering this error message you are likely trying to install or -use IPython from source. You need to checkout the remote 5.x branch. If you are -using git the following should work:: - - $ git fetch origin - $ git checkout 5.x - -If you encounter this error message with a regular install of IPython, then you -likely need to update your package manager, for example if you are using `pip` -check the version of pip with:: - - $ pip --version - -You will need to update pip to the version 9.0.1 or greater. If you are not using -pip, please inquiry with the maintainers of the package for your package -manager. - -For more information see one of our blog posts: - - https://blog.jupyter.org/release-of-ipython-5-0-8ce60b8d2e8e - -As well as the following Pull-Request for discussion: - - https://github.com/ipython/ipython/pull/9900 - -This error does also occur if you are invoking ``setup.py`` directly – which you -should not – or are using ``easy_install`` If this is the case, use ``pip -install .`` instead of ``setup.py install`` , and ``pip install -e .`` instead -of ``setup.py develop`` If you are depending on IPython as a dependency you may -also want to have a conditional dependency on IPython depending on the Python -version:: - - install_req = ['ipython'] - if sys.version_info[0] < 3 and 'bdist_wheel' not in sys.argv: - install_req.remove('ipython') - install_req.append('ipython<6') - - setup( - ... - install_requires=install_req - ) - -Alternatives to IPython -======================= - -IPython may not be to your taste; if that's the case there might be similar -project that you might want to use: - -- the classic Python REPL. -- `bpython <https://bpython-interpreter.org/>`_ -- `mypython <https://www.asmeurer.com/mypython/>`_ -- `ptpython and ptipython <https://pypi.org/project/ptpython/>` -- `xonsh <https://xon.sh/>` - -Ignoring commits with git blame.ignoreRevsFile -============================================== - -As of git 2.23, it is possible to make formatting changes without breaking -``git blame``. See the `git documentation -<https://git-scm.com/docs/git-config#Documentation/git-config.txt-blameignoreRevsFile>`_ -for more details. - -To use this feature you must: - -- Install git >= 2.23 -- Configure your local git repo by running: - - POSIX: ``tools\configure-git-blame-ignore-revs.sh`` - - Windows: ``tools\configure-git-blame-ignore-revs.bat`` +.. image:: https://codecov.io/github/ipython/ipython/coverage.svg?branch=master + :target: https://codecov.io/github/ipython/ipython?branch=master + +.. image:: https://img.shields.io/pypi/v/IPython.svg + :target: https://pypi.python.org/pypi/ipython + +.. image:: https://img.shields.io/travis/ipython/ipython.svg + :target: https://travis-ci.org/ipython/ipython + +.. image:: https://www.codetriage.com/ipython/ipython/badges/users.svg + :target: https://www.codetriage.com/ipython/ipython/ + +.. image:: https://raster.shields.io/badge/Follows-NEP29-brightgreen.png + :target: https://numpy.org/neps/nep-0029-deprecation_policy.html + + +=========================================== + IPython: Productive Interactive Computing +=========================================== + +Overview +======== + +Welcome to IPython. Our full documentation is available on `ipython.readthedocs.io +<https://ipython.readthedocs.io/en/stable/>`_ and contains information on how to install, use, and +contribute to the project. + +**IPython versions and Python Support** + +Starting with IPython 7.10, IPython follows `NEP 29 <https://numpy.org/neps/nep-0029-deprecation_policy.html>`_ + +**IPython 7.17+** requires Python version 3.7 and above. + +**IPython 7.10+** requires Python version 3.6 and above. + +**IPython 7.0** requires Python version 3.5 and above. + +**IPython 6.x** requires Python version 3.3 and above. + +**IPython 5.x LTS** is the compatible release for Python 2.7. +If you require Python 2 support, you **must** use IPython 5.x LTS. Please +update your project configurations and requirements as necessary. + + +The Notebook, Qt console and a number of other pieces are now parts of *Jupyter*. +See the `Jupyter installation docs <https://jupyter.readthedocs.io/en/latest/install.html>`__ +if you want to use these. + + + + +Development and Instant running +=============================== + +You can find the latest version of the development documentation on `readthedocs +<https://ipython.readthedocs.io/en/latest/>`_. + +You can run IPython from this directory without even installing it system-wide +by typing at the terminal:: + + $ python -m IPython + +Or see the `development installation docs +<https://ipython.readthedocs.io/en/latest/install/install.html#installing-the-development-version>`_ +for the latest revision on read the docs. + +Documentation and installation instructions for older version of IPython can be +found on the `IPython website <https://ipython.org/documentation.html>`_ + + + +IPython requires Python version 3 or above +========================================== + +Starting with version 6.0, IPython does not support Python 2.7, 3.0, 3.1, or +3.2. + +For a version compatible with Python 2.7, please install the 5.x LTS Long Term +Support version. + +If you are encountering this error message you are likely trying to install or +use IPython from source. You need to checkout the remote 5.x branch. If you are +using git the following should work:: + + $ git fetch origin + $ git checkout 5.x + +If you encounter this error message with a regular install of IPython, then you +likely need to update your package manager, for example if you are using `pip` +check the version of pip with:: + + $ pip --version + +You will need to update pip to the version 9.0.1 or greater. If you are not using +pip, please inquiry with the maintainers of the package for your package +manager. + +For more information see one of our blog posts: + + https://blog.jupyter.org/release-of-ipython-5-0-8ce60b8d2e8e + +As well as the following Pull-Request for discussion: + + https://github.com/ipython/ipython/pull/9900 + +This error does also occur if you are invoking ``setup.py`` directly – which you +should not – or are using ``easy_install`` If this is the case, use ``pip +install .`` instead of ``setup.py install`` , and ``pip install -e .`` instead +of ``setup.py develop`` If you are depending on IPython as a dependency you may +also want to have a conditional dependency on IPython depending on the Python +version:: + + install_req = ['ipython'] + if sys.version_info[0] < 3 and 'bdist_wheel' not in sys.argv: + install_req.remove('ipython') + install_req.append('ipython<6') + + setup( + ... + install_requires=install_req + ) + +Alternatives to IPython +======================= + +IPython may not be to your taste; if that's the case there might be similar +project that you might want to use: + +- the classic Python REPL. +- `bpython <https://bpython-interpreter.org/>`_ +- `mypython <https://www.asmeurer.com/mypython/>`_ +- `ptpython and ptipython <https://pypi.org/project/ptpython/>` +- `xonsh <https://xon.sh/>` + +Ignoring commits with git blame.ignoreRevsFile +============================================== + +As of git 2.23, it is possible to make formatting changes without breaking +``git blame``. See the `git documentation +<https://git-scm.com/docs/git-config#Documentation/git-config.txt-blameignoreRevsFile>`_ +for more details. + +To use this feature you must: + +- Install git >= 2.23 +- Configure your local git repo by running: + - POSIX: ``tools\configure-git-blame-ignore-revs.sh`` + - Windows: ``tools\configure-git-blame-ignore-revs.bat`` diff --git a/contrib/python/ipython/py3/bin/ya.make b/contrib/python/ipython/py3/bin/ya.make index 6f110cfe21b..60ce6a4c955 100644 --- a/contrib/python/ipython/py3/bin/ya.make +++ b/contrib/python/ipython/py3/bin/ya.make @@ -1,11 +1,11 @@ -PY3_PROGRAM(ipython) - -OWNER(g:python-contrib borman nslus) - -PEERDIR( - contrib/python/ipython -) - -PY_MAIN(IPython:start_ipython) - -END() +PY3_PROGRAM(ipython) + +OWNER(g:python-contrib borman nslus) + +PEERDIR( + contrib/python/ipython +) + +PY_MAIN(IPython:start_ipython) + +END() diff --git a/contrib/python/ipython/py3/patches/01-arcadia.patch b/contrib/python/ipython/py3/patches/01-arcadia.patch index 72a1446d0e7..335f93f18ed 100644 --- a/contrib/python/ipython/py3/patches/01-arcadia.patch +++ b/contrib/python/ipython/py3/patches/01-arcadia.patch @@ -1,171 +1,171 @@ ---- contrib/python/ipython/py3/IPython/core/completer.py (index) -+++ contrib/python/ipython/py3/IPython/core/completer.py (working tree) -@@ -996,6 +996,7 @@ def _make_signature(completion)-> str: - - """ - -+ return '(%s)'% ', '.join([f for f in (_formatparamchildren(p) for p in completion.params) if f]) - # it looks like this might work on jedi 0.17 - if hasattr(completion, 'get_signatures'): - signatures = completion.get_signatures() -@@ -1388,7 +1389,7 @@ class IPCompleter(Completer): - else: - raise ValueError("Don't understand self.omit__names == {}".format(self.omit__names)) - -- interpreter = jedi.Interpreter(text[:offset], namespaces) -+ interpreter = jedi.Interpreter(text[:offset], namespaces, column=cursor_column, line=cursor_line + 1) - try_jedi = True - - try: -@@ -1415,7 +1416,7 @@ class IPCompleter(Completer): - if not try_jedi: - return [] - try: -- return filter(completion_filter, interpreter.complete(column=cursor_column, line=cursor_line + 1)) -+ return filter(completion_filter, interpreter.completions()) - except Exception as e: - if self.debug: - return [_FakeJediCompletion('Oops Jedi has crashed, please report a bug with the following:\n"""\n%s\ns"""' % (e))] ---- contrib/python/ipython/py3/IPython/core/completerlib.py (index) -+++ contrib/python/ipython/py3/IPython/core/completerlib.py (working tree) -@@ -18,6 +18,7 @@ These are all loaded by default by IPython. - # Stdlib imports - import glob - import inspect -+import itertools - import os - import re - import sys -@@ -39,6 +40,8 @@ from IPython import get_ipython - - from typing import List - -+from __res import importer -+ - #----------------------------------------------------------------------------- - # Globals and constants - #----------------------------------------------------------------------------- -@@ -64,6 +67,50 @@ magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$') - # Local utilities - #----------------------------------------------------------------------------- - -+arcadia_rootmodules_cache = None -+arcadia_modules_cache = None -+ -+ -+def arcadia_init_cache(): -+ global arcadia_rootmodules_cache, arcadia_modules_cache -+ arcadia_rootmodules_cache = set() -+ arcadia_modules_cache = {} -+ -+ all_modules = itertools.chain( -+ sys.builtin_module_names, -+ importer.memory -+ ) -+ -+ for name in all_modules: -+ path = name.split('.') -+ arcadia_rootmodules_cache.add(path[0]) -+ -+ prefix = path[0] -+ for element in path[1:]: -+ if element == '__init__': -+ continue -+ -+ arcadia_modules_cache.setdefault(prefix, set()).add(element) -+ prefix += '.' + element -+ -+ arcadia_rootmodules_cache = sorted(arcadia_rootmodules_cache) -+ arcadia_modules_cache = {k: sorted(v) for k, v in arcadia_modules_cache.items()} -+ -+ -+def arcadia_module_list(mod): -+ if arcadia_modules_cache is None: -+ arcadia_init_cache() -+ -+ return arcadia_modules_cache.get(mod, ()) -+ -+ -+def arcadia_get_root_modules(): -+ if arcadia_rootmodules_cache is None: -+ arcadia_init_cache() -+ -+ return arcadia_rootmodules_cache -+ -+ - def module_list(path): - """ - Return the list containing the names of the modules available in the given -@@ -165,7 +212,8 @@ def try_import(mod: str, only_modules=False) -> List[str]: - except: - return [] - -- m_is_init = '__init__' in (getattr(m, '__file__', '') or '') -+ filename = getattr(m, '__file__', '') -+ m_is_init = '__init__' in (filename or '') or filename == mod - - completions = [] - if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init: -@@ -174,10 +222,10 @@ def try_import(mod: str, only_modules=False) -> List[str]: - - completions.extend(getattr(m, '__all__', [])) - if m_is_init: -- completions.extend(module_list(os.path.dirname(m.__file__))) -+ completions.extend(arcadia_module_list(mod)) - completions_set = {c for c in completions if isinstance(c, str)} - completions_set.discard('__init__') -- return list(completions_set) -+ return sorted(completions_set) - - - #----------------------------------------------------------------------------- -@@ -226,10 +274,10 @@ def module_completion(line): - # 'from xy<tab>' or 'import xy<tab>' - if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) : - if nwords == 1: -- return get_root_modules() -+ return arcadia_get_root_modules() - mod = words[1].split('.') - if len(mod) < 2: -- return get_root_modules() -+ return arcadia_get_root_modules() - completion_list = try_import('.'.join(mod[:-1]), True) - return ['.'.join(mod[:-1] + [el]) for el in completion_list] - ---- contrib/python/ipython/py3/IPython/core/extensions.py (index) -+++ contrib/python/ipython/py3/IPython/core/extensions.py (working tree) -@@ -72,11 +72,11 @@ class ExtensionManager(Configurable): - if module_str in self.loaded: - return "already loaded" - -- from IPython.utils.syspathcontext import prepended_to_syspath -- - with self.shell.builtin_trap: - if module_str not in sys.modules: -- with prepended_to_syspath(self.ipython_extension_dir): -+ try: -+ sys.modules[module_str] = __import__('IPython.extensions.' + module_str) -+ except ImportError: - mod = import_module(module_str) - if mod.__file__.startswith(self.ipython_extension_dir): - print(("Loading extensions from {dir} is deprecated. " ---- contrib/python/ipython/py3/IPython/core/profiledir.py (index) -+++ contrib/python/ipython/py3/IPython/core/profiledir.py (working tree) -@@ -111,13 +111,11 @@ class ProfileDir(LoggingConfigurable): - self._mkdir(self.startup_dir) - - readme = os.path.join(self.startup_dir, 'README') -- src = os.path.join(get_ipython_package_dir(), u'core', u'profile', u'README_STARTUP') - -- if not os.path.exists(src): -- self.log.warning("Could not copy README_STARTUP to startup dir. Source file %s does not exist.", src) -- -- if os.path.exists(src) and not os.path.exists(readme): -- shutil.copy(src, readme) -+ if not os.path.exists(readme): -+ import pkgutil -+ with open(readme, 'wb') as f: -+ f.write(pkgutil.get_data(__name__, 'profile/README_STARTUP')) - - @observe('security_dir') - def check_security_dir(self, change=None): +--- contrib/python/ipython/py3/IPython/core/completer.py (index) ++++ contrib/python/ipython/py3/IPython/core/completer.py (working tree) +@@ -996,6 +996,7 @@ def _make_signature(completion)-> str: + + """ + ++ return '(%s)'% ', '.join([f for f in (_formatparamchildren(p) for p in completion.params) if f]) + # it looks like this might work on jedi 0.17 + if hasattr(completion, 'get_signatures'): + signatures = completion.get_signatures() +@@ -1388,7 +1389,7 @@ class IPCompleter(Completer): + else: + raise ValueError("Don't understand self.omit__names == {}".format(self.omit__names)) + +- interpreter = jedi.Interpreter(text[:offset], namespaces) ++ interpreter = jedi.Interpreter(text[:offset], namespaces, column=cursor_column, line=cursor_line + 1) + try_jedi = True + + try: +@@ -1415,7 +1416,7 @@ class IPCompleter(Completer): + if not try_jedi: + return [] + try: +- return filter(completion_filter, interpreter.complete(column=cursor_column, line=cursor_line + 1)) ++ return filter(completion_filter, interpreter.completions()) + except Exception as e: + if self.debug: + return [_FakeJediCompletion('Oops Jedi has crashed, please report a bug with the following:\n"""\n%s\ns"""' % (e))] +--- contrib/python/ipython/py3/IPython/core/completerlib.py (index) ++++ contrib/python/ipython/py3/IPython/core/completerlib.py (working tree) +@@ -18,6 +18,7 @@ These are all loaded by default by IPython. + # Stdlib imports + import glob + import inspect ++import itertools + import os + import re + import sys +@@ -39,6 +40,8 @@ from IPython import get_ipython + + from typing import List + ++from __res import importer ++ + #----------------------------------------------------------------------------- + # Globals and constants + #----------------------------------------------------------------------------- +@@ -64,6 +67,50 @@ magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$') + # Local utilities + #----------------------------------------------------------------------------- + ++arcadia_rootmodules_cache = None ++arcadia_modules_cache = None ++ ++ ++def arcadia_init_cache(): ++ global arcadia_rootmodules_cache, arcadia_modules_cache ++ arcadia_rootmodules_cache = set() ++ arcadia_modules_cache = {} ++ ++ all_modules = itertools.chain( ++ sys.builtin_module_names, ++ importer.memory ++ ) ++ ++ for name in all_modules: ++ path = name.split('.') ++ arcadia_rootmodules_cache.add(path[0]) ++ ++ prefix = path[0] ++ for element in path[1:]: ++ if element == '__init__': ++ continue ++ ++ arcadia_modules_cache.setdefault(prefix, set()).add(element) ++ prefix += '.' + element ++ ++ arcadia_rootmodules_cache = sorted(arcadia_rootmodules_cache) ++ arcadia_modules_cache = {k: sorted(v) for k, v in arcadia_modules_cache.items()} ++ ++ ++def arcadia_module_list(mod): ++ if arcadia_modules_cache is None: ++ arcadia_init_cache() ++ ++ return arcadia_modules_cache.get(mod, ()) ++ ++ ++def arcadia_get_root_modules(): ++ if arcadia_rootmodules_cache is None: ++ arcadia_init_cache() ++ ++ return arcadia_rootmodules_cache ++ ++ + def module_list(path): + """ + Return the list containing the names of the modules available in the given +@@ -165,7 +212,8 @@ def try_import(mod: str, only_modules=False) -> List[str]: + except: + return [] + +- m_is_init = '__init__' in (getattr(m, '__file__', '') or '') ++ filename = getattr(m, '__file__', '') ++ m_is_init = '__init__' in (filename or '') or filename == mod + + completions = [] + if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init: +@@ -174,10 +222,10 @@ def try_import(mod: str, only_modules=False) -> List[str]: + + completions.extend(getattr(m, '__all__', [])) + if m_is_init: +- completions.extend(module_list(os.path.dirname(m.__file__))) ++ completions.extend(arcadia_module_list(mod)) + completions_set = {c for c in completions if isinstance(c, str)} + completions_set.discard('__init__') +- return list(completions_set) ++ return sorted(completions_set) + + + #----------------------------------------------------------------------------- +@@ -226,10 +274,10 @@ def module_completion(line): + # 'from xy<tab>' or 'import xy<tab>' + if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) : + if nwords == 1: +- return get_root_modules() ++ return arcadia_get_root_modules() + mod = words[1].split('.') + if len(mod) < 2: +- return get_root_modules() ++ return arcadia_get_root_modules() + completion_list = try_import('.'.join(mod[:-1]), True) + return ['.'.join(mod[:-1] + [el]) for el in completion_list] + +--- contrib/python/ipython/py3/IPython/core/extensions.py (index) ++++ contrib/python/ipython/py3/IPython/core/extensions.py (working tree) +@@ -72,11 +72,11 @@ class ExtensionManager(Configurable): + if module_str in self.loaded: + return "already loaded" + +- from IPython.utils.syspathcontext import prepended_to_syspath +- + with self.shell.builtin_trap: + if module_str not in sys.modules: +- with prepended_to_syspath(self.ipython_extension_dir): ++ try: ++ sys.modules[module_str] = __import__('IPython.extensions.' + module_str) ++ except ImportError: + mod = import_module(module_str) + if mod.__file__.startswith(self.ipython_extension_dir): + print(("Loading extensions from {dir} is deprecated. " +--- contrib/python/ipython/py3/IPython/core/profiledir.py (index) ++++ contrib/python/ipython/py3/IPython/core/profiledir.py (working tree) +@@ -111,13 +111,11 @@ class ProfileDir(LoggingConfigurable): + self._mkdir(self.startup_dir) + + readme = os.path.join(self.startup_dir, 'README') +- src = os.path.join(get_ipython_package_dir(), u'core', u'profile', u'README_STARTUP') + +- if not os.path.exists(src): +- self.log.warning("Could not copy README_STARTUP to startup dir. Source file %s does not exist.", src) +- +- if os.path.exists(src) and not os.path.exists(readme): +- shutil.copy(src, readme) ++ if not os.path.exists(readme): ++ import pkgutil ++ with open(readme, 'wb') as f: ++ f.write(pkgutil.get_data(__name__, 'profile/README_STARTUP')) + + @observe('security_dir') + def check_security_dir(self, change=None): diff --git a/contrib/python/ipython/py3/patches/02-fix-ya.make.patch b/contrib/python/ipython/py3/patches/02-fix-ya.make.patch index 7fbd38be92b..bfec2cb4983 100644 --- a/contrib/python/ipython/py3/patches/02-fix-ya.make.patch +++ b/contrib/python/ipython/py3/patches/02-fix-ya.make.patch @@ -1,54 +1,54 @@ ---- contrib/python/ipython/py3/.dist-info/METADATA (index) -+++ contrib/python/ipython/py3/.dist-info/METADATA (working tree) -@@ -24,7 +24,7 @@ Classifier: Programming Language :: Python :: 3 :: Only - Classifier: Topic :: System :: Shells - Requires-Python: >=3.7 - Requires-Dist: setuptools (>=18.5) --Requires-Dist: jedi (>=0.16) -+Requires-Dist: jedi (>=0.13) - Requires-Dist: decorator - Requires-Dist: pickleshare - Requires-Dist: traitlets (>=4.2) ---- contrib/python/ipython/py3/ya.make (index) -+++ contrib/python/ipython/py3/ya.make (working tree) -@@ -14,13 +14,28 @@ PEERDIR( - contrib/python/decorator - contrib/python/jedi - contrib/python/matplotlib-inline -- contrib/python/pexpect - contrib/python/pickleshare - contrib/python/prompt-toolkit - contrib/python/setuptools - contrib/python/traitlets - ) - -+IF (OS_WINDOWS) -+ PEERDIR( -+ contrib/python/colorama -+ ) -+ELSE() -+ PEERDIR( -+ contrib/python/pexpect -+ ) -+ENDIF() -+ -+IF (OS_DARWIN) -+ PEERDIR( -+ contrib/python/appnope -+ ) -+ENDIF() -+ - NO_LINT() - - NO_CHECK_IMPORTS( -@@ -255,10 +269,6 @@ RESOURCE_FILES( - .dist-info/entry_points.txt - .dist-info/top_level.txt - IPython/core/profile/README_STARTUP -- IPython/testing/plugin/README.txt -- IPython/testing/plugin/test_combo.txt -- IPython/testing/plugin/test_example.txt -- IPython/testing/plugin/test_exampleip.txt - ) - - END() +--- contrib/python/ipython/py3/.dist-info/METADATA (index) ++++ contrib/python/ipython/py3/.dist-info/METADATA (working tree) +@@ -24,7 +24,7 @@ Classifier: Programming Language :: Python :: 3 :: Only + Classifier: Topic :: System :: Shells + Requires-Python: >=3.7 + Requires-Dist: setuptools (>=18.5) +-Requires-Dist: jedi (>=0.16) ++Requires-Dist: jedi (>=0.13) + Requires-Dist: decorator + Requires-Dist: pickleshare + Requires-Dist: traitlets (>=4.2) +--- contrib/python/ipython/py3/ya.make (index) ++++ contrib/python/ipython/py3/ya.make (working tree) +@@ -14,13 +14,28 @@ PEERDIR( + contrib/python/decorator + contrib/python/jedi + contrib/python/matplotlib-inline +- contrib/python/pexpect + contrib/python/pickleshare + contrib/python/prompt-toolkit + contrib/python/setuptools + contrib/python/traitlets + ) + ++IF (OS_WINDOWS) ++ PEERDIR( ++ contrib/python/colorama ++ ) ++ELSE() ++ PEERDIR( ++ contrib/python/pexpect ++ ) ++ENDIF() ++ ++IF (OS_DARWIN) ++ PEERDIR( ++ contrib/python/appnope ++ ) ++ENDIF() ++ + NO_LINT() + + NO_CHECK_IMPORTS( +@@ -255,10 +269,6 @@ RESOURCE_FILES( + .dist-info/entry_points.txt + .dist-info/top_level.txt + IPython/core/profile/README_STARTUP +- IPython/testing/plugin/README.txt +- IPython/testing/plugin/test_combo.txt +- IPython/testing/plugin/test_example.txt +- IPython/testing/plugin/test_exampleip.txt + ) + + END() diff --git a/contrib/python/ipython/py3/ya.make b/contrib/python/ipython/py3/ya.make index d0cb2b74cb8..a0f1dd9e648 100644 --- a/contrib/python/ipython/py3/ya.make +++ b/contrib/python/ipython/py3/ya.make @@ -1,276 +1,276 @@ -PY3_LIBRARY() - -OWNER(borman nslus g:python-contrib) - -VERSION(7.31.1) - -LICENSE(BSD-3-Clause) - -PEERDIR( - contrib/python/Pygments - contrib/python/backcall - contrib/python/decorator - contrib/python/jedi - contrib/python/matplotlib-inline - contrib/python/pickleshare - contrib/python/prompt-toolkit - contrib/python/setuptools - contrib/python/traitlets -) - -IF (OS_WINDOWS) - PEERDIR( - contrib/python/colorama - ) -ELSE() - PEERDIR( - contrib/python/pexpect - ) -ENDIF() - -IF (OS_DARWIN) - PEERDIR( - contrib/python/appnope - ) -ENDIF() - +PY3_LIBRARY() + +OWNER(borman nslus g:python-contrib) + +VERSION(7.31.1) + +LICENSE(BSD-3-Clause) + +PEERDIR( + contrib/python/Pygments + contrib/python/backcall + contrib/python/decorator + contrib/python/jedi + contrib/python/matplotlib-inline + contrib/python/pickleshare + contrib/python/prompt-toolkit + contrib/python/setuptools + contrib/python/traitlets +) + +IF (OS_WINDOWS) + PEERDIR( + contrib/python/colorama + ) +ELSE() + PEERDIR( + contrib/python/pexpect + ) +ENDIF() + +IF (OS_DARWIN) + PEERDIR( + contrib/python/appnope + ) +ENDIF() + NO_LINT() -NO_CHECK_IMPORTS( - IPython.* -) - -PY_SRCS( - TOP_LEVEL - IPython/__init__.py +NO_CHECK_IMPORTS( + IPython.* +) + +PY_SRCS( + TOP_LEVEL + IPython/__init__.py IPython/__main__.py - IPython/config.py - IPython/consoleapp.py - IPython/core/__init__.py - IPython/core/alias.py - IPython/core/application.py - IPython/core/async_helpers.py - IPython/core/autocall.py - IPython/core/builtin_trap.py - IPython/core/compilerop.py - IPython/core/completer.py - IPython/core/completerlib.py - IPython/core/crashhandler.py - IPython/core/debugger.py - IPython/core/display.py - IPython/core/display_trap.py - IPython/core/displayhook.py - IPython/core/displaypub.py - IPython/core/error.py - IPython/core/events.py - IPython/core/excolors.py - IPython/core/extensions.py - IPython/core/formatters.py - IPython/core/getipython.py - IPython/core/history.py - IPython/core/historyapp.py - IPython/core/hooks.py - IPython/core/inputsplitter.py - IPython/core/inputtransformer.py - IPython/core/inputtransformer2.py - IPython/core/interactiveshell.py - IPython/core/latex_symbols.py - IPython/core/logger.py - IPython/core/macro.py - IPython/core/magic.py - IPython/core/magic_arguments.py - IPython/core/magics/__init__.py - IPython/core/magics/auto.py - IPython/core/magics/basic.py - IPython/core/magics/code.py - IPython/core/magics/config.py - IPython/core/magics/display.py - IPython/core/magics/execution.py - IPython/core/magics/extension.py - IPython/core/magics/history.py - IPython/core/magics/logging.py - IPython/core/magics/namespace.py - IPython/core/magics/osm.py - IPython/core/magics/packaging.py - IPython/core/magics/pylab.py - IPython/core/magics/script.py - IPython/core/oinspect.py - IPython/core/page.py - IPython/core/payload.py - IPython/core/payloadpage.py - IPython/core/prefilter.py - IPython/core/profileapp.py - IPython/core/profiledir.py - IPython/core/prompts.py - IPython/core/pylabtools.py - IPython/core/release.py - IPython/core/shellapp.py - IPython/core/splitinput.py - IPython/core/ultratb.py - IPython/core/usage.py - IPython/display.py - IPython/extensions/__init__.py - IPython/extensions/autoreload.py - IPython/extensions/cythonmagic.py - IPython/extensions/rmagic.py - IPython/extensions/storemagic.py - IPython/extensions/sympyprinting.py - IPython/external/__init__.py - IPython/external/decorators/__init__.py - IPython/external/decorators/_decorators.py - IPython/external/decorators/_numpy_testing_noseclasses.py - IPython/external/mathjax.py - IPython/external/qt_for_kernel.py - IPython/external/qt_loaders.py - IPython/frontend.py - IPython/html.py - IPython/kernel/__init__.py - IPython/kernel/__main__.py - IPython/kernel/adapter.py - IPython/kernel/channels.py - IPython/kernel/channelsabc.py - IPython/kernel/client.py - IPython/kernel/clientabc.py - IPython/kernel/connect.py - IPython/kernel/kernelspec.py - IPython/kernel/kernelspecapp.py - IPython/kernel/launcher.py - IPython/kernel/manager.py - IPython/kernel/managerabc.py - IPython/kernel/multikernelmanager.py - IPython/kernel/restarter.py - IPython/kernel/threaded.py - IPython/lib/__init__.py - IPython/lib/backgroundjobs.py - IPython/lib/clipboard.py - IPython/lib/deepreload.py - IPython/lib/demo.py - IPython/lib/display.py - IPython/lib/editorhooks.py - IPython/lib/guisupport.py - IPython/lib/inputhook.py - IPython/lib/inputhookglut.py - IPython/lib/inputhookgtk.py - IPython/lib/inputhookgtk3.py - IPython/lib/inputhookgtk4.py - IPython/lib/inputhookpyglet.py - IPython/lib/inputhookqt4.py - IPython/lib/inputhookwx.py - IPython/lib/kernel.py - IPython/lib/latextools.py - IPython/lib/lexers.py - IPython/lib/pretty.py - IPython/lib/security.py - IPython/nbconvert.py - IPython/nbformat.py - IPython/parallel.py - IPython/paths.py - IPython/qt.py - IPython/sphinxext/__init__.py - IPython/sphinxext/custom_doctests.py - IPython/sphinxext/ipython_console_highlighting.py - IPython/sphinxext/ipython_directive.py - IPython/terminal/__init__.py - IPython/terminal/console.py - IPython/terminal/debugger.py - IPython/terminal/embed.py - IPython/terminal/interactiveshell.py - IPython/terminal/ipapp.py - IPython/terminal/magics.py - IPython/terminal/prompts.py - IPython/terminal/pt_inputhooks/__init__.py - IPython/terminal/pt_inputhooks/asyncio.py - IPython/terminal/pt_inputhooks/glut.py - IPython/terminal/pt_inputhooks/gtk.py - IPython/terminal/pt_inputhooks/gtk3.py - IPython/terminal/pt_inputhooks/gtk4.py - IPython/terminal/pt_inputhooks/osx.py - IPython/terminal/pt_inputhooks/pyglet.py - IPython/terminal/pt_inputhooks/qt.py - IPython/terminal/pt_inputhooks/tk.py - IPython/terminal/pt_inputhooks/wx.py - IPython/terminal/ptshell.py - IPython/terminal/ptutils.py - IPython/terminal/shortcuts.py - IPython/testing/__init__.py - IPython/testing/__main__.py - IPython/testing/decorators.py - IPython/testing/globalipapp.py - IPython/testing/iptest.py - IPython/testing/iptestcontroller.py - IPython/testing/ipunittest.py - IPython/testing/plugin/__init__.py - IPython/testing/plugin/dtexample.py - IPython/testing/plugin/ipdoctest.py - IPython/testing/plugin/iptest.py - IPython/testing/plugin/setup.py - IPython/testing/plugin/show_refs.py - IPython/testing/plugin/simple.py - IPython/testing/plugin/simplevars.py - IPython/testing/plugin/test_ipdoctest.py - IPython/testing/plugin/test_refs.py - IPython/testing/skipdoctest.py - IPython/testing/tools.py - IPython/utils/PyColorize.py - IPython/utils/__init__.py - IPython/utils/_process_cli.py - IPython/utils/_process_common.py - IPython/utils/_process_posix.py - IPython/utils/_process_win32.py - IPython/utils/_process_win32_controller.py - IPython/utils/_sysinfo.py - IPython/utils/capture.py - IPython/utils/colorable.py - IPython/utils/coloransi.py - IPython/utils/contexts.py - IPython/utils/daemonize.py - IPython/utils/data.py - IPython/utils/decorators.py - IPython/utils/dir2.py - IPython/utils/encoding.py - IPython/utils/eventful.py - IPython/utils/frame.py - IPython/utils/generics.py - IPython/utils/importstring.py - IPython/utils/io.py - IPython/utils/ipstruct.py - IPython/utils/jsonutil.py - IPython/utils/localinterfaces.py - IPython/utils/log.py - IPython/utils/module_paths.py - IPython/utils/openpy.py - IPython/utils/path.py - IPython/utils/pickleutil.py - IPython/utils/process.py - IPython/utils/py3compat.py - IPython/utils/sentinel.py - IPython/utils/shimmodule.py - IPython/utils/signatures.py - IPython/utils/strdispatch.py - IPython/utils/sysinfo.py - IPython/utils/syspathcontext.py - IPython/utils/tempdir.py - IPython/utils/terminal.py - IPython/utils/text.py - IPython/utils/timing.py - IPython/utils/tokenutil.py - IPython/utils/traitlets.py - IPython/utils/tz.py - IPython/utils/ulinecache.py - IPython/utils/version.py - IPython/utils/wildcard.py -) - -RESOURCE_FILES( - PREFIX contrib/python/ipython/py3/ - .dist-info/METADATA - .dist-info/entry_points.txt - .dist-info/top_level.txt - IPython/core/profile/README_STARTUP -) - -END() - -RECURSE( - bin -) + IPython/config.py + IPython/consoleapp.py + IPython/core/__init__.py + IPython/core/alias.py + IPython/core/application.py + IPython/core/async_helpers.py + IPython/core/autocall.py + IPython/core/builtin_trap.py + IPython/core/compilerop.py + IPython/core/completer.py + IPython/core/completerlib.py + IPython/core/crashhandler.py + IPython/core/debugger.py + IPython/core/display.py + IPython/core/display_trap.py + IPython/core/displayhook.py + IPython/core/displaypub.py + IPython/core/error.py + IPython/core/events.py + IPython/core/excolors.py + IPython/core/extensions.py + IPython/core/formatters.py + IPython/core/getipython.py + IPython/core/history.py + IPython/core/historyapp.py + IPython/core/hooks.py + IPython/core/inputsplitter.py + IPython/core/inputtransformer.py + IPython/core/inputtransformer2.py + IPython/core/interactiveshell.py + IPython/core/latex_symbols.py + IPython/core/logger.py + IPython/core/macro.py + IPython/core/magic.py + IPython/core/magic_arguments.py + IPython/core/magics/__init__.py + IPython/core/magics/auto.py + IPython/core/magics/basic.py + IPython/core/magics/code.py + IPython/core/magics/config.py + IPython/core/magics/display.py + IPython/core/magics/execution.py + IPython/core/magics/extension.py + IPython/core/magics/history.py + IPython/core/magics/logging.py + IPython/core/magics/namespace.py + IPython/core/magics/osm.py + IPython/core/magics/packaging.py + IPython/core/magics/pylab.py + IPython/core/magics/script.py + IPython/core/oinspect.py + IPython/core/page.py + IPython/core/payload.py + IPython/core/payloadpage.py + IPython/core/prefilter.py + IPython/core/profileapp.py + IPython/core/profiledir.py + IPython/core/prompts.py + IPython/core/pylabtools.py + IPython/core/release.py + IPython/core/shellapp.py + IPython/core/splitinput.py + IPython/core/ultratb.py + IPython/core/usage.py + IPython/display.py + IPython/extensions/__init__.py + IPython/extensions/autoreload.py + IPython/extensions/cythonmagic.py + IPython/extensions/rmagic.py + IPython/extensions/storemagic.py + IPython/extensions/sympyprinting.py + IPython/external/__init__.py + IPython/external/decorators/__init__.py + IPython/external/decorators/_decorators.py + IPython/external/decorators/_numpy_testing_noseclasses.py + IPython/external/mathjax.py + IPython/external/qt_for_kernel.py + IPython/external/qt_loaders.py + IPython/frontend.py + IPython/html.py + IPython/kernel/__init__.py + IPython/kernel/__main__.py + IPython/kernel/adapter.py + IPython/kernel/channels.py + IPython/kernel/channelsabc.py + IPython/kernel/client.py + IPython/kernel/clientabc.py + IPython/kernel/connect.py + IPython/kernel/kernelspec.py + IPython/kernel/kernelspecapp.py + IPython/kernel/launcher.py + IPython/kernel/manager.py + IPython/kernel/managerabc.py + IPython/kernel/multikernelmanager.py + IPython/kernel/restarter.py + IPython/kernel/threaded.py + IPython/lib/__init__.py + IPython/lib/backgroundjobs.py + IPython/lib/clipboard.py + IPython/lib/deepreload.py + IPython/lib/demo.py + IPython/lib/display.py + IPython/lib/editorhooks.py + IPython/lib/guisupport.py + IPython/lib/inputhook.py + IPython/lib/inputhookglut.py + IPython/lib/inputhookgtk.py + IPython/lib/inputhookgtk3.py + IPython/lib/inputhookgtk4.py + IPython/lib/inputhookpyglet.py + IPython/lib/inputhookqt4.py + IPython/lib/inputhookwx.py + IPython/lib/kernel.py + IPython/lib/latextools.py + IPython/lib/lexers.py + IPython/lib/pretty.py + IPython/lib/security.py + IPython/nbconvert.py + IPython/nbformat.py + IPython/parallel.py + IPython/paths.py + IPython/qt.py + IPython/sphinxext/__init__.py + IPython/sphinxext/custom_doctests.py + IPython/sphinxext/ipython_console_highlighting.py + IPython/sphinxext/ipython_directive.py + IPython/terminal/__init__.py + IPython/terminal/console.py + IPython/terminal/debugger.py + IPython/terminal/embed.py + IPython/terminal/interactiveshell.py + IPython/terminal/ipapp.py + IPython/terminal/magics.py + IPython/terminal/prompts.py + IPython/terminal/pt_inputhooks/__init__.py + IPython/terminal/pt_inputhooks/asyncio.py + IPython/terminal/pt_inputhooks/glut.py + IPython/terminal/pt_inputhooks/gtk.py + IPython/terminal/pt_inputhooks/gtk3.py + IPython/terminal/pt_inputhooks/gtk4.py + IPython/terminal/pt_inputhooks/osx.py + IPython/terminal/pt_inputhooks/pyglet.py + IPython/terminal/pt_inputhooks/qt.py + IPython/terminal/pt_inputhooks/tk.py + IPython/terminal/pt_inputhooks/wx.py + IPython/terminal/ptshell.py + IPython/terminal/ptutils.py + IPython/terminal/shortcuts.py + IPython/testing/__init__.py + IPython/testing/__main__.py + IPython/testing/decorators.py + IPython/testing/globalipapp.py + IPython/testing/iptest.py + IPython/testing/iptestcontroller.py + IPython/testing/ipunittest.py + IPython/testing/plugin/__init__.py + IPython/testing/plugin/dtexample.py + IPython/testing/plugin/ipdoctest.py + IPython/testing/plugin/iptest.py + IPython/testing/plugin/setup.py + IPython/testing/plugin/show_refs.py + IPython/testing/plugin/simple.py + IPython/testing/plugin/simplevars.py + IPython/testing/plugin/test_ipdoctest.py + IPython/testing/plugin/test_refs.py + IPython/testing/skipdoctest.py + IPython/testing/tools.py + IPython/utils/PyColorize.py + IPython/utils/__init__.py + IPython/utils/_process_cli.py + IPython/utils/_process_common.py + IPython/utils/_process_posix.py + IPython/utils/_process_win32.py + IPython/utils/_process_win32_controller.py + IPython/utils/_sysinfo.py + IPython/utils/capture.py + IPython/utils/colorable.py + IPython/utils/coloransi.py + IPython/utils/contexts.py + IPython/utils/daemonize.py + IPython/utils/data.py + IPython/utils/decorators.py + IPython/utils/dir2.py + IPython/utils/encoding.py + IPython/utils/eventful.py + IPython/utils/frame.py + IPython/utils/generics.py + IPython/utils/importstring.py + IPython/utils/io.py + IPython/utils/ipstruct.py + IPython/utils/jsonutil.py + IPython/utils/localinterfaces.py + IPython/utils/log.py + IPython/utils/module_paths.py + IPython/utils/openpy.py + IPython/utils/path.py + IPython/utils/pickleutil.py + IPython/utils/process.py + IPython/utils/py3compat.py + IPython/utils/sentinel.py + IPython/utils/shimmodule.py + IPython/utils/signatures.py + IPython/utils/strdispatch.py + IPython/utils/sysinfo.py + IPython/utils/syspathcontext.py + IPython/utils/tempdir.py + IPython/utils/terminal.py + IPython/utils/text.py + IPython/utils/timing.py + IPython/utils/tokenutil.py + IPython/utils/traitlets.py + IPython/utils/tz.py + IPython/utils/ulinecache.py + IPython/utils/version.py + IPython/utils/wildcard.py +) + +RESOURCE_FILES( + PREFIX contrib/python/ipython/py3/ + .dist-info/METADATA + .dist-info/entry_points.txt + .dist-info/top_level.txt + IPython/core/profile/README_STARTUP +) + +END() + +RECURSE( + bin +) |