aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/ipython/py2/IPython
diff options
context:
space:
mode:
authorMikhail Borisov <borisov.mikhail@gmail.com>2022-02-10 16:45:40 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:45:40 +0300
commit5d50718e66d9c037dc587a0211110b7d25a66185 (patch)
treee98df59de24d2ef7c77baed9f41e4875a2fef972 /contrib/python/ipython/py2/IPython
parenta6a92afe03e02795227d2641b49819b687f088f8 (diff)
downloadydb-5d50718e66d9c037dc587a0211110b7d25a66185.tar.gz
Restoring authorship annotation for Mikhail Borisov <borisov.mikhail@gmail.com>. Commit 2 of 2.
Diffstat (limited to 'contrib/python/ipython/py2/IPython')
-rw-r--r--contrib/python/ipython/py2/IPython/__init__.py292
-rw-r--r--contrib/python/ipython/py2/IPython/__main__.py28
-rw-r--r--contrib/python/ipython/py2/IPython/config.py36
-rw-r--r--contrib/python/ipython/py2/IPython/consoleapp.py24
-rw-r--r--contrib/python/ipython/py2/IPython/core/alias.py508
-rw-r--r--contrib/python/ipython/py2/IPython/core/application.py754
-rw-r--r--contrib/python/ipython/py2/IPython/core/autocall.py140
-rw-r--r--contrib/python/ipython/py2/IPython/core/builtin_trap.py226
-rw-r--r--contrib/python/ipython/py2/IPython/core/compilerop.py286
-rw-r--r--contrib/python/ipython/py2/IPython/core/completer.py2128
-rw-r--r--contrib/python/ipython/py2/IPython/core/completerlib.py686
-rw-r--r--contrib/python/ipython/py2/IPython/core/crashhandler.py432
-rw-r--r--contrib/python/ipython/py2/IPython/core/debugger.py1070
-rw-r--r--contrib/python/ipython/py2/IPython/core/display.py1856
-rw-r--r--contrib/python/ipython/py2/IPython/core/display_trap.py140
-rw-r--r--contrib/python/ipython/py2/IPython/core/displayhook.py560
-rw-r--r--contrib/python/ipython/py2/IPython/core/displaypub.py206
-rw-r--r--contrib/python/ipython/py2/IPython/core/error.py120
-rw-r--r--contrib/python/ipython/py2/IPython/core/events.py260
-rw-r--r--contrib/python/ipython/py2/IPython/core/excolors.py290
-rw-r--r--contrib/python/ipython/py2/IPython/core/extensions.py322
-rw-r--r--contrib/python/ipython/py2/IPython/core/formatters.py1802
-rw-r--r--contrib/python/ipython/py2/IPython/core/getipython.py48
-rw-r--r--contrib/python/ipython/py2/IPython/core/history.py1686
-rw-r--r--contrib/python/ipython/py2/IPython/core/historyapp.py298
-rw-r--r--contrib/python/ipython/py2/IPython/core/hooks.py424
-rw-r--r--contrib/python/ipython/py2/IPython/core/inputsplitter.py1362
-rw-r--r--contrib/python/ipython/py2/IPython/core/inputtransformer.py1088
-rw-r--r--contrib/python/ipython/py2/IPython/core/interactiveshell.py5824
-rw-r--r--contrib/python/ipython/py2/IPython/core/latex_symbols.py2600
-rw-r--r--contrib/python/ipython/py2/IPython/core/logger.py442
-rw-r--r--contrib/python/ipython/py2/IPython/core/macro.py114
-rw-r--r--contrib/python/ipython/py2/IPython/core/magic.py1344
-rw-r--r--contrib/python/ipython/py2/IPython/core/magic_arguments.py556
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/__init__.py82
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/auto.py256
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/basic.py1094
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/code.py1420
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/config.py314
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/display.py98
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/execution.py2616
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/extension.py134
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/history.py636
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/logging.py366
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/namespace.py1408
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/osm.py1572
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/pylab.py332
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/script.py528
-rw-r--r--contrib/python/ipython/py2/IPython/core/oinspect.py1652
-rw-r--r--contrib/python/ipython/py2/IPython/core/page.py738
-rw-r--r--contrib/python/ipython/py2/IPython/core/payload.py110
-rw-r--r--contrib/python/ipython/py2/IPython/core/payloadpage.py104
-rw-r--r--contrib/python/ipython/py2/IPython/core/prefilter.py1356
-rw-r--r--contrib/python/ipython/py2/IPython/core/profile/README_STARTUP22
-rw-r--r--contrib/python/ipython/py2/IPython/core/profileapp.py606
-rw-r--r--contrib/python/ipython/py2/IPython/core/profiledir.py402
-rw-r--r--contrib/python/ipython/py2/IPython/core/prompts.py48
-rw-r--r--contrib/python/ipython/py2/IPython/core/pylabtools.py756
-rw-r--r--contrib/python/ipython/py2/IPython/core/release.py234
-rw-r--r--contrib/python/ipython/py2/IPython/core/shadowns.py2
-rw-r--r--contrib/python/ipython/py2/IPython/core/shellapp.py724
-rw-r--r--contrib/python/ipython/py2/IPython/core/splitinput.py274
-rw-r--r--contrib/python/ipython/py2/IPython/core/ultratb.py2874
-rw-r--r--contrib/python/ipython/py2/IPython/core/usage.py648
-rw-r--r--contrib/python/ipython/py2/IPython/display.py32
-rw-r--r--contrib/python/ipython/py2/IPython/extensions/__init__.py4
-rw-r--r--contrib/python/ipython/py2/IPython/extensions/autoreload.py1068
-rw-r--r--contrib/python/ipython/py2/IPython/extensions/cythonmagic.py42
-rw-r--r--contrib/python/ipython/py2/IPython/extensions/rmagic.py24
-rw-r--r--contrib/python/ipython/py2/IPython/extensions/storemagic.py448
-rw-r--r--contrib/python/ipython/py2/IPython/extensions/sympyprinting.py64
-rw-r--r--contrib/python/ipython/py2/IPython/external/__init__.py10
-rw-r--r--contrib/python/ipython/py2/IPython/external/decorators/__init__.py18
-rw-r--r--contrib/python/ipython/py2/IPython/external/decorators/_decorators.py562
-rw-r--r--contrib/python/ipython/py2/IPython/external/decorators/_numpy_testing_noseclasses.py82
-rw-r--r--contrib/python/ipython/py2/IPython/external/decorators/_numpy_testing_utils.py218
-rw-r--r--contrib/python/ipython/py2/IPython/external/mathjax.py26
-rw-r--r--contrib/python/ipython/py2/IPython/external/qt_for_kernel.py182
-rw-r--r--contrib/python/ipython/py2/IPython/external/qt_loaders.py562
-rw-r--r--contrib/python/ipython/py2/IPython/frontend.py56
-rw-r--r--contrib/python/ipython/py2/IPython/html.py54
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/__init__.py68
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/__main__.py6
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/adapter.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/channels.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/channelsabc.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/client.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/clientabc.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/connect.py4
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/kernelspec.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/kernelspecapp.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/launcher.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/manager.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/managerabc.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/multikernelmanager.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/restarter.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/threaded.py2
-rw-r--r--contrib/python/ipython/py2/IPython/lib/__init__.py42
-rw-r--r--contrib/python/ipython/py2/IPython/lib/backgroundjobs.py980
-rw-r--r--contrib/python/ipython/py2/IPython/lib/clipboard.py144
-rw-r--r--contrib/python/ipython/py2/IPython/lib/deepreload.py712
-rw-r--r--contrib/python/ipython/py2/IPython/lib/demo.py1136
-rw-r--r--contrib/python/ipython/py2/IPython/lib/display.py1106
-rw-r--r--contrib/python/ipython/py2/IPython/lib/editorhooks.py256
-rw-r--r--contrib/python/ipython/py2/IPython/lib/guisupport.py274
-rw-r--r--contrib/python/ipython/py2/IPython/lib/inputhook.py1096
-rw-r--r--contrib/python/ipython/py2/IPython/lib/inputhookglut.py346
-rw-r--r--contrib/python/ipython/py2/IPython/lib/inputhookgtk.py70
-rw-r--r--contrib/python/ipython/py2/IPython/lib/inputhookgtk3.py68
-rw-r--r--contrib/python/ipython/py2/IPython/lib/inputhookpyglet.py222
-rw-r--r--contrib/python/ipython/py2/IPython/lib/inputhookqt4.py360
-rw-r--r--contrib/python/ipython/py2/IPython/lib/inputhookwx.py334
-rw-r--r--contrib/python/ipython/py2/IPython/lib/kernel.py26
-rw-r--r--contrib/python/ipython/py2/IPython/lib/latextools.py380
-rw-r--r--contrib/python/ipython/py2/IPython/lib/lexers.py1000
-rw-r--r--contrib/python/ipython/py2/IPython/lib/pretty.py1626
-rw-r--r--contrib/python/ipython/py2/IPython/lib/security.py228
-rw-r--r--contrib/python/ipython/py2/IPython/nbconvert.py36
-rw-r--r--contrib/python/ipython/py2/IPython/nbformat.py36
-rw-r--r--contrib/python/ipython/py2/IPython/parallel.py38
-rw-r--r--contrib/python/ipython/py2/IPython/paths.py240
-rw-r--r--contrib/python/ipython/py2/IPython/qt.py46
-rw-r--r--contrib/python/ipython/py2/IPython/sphinxext/custom_doctests.py310
-rw-r--r--contrib/python/ipython/py2/IPython/sphinxext/ipython_console_highlighting.py56
-rw-r--r--contrib/python/ipython/py2/IPython/sphinxext/ipython_directive.py2322
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/console.py36
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/embed.py536
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/interactiveshell.py236
-rwxr-xr-xcontrib/python/ipython/py2/IPython/terminal/ipapp.py674
-rw-r--r--contrib/python/ipython/py2/IPython/testing/__init__.py76
-rw-r--r--contrib/python/ipython/py2/IPython/testing/__main__.py6
-rw-r--r--contrib/python/ipython/py2/IPython/testing/decorators.py740
-rw-r--r--contrib/python/ipython/py2/IPython/testing/globalipapp.py258
-rw-r--r--contrib/python/ipython/py2/IPython/testing/iptest.py856
-rw-r--r--contrib/python/ipython/py2/IPython/testing/iptestcontroller.py1064
-rw-r--r--contrib/python/ipython/py2/IPython/testing/ipunittest.py352
-rw-r--r--contrib/python/ipython/py2/IPython/testing/plugin/Makefile148
-rw-r--r--contrib/python/ipython/py2/IPython/testing/plugin/README.txt78
-rw-r--r--contrib/python/ipython/py2/IPython/testing/plugin/dtexample.py316
-rw-r--r--contrib/python/ipython/py2/IPython/testing/plugin/ipdoctest.py1522
-rwxr-xr-xcontrib/python/ipython/py2/IPython/testing/plugin/iptest.py38
-rwxr-xr-xcontrib/python/ipython/py2/IPython/testing/plugin/setup.py36
-rw-r--r--contrib/python/ipython/py2/IPython/testing/plugin/show_refs.py40
-rw-r--r--contrib/python/ipython/py2/IPython/testing/plugin/simple.py68
-rw-r--r--contrib/python/ipython/py2/IPython/testing/plugin/simplevars.py6
-rw-r--r--contrib/python/ipython/py2/IPython/testing/plugin/test_combo.txt72
-rw-r--r--contrib/python/ipython/py2/IPython/testing/plugin/test_example.txt48
-rw-r--r--contrib/python/ipython/py2/IPython/testing/plugin/test_exampleip.txt60
-rw-r--r--contrib/python/ipython/py2/IPython/testing/plugin/test_ipdoctest.py160
-rw-r--r--contrib/python/ipython/py2/IPython/testing/plugin/test_refs.py92
-rw-r--r--contrib/python/ipython/py2/IPython/testing/skipdoctest.py76
-rw-r--r--contrib/python/ipython/py2/IPython/testing/tools.py892
-rw-r--r--contrib/python/ipython/py2/IPython/utils/PyColorize.py680
-rw-r--r--contrib/python/ipython/py2/IPython/utils/_process_cli.py156
-rw-r--r--contrib/python/ipython/py2/IPython/utils/_process_common.py428
-rw-r--r--contrib/python/ipython/py2/IPython/utils/_process_posix.py450
-rw-r--r--contrib/python/ipython/py2/IPython/utils/_process_win32.py384
-rw-r--r--contrib/python/ipython/py2/IPython/utils/_process_win32_controller.py1154
-rw-r--r--contrib/python/ipython/py2/IPython/utils/_signatures.py1626
-rw-r--r--contrib/python/ipython/py2/IPython/utils/_sysinfo.py2
-rw-r--r--contrib/python/ipython/py2/IPython/utils/_tokenize_py2.py878
-rw-r--r--contrib/python/ipython/py2/IPython/utils/_tokenize_py3.py1190
-rw-r--r--contrib/python/ipython/py2/IPython/utils/capture.py312
-rw-r--r--contrib/python/ipython/py2/IPython/utils/coloransi.py374
-rw-r--r--contrib/python/ipython/py2/IPython/utils/contexts.py126
-rw-r--r--contrib/python/ipython/py2/IPython/utils/daemonize.py8
-rw-r--r--contrib/python/ipython/py2/IPython/utils/data.py74
-rw-r--r--contrib/python/ipython/py2/IPython/utils/decorators.py116
-rw-r--r--contrib/python/ipython/py2/IPython/utils/dir2.py90
-rw-r--r--contrib/python/ipython/py2/IPython/utils/encoding.py142
-rw-r--r--contrib/python/ipython/py2/IPython/utils/eventful.py14
-rw-r--r--contrib/python/ipython/py2/IPython/utils/frame.py196
-rw-r--r--contrib/python/ipython/py2/IPython/utils/generics.py68
-rw-r--r--contrib/python/ipython/py2/IPython/utils/importstring.py78
-rw-r--r--contrib/python/ipython/py2/IPython/utils/io.py440
-rw-r--r--contrib/python/ipython/py2/IPython/utils/ipstruct.py782
-rw-r--r--contrib/python/ipython/py2/IPython/utils/jsonutil.py10
-rw-r--r--contrib/python/ipython/py2/IPython/utils/localinterfaces.py10
-rw-r--r--contrib/python/ipython/py2/IPython/utils/log.py14
-rw-r--r--contrib/python/ipython/py2/IPython/utils/module_paths.py250
-rw-r--r--contrib/python/ipython/py2/IPython/utils/openpy.py496
-rw-r--r--contrib/python/ipython/py2/IPython/utils/path.py862
-rw-r--r--contrib/python/ipython/py2/IPython/utils/pickleutil.py10
-rw-r--r--contrib/python/ipython/py2/IPython/utils/process.py212
-rw-r--r--contrib/python/ipython/py2/IPython/utils/py3compat.py668
-rw-r--r--contrib/python/ipython/py2/IPython/utils/rlineimpl.py148
-rw-r--r--contrib/python/ipython/py2/IPython/utils/sentinel.py34
-rw-r--r--contrib/python/ipython/py2/IPython/utils/shimmodule.py184
-rw-r--r--contrib/python/ipython/py2/IPython/utils/signatures.py22
-rw-r--r--contrib/python/ipython/py2/IPython/utils/strdispatch.py136
-rw-r--r--contrib/python/ipython/py2/IPython/utils/sysinfo.py334
-rw-r--r--contrib/python/ipython/py2/IPython/utils/syspathcontext.py142
-rw-r--r--contrib/python/ipython/py2/IPython/utils/tempdir.py290
-rw-r--r--contrib/python/ipython/py2/IPython/utils/terminal.py222
-rw-r--r--contrib/python/ipython/py2/IPython/utils/text.py1552
-rw-r--r--contrib/python/ipython/py2/IPython/utils/timing.py236
-rw-r--r--contrib/python/ipython/py2/IPython/utils/tokenize2.py18
-rw-r--r--contrib/python/ipython/py2/IPython/utils/tokenutil.py254
-rw-r--r--contrib/python/ipython/py2/IPython/utils/traitlets.py14
-rw-r--r--contrib/python/ipython/py2/IPython/utils/tz.py92
-rw-r--r--contrib/python/ipython/py2/IPython/utils/ulinecache.py90
-rw-r--r--contrib/python/ipython/py2/IPython/utils/version.py72
-rw-r--r--contrib/python/ipython/py2/IPython/utils/warn.py92
-rw-r--r--contrib/python/ipython/py2/IPython/utils/wildcard.py224
204 files changed, 45673 insertions, 45673 deletions
diff --git a/contrib/python/ipython/py2/IPython/__init__.py b/contrib/python/ipython/py2/IPython/__init__.py
index aa5121da24..9b450da6a0 100644
--- a/contrib/python/ipython/py2/IPython/__init__.py
+++ b/contrib/python/ipython/py2/IPython/__init__.py
@@ -1,146 +1,146 @@
-# encoding: utf-8
-"""
-IPython: tools for interactive and parallel computing in Python.
-
-http://ipython.org
-"""
-#-----------------------------------------------------------------------------
-# Copyright (c) 2008-2011, IPython Development Team.
-# Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu>
-# Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
-# Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-from __future__ import absolute_import
-
-import os
-import sys
-import warnings
-
-#-----------------------------------------------------------------------------
-# Setup everything
-#-----------------------------------------------------------------------------
-
-# Don't forget to also update setup.py when this changes!
-v = sys.version_info
-if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)):
- raise ImportError('IPython requires Python version 2.7 or 3.3 or above.')
-del v
-
-# Make it easy to import extensions - they are always directly on pythonpath.
-# Therefore, non-IPython modules can be added to extensions directory.
-# This should probably be in ipapp.py.
-sys.path.append(os.path.join(os.path.dirname(__file__), "extensions"))
-
-#-----------------------------------------------------------------------------
-# Setup the top level names
-#-----------------------------------------------------------------------------
-
-from .core.getipython import get_ipython
-from .core import release
-from .core.application import Application
-from .terminal.embed import embed
-
-from .core.interactiveshell import InteractiveShell
-from .testing import test
-from .utils.sysinfo import sys_info
-from .utils.frame import extract_module_locals
-
-# Release data
-__author__ = '%s <%s>' % (release.author, release.author_email)
-__license__ = release.license
-__version__ = release.version
-version_info = release.version_info
-
-def embed_kernel(module=None, local_ns=None, **kwargs):
- """Embed and start an IPython kernel in a given scope.
-
- If you don't want the kernel to initialize the namespace
- from the scope of the surrounding function,
- and/or you want to load full IPython configuration,
- you probably want `IPython.start_kernel()` instead.
-
- Parameters
- ----------
- module : ModuleType, optional
- The module to load into IPython globals (default: caller)
- local_ns : dict, optional
- The namespace to load into IPython user namespace (default: caller)
-
- kwargs : various, optional
- Further keyword args are relayed to the IPKernelApp constructor,
- allowing configuration of the Kernel. Will only have an effect
- on the first embed_kernel call for a given process.
- """
-
- (caller_module, caller_locals) = extract_module_locals(1)
- if module is None:
- module = caller_module
- if local_ns is None:
- local_ns = caller_locals
-
- # Only import .zmq when we really need it
- from ipykernel.embed import embed_kernel as real_embed_kernel
- real_embed_kernel(module=module, local_ns=local_ns, **kwargs)
-
-def start_ipython(argv=None, **kwargs):
- """Launch a normal IPython instance (as opposed to embedded)
-
- `IPython.embed()` puts a shell in a particular calling scope,
- such as a function or method for debugging purposes,
- which is often not desirable.
-
- `start_ipython()` does full, regular IPython initialization,
- including loading startup files, configuration, etc.
- much of which is skipped by `embed()`.
-
- This is a public API method, and will survive implementation changes.
-
- Parameters
- ----------
-
- argv : list or None, optional
- If unspecified or None, IPython will parse command-line options from sys.argv.
- To prevent any command-line parsing, pass an empty list: `argv=[]`.
- user_ns : dict, optional
- specify this dictionary to initialize the IPython user namespace with particular values.
- kwargs : various, optional
- Any other kwargs will be passed to the Application constructor,
- such as `config`.
- """
- from IPython.terminal.ipapp import launch_new_instance
- return launch_new_instance(argv=argv, **kwargs)
-
-def start_kernel(argv=None, **kwargs):
- """Launch a normal IPython kernel instance (as opposed to embedded)
-
- `IPython.embed_kernel()` puts a shell in a particular calling scope,
- such as a function or method for debugging purposes,
- which is often not desirable.
-
- `start_kernel()` does full, regular IPython initialization,
- including loading startup files, configuration, etc.
- much of which is skipped by `embed()`.
-
- Parameters
- ----------
-
- argv : list or None, optional
- If unspecified or None, IPython will parse command-line options from sys.argv.
- To prevent any command-line parsing, pass an empty list: `argv=[]`.
- user_ns : dict, optional
- specify this dictionary to initialize the IPython user namespace with particular values.
- kwargs : various, optional
- Any other kwargs will be passed to the Application constructor,
- such as `config`.
- """
- from IPython.kernel.zmq.kernelapp import launch_new_instance
- return launch_new_instance(argv=argv, **kwargs)
-
+# encoding: utf-8
+"""
+IPython: tools for interactive and parallel computing in Python.
+
+http://ipython.org
+"""
+#-----------------------------------------------------------------------------
+# Copyright (c) 2008-2011, IPython Development Team.
+# Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu>
+# Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
+# Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+from __future__ import absolute_import
+
+import os
+import sys
+import warnings
+
+#-----------------------------------------------------------------------------
+# Setup everything
+#-----------------------------------------------------------------------------
+
+# Don't forget to also update setup.py when this changes!
+v = sys.version_info
+if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)):
+ raise ImportError('IPython requires Python version 2.7 or 3.3 or above.')
+del v
+
+# Make it easy to import extensions - they are always directly on pythonpath.
+# Therefore, non-IPython modules can be added to extensions directory.
+# This should probably be in ipapp.py.
+sys.path.append(os.path.join(os.path.dirname(__file__), "extensions"))
+
+#-----------------------------------------------------------------------------
+# Setup the top level names
+#-----------------------------------------------------------------------------
+
+from .core.getipython import get_ipython
+from .core import release
+from .core.application import Application
+from .terminal.embed import embed
+
+from .core.interactiveshell import InteractiveShell
+from .testing import test
+from .utils.sysinfo import sys_info
+from .utils.frame import extract_module_locals
+
+# Release data
+__author__ = '%s <%s>' % (release.author, release.author_email)
+__license__ = release.license
+__version__ = release.version
+version_info = release.version_info
+
+def embed_kernel(module=None, local_ns=None, **kwargs):
+ """Embed and start an IPython kernel in a given scope.
+
+ If you don't want the kernel to initialize the namespace
+ from the scope of the surrounding function,
+ and/or you want to load full IPython configuration,
+ you probably want `IPython.start_kernel()` instead.
+
+ Parameters
+ ----------
+ module : ModuleType, optional
+ The module to load into IPython globals (default: caller)
+ local_ns : dict, optional
+ The namespace to load into IPython user namespace (default: caller)
+
+ kwargs : various, optional
+ Further keyword args are relayed to the IPKernelApp constructor,
+ allowing configuration of the Kernel. Will only have an effect
+ on the first embed_kernel call for a given process.
+ """
+
+ (caller_module, caller_locals) = extract_module_locals(1)
+ if module is None:
+ module = caller_module
+ if local_ns is None:
+ local_ns = caller_locals
+
+ # Only import .zmq when we really need it
+ from ipykernel.embed import embed_kernel as real_embed_kernel
+ real_embed_kernel(module=module, local_ns=local_ns, **kwargs)
+
+def start_ipython(argv=None, **kwargs):
+ """Launch a normal IPython instance (as opposed to embedded)
+
+ `IPython.embed()` puts a shell in a particular calling scope,
+ such as a function or method for debugging purposes,
+ which is often not desirable.
+
+ `start_ipython()` does full, regular IPython initialization,
+ including loading startup files, configuration, etc.
+ much of which is skipped by `embed()`.
+
+ This is a public API method, and will survive implementation changes.
+
+ Parameters
+ ----------
+
+ argv : list or None, optional
+ If unspecified or None, IPython will parse command-line options from sys.argv.
+ To prevent any command-line parsing, pass an empty list: `argv=[]`.
+ user_ns : dict, optional
+ specify this dictionary to initialize the IPython user namespace with particular values.
+ kwargs : various, optional
+ Any other kwargs will be passed to the Application constructor,
+ such as `config`.
+ """
+ from IPython.terminal.ipapp import launch_new_instance
+ return launch_new_instance(argv=argv, **kwargs)
+
+def start_kernel(argv=None, **kwargs):
+ """Launch a normal IPython kernel instance (as opposed to embedded)
+
+ `IPython.embed_kernel()` puts a shell in a particular calling scope,
+ such as a function or method for debugging purposes,
+ which is often not desirable.
+
+ `start_kernel()` does full, regular IPython initialization,
+ including loading startup files, configuration, etc.
+ much of which is skipped by `embed()`.
+
+ Parameters
+ ----------
+
+ argv : list or None, optional
+ If unspecified or None, IPython will parse command-line options from sys.argv.
+ To prevent any command-line parsing, pass an empty list: `argv=[]`.
+ user_ns : dict, optional
+ specify this dictionary to initialize the IPython user namespace with particular values.
+ kwargs : various, optional
+ Any other kwargs will be passed to the Application constructor,
+ such as `config`.
+ """
+ from IPython.kernel.zmq.kernelapp import launch_new_instance
+ return launch_new_instance(argv=argv, **kwargs)
+
diff --git a/contrib/python/ipython/py2/IPython/__main__.py b/contrib/python/ipython/py2/IPython/__main__.py
index 2e142249b5..d5123f33a2 100644
--- a/contrib/python/ipython/py2/IPython/__main__.py
+++ b/contrib/python/ipython/py2/IPython/__main__.py
@@ -1,14 +1,14 @@
-# encoding: utf-8
-"""Terminal-based IPython entry point.
-"""
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012, IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from IPython import start_ipython
-
-start_ipython()
+# encoding: utf-8
+"""Terminal-based IPython entry point.
+"""
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012, IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from IPython import start_ipython
+
+start_ipython()
diff --git a/contrib/python/ipython/py2/IPython/config.py b/contrib/python/ipython/py2/IPython/config.py
index ac8d0aa4bd..cf2bacafad 100644
--- a/contrib/python/ipython/py2/IPython/config.py
+++ b/contrib/python/ipython/py2/IPython/config.py
@@ -1,19 +1,19 @@
-"""
-Shim to maintain backwards compatibility with old IPython.config imports.
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import sys
-from warnings import warn
-
-from IPython.utils.shimmodule import ShimModule, ShimWarning
-
+"""
+Shim to maintain backwards compatibility with old IPython.config imports.
+"""
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import sys
+from warnings import warn
+
+from IPython.utils.shimmodule import ShimModule, ShimWarning
+
warn("The `IPython.config` package has been deprecated since IPython 4.0. "
- "You should import from traitlets.config instead.", ShimWarning)
-
-
-# Unconditionally insert the shim into sys.modules so that further import calls
-# trigger the custom attribute access above
-
-sys.modules['IPython.config'] = ShimModule(src='IPython.config', mirror='traitlets.config')
+ "You should import from traitlets.config instead.", ShimWarning)
+
+
+# Unconditionally insert the shim into sys.modules so that further import calls
+# trigger the custom attribute access above
+
+sys.modules['IPython.config'] = ShimModule(src='IPython.config', mirror='traitlets.config')
diff --git a/contrib/python/ipython/py2/IPython/consoleapp.py b/contrib/python/ipython/py2/IPython/consoleapp.py
index e2ffbbf664..14903bdc74 100644
--- a/contrib/python/ipython/py2/IPython/consoleapp.py
+++ b/contrib/python/ipython/py2/IPython/consoleapp.py
@@ -1,12 +1,12 @@
-"""
-Shim to maintain backwards compatibility with old IPython.consoleapp imports.
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from warnings import warn
-
-warn("The `IPython.consoleapp` package has been deprecated. "
- "You should import from jupyter_client.consoleapp instead.")
-
-from jupyter_client.consoleapp import *
+"""
+Shim to maintain backwards compatibility with old IPython.consoleapp imports.
+"""
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from warnings import warn
+
+warn("The `IPython.consoleapp` package has been deprecated. "
+ "You should import from jupyter_client.consoleapp instead.")
+
+from jupyter_client.consoleapp import *
diff --git a/contrib/python/ipython/py2/IPython/core/alias.py b/contrib/python/ipython/py2/IPython/core/alias.py
index 66ba986b40..28a9ccb00d 100644
--- a/contrib/python/ipython/py2/IPython/core/alias.py
+++ b/contrib/python/ipython/py2/IPython/core/alias.py
@@ -1,257 +1,257 @@
-# encoding: utf-8
-"""
-System command aliases.
-
-Authors:
-
-* Fernando Perez
-* Brian Granger
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import os
-import re
-import sys
-
-from traitlets.config.configurable import Configurable
-from IPython.core.error import UsageError
-
-from IPython.utils.py3compat import string_types
-from traitlets import List, Instance
+# encoding: utf-8
+"""
+System command aliases.
+
+Authors:
+
+* Fernando Perez
+* Brian Granger
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import os
+import re
+import sys
+
+from traitlets.config.configurable import Configurable
+from IPython.core.error import UsageError
+
+from IPython.utils.py3compat import string_types
+from traitlets import List, Instance
from logging import error
-
-#-----------------------------------------------------------------------------
-# Utilities
-#-----------------------------------------------------------------------------
-
-# This is used as the pattern for calls to split_user_input.
-shell_line_split = re.compile(r'^(\s*)()(\S+)(.*$)')
-
-def default_aliases():
- """Return list of shell aliases to auto-define.
- """
- # Note: the aliases defined here should be safe to use on a kernel
- # regardless of what frontend it is attached to. Frontends that use a
- # kernel in-process can define additional aliases that will only work in
- # their case. For example, things like 'less' or 'clear' that manipulate
- # the terminal should NOT be declared here, as they will only work if the
- # kernel is running inside a true terminal, and not over the network.
-
- if os.name == 'posix':
- default_aliases = [('mkdir', 'mkdir'), ('rmdir', 'rmdir'),
- ('mv', 'mv'), ('rm', 'rm'), ('cp', 'cp'),
- ('cat', 'cat'),
- ]
- # Useful set of ls aliases. The GNU and BSD options are a little
- # different, so we make aliases that provide as similar as possible
- # behavior in ipython, by passing the right flags for each platform
- if sys.platform.startswith('linux'):
- ls_aliases = [('ls', 'ls -F --color'),
- # long ls
- ('ll', 'ls -F -o --color'),
- # ls normal files only
- ('lf', 'ls -F -o --color %l | grep ^-'),
- # ls symbolic links
- ('lk', 'ls -F -o --color %l | grep ^l'),
- # directories or links to directories,
- ('ldir', 'ls -F -o --color %l | grep /$'),
- # things which are executable
- ('lx', 'ls -F -o --color %l | grep ^-..x'),
- ]
- elif sys.platform.startswith('openbsd') or sys.platform.startswith('netbsd'):
- # OpenBSD, NetBSD. The ls implementation on these platforms do not support
- # the -G switch and lack the ability to use colorized output.
- ls_aliases = [('ls', 'ls -F'),
- # long ls
- ('ll', 'ls -F -l'),
- # ls normal files only
- ('lf', 'ls -F -l %l | grep ^-'),
- # ls symbolic links
- ('lk', 'ls -F -l %l | grep ^l'),
- # directories or links to directories,
- ('ldir', 'ls -F -l %l | grep /$'),
- # things which are executable
- ('lx', 'ls -F -l %l | grep ^-..x'),
- ]
- else:
- # BSD, OSX, etc.
- ls_aliases = [('ls', 'ls -F -G'),
- # long ls
- ('ll', 'ls -F -l -G'),
- # ls normal files only
- ('lf', 'ls -F -l -G %l | grep ^-'),
- # ls symbolic links
- ('lk', 'ls -F -l -G %l | grep ^l'),
- # directories or links to directories,
- ('ldir', 'ls -F -G -l %l | grep /$'),
- # things which are executable
- ('lx', 'ls -F -l -G %l | grep ^-..x'),
- ]
- default_aliases = default_aliases + ls_aliases
- elif os.name in ['nt', 'dos']:
- default_aliases = [('ls', 'dir /on'),
- ('ddir', 'dir /ad /on'), ('ldir', 'dir /ad /on'),
- ('mkdir', 'mkdir'), ('rmdir', 'rmdir'),
- ('echo', 'echo'), ('ren', 'ren'), ('copy', 'copy'),
- ]
- else:
- default_aliases = []
-
- return default_aliases
-
-
-class AliasError(Exception):
- pass
-
-
-class InvalidAliasError(AliasError):
- pass
-
-class Alias(object):
- """Callable object storing the details of one alias.
-
- Instances are registered as magic functions to allow use of aliases.
- """
-
- # Prepare blacklist
- blacklist = {'cd','popd','pushd','dhist','alias','unalias'}
-
- def __init__(self, shell, name, cmd):
- self.shell = shell
- self.name = name
- self.cmd = cmd
- self.__doc__ = "Alias for `!{}`".format(cmd)
- self.nargs = self.validate()
-
- def validate(self):
- """Validate the alias, and return the number of arguments."""
- if self.name in self.blacklist:
- raise InvalidAliasError("The name %s can't be aliased "
- "because it is a keyword or builtin." % self.name)
- try:
- caller = self.shell.magics_manager.magics['line'][self.name]
- except KeyError:
- pass
- else:
- if not isinstance(caller, Alias):
- raise InvalidAliasError("The name %s can't be aliased "
- "because it is another magic command." % self.name)
-
- if not (isinstance(self.cmd, string_types)):
- raise InvalidAliasError("An alias command must be a string, "
- "got: %r" % self.cmd)
-
- nargs = self.cmd.count('%s') - self.cmd.count('%%s')
-
- if (nargs > 0) and (self.cmd.find('%l') >= 0):
- raise InvalidAliasError('The %s and %l specifiers are mutually '
- 'exclusive in alias definitions.')
-
- return nargs
-
- def __repr__(self):
- return "<alias {} for {!r}>".format(self.name, self.cmd)
-
- def __call__(self, rest=''):
- cmd = self.cmd
- nargs = self.nargs
- # Expand the %l special to be the user's input line
- if cmd.find('%l') >= 0:
- cmd = cmd.replace('%l', rest)
- rest = ''
-
- if nargs==0:
- if cmd.find('%%s') >= 1:
- cmd = cmd.replace('%%s', '%s')
- # Simple, argument-less aliases
- cmd = '%s %s' % (cmd, rest)
- else:
- # Handle aliases with positional arguments
- args = rest.split(None, nargs)
- if len(args) < nargs:
- raise UsageError('Alias <%s> requires %s arguments, %s given.' %
- (self.name, nargs, len(args)))
- cmd = '%s %s' % (cmd % tuple(args[:nargs]),' '.join(args[nargs:]))
-
- self.shell.system(cmd)
-
-#-----------------------------------------------------------------------------
-# Main AliasManager class
-#-----------------------------------------------------------------------------
-
-class AliasManager(Configurable):
-
+
+#-----------------------------------------------------------------------------
+# Utilities
+#-----------------------------------------------------------------------------
+
+# This is used as the pattern for calls to split_user_input.
+shell_line_split = re.compile(r'^(\s*)()(\S+)(.*$)')
+
+def default_aliases():
+ """Return list of shell aliases to auto-define.
+ """
+ # Note: the aliases defined here should be safe to use on a kernel
+ # regardless of what frontend it is attached to. Frontends that use a
+ # kernel in-process can define additional aliases that will only work in
+ # their case. For example, things like 'less' or 'clear' that manipulate
+ # the terminal should NOT be declared here, as they will only work if the
+ # kernel is running inside a true terminal, and not over the network.
+
+ if os.name == 'posix':
+ default_aliases = [('mkdir', 'mkdir'), ('rmdir', 'rmdir'),
+ ('mv', 'mv'), ('rm', 'rm'), ('cp', 'cp'),
+ ('cat', 'cat'),
+ ]
+ # Useful set of ls aliases. The GNU and BSD options are a little
+ # different, so we make aliases that provide as similar as possible
+ # behavior in ipython, by passing the right flags for each platform
+ if sys.platform.startswith('linux'):
+ ls_aliases = [('ls', 'ls -F --color'),
+ # long ls
+ ('ll', 'ls -F -o --color'),
+ # ls normal files only
+ ('lf', 'ls -F -o --color %l | grep ^-'),
+ # ls symbolic links
+ ('lk', 'ls -F -o --color %l | grep ^l'),
+ # directories or links to directories,
+ ('ldir', 'ls -F -o --color %l | grep /$'),
+ # things which are executable
+ ('lx', 'ls -F -o --color %l | grep ^-..x'),
+ ]
+ elif sys.platform.startswith('openbsd') or sys.platform.startswith('netbsd'):
+ # OpenBSD, NetBSD. The ls implementation on these platforms do not support
+ # the -G switch and lack the ability to use colorized output.
+ ls_aliases = [('ls', 'ls -F'),
+ # long ls
+ ('ll', 'ls -F -l'),
+ # ls normal files only
+ ('lf', 'ls -F -l %l | grep ^-'),
+ # ls symbolic links
+ ('lk', 'ls -F -l %l | grep ^l'),
+ # directories or links to directories,
+ ('ldir', 'ls -F -l %l | grep /$'),
+ # things which are executable
+ ('lx', 'ls -F -l %l | grep ^-..x'),
+ ]
+ else:
+ # BSD, OSX, etc.
+ ls_aliases = [('ls', 'ls -F -G'),
+ # long ls
+ ('ll', 'ls -F -l -G'),
+ # ls normal files only
+ ('lf', 'ls -F -l -G %l | grep ^-'),
+ # ls symbolic links
+ ('lk', 'ls -F -l -G %l | grep ^l'),
+ # directories or links to directories,
+ ('ldir', 'ls -F -G -l %l | grep /$'),
+ # things which are executable
+ ('lx', 'ls -F -l -G %l | grep ^-..x'),
+ ]
+ default_aliases = default_aliases + ls_aliases
+ elif os.name in ['nt', 'dos']:
+ default_aliases = [('ls', 'dir /on'),
+ ('ddir', 'dir /ad /on'), ('ldir', 'dir /ad /on'),
+ ('mkdir', 'mkdir'), ('rmdir', 'rmdir'),
+ ('echo', 'echo'), ('ren', 'ren'), ('copy', 'copy'),
+ ]
+ else:
+ default_aliases = []
+
+ return default_aliases
+
+
+class AliasError(Exception):
+ pass
+
+
+class InvalidAliasError(AliasError):
+ pass
+
+class Alias(object):
+ """Callable object storing the details of one alias.
+
+ Instances are registered as magic functions to allow use of aliases.
+ """
+
+ # Prepare blacklist
+ blacklist = {'cd','popd','pushd','dhist','alias','unalias'}
+
+ def __init__(self, shell, name, cmd):
+ self.shell = shell
+ self.name = name
+ self.cmd = cmd
+ self.__doc__ = "Alias for `!{}`".format(cmd)
+ self.nargs = self.validate()
+
+ def validate(self):
+ """Validate the alias, and return the number of arguments."""
+ if self.name in self.blacklist:
+ raise InvalidAliasError("The name %s can't be aliased "
+ "because it is a keyword or builtin." % self.name)
+ try:
+ caller = self.shell.magics_manager.magics['line'][self.name]
+ except KeyError:
+ pass
+ else:
+ if not isinstance(caller, Alias):
+ raise InvalidAliasError("The name %s can't be aliased "
+ "because it is another magic command." % self.name)
+
+ if not (isinstance(self.cmd, string_types)):
+ raise InvalidAliasError("An alias command must be a string, "
+ "got: %r" % self.cmd)
+
+ nargs = self.cmd.count('%s') - self.cmd.count('%%s')
+
+ if (nargs > 0) and (self.cmd.find('%l') >= 0):
+ raise InvalidAliasError('The %s and %l specifiers are mutually '
+ 'exclusive in alias definitions.')
+
+ return nargs
+
+ def __repr__(self):
+ return "<alias {} for {!r}>".format(self.name, self.cmd)
+
+ def __call__(self, rest=''):
+ cmd = self.cmd
+ nargs = self.nargs
+ # Expand the %l special to be the user's input line
+ if cmd.find('%l') >= 0:
+ cmd = cmd.replace('%l', rest)
+ rest = ''
+
+ if nargs==0:
+ if cmd.find('%%s') >= 1:
+ cmd = cmd.replace('%%s', '%s')
+ # Simple, argument-less aliases
+ cmd = '%s %s' % (cmd, rest)
+ else:
+ # Handle aliases with positional arguments
+ args = rest.split(None, nargs)
+ if len(args) < nargs:
+ raise UsageError('Alias <%s> requires %s arguments, %s given.' %
+ (self.name, nargs, len(args)))
+ cmd = '%s %s' % (cmd % tuple(args[:nargs]),' '.join(args[nargs:]))
+
+ self.shell.system(cmd)
+
+#-----------------------------------------------------------------------------
+# Main AliasManager class
+#-----------------------------------------------------------------------------
+
+class AliasManager(Configurable):
+
default_aliases = List(default_aliases()).tag(config=True)
user_aliases = List(default_value=[]).tag(config=True)
- shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
-
- def __init__(self, shell=None, **kwargs):
- super(AliasManager, self).__init__(shell=shell, **kwargs)
- # For convenient access
- self.linemagics = self.shell.magics_manager.magics['line']
- self.init_aliases()
-
- def init_aliases(self):
- # Load default & user aliases
- for name, cmd in self.default_aliases + self.user_aliases:
- self.soft_define_alias(name, cmd)
-
- @property
- def aliases(self):
- return [(n, func.cmd) for (n, func) in self.linemagics.items()
- if isinstance(func, Alias)]
-
- def soft_define_alias(self, name, cmd):
- """Define an alias, but don't raise on an AliasError."""
- try:
- self.define_alias(name, cmd)
- except AliasError as e:
- error("Invalid alias: %s" % e)
-
- def define_alias(self, name, cmd):
- """Define a new alias after validating it.
-
- This will raise an :exc:`AliasError` if there are validation
- problems.
- """
- caller = Alias(shell=self.shell, name=name, cmd=cmd)
- self.shell.magics_manager.register_function(caller, magic_kind='line',
- magic_name=name)
-
- def get_alias(self, name):
- """Return an alias, or None if no alias by that name exists."""
- aname = self.linemagics.get(name, None)
- return aname if isinstance(aname, Alias) else None
-
- def is_alias(self, name):
- """Return whether or not a given name has been defined as an alias"""
- return self.get_alias(name) is not None
-
- def undefine_alias(self, name):
- if self.is_alias(name):
- del self.linemagics[name]
- else:
- raise ValueError('%s is not an alias' % name)
-
- def clear_aliases(self):
- for name, cmd in self.aliases:
- self.undefine_alias(name)
-
- def retrieve_alias(self, name):
- """Retrieve the command to which an alias expands."""
- caller = self.get_alias(name)
- if caller:
- return caller.cmd
- else:
- raise ValueError('%s is not an alias' % name)
+ shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
+
+ def __init__(self, shell=None, **kwargs):
+ super(AliasManager, self).__init__(shell=shell, **kwargs)
+ # For convenient access
+ self.linemagics = self.shell.magics_manager.magics['line']
+ self.init_aliases()
+
+ def init_aliases(self):
+ # Load default & user aliases
+ for name, cmd in self.default_aliases + self.user_aliases:
+ self.soft_define_alias(name, cmd)
+
+ @property
+ def aliases(self):
+ return [(n, func.cmd) for (n, func) in self.linemagics.items()
+ if isinstance(func, Alias)]
+
+ def soft_define_alias(self, name, cmd):
+ """Define an alias, but don't raise on an AliasError."""
+ try:
+ self.define_alias(name, cmd)
+ except AliasError as e:
+ error("Invalid alias: %s" % e)
+
+ def define_alias(self, name, cmd):
+ """Define a new alias after validating it.
+
+ This will raise an :exc:`AliasError` if there are validation
+ problems.
+ """
+ caller = Alias(shell=self.shell, name=name, cmd=cmd)
+ self.shell.magics_manager.register_function(caller, magic_kind='line',
+ magic_name=name)
+
+ def get_alias(self, name):
+ """Return an alias, or None if no alias by that name exists."""
+ aname = self.linemagics.get(name, None)
+ return aname if isinstance(aname, Alias) else None
+
+ def is_alias(self, name):
+ """Return whether or not a given name has been defined as an alias"""
+ return self.get_alias(name) is not None
+
+ def undefine_alias(self, name):
+ if self.is_alias(name):
+ del self.linemagics[name]
+ else:
+ raise ValueError('%s is not an alias' % name)
+
+ def clear_aliases(self):
+ for name, cmd in self.aliases:
+ self.undefine_alias(name)
+
+ def retrieve_alias(self, name):
+ """Retrieve the command to which an alias expands."""
+ caller = self.get_alias(name)
+ if caller:
+ return caller.cmd
+ else:
+ raise ValueError('%s is not an alias' % name)
diff --git a/contrib/python/ipython/py2/IPython/core/application.py b/contrib/python/ipython/py2/IPython/core/application.py
index 99e94a342a..af28133945 100644
--- a/contrib/python/ipython/py2/IPython/core/application.py
+++ b/contrib/python/ipython/py2/IPython/core/application.py
@@ -1,49 +1,49 @@
-# encoding: utf-8
-"""
-An application for IPython.
-
-All top-level applications should use the classes in this module for
-handling configuration and creating configurables.
-
-The job of an :class:`Application` is to create the master configuration
-object and then create the configurable objects, passing the config to them.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import atexit
+# encoding: utf-8
+"""
+An application for IPython.
+
+All top-level applications should use the classes in this module for
+handling configuration and creating configurables.
+
+The job of an :class:`Application` is to create the master configuration
+object and then create the configurable objects, passing the config to them.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import atexit
from copy import deepcopy
-import glob
-import logging
-import os
-import shutil
-import sys
-
-from traitlets.config.application import Application, catch_config_error
-from traitlets.config.loader import ConfigFileNotFound, PyFileConfigLoader
-from IPython.core import release, crashhandler
-from IPython.core.profiledir import ProfileDir, ProfileDirError
-from IPython.paths import get_ipython_dir, get_ipython_package_dir
-from IPython.utils.path import ensure_dir_exists
-from IPython.utils import py3compat
+import glob
+import logging
+import os
+import shutil
+import sys
+
+from traitlets.config.application import Application, catch_config_error
+from traitlets.config.loader import ConfigFileNotFound, PyFileConfigLoader
+from IPython.core import release, crashhandler
+from IPython.core.profiledir import ProfileDir, ProfileDirError
+from IPython.paths import get_ipython_dir, get_ipython_package_dir
+from IPython.utils.path import ensure_dir_exists
+from IPython.utils import py3compat
from traitlets import (
List, Unicode, Type, Bool, Dict, Set, Instance, Undefined,
default, observe,
)
-
-if os.name == 'nt':
- programdata = os.environ.get('PROGRAMDATA', None)
- if programdata:
- SYSTEM_CONFIG_DIRS = [os.path.join(programdata, 'ipython')]
- else: # PROGRAMDATA is not defined by default on XP.
- SYSTEM_CONFIG_DIRS = []
-else:
- SYSTEM_CONFIG_DIRS = [
- "/usr/local/etc/ipython",
- "/etc/ipython",
- ]
-
+
+if os.name == 'nt':
+ programdata = os.environ.get('PROGRAMDATA', None)
+ if programdata:
+ SYSTEM_CONFIG_DIRS = [os.path.join(programdata, 'ipython')]
+ else: # PROGRAMDATA is not defined by default on XP.
+ SYSTEM_CONFIG_DIRS = []
+else:
+ SYSTEM_CONFIG_DIRS = [
+ "/usr/local/etc/ipython",
+ "/etc/ipython",
+ ]
+
ENV_CONFIG_DIRS = []
_env_config_dir = os.path.join(sys.prefix, 'etc', 'ipython')
@@ -62,241 +62,241 @@ else:
IPYTHON_SUPPRESS_CONFIG_ERRORS = False
else:
sys.exit("Unsupported value for environment variable: 'IPYTHON_SUPPRESS_CONFIG_ERRORS' is set to '%s' which is none of {'0', '1', 'false', 'true', ''}."% _envvar )
-
-# aliases and flags
-
-base_aliases = {
- 'profile-dir' : 'ProfileDir.location',
- 'profile' : 'BaseIPythonApplication.profile',
- 'ipython-dir' : 'BaseIPythonApplication.ipython_dir',
- 'log-level' : 'Application.log_level',
- 'config' : 'BaseIPythonApplication.extra_config_file',
-}
-
-base_flags = dict(
- debug = ({'Application' : {'log_level' : logging.DEBUG}},
- "set log level to logging.DEBUG (maximize logging output)"),
- quiet = ({'Application' : {'log_level' : logging.CRITICAL}},
- "set log level to logging.CRITICAL (minimize logging output)"),
- init = ({'BaseIPythonApplication' : {
- 'copy_config_files' : True,
- 'auto_create' : True}
- }, """Initialize profile with default config files. This is equivalent
- to running `ipython profile create <profile>` prior to startup.
- """)
-)
-
-class ProfileAwareConfigLoader(PyFileConfigLoader):
- """A Python file config loader that is aware of IPython profiles."""
- def load_subconfig(self, fname, path=None, profile=None):
- if profile is not None:
- try:
- profile_dir = ProfileDir.find_profile_dir_by_name(
- get_ipython_dir(),
- profile,
- )
- except ProfileDirError:
- return
- path = profile_dir.location
- return super(ProfileAwareConfigLoader, self).load_subconfig(fname, path=path)
-
-class BaseIPythonApplication(Application):
-
- name = Unicode(u'ipython')
- description = Unicode(u'IPython: an enhanced interactive Python shell.')
- version = Unicode(release.version)
-
- aliases = Dict(base_aliases)
- flags = Dict(base_flags)
- classes = List([ProfileDir])
-
- # enable `load_subconfig('cfg.py', profile='name')`
- python_config_loader_class = ProfileAwareConfigLoader
-
- # Track whether the config_file has changed,
- # because some logic happens only if we aren't using the default.
- config_file_specified = Set()
-
- config_file_name = Unicode()
+
+# aliases and flags
+
+base_aliases = {
+ 'profile-dir' : 'ProfileDir.location',
+ 'profile' : 'BaseIPythonApplication.profile',
+ 'ipython-dir' : 'BaseIPythonApplication.ipython_dir',
+ 'log-level' : 'Application.log_level',
+ 'config' : 'BaseIPythonApplication.extra_config_file',
+}
+
+base_flags = dict(
+ debug = ({'Application' : {'log_level' : logging.DEBUG}},
+ "set log level to logging.DEBUG (maximize logging output)"),
+ quiet = ({'Application' : {'log_level' : logging.CRITICAL}},
+ "set log level to logging.CRITICAL (minimize logging output)"),
+ init = ({'BaseIPythonApplication' : {
+ 'copy_config_files' : True,
+ 'auto_create' : True}
+ }, """Initialize profile with default config files. This is equivalent
+ to running `ipython profile create <profile>` prior to startup.
+ """)
+)
+
+class ProfileAwareConfigLoader(PyFileConfigLoader):
+ """A Python file config loader that is aware of IPython profiles."""
+ def load_subconfig(self, fname, path=None, profile=None):
+ if profile is not None:
+ try:
+ profile_dir = ProfileDir.find_profile_dir_by_name(
+ get_ipython_dir(),
+ profile,
+ )
+ except ProfileDirError:
+ return
+ path = profile_dir.location
+ return super(ProfileAwareConfigLoader, self).load_subconfig(fname, path=path)
+
+class BaseIPythonApplication(Application):
+
+ name = Unicode(u'ipython')
+ description = Unicode(u'IPython: an enhanced interactive Python shell.')
+ version = Unicode(release.version)
+
+ aliases = Dict(base_aliases)
+ flags = Dict(base_flags)
+ classes = List([ProfileDir])
+
+ # enable `load_subconfig('cfg.py', profile='name')`
+ python_config_loader_class = ProfileAwareConfigLoader
+
+ # Track whether the config_file has changed,
+ # because some logic happens only if we aren't using the default.
+ config_file_specified = Set()
+
+ config_file_name = Unicode()
@default('config_file_name')
- def _config_file_name_default(self):
- return self.name.replace('-','_') + u'_config.py'
+ def _config_file_name_default(self):
+ return self.name.replace('-','_') + u'_config.py'
@observe('config_file_name')
def _config_file_name_changed(self, change):
if change['new'] != change['old']:
self.config_file_specified.add(change['new'])
-
- # The directory that contains IPython's builtin profiles.
- builtin_profile_dir = Unicode(
- os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default')
- )
-
- config_file_paths = List(Unicode())
+
+ # The directory that contains IPython's builtin profiles.
+ builtin_profile_dir = Unicode(
+ os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default')
+ )
+
+ config_file_paths = List(Unicode())
@default('config_file_paths')
- def _config_file_paths_default(self):
- return [py3compat.getcwd()]
-
+ def _config_file_paths_default(self):
+ return [py3compat.getcwd()]
+
extra_config_file = Unicode(
- help="""Path to an extra config file to load.
-
- If specified, load this config file in addition to any other IPython config.
+ help="""Path to an extra config file to load.
+
+ If specified, load this config file in addition to any other IPython config.
""").tag(config=True)
@observe('extra_config_file')
def _extra_config_file_changed(self, change):
old = change['old']
new = change['new']
- try:
- self.config_files.remove(old)
- except ValueError:
- pass
- self.config_file_specified.add(new)
- self.config_files.append(new)
-
+ try:
+ self.config_files.remove(old)
+ except ValueError:
+ pass
+ self.config_file_specified.add(new)
+ self.config_files.append(new)
+
profile = Unicode(u'default',
- help="""The IPython profile to use."""
+ help="""The IPython profile to use."""
).tag(config=True)
@observe('profile')
def _profile_changed(self, change):
- self.builtin_profile_dir = os.path.join(
+ self.builtin_profile_dir = os.path.join(
get_ipython_package_dir(), u'config', u'profile', change['new']
- )
-
+ )
+
ipython_dir = Unicode(
- help="""
- The name of the IPython directory. This directory is used for logging
- configuration (through profiles), history storage, etc. The default
- is usually $HOME/.ipython. This option can also be specified through
- the environment variable IPYTHONDIR.
- """
+ help="""
+ The name of the IPython directory. This directory is used for logging
+ configuration (through profiles), history storage, etc. The default
+ is usually $HOME/.ipython. This option can also be specified through
+ the environment variable IPYTHONDIR.
+ """
).tag(config=True)
@default('ipython_dir')
- def _ipython_dir_default(self):
- d = get_ipython_dir()
+ def _ipython_dir_default(self):
+ d = get_ipython_dir()
self._ipython_dir_changed({
'name': 'ipython_dir',
'old': d,
'new': d,
})
- return d
-
- _in_init_profile_dir = False
- profile_dir = Instance(ProfileDir, allow_none=True)
+ return d
+
+ _in_init_profile_dir = False
+ profile_dir = Instance(ProfileDir, allow_none=True)
@default('profile_dir')
- def _profile_dir_default(self):
- # avoid recursion
- if self._in_init_profile_dir:
- return
- # profile_dir requested early, force initialization
- self.init_profile_dir()
- return self.profile_dir
-
+ def _profile_dir_default(self):
+ # avoid recursion
+ if self._in_init_profile_dir:
+ return
+ # profile_dir requested early, force initialization
+ self.init_profile_dir()
+ return self.profile_dir
+
overwrite = Bool(False,
help="""Whether to overwrite existing config files when copying"""
).tag(config=True)
auto_create = Bool(False,
help="""Whether to create profile dir if it doesn't exist"""
).tag(config=True)
-
- config_files = List(Unicode())
+
+ config_files = List(Unicode())
@default('config_files')
- def _config_files_default(self):
- return [self.config_file_name]
-
+ def _config_files_default(self):
+ return [self.config_file_name]
+
copy_config_files = Bool(False,
- help="""Whether to install the default config files into the profile dir.
- If a new profile is being created, and IPython contains config files for that
- profile, then they will be staged into the new directory. Otherwise,
- default config files will be automatically generated.
+ help="""Whether to install the default config files into the profile dir.
+ If a new profile is being created, and IPython contains config files for that
+ profile, then they will be staged into the new directory. Otherwise,
+ default config files will be automatically generated.
""").tag(config=True)
-
+
verbose_crash = Bool(False,
- help="""Create a massive crash report when IPython encounters what may be an
- internal error. The default is to append a short message to the
+ help="""Create a massive crash report when IPython encounters what may be an
+ internal error. The default is to append a short message to the
usual traceback""").tag(config=True)
-
- # The class to use as the crash handler.
- crash_handler_class = Type(crashhandler.CrashHandler)
-
- @catch_config_error
- def __init__(self, **kwargs):
- super(BaseIPythonApplication, self).__init__(**kwargs)
- # ensure current working directory exists
- try:
- py3compat.getcwd()
- except:
- # exit if cwd doesn't exist
- self.log.error("Current working directory doesn't exist.")
- self.exit(1)
-
- #-------------------------------------------------------------------------
- # Various stages of Application creation
- #-------------------------------------------------------------------------
-
- deprecated_subcommands = {}
-
- def initialize_subcommand(self, subc, argv=None):
- if subc in self.deprecated_subcommands:
- self.log.warning("Subcommand `ipython {sub}` is deprecated and will be removed "
- "in future versions.".format(sub=subc))
+
+ # The class to use as the crash handler.
+ crash_handler_class = Type(crashhandler.CrashHandler)
+
+ @catch_config_error
+ def __init__(self, **kwargs):
+ super(BaseIPythonApplication, self).__init__(**kwargs)
+ # ensure current working directory exists
+ try:
+ py3compat.getcwd()
+ except:
+ # exit if cwd doesn't exist
+ self.log.error("Current working directory doesn't exist.")
+ self.exit(1)
+
+ #-------------------------------------------------------------------------
+ # Various stages of Application creation
+ #-------------------------------------------------------------------------
+
+ deprecated_subcommands = {}
+
+ def initialize_subcommand(self, subc, argv=None):
+ if subc in self.deprecated_subcommands:
+ self.log.warning("Subcommand `ipython {sub}` is deprecated and will be removed "
+ "in future versions.".format(sub=subc))
self.log.warning("You likely want to use `jupyter {sub}` in the "
"future".format(sub=subc))
- return super(BaseIPythonApplication, self).initialize_subcommand(subc, argv)
-
- def init_crash_handler(self):
- """Create a crash handler, typically setting sys.excepthook to it."""
- self.crash_handler = self.crash_handler_class(self)
- sys.excepthook = self.excepthook
- def unset_crashhandler():
- sys.excepthook = sys.__excepthook__
- atexit.register(unset_crashhandler)
-
- def excepthook(self, etype, evalue, tb):
- """this is sys.excepthook after init_crashhandler
-
- set self.verbose_crash=True to use our full crashhandler, instead of
- a regular traceback with a short message (crash_handler_lite)
- """
-
- if self.verbose_crash:
- return self.crash_handler(etype, evalue, tb)
- else:
- return crashhandler.crash_handler_lite(etype, evalue, tb)
+ return super(BaseIPythonApplication, self).initialize_subcommand(subc, argv)
+
+ def init_crash_handler(self):
+ """Create a crash handler, typically setting sys.excepthook to it."""
+ self.crash_handler = self.crash_handler_class(self)
+ sys.excepthook = self.excepthook
+ def unset_crashhandler():
+ sys.excepthook = sys.__excepthook__
+ atexit.register(unset_crashhandler)
+
+ def excepthook(self, etype, evalue, tb):
+ """this is sys.excepthook after init_crashhandler
+
+ set self.verbose_crash=True to use our full crashhandler, instead of
+ a regular traceback with a short message (crash_handler_lite)
+ """
+
+ if self.verbose_crash:
+ return self.crash_handler(etype, evalue, tb)
+ else:
+ return crashhandler.crash_handler_lite(etype, evalue, tb)
@observe('ipython_dir')
def _ipython_dir_changed(self, change):
old = change['old']
new = change['new']
- if old is not Undefined:
- str_old = py3compat.cast_bytes_py2(os.path.abspath(old),
- sys.getfilesystemencoding()
- )
- if str_old in sys.path:
- sys.path.remove(str_old)
- str_path = py3compat.cast_bytes_py2(os.path.abspath(new),
- sys.getfilesystemencoding()
- )
- sys.path.append(str_path)
- ensure_dir_exists(new)
- readme = os.path.join(new, 'README')
- readme_src = os.path.join(get_ipython_package_dir(), u'config', u'profile', 'README')
- if not os.path.exists(readme) and os.path.exists(readme_src):
- shutil.copy(readme_src, readme)
- for d in ('extensions', 'nbextensions'):
- path = os.path.join(new, d)
- try:
- ensure_dir_exists(path)
- except OSError as e:
- # this will not be EEXIST
- self.log.error("couldn't create path %s: %s", path, e)
- self.log.debug("IPYTHONDIR set to: %s" % new)
-
+ if old is not Undefined:
+ str_old = py3compat.cast_bytes_py2(os.path.abspath(old),
+ sys.getfilesystemencoding()
+ )
+ if str_old in sys.path:
+ sys.path.remove(str_old)
+ str_path = py3compat.cast_bytes_py2(os.path.abspath(new),
+ sys.getfilesystemencoding()
+ )
+ sys.path.append(str_path)
+ ensure_dir_exists(new)
+ readme = os.path.join(new, 'README')
+ readme_src = os.path.join(get_ipython_package_dir(), u'config', u'profile', 'README')
+ if not os.path.exists(readme) and os.path.exists(readme_src):
+ shutil.copy(readme_src, readme)
+ for d in ('extensions', 'nbextensions'):
+ path = os.path.join(new, d)
+ try:
+ ensure_dir_exists(path)
+ except OSError as e:
+ # this will not be EEXIST
+ self.log.error("couldn't create path %s: %s", path, e)
+ self.log.debug("IPYTHONDIR set to: %s" % new)
+
def load_config_file(self, suppress_errors=IPYTHON_SUPPRESS_CONFIG_ERRORS):
- """Load the config file.
-
- By default, errors in loading config are handled, and a warning
- printed on screen. For testing, the suppress_errors option is set
- to False, so errors will make tests fail.
+ """Load the config file.
+
+ By default, errors in loading config are handled, and a warning
+ printed on screen. For testing, the suppress_errors option is set
+ to False, so errors will make tests fail.
`supress_errors` default value is to be `None` in which case the
behavior default to the one of `traitlets.Application`.
@@ -307,161 +307,161 @@ class BaseIPythonApplication(Application):
- to `None` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '' (empty string) or leaving it unset.
Any other value are invalid, and will make IPython exit with a non-zero return code.
- """
+ """
- self.log.debug("Searching path %s for config files", self.config_file_paths)
- base_config = 'ipython_config.py'
- self.log.debug("Attempting to load config file: %s" %
- base_config)
- try:
+ self.log.debug("Searching path %s for config files", self.config_file_paths)
+ base_config = 'ipython_config.py'
+ self.log.debug("Attempting to load config file: %s" %
+ base_config)
+ try:
if suppress_errors is not None:
old_value = Application.raise_config_file_errors
Application.raise_config_file_errors = not suppress_errors;
- Application.load_config_file(
- self,
- base_config,
- path=self.config_file_paths
- )
- except ConfigFileNotFound:
- # ignore errors loading parent
- self.log.debug("Config file %s not found", base_config)
- pass
+ Application.load_config_file(
+ self,
+ base_config,
+ path=self.config_file_paths
+ )
+ except ConfigFileNotFound:
+ # ignore errors loading parent
+ self.log.debug("Config file %s not found", base_config)
+ pass
if suppress_errors is not None:
Application.raise_config_file_errors = old_value
-
- for config_file_name in self.config_files:
- if not config_file_name or config_file_name == base_config:
- continue
- self.log.debug("Attempting to load config file: %s" %
- self.config_file_name)
- try:
- Application.load_config_file(
- self,
- config_file_name,
- path=self.config_file_paths
- )
- except ConfigFileNotFound:
- # Only warn if the default config file was NOT being used.
- if config_file_name in self.config_file_specified:
- msg = self.log.warning
- else:
- msg = self.log.debug
- msg("Config file not found, skipping: %s", config_file_name)
- except Exception:
- # For testing purposes.
- if not suppress_errors:
- raise
- self.log.warning("Error loading config file: %s" %
- self.config_file_name, exc_info=True)
-
- def init_profile_dir(self):
- """initialize the profile dir"""
- self._in_init_profile_dir = True
- if self.profile_dir is not None:
- # already ran
- return
- if 'ProfileDir.location' not in self.config:
- # location not specified, find by profile name
- try:
- p = ProfileDir.find_profile_dir_by_name(self.ipython_dir, self.profile, self.config)
- except ProfileDirError:
- # not found, maybe create it (always create default profile)
- if self.auto_create or self.profile == 'default':
- try:
- p = ProfileDir.create_profile_dir_by_name(self.ipython_dir, self.profile, self.config)
- except ProfileDirError:
- self.log.fatal("Could not create profile: %r"%self.profile)
- self.exit(1)
- else:
- self.log.info("Created profile dir: %r"%p.location)
- else:
- self.log.fatal("Profile %r not found."%self.profile)
- self.exit(1)
- else:
- self.log.debug("Using existing profile dir: %r"%p.location)
- else:
- location = self.config.ProfileDir.location
- # location is fully specified
- try:
- p = ProfileDir.find_profile_dir(location, self.config)
- except ProfileDirError:
- # not found, maybe create it
- if self.auto_create:
- try:
- p = ProfileDir.create_profile_dir(location, self.config)
- except ProfileDirError:
- self.log.fatal("Could not create profile directory: %r"%location)
- self.exit(1)
- else:
- self.log.debug("Creating new profile dir: %r"%location)
- else:
- self.log.fatal("Profile directory %r not found."%location)
- self.exit(1)
- else:
- self.log.info("Using existing profile dir: %r"%location)
- # if profile_dir is specified explicitly, set profile name
- dir_name = os.path.basename(p.location)
- if dir_name.startswith('profile_'):
- self.profile = dir_name[8:]
-
- self.profile_dir = p
- self.config_file_paths.append(p.location)
- self._in_init_profile_dir = False
-
- def init_config_files(self):
- """[optionally] copy default config files into profile dir."""
+
+ for config_file_name in self.config_files:
+ if not config_file_name or config_file_name == base_config:
+ continue
+ self.log.debug("Attempting to load config file: %s" %
+ self.config_file_name)
+ try:
+ Application.load_config_file(
+ self,
+ config_file_name,
+ path=self.config_file_paths
+ )
+ except ConfigFileNotFound:
+ # Only warn if the default config file was NOT being used.
+ if config_file_name in self.config_file_specified:
+ msg = self.log.warning
+ else:
+ msg = self.log.debug
+ msg("Config file not found, skipping: %s", config_file_name)
+ except Exception:
+ # For testing purposes.
+ if not suppress_errors:
+ raise
+ self.log.warning("Error loading config file: %s" %
+ self.config_file_name, exc_info=True)
+
+ def init_profile_dir(self):
+ """initialize the profile dir"""
+ self._in_init_profile_dir = True
+ if self.profile_dir is not None:
+ # already ran
+ return
+ if 'ProfileDir.location' not in self.config:
+ # location not specified, find by profile name
+ try:
+ p = ProfileDir.find_profile_dir_by_name(self.ipython_dir, self.profile, self.config)
+ except ProfileDirError:
+ # not found, maybe create it (always create default profile)
+ if self.auto_create or self.profile == 'default':
+ try:
+ p = ProfileDir.create_profile_dir_by_name(self.ipython_dir, self.profile, self.config)
+ except ProfileDirError:
+ self.log.fatal("Could not create profile: %r"%self.profile)
+ self.exit(1)
+ else:
+ self.log.info("Created profile dir: %r"%p.location)
+ else:
+ self.log.fatal("Profile %r not found."%self.profile)
+ self.exit(1)
+ else:
+ self.log.debug("Using existing profile dir: %r"%p.location)
+ else:
+ location = self.config.ProfileDir.location
+ # location is fully specified
+ try:
+ p = ProfileDir.find_profile_dir(location, self.config)
+ except ProfileDirError:
+ # not found, maybe create it
+ if self.auto_create:
+ try:
+ p = ProfileDir.create_profile_dir(location, self.config)
+ except ProfileDirError:
+ self.log.fatal("Could not create profile directory: %r"%location)
+ self.exit(1)
+ else:
+ self.log.debug("Creating new profile dir: %r"%location)
+ else:
+ self.log.fatal("Profile directory %r not found."%location)
+ self.exit(1)
+ else:
+ self.log.info("Using existing profile dir: %r"%location)
+ # if profile_dir is specified explicitly, set profile name
+ dir_name = os.path.basename(p.location)
+ if dir_name.startswith('profile_'):
+ self.profile = dir_name[8:]
+
+ self.profile_dir = p
+ self.config_file_paths.append(p.location)
+ self._in_init_profile_dir = False
+
+ def init_config_files(self):
+ """[optionally] copy default config files into profile dir."""
self.config_file_paths.extend(ENV_CONFIG_DIRS)
- self.config_file_paths.extend(SYSTEM_CONFIG_DIRS)
- # copy config files
- path = self.builtin_profile_dir
- if self.copy_config_files:
- src = self.profile
-
- cfg = self.config_file_name
- if path and os.path.exists(os.path.join(path, cfg)):
- self.log.warning("Staging %r from %s into %r [overwrite=%s]"%(
- cfg, src, self.profile_dir.location, self.overwrite)
- )
- self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite)
- else:
- self.stage_default_config_file()
- else:
- # Still stage *bundled* config files, but not generated ones
- # This is necessary for `ipython profile=sympy` to load the profile
- # on the first go
- files = glob.glob(os.path.join(path, '*.py'))
- for fullpath in files:
- cfg = os.path.basename(fullpath)
- if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False):
- # file was copied
- self.log.warning("Staging bundled %s from %s into %r"%(
- cfg, self.profile, self.profile_dir.location)
- )
-
-
- def stage_default_config_file(self):
- """auto generate default config file, and stage it into the profile."""
- s = self.generate_config_file()
- fname = os.path.join(self.profile_dir.location, self.config_file_name)
- if self.overwrite or not os.path.exists(fname):
- self.log.warning("Generating default config file: %r"%(fname))
- with open(fname, 'w') as f:
- f.write(s)
-
- @catch_config_error
- def initialize(self, argv=None):
- # don't hook up crash handler before parsing command-line
- self.parse_command_line(argv)
- self.init_crash_handler()
- if self.subapp is not None:
- # stop here if subapp is taking over
- return
+ self.config_file_paths.extend(SYSTEM_CONFIG_DIRS)
+ # copy config files
+ path = self.builtin_profile_dir
+ if self.copy_config_files:
+ src = self.profile
+
+ cfg = self.config_file_name
+ if path and os.path.exists(os.path.join(path, cfg)):
+ self.log.warning("Staging %r from %s into %r [overwrite=%s]"%(
+ cfg, src, self.profile_dir.location, self.overwrite)
+ )
+ self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite)
+ else:
+ self.stage_default_config_file()
+ else:
+ # Still stage *bundled* config files, but not generated ones
+ # This is necessary for `ipython profile=sympy` to load the profile
+ # on the first go
+ files = glob.glob(os.path.join(path, '*.py'))
+ for fullpath in files:
+ cfg = os.path.basename(fullpath)
+ if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False):
+ # file was copied
+ self.log.warning("Staging bundled %s from %s into %r"%(
+ cfg, self.profile, self.profile_dir.location)
+ )
+
+
+ def stage_default_config_file(self):
+ """auto generate default config file, and stage it into the profile."""
+ s = self.generate_config_file()
+ fname = os.path.join(self.profile_dir.location, self.config_file_name)
+ if self.overwrite or not os.path.exists(fname):
+ self.log.warning("Generating default config file: %r"%(fname))
+ with open(fname, 'w') as f:
+ f.write(s)
+
+ @catch_config_error
+ def initialize(self, argv=None):
+ # don't hook up crash handler before parsing command-line
+ self.parse_command_line(argv)
+ self.init_crash_handler()
+ if self.subapp is not None:
+ # stop here if subapp is taking over
+ return
# save a copy of CLI config to re-load after config files
# so that it has highest priority
cl_config = deepcopy(self.config)
- self.init_profile_dir()
- self.init_config_files()
- self.load_config_file()
- # enforce cl-opts override configfile opts:
- self.update_config(cl_config)
+ self.init_profile_dir()
+ self.init_config_files()
+ self.load_config_file()
+ # enforce cl-opts override configfile opts:
+ self.update_config(cl_config)
diff --git a/contrib/python/ipython/py2/IPython/core/autocall.py b/contrib/python/ipython/py2/IPython/core/autocall.py
index 4ef2bce59c..bab7f859c9 100644
--- a/contrib/python/ipython/py2/IPython/core/autocall.py
+++ b/contrib/python/ipython/py2/IPython/core/autocall.py
@@ -1,70 +1,70 @@
-# encoding: utf-8
-"""
-Autocall capabilities for IPython.core.
-
-Authors:
-
-* Brian Granger
-* Fernando Perez
-* Thomas Kluyver
-
-Notes
------
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-class IPyAutocall(object):
- """ Instances of this class are always autocalled
-
- This happens regardless of 'autocall' variable state. Use this to
- develop macro-like mechanisms.
- """
- _ip = None
- rewrite = True
- def __init__(self, ip=None):
- self._ip = ip
-
- def set_ip(self, ip):
- """ Will be used to set _ip point to current ipython instance b/f call
-
- Override this method if you don't want this to happen.
-
- """
- self._ip = ip
-
-
-class ExitAutocall(IPyAutocall):
- """An autocallable object which will be added to the user namespace so that
- exit, exit(), quit or quit() are all valid ways to close the shell."""
- rewrite = False
-
- def __call__(self):
- self._ip.ask_exit()
-
-class ZMQExitAutocall(ExitAutocall):
- """Exit IPython. Autocallable, so it needn't be explicitly called.
-
- Parameters
- ----------
- keep_kernel : bool
- If True, leave the kernel alive. Otherwise, tell the kernel to exit too
- (default).
- """
- def __call__(self, keep_kernel=False):
- self._ip.keepkernel_on_exit = keep_kernel
- self._ip.ask_exit()
+# encoding: utf-8
+"""
+Autocall capabilities for IPython.core.
+
+Authors:
+
+* Brian Granger
+* Fernando Perez
+* Thomas Kluyver
+
+Notes
+-----
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+class IPyAutocall(object):
+ """ Instances of this class are always autocalled
+
+ This happens regardless of 'autocall' variable state. Use this to
+ develop macro-like mechanisms.
+ """
+ _ip = None
+ rewrite = True
+ def __init__(self, ip=None):
+ self._ip = ip
+
+ def set_ip(self, ip):
+ """ Will be used to set _ip point to current ipython instance b/f call
+
+ Override this method if you don't want this to happen.
+
+ """
+ self._ip = ip
+
+
+class ExitAutocall(IPyAutocall):
+ """An autocallable object which will be added to the user namespace so that
+ exit, exit(), quit or quit() are all valid ways to close the shell."""
+ rewrite = False
+
+ def __call__(self):
+ self._ip.ask_exit()
+
+class ZMQExitAutocall(ExitAutocall):
+ """Exit IPython. Autocallable, so it needn't be explicitly called.
+
+ Parameters
+ ----------
+ keep_kernel : bool
+ If True, leave the kernel alive. Otherwise, tell the kernel to exit too
+ (default).
+ """
+ def __call__(self, keep_kernel=False):
+ self._ip.keepkernel_on_exit = keep_kernel
+ self._ip.ask_exit()
diff --git a/contrib/python/ipython/py2/IPython/core/builtin_trap.py b/contrib/python/ipython/py2/IPython/core/builtin_trap.py
index 011362599c..909a555c73 100644
--- a/contrib/python/ipython/py2/IPython/core/builtin_trap.py
+++ b/contrib/python/ipython/py2/IPython/core/builtin_trap.py
@@ -1,114 +1,114 @@
-"""
-A context manager for managing things injected into :mod:`__builtin__`.
-
-Authors:
-
-* Brian Granger
-* Fernando Perez
-"""
-#-----------------------------------------------------------------------------
-# Copyright (C) 2010-2011 The IPython Development Team.
-#
-# Distributed under the terms of the BSD License.
-#
-# Complete license in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-from traitlets.config.configurable import Configurable
-
-from IPython.utils.py3compat import builtin_mod, iteritems
-from traitlets import Instance
-
-#-----------------------------------------------------------------------------
-# Classes and functions
-#-----------------------------------------------------------------------------
-
-class __BuiltinUndefined(object): pass
-BuiltinUndefined = __BuiltinUndefined()
-
-class __HideBuiltin(object): pass
-HideBuiltin = __HideBuiltin()
-
-
-class BuiltinTrap(Configurable):
-
- shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
- allow_none=True)
-
- def __init__(self, shell=None):
- super(BuiltinTrap, self).__init__(shell=shell, config=None)
- self._orig_builtins = {}
- # We define this to track if a single BuiltinTrap is nested.
- # Only turn off the trap when the outermost call to __exit__ is made.
- self._nested_level = 0
- self.shell = shell
- # builtins we always add - if set to HideBuiltin, they will just
- # be removed instead of being replaced by something else
- self.auto_builtins = {'exit': HideBuiltin,
- 'quit': HideBuiltin,
- 'get_ipython': self.shell.get_ipython,
- }
- # Recursive reload function
- try:
- from IPython.lib import deepreload
- if self.shell.deep_reload:
- from warnings import warn
+"""
+A context manager for managing things injected into :mod:`__builtin__`.
+
+Authors:
+
+* Brian Granger
+* Fernando Perez
+"""
+#-----------------------------------------------------------------------------
+# Copyright (C) 2010-2011 The IPython Development Team.
+#
+# Distributed under the terms of the BSD License.
+#
+# Complete license in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+from traitlets.config.configurable import Configurable
+
+from IPython.utils.py3compat import builtin_mod, iteritems
+from traitlets import Instance
+
+#-----------------------------------------------------------------------------
+# Classes and functions
+#-----------------------------------------------------------------------------
+
+class __BuiltinUndefined(object): pass
+BuiltinUndefined = __BuiltinUndefined()
+
+class __HideBuiltin(object): pass
+HideBuiltin = __HideBuiltin()
+
+
+class BuiltinTrap(Configurable):
+
+ shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
+ allow_none=True)
+
+ def __init__(self, shell=None):
+ super(BuiltinTrap, self).__init__(shell=shell, config=None)
+ self._orig_builtins = {}
+ # We define this to track if a single BuiltinTrap is nested.
+ # Only turn off the trap when the outermost call to __exit__ is made.
+ self._nested_level = 0
+ self.shell = shell
+ # builtins we always add - if set to HideBuiltin, they will just
+ # be removed instead of being replaced by something else
+ self.auto_builtins = {'exit': HideBuiltin,
+ 'quit': HideBuiltin,
+ 'get_ipython': self.shell.get_ipython,
+ }
+ # Recursive reload function
+ try:
+ from IPython.lib import deepreload
+ if self.shell.deep_reload:
+ from warnings import warn
warn("Automatically replacing builtin `reload` by `deepreload.reload` is deprecated since IPython 4.0, please import `reload` explicitly from `IPython.lib.deepreload", DeprecationWarning)
- self.auto_builtins['reload'] = deepreload._dreload
- else:
- self.auto_builtins['dreload']= deepreload._dreload
- except ImportError:
- pass
-
- def __enter__(self):
- if self._nested_level == 0:
- self.activate()
- self._nested_level += 1
- # I return self, so callers can use add_builtin in a with clause.
- return self
-
- def __exit__(self, type, value, traceback):
- if self._nested_level == 1:
- self.deactivate()
- self._nested_level -= 1
- # Returning False will cause exceptions to propagate
- return False
-
- def add_builtin(self, key, value):
- """Add a builtin and save the original."""
- bdict = builtin_mod.__dict__
- orig = bdict.get(key, BuiltinUndefined)
- if value is HideBuiltin:
- if orig is not BuiltinUndefined: #same as 'key in bdict'
- self._orig_builtins[key] = orig
- del bdict[key]
- else:
- self._orig_builtins[key] = orig
- bdict[key] = value
-
- def remove_builtin(self, key, orig):
- """Remove an added builtin and re-set the original."""
- if orig is BuiltinUndefined:
- del builtin_mod.__dict__[key]
- else:
- builtin_mod.__dict__[key] = orig
-
- def activate(self):
- """Store ipython references in the __builtin__ namespace."""
-
- add_builtin = self.add_builtin
- for name, func in iteritems(self.auto_builtins):
- add_builtin(name, func)
-
- def deactivate(self):
- """Remove any builtins which might have been added by add_builtins, or
- restore overwritten ones to their previous values."""
- remove_builtin = self.remove_builtin
- for key, val in iteritems(self._orig_builtins):
- remove_builtin(key, val)
- self._orig_builtins.clear()
- self._builtins_added = False
+ self.auto_builtins['reload'] = deepreload._dreload
+ else:
+ self.auto_builtins['dreload']= deepreload._dreload
+ except ImportError:
+ pass
+
+ def __enter__(self):
+ if self._nested_level == 0:
+ self.activate()
+ self._nested_level += 1
+ # I return self, so callers can use add_builtin in a with clause.
+ return self
+
+ def __exit__(self, type, value, traceback):
+ if self._nested_level == 1:
+ self.deactivate()
+ self._nested_level -= 1
+ # Returning False will cause exceptions to propagate
+ return False
+
+ def add_builtin(self, key, value):
+ """Add a builtin and save the original."""
+ bdict = builtin_mod.__dict__
+ orig = bdict.get(key, BuiltinUndefined)
+ if value is HideBuiltin:
+ if orig is not BuiltinUndefined: #same as 'key in bdict'
+ self._orig_builtins[key] = orig
+ del bdict[key]
+ else:
+ self._orig_builtins[key] = orig
+ bdict[key] = value
+
+ def remove_builtin(self, key, orig):
+ """Remove an added builtin and re-set the original."""
+ if orig is BuiltinUndefined:
+ del builtin_mod.__dict__[key]
+ else:
+ builtin_mod.__dict__[key] = orig
+
+ def activate(self):
+ """Store ipython references in the __builtin__ namespace."""
+
+ add_builtin = self.add_builtin
+ for name, func in iteritems(self.auto_builtins):
+ add_builtin(name, func)
+
+ def deactivate(self):
+ """Remove any builtins which might have been added by add_builtins, or
+ restore overwritten ones to their previous values."""
+ remove_builtin = self.remove_builtin
+ for key, val in iteritems(self._orig_builtins):
+ remove_builtin(key, val)
+ self._orig_builtins.clear()
+ self._builtins_added = False
diff --git a/contrib/python/ipython/py2/IPython/core/compilerop.py b/contrib/python/ipython/py2/IPython/core/compilerop.py
index f662e37e96..f529eb5224 100644
--- a/contrib/python/ipython/py2/IPython/core/compilerop.py
+++ b/contrib/python/ipython/py2/IPython/core/compilerop.py
@@ -1,144 +1,144 @@
-"""Compiler tools with improved interactive support.
-
-Provides compilation machinery similar to codeop, but with caching support so
-we can provide interactive tracebacks.
-
-Authors
--------
-* Robert Kern
-* Fernando Perez
-* Thomas Kluyver
-"""
-
-# Note: though it might be more natural to name this module 'compiler', that
-# name is in the stdlib and name collisions with the stdlib tend to produce
-# weird problems (often with third-party tools).
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2010-2011 The IPython Development Team.
-#
-# Distributed under the terms of the BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-from __future__ import print_function
-
-# Stdlib imports
-import __future__
-from ast import PyCF_ONLY_AST
-import codeop
-import functools
-import hashlib
-import linecache
-import operator
-import time
-
-#-----------------------------------------------------------------------------
-# Constants
-#-----------------------------------------------------------------------------
-
-# Roughtly equal to PyCF_MASK | PyCF_MASK_OBSOLETE as defined in pythonrun.h,
-# this is used as a bitmask to extract future-related code flags.
-PyCF_MASK = functools.reduce(operator.or_,
- (getattr(__future__, fname).compiler_flag
- for fname in __future__.all_feature_names))
-
-#-----------------------------------------------------------------------------
-# Local utilities
-#-----------------------------------------------------------------------------
-
-def code_name(code, number=0):
- """ Compute a (probably) unique name for code for caching.
-
- This now expects code to be unicode.
- """
+"""Compiler tools with improved interactive support.
+
+Provides compilation machinery similar to codeop, but with caching support so
+we can provide interactive tracebacks.
+
+Authors
+-------
+* Robert Kern
+* Fernando Perez
+* Thomas Kluyver
+"""
+
+# Note: though it might be more natural to name this module 'compiler', that
+# name is in the stdlib and name collisions with the stdlib tend to produce
+# weird problems (often with third-party tools).
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2010-2011 The IPython Development Team.
+#
+# Distributed under the terms of the BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+from __future__ import print_function
+
+# Stdlib imports
+import __future__
+from ast import PyCF_ONLY_AST
+import codeop
+import functools
+import hashlib
+import linecache
+import operator
+import time
+
+#-----------------------------------------------------------------------------
+# Constants
+#-----------------------------------------------------------------------------
+
+# Roughtly equal to PyCF_MASK | PyCF_MASK_OBSOLETE as defined in pythonrun.h,
+# this is used as a bitmask to extract future-related code flags.
+PyCF_MASK = functools.reduce(operator.or_,
+ (getattr(__future__, fname).compiler_flag
+ for fname in __future__.all_feature_names))
+
+#-----------------------------------------------------------------------------
+# Local utilities
+#-----------------------------------------------------------------------------
+
+def code_name(code, number=0):
+ """ Compute a (probably) unique name for code for caching.
+
+ This now expects code to be unicode.
+ """
hash_digest = hashlib.sha1(code.encode("utf-8")).hexdigest()
- # Include the number and 12 characters of the hash in the name. It's
- # pretty much impossible that in a single session we'll have collisions
- # even with truncated hashes, and the full one makes tracebacks too long
- return '<ipython-input-{0}-{1}>'.format(number, hash_digest[:12])
-
-#-----------------------------------------------------------------------------
-# Classes and functions
-#-----------------------------------------------------------------------------
-
-class CachingCompiler(codeop.Compile):
- """A compiler that caches code compiled from interactive statements.
- """
-
- def __init__(self):
- codeop.Compile.__init__(self)
-
- # This is ugly, but it must be done this way to allow multiple
- # simultaneous ipython instances to coexist. Since Python itself
- # directly accesses the data structures in the linecache module, and
- # the cache therein is global, we must work with that data structure.
- # We must hold a reference to the original checkcache routine and call
- # that in our own check_cache() below, but the special IPython cache
- # must also be shared by all IPython instances. If we were to hold
- # separate caches (one in each CachingCompiler instance), any call made
- # by Python itself to linecache.checkcache() would obliterate the
- # cached data from the other IPython instances.
- if not hasattr(linecache, '_ipython_cache'):
- linecache._ipython_cache = {}
- if not hasattr(linecache, '_checkcache_ori'):
- linecache._checkcache_ori = linecache.checkcache
- # Now, we must monkeypatch the linecache directly so that parts of the
- # stdlib that call it outside our control go through our codepath
- # (otherwise we'd lose our tracebacks).
- linecache.checkcache = check_linecache_ipython
-
- def ast_parse(self, source, filename='<unknown>', symbol='exec'):
- """Parse code to an AST with the current compiler flags active.
-
- Arguments are exactly the same as ast.parse (in the standard library),
- and are passed to the built-in compile function."""
- return compile(source, filename, symbol, self.flags | PyCF_ONLY_AST, 1)
-
- def reset_compiler_flags(self):
- """Reset compiler flags to default state."""
- # This value is copied from codeop.Compile.__init__, so if that ever
- # changes, it will need to be updated.
- self.flags = codeop.PyCF_DONT_IMPLY_DEDENT
-
- @property
- def compiler_flags(self):
- """Flags currently active in the compilation process.
- """
- return self.flags
-
- def cache(self, code, number=0):
- """Make a name for a block of code, and cache the code.
-
- Parameters
- ----------
- code : str
- The Python source code to cache.
- number : int
- A number which forms part of the code's name. Used for the execution
- counter.
-
- Returns
- -------
- The name of the cached code (as a string). Pass this as the filename
- argument to compilation, so that tracebacks are correctly hooked up.
- """
- name = code_name(code, number)
- entry = (len(code), time.time(),
- [line+'\n' for line in code.splitlines()], name)
- linecache.cache[name] = entry
- linecache._ipython_cache[name] = entry
- return name
-
-def check_linecache_ipython(*args):
- """Call linecache.checkcache() safely protecting our cached values.
- """
- # First call the orignal checkcache as intended
- linecache._checkcache_ori(*args)
- # Then, update back the cache with our data, so that tracebacks related
- # to our compiled codes can be produced.
- linecache.cache.update(linecache._ipython_cache)
+ # Include the number and 12 characters of the hash in the name. It's
+ # pretty much impossible that in a single session we'll have collisions
+ # even with truncated hashes, and the full one makes tracebacks too long
+ return '<ipython-input-{0}-{1}>'.format(number, hash_digest[:12])
+
+#-----------------------------------------------------------------------------
+# Classes and functions
+#-----------------------------------------------------------------------------
+
+class CachingCompiler(codeop.Compile):
+ """A compiler that caches code compiled from interactive statements.
+ """
+
+ def __init__(self):
+ codeop.Compile.__init__(self)
+
+ # This is ugly, but it must be done this way to allow multiple
+ # simultaneous ipython instances to coexist. Since Python itself
+ # directly accesses the data structures in the linecache module, and
+ # the cache therein is global, we must work with that data structure.
+ # We must hold a reference to the original checkcache routine and call
+ # that in our own check_cache() below, but the special IPython cache
+ # must also be shared by all IPython instances. If we were to hold
+ # separate caches (one in each CachingCompiler instance), any call made
+ # by Python itself to linecache.checkcache() would obliterate the
+ # cached data from the other IPython instances.
+ if not hasattr(linecache, '_ipython_cache'):
+ linecache._ipython_cache = {}
+ if not hasattr(linecache, '_checkcache_ori'):
+ linecache._checkcache_ori = linecache.checkcache
+ # Now, we must monkeypatch the linecache directly so that parts of the
+ # stdlib that call it outside our control go through our codepath
+ # (otherwise we'd lose our tracebacks).
+ linecache.checkcache = check_linecache_ipython
+
+ def ast_parse(self, source, filename='<unknown>', symbol='exec'):
+ """Parse code to an AST with the current compiler flags active.
+
+ Arguments are exactly the same as ast.parse (in the standard library),
+ and are passed to the built-in compile function."""
+ return compile(source, filename, symbol, self.flags | PyCF_ONLY_AST, 1)
+
+ def reset_compiler_flags(self):
+ """Reset compiler flags to default state."""
+ # This value is copied from codeop.Compile.__init__, so if that ever
+ # changes, it will need to be updated.
+ self.flags = codeop.PyCF_DONT_IMPLY_DEDENT
+
+ @property
+ def compiler_flags(self):
+ """Flags currently active in the compilation process.
+ """
+ return self.flags
+
+ def cache(self, code, number=0):
+ """Make a name for a block of code, and cache the code.
+
+ Parameters
+ ----------
+ code : str
+ The Python source code to cache.
+ number : int
+ A number which forms part of the code's name. Used for the execution
+ counter.
+
+ Returns
+ -------
+ The name of the cached code (as a string). Pass this as the filename
+ argument to compilation, so that tracebacks are correctly hooked up.
+ """
+ name = code_name(code, number)
+ entry = (len(code), time.time(),
+ [line+'\n' for line in code.splitlines()], name)
+ linecache.cache[name] = entry
+ linecache._ipython_cache[name] = entry
+ return name
+
+def check_linecache_ipython(*args):
+ """Call linecache.checkcache() safely protecting our cached values.
+ """
+ # First call the orignal checkcache as intended
+ linecache._checkcache_ori(*args)
+ # Then, update back the cache with our data, so that tracebacks related
+ # to our compiled codes can be produced.
+ linecache.cache.update(linecache._ipython_cache)
diff --git a/contrib/python/ipython/py2/IPython/core/completer.py b/contrib/python/ipython/py2/IPython/core/completer.py
index 46003de8d4..b386945e54 100644
--- a/contrib/python/ipython/py2/IPython/core/completer.py
+++ b/contrib/python/ipython/py2/IPython/core/completer.py
@@ -1,79 +1,79 @@
-# encoding: utf-8
-"""Word completion for IPython.
-
+# encoding: utf-8
+"""Word completion for IPython.
+
This module started as fork of the rlcompleter module in the Python standard
-library. The original enhancements made to rlcompleter have been sent
+library. The original enhancements made to rlcompleter have been sent
upstream and were accepted as of Python 2.3,
-
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-#
-# Some of this code originated from rlcompleter in the Python standard library
-# Copyright (C) 2001 Python Software Foundation, www.python.org
-
+
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+#
+# Some of this code originated from rlcompleter in the Python standard library
+# Copyright (C) 2001 Python Software Foundation, www.python.org
+
from __future__ import print_function
-import __main__
-import glob
-import inspect
-import itertools
-import keyword
-import os
-import re
-import sys
-import unicodedata
-import string
+import __main__
+import glob
+import inspect
+import itertools
+import keyword
+import os
+import re
+import sys
+import unicodedata
+import string
import warnings
-
+
from traitlets.config.configurable import Configurable
-from IPython.core.error import TryNext
-from IPython.core.inputsplitter import ESC_MAGIC
-from IPython.core.latex_symbols import latex_symbols, reverse_latex_symbol
-from IPython.utils import generics
-from IPython.utils.decorators import undoc
+from IPython.core.error import TryNext
+from IPython.core.inputsplitter import ESC_MAGIC
+from IPython.core.latex_symbols import latex_symbols, reverse_latex_symbol
+from IPython.utils import generics
+from IPython.utils.decorators import undoc
from IPython.utils.dir2 import dir2, get_real_method
-from IPython.utils.process import arg_split
+from IPython.utils.process import arg_split
from IPython.utils.py3compat import builtin_mod, string_types, PY3, cast_unicode_py2
from traitlets import Bool, Enum, observe
-
-
-# Public API
-__all__ = ['Completer','IPCompleter']
-
-if sys.platform == 'win32':
- PROTECTABLES = ' '
-else:
- PROTECTABLES = ' ()[]{}?=\\|;:\'#*"^&'
-
+
+
+# Public API
+__all__ = ['Completer','IPCompleter']
+
+if sys.platform == 'win32':
+ PROTECTABLES = ' '
+else:
+ PROTECTABLES = ' ()[]{}?=\\|;:\'#*"^&'
+
# Protect against returning an enormous number of completions which the frontend
# may have trouble processing.
MATCHES_LIMIT = 500
-
-def has_open_quotes(s):
- """Return whether a string has open quotes.
-
- This simply counts whether the number of quote characters of either type in
- the string is odd.
-
- Returns
- -------
- If there is an open quote, the quote character is returned. Else, return
- False.
- """
- # We check " first, then ', so complex cases with nested quotes will get
- # the " to take precedence.
- if s.count('"') % 2:
- return '"'
- elif s.count("'") % 2:
- return "'"
- else:
- return False
-
-
-def protect_filename(s):
- """Escape a string to protect certain characters."""
+
+def has_open_quotes(s):
+ """Return whether a string has open quotes.
+
+ This simply counts whether the number of quote characters of either type in
+ the string is odd.
+
+ Returns
+ -------
+ If there is an open quote, the quote character is returned. Else, return
+ False.
+ """
+ # We check " first, then ', so complex cases with nested quotes will get
+ # the " to take precedence.
+ if s.count('"') % 2:
+ return '"'
+ elif s.count("'") % 2:
+ return "'"
+ else:
+ return False
+
+
+def protect_filename(s):
+ """Escape a string to protect certain characters."""
if set(s) & set(PROTECTABLES):
if sys.platform == "win32":
return '"' + s + '"'
@@ -81,62 +81,62 @@ def protect_filename(s):
return "".join(("\\" + c if c in PROTECTABLES else c) for c in s)
else:
return s
-
-
-def expand_user(path):
- """Expand '~'-style usernames in strings.
-
- This is similar to :func:`os.path.expanduser`, but it computes and returns
- extra information that will be useful if the input was being used in
- computing completions, and you wish to return the completions with the
- original '~' instead of its expanded value.
-
- Parameters
- ----------
- path : str
- String to be expanded. If no ~ is present, the output is the same as the
- input.
-
- Returns
- -------
- newpath : str
- Result of ~ expansion in the input path.
- tilde_expand : bool
- Whether any expansion was performed or not.
- tilde_val : str
- The value that ~ was replaced with.
- """
- # Default values
- tilde_expand = False
- tilde_val = ''
- newpath = path
-
- if path.startswith('~'):
- tilde_expand = True
- rest = len(path)-1
- newpath = os.path.expanduser(path)
- if rest:
- tilde_val = newpath[:-rest]
- else:
- tilde_val = newpath
-
- return newpath, tilde_expand, tilde_val
-
-
-def compress_user(path, tilde_expand, tilde_val):
- """Does the opposite of expand_user, with its outputs.
- """
- if tilde_expand:
- return path.replace(tilde_val, '~')
- else:
- return path
-
-
+
+
+def expand_user(path):
+ """Expand '~'-style usernames in strings.
+
+ This is similar to :func:`os.path.expanduser`, but it computes and returns
+ extra information that will be useful if the input was being used in
+ computing completions, and you wish to return the completions with the
+ original '~' instead of its expanded value.
+
+ Parameters
+ ----------
+ path : str
+ String to be expanded. If no ~ is present, the output is the same as the
+ input.
+
+ Returns
+ -------
+ newpath : str
+ Result of ~ expansion in the input path.
+ tilde_expand : bool
+ Whether any expansion was performed or not.
+ tilde_val : str
+ The value that ~ was replaced with.
+ """
+ # Default values
+ tilde_expand = False
+ tilde_val = ''
+ newpath = path
+
+ if path.startswith('~'):
+ tilde_expand = True
+ rest = len(path)-1
+ newpath = os.path.expanduser(path)
+ if rest:
+ tilde_val = newpath[:-rest]
+ else:
+ tilde_val = newpath
+
+ return newpath, tilde_expand, tilde_val
+
+
+def compress_user(path, tilde_expand, tilde_val):
+ """Does the opposite of expand_user, with its outputs.
+ """
+ if tilde_expand:
+ return path.replace(tilde_val, '~')
+ else:
+ return path
+
+
def completions_sorting_key(word):
"""key for sorting completions
-
+
This does several things:
-
+
- Lowercase all completions, so they are sorted alphabetically with
upper and lower case words mingled
- Demote any completions starting with underscores to the end
@@ -145,410 +145,410 @@ def completions_sorting_key(word):
"""
# Case insensitive sort
word = word.lower()
-
+
prio1, prio2 = 0, 0
-
+
if word.startswith('__'):
prio1 = 2
elif word.startswith('_'):
prio1 = 1
-
+
if word.endswith('='):
prio1 = -1
-
+
if word.startswith('%%'):
# If there's another % in there, this is something else, so leave it alone
- if not "%" in word[2:]:
+ if not "%" in word[2:]:
word = word[2:]
prio2 = 2
elif word.startswith('%'):
- if not "%" in word[1:]:
+ if not "%" in word[1:]:
word = word[1:]
prio2 = 1
-
+
return prio1, word, prio2
-
-@undoc
-class Bunch(object): pass
-
-
+
+@undoc
+class Bunch(object): pass
+
+
if sys.platform == 'win32':
DELIMS = ' \t\n`!@#$^&*()=+[{]}|;\'",<>?'
else:
DELIMS = ' \t\n`!@#$^&*()=+[{]}\\|;:\'",<>?'
-GREEDY_DELIMS = ' =\r\n'
-
-
-class CompletionSplitter(object):
- """An object to split an input line in a manner similar to readline.
-
- By having our own implementation, we can expose readline-like completion in
- a uniform manner to all frontends. This object only needs to be given the
- line of text to be split and the cursor position on said line, and it
- returns the 'word' to be completed on at the cursor after splitting the
- entire line.
-
- What characters are used as splitting delimiters can be controlled by
- setting the `delims` attribute (this is a property that internally
- automatically builds the necessary regular expression)"""
-
- # Private interface
-
- # A string of delimiter characters. The default value makes sense for
- # IPython's most typical usage patterns.
- _delims = DELIMS
-
- # The expression (a normal string) to be compiled into a regular expression
- # for actual splitting. We store it as an attribute mostly for ease of
- # debugging, since this type of code can be so tricky to debug.
- _delim_expr = None
-
- # The regular expression that does the actual splitting
- _delim_re = None
-
- def __init__(self, delims=None):
- delims = CompletionSplitter._delims if delims is None else delims
- self.delims = delims
-
- @property
- def delims(self):
- """Return the string of delimiter characters."""
- return self._delims
-
- @delims.setter
- def delims(self, delims):
- """Set the delimiters for line splitting."""
- expr = '[' + ''.join('\\'+ c for c in delims) + ']'
- self._delim_re = re.compile(expr)
- self._delims = delims
- self._delim_expr = expr
-
- def split_line(self, line, cursor_pos=None):
- """Split a line of text with a cursor at the given position.
- """
- l = line if cursor_pos is None else line[:cursor_pos]
- return self._delim_re.split(l)[-1]
-
-
-class Completer(Configurable):
-
+GREEDY_DELIMS = ' =\r\n'
+
+
+class CompletionSplitter(object):
+ """An object to split an input line in a manner similar to readline.
+
+ By having our own implementation, we can expose readline-like completion in
+ a uniform manner to all frontends. This object only needs to be given the
+ line of text to be split and the cursor position on said line, and it
+ returns the 'word' to be completed on at the cursor after splitting the
+ entire line.
+
+ What characters are used as splitting delimiters can be controlled by
+ setting the `delims` attribute (this is a property that internally
+ automatically builds the necessary regular expression)"""
+
+ # Private interface
+
+ # A string of delimiter characters. The default value makes sense for
+ # IPython's most typical usage patterns.
+ _delims = DELIMS
+
+ # The expression (a normal string) to be compiled into a regular expression
+ # for actual splitting. We store it as an attribute mostly for ease of
+ # debugging, since this type of code can be so tricky to debug.
+ _delim_expr = None
+
+ # The regular expression that does the actual splitting
+ _delim_re = None
+
+ def __init__(self, delims=None):
+ delims = CompletionSplitter._delims if delims is None else delims
+ self.delims = delims
+
+ @property
+ def delims(self):
+ """Return the string of delimiter characters."""
+ return self._delims
+
+ @delims.setter
+ def delims(self, delims):
+ """Set the delimiters for line splitting."""
+ expr = '[' + ''.join('\\'+ c for c in delims) + ']'
+ self._delim_re = re.compile(expr)
+ self._delims = delims
+ self._delim_expr = expr
+
+ def split_line(self, line, cursor_pos=None):
+ """Split a line of text with a cursor at the given position.
+ """
+ l = line if cursor_pos is None else line[:cursor_pos]
+ return self._delim_re.split(l)[-1]
+
+
+class Completer(Configurable):
+
greedy = Bool(False,
- help="""Activate greedy completion
+ help="""Activate greedy completion
PENDING DEPRECTION. this is now mostly taken care of with Jedi.
-
- This will enable completion on elements of lists, results of function calls, etc.,
- but can be unsafe because the code is actually evaluated on TAB.
- """
+
+ This will enable completion on elements of lists, results of function calls, etc.,
+ but can be unsafe because the code is actually evaluated on TAB.
+ """
).tag(config=True)
-
+
backslash_combining_completions = Bool(True,
help="Enable unicode completions, e.g. \\alpha<tab> . "
"Includes completion of latex commands, unicode names, and expanding "
"unicode characters back to latex commands.").tag(config=True)
- def __init__(self, namespace=None, global_namespace=None, **kwargs):
- """Create a new completer for the command line.
-
+ def __init__(self, namespace=None, global_namespace=None, **kwargs):
+ """Create a new completer for the command line.
+
Completer(namespace=ns, global_namespace=ns2) -> completer instance.
-
- If unspecified, the default namespace where completions are performed
- is __main__ (technically, __main__.__dict__). Namespaces should be
- given as dictionaries.
-
- An optional second namespace can be given. This allows the completer
- to handle cases where both the local and global scopes need to be
- distinguished.
-
- Completer instances should be used as the completion mechanism of
- readline via the set_completer() call:
-
- readline.set_completer(Completer(my_namespace).complete)
- """
-
- # Don't bind to namespace quite yet, but flag whether the user wants a
- # specific namespace or to use __main__.__dict__. This will allow us
- # to bind to __main__.__dict__ at completion time, not now.
- if namespace is None:
- self.use_main_ns = 1
- else:
- self.use_main_ns = 0
- self.namespace = namespace
-
- # The global namespace, if given, can be bound directly
- if global_namespace is None:
- self.global_namespace = {}
- else:
- self.global_namespace = global_namespace
-
- super(Completer, self).__init__(**kwargs)
-
- def complete(self, text, state):
- """Return the next possible completion for 'text'.
-
- This is called successively with state == 0, 1, 2, ... until it
- returns None. The completion should begin with 'text'.
-
- """
- if self.use_main_ns:
- self.namespace = __main__.__dict__
-
- if state == 0:
- if "." in text:
- self.matches = self.attr_matches(text)
- else:
- self.matches = self.global_matches(text)
- try:
- return self.matches[state]
- except IndexError:
- return None
-
- def global_matches(self, text):
- """Compute matches when text is a simple name.
-
- Return a list of all keywords, built-in functions and names currently
- defined in self.namespace or self.global_namespace that match.
-
- """
- matches = []
- match_append = matches.append
- n = len(text)
- for lst in [keyword.kwlist,
- builtin_mod.__dict__.keys(),
- self.namespace.keys(),
- self.global_namespace.keys()]:
- for word in lst:
- if word[:n] == text and word != "__builtins__":
- match_append(word)
+
+ If unspecified, the default namespace where completions are performed
+ is __main__ (technically, __main__.__dict__). Namespaces should be
+ given as dictionaries.
+
+ An optional second namespace can be given. This allows the completer
+ to handle cases where both the local and global scopes need to be
+ distinguished.
+
+ Completer instances should be used as the completion mechanism of
+ readline via the set_completer() call:
+
+ readline.set_completer(Completer(my_namespace).complete)
+ """
+
+ # Don't bind to namespace quite yet, but flag whether the user wants a
+ # specific namespace or to use __main__.__dict__. This will allow us
+ # to bind to __main__.__dict__ at completion time, not now.
+ if namespace is None:
+ self.use_main_ns = 1
+ else:
+ self.use_main_ns = 0
+ self.namespace = namespace
+
+ # The global namespace, if given, can be bound directly
+ if global_namespace is None:
+ self.global_namespace = {}
+ else:
+ self.global_namespace = global_namespace
+
+ super(Completer, self).__init__(**kwargs)
+
+ def complete(self, text, state):
+ """Return the next possible completion for 'text'.
+
+ This is called successively with state == 0, 1, 2, ... until it
+ returns None. The completion should begin with 'text'.
+
+ """
+ if self.use_main_ns:
+ self.namespace = __main__.__dict__
+
+ if state == 0:
+ if "." in text:
+ self.matches = self.attr_matches(text)
+ else:
+ self.matches = self.global_matches(text)
+ try:
+ return self.matches[state]
+ except IndexError:
+ return None
+
+ def global_matches(self, text):
+ """Compute matches when text is a simple name.
+
+ Return a list of all keywords, built-in functions and names currently
+ defined in self.namespace or self.global_namespace that match.
+
+ """
+ matches = []
+ match_append = matches.append
+ n = len(text)
+ for lst in [keyword.kwlist,
+ builtin_mod.__dict__.keys(),
+ self.namespace.keys(),
+ self.global_namespace.keys()]:
+ for word in lst:
+ if word[:n] == text and word != "__builtins__":
+ match_append(word)
return [cast_unicode_py2(m) for m in matches]
-
- def attr_matches(self, text):
- """Compute matches when text contains a dot.
-
- Assuming the text is of the form NAME.NAME....[NAME], and is
- evaluatable in self.namespace or self.global_namespace, it will be
- evaluated and its attributes (as revealed by dir()) are used as
- possible completions. (For class instances, class members are are
- also considered.)
-
- WARNING: this can still invoke arbitrary C code, if an object
- with a __getattr__ hook is evaluated.
-
- """
-
- # Another option, seems to work great. Catches things like ''.<tab>
- m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text)
-
- if m:
- expr, attr = m.group(1, 3)
- elif self.greedy:
- m2 = re.match(r"(.+)\.(\w*)$", self.line_buffer)
- if not m2:
- return []
- expr, attr = m2.group(1,2)
+
+ def attr_matches(self, text):
+ """Compute matches when text contains a dot.
+
+ Assuming the text is of the form NAME.NAME....[NAME], and is
+ evaluatable in self.namespace or self.global_namespace, it will be
+ evaluated and its attributes (as revealed by dir()) are used as
+ possible completions. (For class instances, class members are are
+ also considered.)
+
+ WARNING: this can still invoke arbitrary C code, if an object
+ with a __getattr__ hook is evaluated.
+
+ """
+
+ # Another option, seems to work great. Catches things like ''.<tab>
+ m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text)
+
+ if m:
+ expr, attr = m.group(1, 3)
+ elif self.greedy:
+ m2 = re.match(r"(.+)\.(\w*)$", self.line_buffer)
+ if not m2:
+ return []
+ expr, attr = m2.group(1,2)
+ else:
+ return []
+
+ try:
+ obj = eval(expr, self.namespace)
+ except:
+ try:
+ obj = eval(expr, self.global_namespace)
+ except:
+ return []
+
+ if self.limit_to__all__ and hasattr(obj, '__all__'):
+ words = get__all__entries(obj)
else:
- return []
-
- try:
- obj = eval(expr, self.namespace)
- except:
- try:
- obj = eval(expr, self.global_namespace)
- except:
- return []
-
- if self.limit_to__all__ and hasattr(obj, '__all__'):
- words = get__all__entries(obj)
- else:
- words = dir2(obj)
-
- try:
- words = generics.complete_object(obj, words)
- except TryNext:
- pass
- except Exception:
- # Silence errors from completion function
- #raise # dbg
- pass
- # Build match list to return
- n = len(attr)
+ words = dir2(obj)
+
+ try:
+ words = generics.complete_object(obj, words)
+ except TryNext:
+ pass
+ except Exception:
+ # Silence errors from completion function
+ #raise # dbg
+ pass
+ # Build match list to return
+ n = len(attr)
return [u"%s.%s" % (expr, w) for w in words if w[:n] == attr ]
-
-
-def get__all__entries(obj):
- """returns the strings in the __all__ attribute"""
- try:
- words = getattr(obj, '__all__')
- except:
- return []
-
+
+
+def get__all__entries(obj):
+ """returns the strings in the __all__ attribute"""
+ try:
+ words = getattr(obj, '__all__')
+ except:
+ return []
+
return [cast_unicode_py2(w) for w in words if isinstance(w, string_types)]
-
-
-def match_dict_keys(keys, prefix, delims):
- """Used by dict_key_matches, matching the prefix to a list of keys"""
- if not prefix:
- return None, 0, [repr(k) for k in keys
- if isinstance(k, (string_types, bytes))]
- quote_match = re.search('["\']', prefix)
- quote = quote_match.group()
- try:
- prefix_str = eval(prefix + quote, {})
- except Exception:
- return None, 0, []
-
- pattern = '[^' + ''.join('\\' + c for c in delims) + ']*$'
- token_match = re.search(pattern, prefix, re.UNICODE)
- token_start = token_match.start()
- token_prefix = token_match.group()
-
- # TODO: support bytes in Py3k
- matched = []
- for key in keys:
- try:
- if not key.startswith(prefix_str):
- continue
- except (AttributeError, TypeError, UnicodeError):
- # Python 3+ TypeError on b'a'.startswith('a') or vice-versa
- continue
-
- # reformat remainder of key to begin with prefix
- rem = key[len(prefix_str):]
- # force repr wrapped in '
- rem_repr = repr(rem + '"')
- if rem_repr.startswith('u') and prefix[0] not in 'uU':
- # Found key is unicode, but prefix is Py2 string.
- # Therefore attempt to interpret key as string.
- try:
- rem_repr = repr(rem.encode('ascii') + '"')
- except UnicodeEncodeError:
- continue
-
- rem_repr = rem_repr[1 + rem_repr.index("'"):-2]
- if quote == '"':
- # The entered prefix is quoted with ",
- # but the match is quoted with '.
- # A contained " hence needs escaping for comparison:
- rem_repr = rem_repr.replace('"', '\\"')
-
- # then reinsert prefix from start of token
- matched.append('%s%s' % (token_prefix, rem_repr))
- return quote, token_start, matched
-
-
-def _safe_isinstance(obj, module, class_name):
- """Checks if obj is an instance of module.class_name if loaded
- """
- return (module in sys.modules and
- isinstance(obj, getattr(__import__(module), class_name)))
-
-
-def back_unicode_name_matches(text):
- u"""Match unicode characters back to unicode name
-
- This does ☃ -> \\snowman
-
- Note that snowman is not a valid python3 combining character but will be expanded.
- Though it will not recombine back to the snowman character by the completion machinery.
-
- This will not either back-complete standard sequences like \\n, \\b ...
-
- Used on Python 3 only.
- """
- if len(text)<2:
- return u'', ()
- maybe_slash = text[-2]
- if maybe_slash != '\\':
- return u'', ()
-
- char = text[-1]
- # no expand on quote for completion in strings.
- # nor backcomplete standard ascii keys
- if char in string.ascii_letters or char in ['"',"'"]:
- return u'', ()
- try :
- unic = unicodedata.name(char)
- return '\\'+char,['\\'+unic]
+
+
+def match_dict_keys(keys, prefix, delims):
+ """Used by dict_key_matches, matching the prefix to a list of keys"""
+ if not prefix:
+ return None, 0, [repr(k) for k in keys
+ if isinstance(k, (string_types, bytes))]
+ quote_match = re.search('["\']', prefix)
+ quote = quote_match.group()
+ try:
+ prefix_str = eval(prefix + quote, {})
+ except Exception:
+ return None, 0, []
+
+ pattern = '[^' + ''.join('\\' + c for c in delims) + ']*$'
+ token_match = re.search(pattern, prefix, re.UNICODE)
+ token_start = token_match.start()
+ token_prefix = token_match.group()
+
+ # TODO: support bytes in Py3k
+ matched = []
+ for key in keys:
+ try:
+ if not key.startswith(prefix_str):
+ continue
+ except (AttributeError, TypeError, UnicodeError):
+ # Python 3+ TypeError on b'a'.startswith('a') or vice-versa
+ continue
+
+ # reformat remainder of key to begin with prefix
+ rem = key[len(prefix_str):]
+ # force repr wrapped in '
+ rem_repr = repr(rem + '"')
+ if rem_repr.startswith('u') and prefix[0] not in 'uU':
+ # Found key is unicode, but prefix is Py2 string.
+ # Therefore attempt to interpret key as string.
+ try:
+ rem_repr = repr(rem.encode('ascii') + '"')
+ except UnicodeEncodeError:
+ continue
+
+ rem_repr = rem_repr[1 + rem_repr.index("'"):-2]
+ if quote == '"':
+ # The entered prefix is quoted with ",
+ # but the match is quoted with '.
+ # A contained " hence needs escaping for comparison:
+ rem_repr = rem_repr.replace('"', '\\"')
+
+ # then reinsert prefix from start of token
+ matched.append('%s%s' % (token_prefix, rem_repr))
+ return quote, token_start, matched
+
+
+def _safe_isinstance(obj, module, class_name):
+ """Checks if obj is an instance of module.class_name if loaded
+ """
+ return (module in sys.modules and
+ isinstance(obj, getattr(__import__(module), class_name)))
+
+
+def back_unicode_name_matches(text):
+ u"""Match unicode characters back to unicode name
+
+ This does ☃ -> \\snowman
+
+ Note that snowman is not a valid python3 combining character but will be expanded.
+ Though it will not recombine back to the snowman character by the completion machinery.
+
+ This will not either back-complete standard sequences like \\n, \\b ...
+
+ Used on Python 3 only.
+ """
+ if len(text)<2:
+ return u'', ()
+ maybe_slash = text[-2]
+ if maybe_slash != '\\':
+ return u'', ()
+
+ char = text[-1]
+ # no expand on quote for completion in strings.
+ # nor backcomplete standard ascii keys
+ if char in string.ascii_letters or char in ['"',"'"]:
+ return u'', ()
+ try :
+ unic = unicodedata.name(char)
+ return '\\'+char,['\\'+unic]
except KeyError:
- pass
- return u'', ()
-
-def back_latex_name_matches(text):
- u"""Match latex characters back to unicode name
-
- This does ->\\sqrt
-
- Used on Python 3 only.
- """
- if len(text)<2:
- return u'', ()
- maybe_slash = text[-2]
- if maybe_slash != '\\':
- return u'', ()
-
-
- char = text[-1]
- # no expand on quote for completion in strings.
- # nor backcomplete standard ascii keys
- if char in string.ascii_letters or char in ['"',"'"]:
- return u'', ()
- try :
- latex = reverse_latex_symbol[char]
- # '\\' replace the \ as well
- return '\\'+char,[latex]
+ pass
+ return u'', ()
+
+def back_latex_name_matches(text):
+ u"""Match latex characters back to unicode name
+
+ This does ->\\sqrt
+
+ Used on Python 3 only.
+ """
+ if len(text)<2:
+ return u'', ()
+ maybe_slash = text[-2]
+ if maybe_slash != '\\':
+ return u'', ()
+
+
+ char = text[-1]
+ # no expand on quote for completion in strings.
+ # nor backcomplete standard ascii keys
+ if char in string.ascii_letters or char in ['"',"'"]:
+ return u'', ()
+ try :
+ latex = reverse_latex_symbol[char]
+ # '\\' replace the \ as well
+ return '\\'+char,[latex]
except KeyError:
- pass
- return u'', ()
-
-
-class IPCompleter(Completer):
- """Extension of the completer class with IPython-specific features"""
+ pass
+ return u'', ()
+
+
+class IPCompleter(Completer):
+ """Extension of the completer class with IPython-specific features"""
@observe('greedy')
def _greedy_changed(self, change):
- """update the splitter and readline delims when greedy is changed"""
+ """update the splitter and readline delims when greedy is changed"""
if change['new']:
- self.splitter.delims = GREEDY_DELIMS
- else:
- self.splitter.delims = DELIMS
-
- if self.readline:
- self.readline.set_completer_delims(self.splitter.delims)
-
+ self.splitter.delims = GREEDY_DELIMS
+ else:
+ self.splitter.delims = DELIMS
+
+ if self.readline:
+ self.readline.set_completer_delims(self.splitter.delims)
+
merge_completions = Bool(True,
- help="""Whether to merge completion results into a single list
-
- If False, only the completion results from the first non-empty
- completer will be returned.
- """
+ help="""Whether to merge completion results into a single list
+
+ If False, only the completion results from the first non-empty
+ completer will be returned.
+ """
).tag(config=True)
omit__names = Enum((0,1,2), default_value=2,
- help="""Instruct the completer to omit private method names
-
- Specifically, when completing on ``object.<tab>``.
-
- When 2 [default]: all names that start with '_' will be excluded.
-
- When 1: all 'magic' names (``__foo__``) will be excluded.
-
- When 0: nothing will be excluded.
- """
+ help="""Instruct the completer to omit private method names
+
+ Specifically, when completing on ``object.<tab>``.
+
+ When 2 [default]: all names that start with '_' will be excluded.
+
+ When 1: all 'magic' names (``__foo__``) will be excluded.
+
+ When 0: nothing will be excluded.
+ """
).tag(config=True)
limit_to__all__ = Bool(False,
help="""
DEPRECATED as of version 5.0.
-
+
Instruct the completer to use __all__ for the completion
- Specifically, when completing on ``object.<tab>``.
-
- When True: only those names in obj.__all__ will be included.
-
- When False [default]: the __all__ attribute is ignored
+ Specifically, when completing on ``object.<tab>``.
+
+ When True: only those names in obj.__all__ will be included.
+
+ When False [default]: the __all__ attribute is ignored
""",
).tag(config=True)
-
+
@observe('limit_to__all__')
def _limit_to_all_changed(self, change):
warnings.warn('`IPython.core.IPCompleter.limit_to__all__` configuration '
@@ -556,352 +556,352 @@ class IPCompleter(Completer):
'no effects and then removed in future version of IPython.',
UserWarning)
- def __init__(self, shell=None, namespace=None, global_namespace=None,
- use_readline=True, config=None, **kwargs):
- """IPCompleter() -> completer
-
- Return a completer object suitable for use by the readline library
- via readline.set_completer().
-
- Inputs:
-
- - shell: a pointer to the ipython shell itself. This is needed
- because this completer knows about magic functions, and those can
- only be accessed via the ipython instance.
-
- - namespace: an optional dict where completions are performed.
-
- - global_namespace: secondary optional dict for completions, to
- handle cases (such as IPython embedded inside functions) where
- both Python scopes are visible.
-
- use_readline : bool, optional
- If true, use the readline library. This completer can still function
- without readline, though in that case callers must provide some extra
- information on each call about the current line."""
-
- self.magic_escape = ESC_MAGIC
- self.splitter = CompletionSplitter()
-
- # Readline configuration, only used by the rlcompleter method.
- if use_readline:
- # We store the right version of readline so that later code
- import IPython.utils.rlineimpl as readline
- self.readline = readline
- else:
- self.readline = None
-
- # _greedy_changed() depends on splitter and readline being defined:
- Completer.__init__(self, namespace=namespace, global_namespace=global_namespace,
- config=config, **kwargs)
-
- # List where completion matches will be stored
- self.matches = []
- self.shell = shell
- # Regexp to split filenames with spaces in them
- self.space_name_re = re.compile(r'([^\\] )')
- # Hold a local ref. to glob.glob for speed
- self.glob = glob.glob
-
- # Determine if we are running on 'dumb' terminals, like (X)Emacs
- # buffers, to avoid completion problems.
- term = os.environ.get('TERM','xterm')
- self.dumb_terminal = term in ['dumb','emacs']
-
- # Special handling of backslashes needed in win32 platforms
- if sys.platform == "win32":
- self.clean_glob = self._clean_glob_win32
- else:
- self.clean_glob = self._clean_glob
-
- #regexp to parse docstring for function signature
- self.docstring_sig_re = re.compile(r'^[\w|\s.]+\(([^)]*)\).*')
- self.docstring_kwd_re = re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)')
- #use this if positional argument name is also needed
- #= re.compile(r'[\s|\[]*(\w+)(?:\s*=?\s*.*)')
-
- # All active matcher routines for completion
+ def __init__(self, shell=None, namespace=None, global_namespace=None,
+ use_readline=True, config=None, **kwargs):
+ """IPCompleter() -> completer
+
+ Return a completer object suitable for use by the readline library
+ via readline.set_completer().
+
+ Inputs:
+
+ - shell: a pointer to the ipython shell itself. This is needed
+ because this completer knows about magic functions, and those can
+ only be accessed via the ipython instance.
+
+ - namespace: an optional dict where completions are performed.
+
+ - global_namespace: secondary optional dict for completions, to
+ handle cases (such as IPython embedded inside functions) where
+ both Python scopes are visible.
+
+ use_readline : bool, optional
+ If true, use the readline library. This completer can still function
+ without readline, though in that case callers must provide some extra
+ information on each call about the current line."""
+
+ self.magic_escape = ESC_MAGIC
+ self.splitter = CompletionSplitter()
+
+ # Readline configuration, only used by the rlcompleter method.
+ if use_readline:
+ # We store the right version of readline so that later code
+ import IPython.utils.rlineimpl as readline
+ self.readline = readline
+ else:
+ self.readline = None
+
+ # _greedy_changed() depends on splitter and readline being defined:
+ Completer.__init__(self, namespace=namespace, global_namespace=global_namespace,
+ config=config, **kwargs)
+
+ # List where completion matches will be stored
+ self.matches = []
+ self.shell = shell
+ # Regexp to split filenames with spaces in them
+ self.space_name_re = re.compile(r'([^\\] )')
+ # Hold a local ref. to glob.glob for speed
+ self.glob = glob.glob
+
+ # Determine if we are running on 'dumb' terminals, like (X)Emacs
+ # buffers, to avoid completion problems.
+ term = os.environ.get('TERM','xterm')
+ self.dumb_terminal = term in ['dumb','emacs']
+
+ # Special handling of backslashes needed in win32 platforms
+ if sys.platform == "win32":
+ self.clean_glob = self._clean_glob_win32
+ else:
+ self.clean_glob = self._clean_glob
+
+ #regexp to parse docstring for function signature
+ self.docstring_sig_re = re.compile(r'^[\w|\s.]+\(([^)]*)\).*')
+ self.docstring_kwd_re = re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)')
+ #use this if positional argument name is also needed
+ #= re.compile(r'[\s|\[]*(\w+)(?:\s*=?\s*.*)')
+
+ # All active matcher routines for completion
self.matchers = [
self.python_matches,
- self.file_matches,
- self.magic_matches,
- self.python_func_kw_matches,
- self.dict_key_matches,
- ]
-
+ self.file_matches,
+ self.magic_matches,
+ self.python_func_kw_matches,
+ self.dict_key_matches,
+ ]
+
# This is set externally by InteractiveShell
self.custom_completers = None
- def all_completions(self, text):
- """
+ def all_completions(self, text):
+ """
Wrapper around the complete method for the benefit of emacs.
- """
- return self.complete(text)[1]
-
+ """
+ return self.complete(text)[1]
+
def _clean_glob(self, text):
- return self.glob("%s*" % text)
-
- def _clean_glob_win32(self,text):
- return [f.replace("\\","/")
- for f in self.glob("%s*" % text)]
-
- def file_matches(self, text):
- """Match filenames, expanding ~USER type strings.
-
- Most of the seemingly convoluted logic in this completer is an
- attempt to handle filenames with spaces in them. And yet it's not
- quite perfect, because Python's readline doesn't expose all of the
- GNU readline details needed for this to be done correctly.
-
- For a filename with a space in it, the printed completions will be
- only the parts after what's already been typed (instead of the
- full completions, as is normally done). I don't think with the
- current (as of Python 2.3) Python readline it's possible to do
- better."""
-
- # chars that require escaping with backslash - i.e. chars
- # that readline treats incorrectly as delimiters, but we
- # don't want to treat as delimiters in filename matching
- # when escaped with backslash
- if text.startswith('!'):
- text = text[1:]
+ return self.glob("%s*" % text)
+
+ def _clean_glob_win32(self,text):
+ return [f.replace("\\","/")
+ for f in self.glob("%s*" % text)]
+
+ def file_matches(self, text):
+ """Match filenames, expanding ~USER type strings.
+
+ Most of the seemingly convoluted logic in this completer is an
+ attempt to handle filenames with spaces in them. And yet it's not
+ quite perfect, because Python's readline doesn't expose all of the
+ GNU readline details needed for this to be done correctly.
+
+ For a filename with a space in it, the printed completions will be
+ only the parts after what's already been typed (instead of the
+ full completions, as is normally done). I don't think with the
+ current (as of Python 2.3) Python readline it's possible to do
+ better."""
+
+ # chars that require escaping with backslash - i.e. chars
+ # that readline treats incorrectly as delimiters, but we
+ # don't want to treat as delimiters in filename matching
+ # when escaped with backslash
+ if text.startswith('!'):
+ text = text[1:]
text_prefix = u'!'
- else:
+ else:
text_prefix = u''
-
- text_until_cursor = self.text_until_cursor
- # track strings with open quotes
- open_quotes = has_open_quotes(text_until_cursor)
-
- if '(' in text_until_cursor or '[' in text_until_cursor:
- lsplit = text
- else:
- try:
- # arg_split ~ shlex.split, but with unicode bugs fixed by us
- lsplit = arg_split(text_until_cursor)[-1]
- except ValueError:
- # typically an unmatched ", or backslash without escaped char.
- if open_quotes:
- lsplit = text_until_cursor.split(open_quotes)[-1]
- else:
- return []
- except IndexError:
- # tab pressed on empty line
- lsplit = ""
-
- if not open_quotes and lsplit != protect_filename(lsplit):
- # if protectables are found, do matching on the whole escaped name
- has_protectables = True
- text0,text = text,lsplit
- else:
- has_protectables = False
- text = os.path.expanduser(text)
-
- if text == "":
+
+ text_until_cursor = self.text_until_cursor
+ # track strings with open quotes
+ open_quotes = has_open_quotes(text_until_cursor)
+
+ if '(' in text_until_cursor or '[' in text_until_cursor:
+ lsplit = text
+ else:
+ try:
+ # arg_split ~ shlex.split, but with unicode bugs fixed by us
+ lsplit = arg_split(text_until_cursor)[-1]
+ except ValueError:
+ # typically an unmatched ", or backslash without escaped char.
+ if open_quotes:
+ lsplit = text_until_cursor.split(open_quotes)[-1]
+ else:
+ return []
+ except IndexError:
+ # tab pressed on empty line
+ lsplit = ""
+
+ if not open_quotes and lsplit != protect_filename(lsplit):
+ # if protectables are found, do matching on the whole escaped name
+ has_protectables = True
+ text0,text = text,lsplit
+ else:
+ has_protectables = False
+ text = os.path.expanduser(text)
+
+ if text == "":
return [text_prefix + cast_unicode_py2(protect_filename(f)) for f in self.glob("*")]
-
- # Compute the matches from the filesystem
+
+ # Compute the matches from the filesystem
if sys.platform == 'win32':
m0 = self.clean_glob(text)
else:
m0 = self.clean_glob(text.replace('\\', ''))
-
- if has_protectables:
- # If we had protectables, we need to revert our changes to the
- # beginning of filename so that we don't double-write the part
- # of the filename we have so far
- len_lsplit = len(lsplit)
- matches = [text_prefix + text0 +
- protect_filename(f[len_lsplit:]) for f in m0]
- else:
- if open_quotes:
- # if we have a string with an open quote, we don't need to
- # protect the names at all (and we _shouldn't_, as it
- # would cause bugs when the filesystem call is made).
- matches = m0
- else:
- matches = [text_prefix +
- protect_filename(f) for f in m0]
-
- # Mark directories in input list by appending '/' to their names.
+
+ if has_protectables:
+ # If we had protectables, we need to revert our changes to the
+ # beginning of filename so that we don't double-write the part
+ # of the filename we have so far
+ len_lsplit = len(lsplit)
+ matches = [text_prefix + text0 +
+ protect_filename(f[len_lsplit:]) for f in m0]
+ else:
+ if open_quotes:
+ # if we have a string with an open quote, we don't need to
+ # protect the names at all (and we _shouldn't_, as it
+ # would cause bugs when the filesystem call is made).
+ matches = m0
+ else:
+ matches = [text_prefix +
+ protect_filename(f) for f in m0]
+
+ # Mark directories in input list by appending '/' to their names.
return [cast_unicode_py2(x+'/') if os.path.isdir(x) else x for x in matches]
-
- def magic_matches(self, text):
- """Match magics"""
- # Get all shell magics now rather than statically, so magics loaded at
- # runtime show up too.
- lsm = self.shell.magics_manager.lsmagic()
- line_magics = lsm['line']
- cell_magics = lsm['cell']
- pre = self.magic_escape
- pre2 = pre+pre
-
- # Completion logic:
- # - user gives %%: only do cell magics
- # - user gives %: do both line and cell magics
- # - no prefix: do both
- # In other words, line magics are skipped if the user gives %% explicitly
- bare_text = text.lstrip(pre)
- comp = [ pre2+m for m in cell_magics if m.startswith(bare_text)]
- if not text.startswith(pre2):
- comp += [ pre+m for m in line_magics if m.startswith(bare_text)]
+
+ def magic_matches(self, text):
+ """Match magics"""
+ # Get all shell magics now rather than statically, so magics loaded at
+ # runtime show up too.
+ lsm = self.shell.magics_manager.lsmagic()
+ line_magics = lsm['line']
+ cell_magics = lsm['cell']
+ pre = self.magic_escape
+ pre2 = pre+pre
+
+ # Completion logic:
+ # - user gives %%: only do cell magics
+ # - user gives %: do both line and cell magics
+ # - no prefix: do both
+ # In other words, line magics are skipped if the user gives %% explicitly
+ bare_text = text.lstrip(pre)
+ comp = [ pre2+m for m in cell_magics if m.startswith(bare_text)]
+ if not text.startswith(pre2):
+ comp += [ pre+m for m in line_magics if m.startswith(bare_text)]
return [cast_unicode_py2(c) for c in comp]
-
+
def python_matches(self, text):
- """Match attributes or global python names"""
- if "." in text:
- try:
- matches = self.attr_matches(text)
- if text.endswith('.') and self.omit__names:
- if self.omit__names == 1:
- # true if txt is _not_ a __ name, false otherwise:
- no__name = (lambda txt:
- re.match(r'.*\.__.*?__',txt) is None)
- else:
- # true if txt is _not_ a _ name, false otherwise:
- no__name = (lambda txt:
- re.match(r'\._.*?',txt[txt.rindex('.'):]) is None)
- matches = filter(no__name, matches)
- except NameError:
- # catches <undefined attributes>.<tab>
- matches = []
- else:
- matches = self.global_matches(text)
- return matches
-
- def _default_arguments_from_docstring(self, doc):
- """Parse the first line of docstring for call signature.
-
- Docstring should be of the form 'min(iterable[, key=func])\n'.
- It can also parse cython docstring of the form
- 'Minuit.migrad(self, int ncall=10000, resume=True, int nsplit=1)'.
- """
- if doc is None:
- return []
-
- #care only the firstline
- line = doc.lstrip().splitlines()[0]
-
- #p = re.compile(r'^[\w|\s.]+\(([^)]*)\).*')
- #'min(iterable[, key=func])\n' -> 'iterable[, key=func]'
- sig = self.docstring_sig_re.search(line)
- if sig is None:
- return []
- # iterable[, key=func]' -> ['iterable[' ,' key=func]']
- sig = sig.groups()[0].split(',')
- ret = []
- for s in sig:
- #re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)')
- ret += self.docstring_kwd_re.findall(s)
- return ret
-
- def _default_arguments(self, obj):
- """Return the list of default arguments of obj if it is callable,
- or empty list otherwise."""
- call_obj = obj
- ret = []
- if inspect.isbuiltin(obj):
- pass
- elif not (inspect.isfunction(obj) or inspect.ismethod(obj)):
- if inspect.isclass(obj):
- #for cython embededsignature=True the constructor docstring
- #belongs to the object itself not __init__
- ret += self._default_arguments_from_docstring(
- getattr(obj, '__doc__', ''))
- # for classes, check for __init__,__new__
- call_obj = (getattr(obj, '__init__', None) or
- getattr(obj, '__new__', None))
- # for all others, check if they are __call__able
- elif hasattr(obj, '__call__'):
- call_obj = obj.__call__
- ret += self._default_arguments_from_docstring(
- getattr(call_obj, '__doc__', ''))
-
- if PY3:
- _keeps = (inspect.Parameter.KEYWORD_ONLY,
- inspect.Parameter.POSITIONAL_OR_KEYWORD)
- signature = inspect.signature
- else:
- import IPython.utils.signatures
- _keeps = (IPython.utils.signatures.Parameter.KEYWORD_ONLY,
- IPython.utils.signatures.Parameter.POSITIONAL_OR_KEYWORD)
- signature = IPython.utils.signatures.signature
-
- try:
- sig = signature(call_obj)
- ret.extend(k for k, v in sig.parameters.items() if
- v.kind in _keeps)
- except ValueError:
- pass
-
- return list(set(ret))
-
- def python_func_kw_matches(self,text):
- """Match named parameters (kwargs) of the last open function"""
-
- if "." in text: # a parameter cannot be dotted
- return []
- try: regexp = self.__funcParamsRegex
- except AttributeError:
- regexp = self.__funcParamsRegex = re.compile(r'''
- '.*?(?<!\\)' | # single quoted strings or
- ".*?(?<!\\)" | # double quoted strings or
- \w+ | # identifier
- \S # other characters
- ''', re.VERBOSE | re.DOTALL)
- # 1. find the nearest identifier that comes before an unclosed
- # parenthesis before the cursor
- # e.g. for "foo (1+bar(x), pa<cursor>,a=1)", the candidate is "foo"
- tokens = regexp.findall(self.text_until_cursor)
- tokens.reverse()
- iterTokens = iter(tokens); openPar = 0
-
- for token in iterTokens:
- if token == ')':
- openPar -= 1
- elif token == '(':
- openPar += 1
- if openPar > 0:
- # found the last unclosed parenthesis
- break
- else:
- return []
- # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" )
- ids = []
- isId = re.compile(r'\w+$').match
-
- while True:
- try:
- ids.append(next(iterTokens))
- if not isId(ids[-1]):
- ids.pop(); break
- if not next(iterTokens) == '.':
- break
- except StopIteration:
- break
- # lookup the candidate callable matches either using global_matches
- # or attr_matches for dotted names
- if len(ids) == 1:
- callableMatches = self.global_matches(ids[0])
- else:
- callableMatches = self.attr_matches('.'.join(ids[::-1]))
- argMatches = []
- for callableMatch in callableMatches:
- try:
- namedArgs = self._default_arguments(eval(callableMatch,
- self.namespace))
- except:
- continue
-
- for namedArg in namedArgs:
- if namedArg.startswith(text):
+ """Match attributes or global python names"""
+ if "." in text:
+ try:
+ matches = self.attr_matches(text)
+ if text.endswith('.') and self.omit__names:
+ if self.omit__names == 1:
+ # true if txt is _not_ a __ name, false otherwise:
+ no__name = (lambda txt:
+ re.match(r'.*\.__.*?__',txt) is None)
+ else:
+ # true if txt is _not_ a _ name, false otherwise:
+ no__name = (lambda txt:
+ re.match(r'\._.*?',txt[txt.rindex('.'):]) is None)
+ matches = filter(no__name, matches)
+ except NameError:
+ # catches <undefined attributes>.<tab>
+ matches = []
+ else:
+ matches = self.global_matches(text)
+ return matches
+
+ def _default_arguments_from_docstring(self, doc):
+ """Parse the first line of docstring for call signature.
+
+ Docstring should be of the form 'min(iterable[, key=func])\n'.
+ It can also parse cython docstring of the form
+ 'Minuit.migrad(self, int ncall=10000, resume=True, int nsplit=1)'.
+ """
+ if doc is None:
+ return []
+
+ #care only the firstline
+ line = doc.lstrip().splitlines()[0]
+
+ #p = re.compile(r'^[\w|\s.]+\(([^)]*)\).*')
+ #'min(iterable[, key=func])\n' -> 'iterable[, key=func]'
+ sig = self.docstring_sig_re.search(line)
+ if sig is None:
+ return []
+ # iterable[, key=func]' -> ['iterable[' ,' key=func]']
+ sig = sig.groups()[0].split(',')
+ ret = []
+ for s in sig:
+ #re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)')
+ ret += self.docstring_kwd_re.findall(s)
+ return ret
+
+ def _default_arguments(self, obj):
+ """Return the list of default arguments of obj if it is callable,
+ or empty list otherwise."""
+ call_obj = obj
+ ret = []
+ if inspect.isbuiltin(obj):
+ pass
+ elif not (inspect.isfunction(obj) or inspect.ismethod(obj)):
+ if inspect.isclass(obj):
+ #for cython embededsignature=True the constructor docstring
+ #belongs to the object itself not __init__
+ ret += self._default_arguments_from_docstring(
+ getattr(obj, '__doc__', ''))
+ # for classes, check for __init__,__new__
+ call_obj = (getattr(obj, '__init__', None) or
+ getattr(obj, '__new__', None))
+ # for all others, check if they are __call__able
+ elif hasattr(obj, '__call__'):
+ call_obj = obj.__call__
+ ret += self._default_arguments_from_docstring(
+ getattr(call_obj, '__doc__', ''))
+
+ if PY3:
+ _keeps = (inspect.Parameter.KEYWORD_ONLY,
+ inspect.Parameter.POSITIONAL_OR_KEYWORD)
+ signature = inspect.signature
+ else:
+ import IPython.utils.signatures
+ _keeps = (IPython.utils.signatures.Parameter.KEYWORD_ONLY,
+ IPython.utils.signatures.Parameter.POSITIONAL_OR_KEYWORD)
+ signature = IPython.utils.signatures.signature
+
+ try:
+ sig = signature(call_obj)
+ ret.extend(k for k, v in sig.parameters.items() if
+ v.kind in _keeps)
+ except ValueError:
+ pass
+
+ return list(set(ret))
+
+ def python_func_kw_matches(self,text):
+ """Match named parameters (kwargs) of the last open function"""
+
+ if "." in text: # a parameter cannot be dotted
+ return []
+ try: regexp = self.__funcParamsRegex
+ except AttributeError:
+ regexp = self.__funcParamsRegex = re.compile(r'''
+ '.*?(?<!\\)' | # single quoted strings or
+ ".*?(?<!\\)" | # double quoted strings or
+ \w+ | # identifier
+ \S # other characters
+ ''', re.VERBOSE | re.DOTALL)
+ # 1. find the nearest identifier that comes before an unclosed
+ # parenthesis before the cursor
+ # e.g. for "foo (1+bar(x), pa<cursor>,a=1)", the candidate is "foo"
+ tokens = regexp.findall(self.text_until_cursor)
+ tokens.reverse()
+ iterTokens = iter(tokens); openPar = 0
+
+ for token in iterTokens:
+ if token == ')':
+ openPar -= 1
+ elif token == '(':
+ openPar += 1
+ if openPar > 0:
+ # found the last unclosed parenthesis
+ break
+ else:
+ return []
+ # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" )
+ ids = []
+ isId = re.compile(r'\w+$').match
+
+ while True:
+ try:
+ ids.append(next(iterTokens))
+ if not isId(ids[-1]):
+ ids.pop(); break
+ if not next(iterTokens) == '.':
+ break
+ except StopIteration:
+ break
+ # lookup the candidate callable matches either using global_matches
+ # or attr_matches for dotted names
+ if len(ids) == 1:
+ callableMatches = self.global_matches(ids[0])
+ else:
+ callableMatches = self.attr_matches('.'.join(ids[::-1]))
+ argMatches = []
+ for callableMatch in callableMatches:
+ try:
+ namedArgs = self._default_arguments(eval(callableMatch,
+ self.namespace))
+ except:
+ continue
+
+ for namedArg in namedArgs:
+ if namedArg.startswith(text):
argMatches.append(u"%s=" %namedArg)
- return argMatches
-
- def dict_key_matches(self, text):
- "Match string keys in a dictionary, after e.g. 'foo[' "
- def get_keys(obj):
+ return argMatches
+
+ def dict_key_matches(self, text):
+ "Match string keys in a dictionary, after e.g. 'foo[' "
+ def get_keys(obj):
# Objects can define their own completions by defining an
# _ipy_key_completions_() method.
method = get_real_method(obj, '_ipython_key_completions_')
@@ -909,289 +909,289 @@ class IPCompleter(Completer):
return method()
# Special case some common in-memory dict-like types
- if isinstance(obj, dict) or\
- _safe_isinstance(obj, 'pandas', 'DataFrame'):
- try:
- return list(obj.keys())
- except Exception:
- return []
- elif _safe_isinstance(obj, 'numpy', 'ndarray') or\
- _safe_isinstance(obj, 'numpy', 'void'):
- return obj.dtype.names or []
- return []
-
- try:
- regexps = self.__dict_key_regexps
- except AttributeError:
- dict_key_re_fmt = r'''(?x)
- ( # match dict-referring expression wrt greedy setting
- %s
- )
- \[ # open bracket
- \s* # and optional whitespace
- ([uUbB]? # string prefix (r not handled)
- (?: # unclosed string
- '(?:[^']|(?<!\\)\\')*
- |
- "(?:[^"]|(?<!\\)\\")*
- )
- )?
- $
- '''
- regexps = self.__dict_key_regexps = {
- False: re.compile(dict_key_re_fmt % '''
- # identifiers separated by .
- (?!\d)\w+
- (?:\.(?!\d)\w+)*
- '''),
- True: re.compile(dict_key_re_fmt % '''
- .+
- ''')
- }
-
- match = regexps[self.greedy].search(self.text_until_cursor)
- if match is None:
- return []
-
- expr, prefix = match.groups()
- try:
- obj = eval(expr, self.namespace)
- except Exception:
- try:
- obj = eval(expr, self.global_namespace)
- except Exception:
- return []
-
- keys = get_keys(obj)
- if not keys:
- return keys
- closing_quote, token_offset, matches = match_dict_keys(keys, prefix, self.splitter.delims)
- if not matches:
- return matches
-
- # get the cursor position of
- # - the text being completed
- # - the start of the key text
- # - the start of the completion
- text_start = len(self.text_until_cursor) - len(text)
- if prefix:
- key_start = match.start(2)
- completion_start = key_start + token_offset
- else:
- key_start = completion_start = match.end()
-
- # grab the leading prefix, to make sure all completions start with `text`
- if text_start > key_start:
- leading = ''
- else:
- leading = text[text_start:completion_start]
-
- # the index of the `[` character
- bracket_idx = match.end(1)
-
- # append closing quote and bracket as appropriate
- # this is *not* appropriate if the opening quote or bracket is outside
- # the text given to this method
- suf = ''
- continuation = self.line_buffer[len(self.text_until_cursor):]
- if key_start > text_start and closing_quote:
- # quotes were opened inside text, maybe close them
- if continuation.startswith(closing_quote):
- continuation = continuation[len(closing_quote):]
- else:
- suf += closing_quote
- if bracket_idx > text_start:
- # brackets were opened inside text, maybe close them
- if not continuation.startswith(']'):
- suf += ']'
-
- return [leading + k + suf for k in matches]
-
- def unicode_name_matches(self, text):
- u"""Match Latex-like syntax for unicode characters base
- on the name of the character.
-
- This does \\GREEK SMALL LETTER ETA -> η
-
- Works only on valid python 3 identifier, or on combining characters that
- will combine to form a valid identifier.
-
- Used on Python 3 only.
- """
- slashpos = text.rfind('\\')
- if slashpos > -1:
- s = text[slashpos+1:]
- try :
- unic = unicodedata.lookup(s)
- # allow combining chars
- if ('a'+unic).isidentifier():
- return '\\'+s,[unic]
+ if isinstance(obj, dict) or\
+ _safe_isinstance(obj, 'pandas', 'DataFrame'):
+ try:
+ return list(obj.keys())
+ except Exception:
+ return []
+ elif _safe_isinstance(obj, 'numpy', 'ndarray') or\
+ _safe_isinstance(obj, 'numpy', 'void'):
+ return obj.dtype.names or []
+ return []
+
+ try:
+ regexps = self.__dict_key_regexps
+ except AttributeError:
+ dict_key_re_fmt = r'''(?x)
+ ( # match dict-referring expression wrt greedy setting
+ %s
+ )
+ \[ # open bracket
+ \s* # and optional whitespace
+ ([uUbB]? # string prefix (r not handled)
+ (?: # unclosed string
+ '(?:[^']|(?<!\\)\\')*
+ |
+ "(?:[^"]|(?<!\\)\\")*
+ )
+ )?
+ $
+ '''
+ regexps = self.__dict_key_regexps = {
+ False: re.compile(dict_key_re_fmt % '''
+ # identifiers separated by .
+ (?!\d)\w+
+ (?:\.(?!\d)\w+)*
+ '''),
+ True: re.compile(dict_key_re_fmt % '''
+ .+
+ ''')
+ }
+
+ match = regexps[self.greedy].search(self.text_until_cursor)
+ if match is None:
+ return []
+
+ expr, prefix = match.groups()
+ try:
+ obj = eval(expr, self.namespace)
+ except Exception:
+ try:
+ obj = eval(expr, self.global_namespace)
+ except Exception:
+ return []
+
+ keys = get_keys(obj)
+ if not keys:
+ return keys
+ closing_quote, token_offset, matches = match_dict_keys(keys, prefix, self.splitter.delims)
+ if not matches:
+ return matches
+
+ # get the cursor position of
+ # - the text being completed
+ # - the start of the key text
+ # - the start of the completion
+ text_start = len(self.text_until_cursor) - len(text)
+ if prefix:
+ key_start = match.start(2)
+ completion_start = key_start + token_offset
+ else:
+ key_start = completion_start = match.end()
+
+ # grab the leading prefix, to make sure all completions start with `text`
+ if text_start > key_start:
+ leading = ''
+ else:
+ leading = text[text_start:completion_start]
+
+ # the index of the `[` character
+ bracket_idx = match.end(1)
+
+ # append closing quote and bracket as appropriate
+ # this is *not* appropriate if the opening quote or bracket is outside
+ # the text given to this method
+ suf = ''
+ continuation = self.line_buffer[len(self.text_until_cursor):]
+ if key_start > text_start and closing_quote:
+ # quotes were opened inside text, maybe close them
+ if continuation.startswith(closing_quote):
+ continuation = continuation[len(closing_quote):]
+ else:
+ suf += closing_quote
+ if bracket_idx > text_start:
+ # brackets were opened inside text, maybe close them
+ if not continuation.startswith(']'):
+ suf += ']'
+
+ return [leading + k + suf for k in matches]
+
+ def unicode_name_matches(self, text):
+ u"""Match Latex-like syntax for unicode characters base
+ on the name of the character.
+
+ This does \\GREEK SMALL LETTER ETA -> η
+
+ Works only on valid python 3 identifier, or on combining characters that
+ will combine to form a valid identifier.
+
+ Used on Python 3 only.
+ """
+ slashpos = text.rfind('\\')
+ if slashpos > -1:
+ s = text[slashpos+1:]
+ try :
+ unic = unicodedata.lookup(s)
+ # allow combining chars
+ if ('a'+unic).isidentifier():
+ return '\\'+s,[unic]
except KeyError:
- pass
- return u'', []
-
-
-
-
- def latex_matches(self, text):
- u"""Match Latex syntax for unicode characters.
-
- This does both \\alp -> \\alpha and \\alpha -> α
-
- Used on Python 3 only.
- """
- slashpos = text.rfind('\\')
- if slashpos > -1:
- s = text[slashpos:]
- if s in latex_symbols:
- # Try to complete a full latex symbol to unicode
- # \\alpha -> α
- return s, [latex_symbols[s]]
- else:
- # If a user has partially typed a latex symbol, give them
- # a full list of options \al -> [\aleph, \alpha]
- matches = [k for k in latex_symbols if k.startswith(s)]
- return s, matches
- return u'', []
-
- def dispatch_custom_completer(self, text):
+ pass
+ return u'', []
+
+
+
+
+ def latex_matches(self, text):
+ u"""Match Latex syntax for unicode characters.
+
+ This does both \\alp -> \\alpha and \\alpha -> α
+
+ Used on Python 3 only.
+ """
+ slashpos = text.rfind('\\')
+ if slashpos > -1:
+ s = text[slashpos:]
+ if s in latex_symbols:
+ # Try to complete a full latex symbol to unicode
+ # \\alpha -> α
+ return s, [latex_symbols[s]]
+ else:
+ # If a user has partially typed a latex symbol, give them
+ # a full list of options \al -> [\aleph, \alpha]
+ matches = [k for k in latex_symbols if k.startswith(s)]
+ return s, matches
+ return u'', []
+
+ def dispatch_custom_completer(self, text):
if not self.custom_completers:
return
- line = self.line_buffer
- if not line.strip():
- return None
-
- # Create a little structure to pass all the relevant information about
- # the current completion to any custom completer.
- event = Bunch()
- event.line = line
- event.symbol = text
- cmd = line.split(None,1)[0]
- event.command = cmd
- event.text_until_cursor = self.text_until_cursor
-
- # for foo etc, try also to find completer for %foo
- if not cmd.startswith(self.magic_escape):
- try_magic = self.custom_completers.s_matches(
- self.magic_escape + cmd)
- else:
- try_magic = []
-
- for c in itertools.chain(self.custom_completers.s_matches(cmd),
- try_magic,
- self.custom_completers.flat_matches(self.text_until_cursor)):
- try:
- res = c(event)
- if res:
- # first, try case sensitive match
+ line = self.line_buffer
+ if not line.strip():
+ return None
+
+ # Create a little structure to pass all the relevant information about
+ # the current completion to any custom completer.
+ event = Bunch()
+ event.line = line
+ event.symbol = text
+ cmd = line.split(None,1)[0]
+ event.command = cmd
+ event.text_until_cursor = self.text_until_cursor
+
+ # for foo etc, try also to find completer for %foo
+ if not cmd.startswith(self.magic_escape):
+ try_magic = self.custom_completers.s_matches(
+ self.magic_escape + cmd)
+ else:
+ try_magic = []
+
+ for c in itertools.chain(self.custom_completers.s_matches(cmd),
+ try_magic,
+ self.custom_completers.flat_matches(self.text_until_cursor)):
+ try:
+ res = c(event)
+ if res:
+ # first, try case sensitive match
withcase = [cast_unicode_py2(r) for r in res if r.startswith(text)]
- if withcase:
- return withcase
- # if none, then case insensitive ones are ok too
- text_low = text.lower()
+ if withcase:
+ return withcase
+ # if none, then case insensitive ones are ok too
+ text_low = text.lower()
return [cast_unicode_py2(r) for r in res if r.lower().startswith(text_low)]
- except TryNext:
- pass
+ except TryNext:
+ pass
except KeyboardInterrupt:
"""
If custom completer take too long,
let keyboard interrupt abort and return nothing.
"""
break
-
- return None
-
- def complete(self, text=None, line_buffer=None, cursor_pos=None):
- """Find completions for the given text and line context.
-
- Note that both the text and the line_buffer are optional, but at least
- one of them must be given.
-
- Parameters
- ----------
- text : string, optional
- Text to perform the completion on. If not given, the line buffer
- is split using the instance's CompletionSplitter object.
-
- line_buffer : string, optional
- If not given, the completer attempts to obtain the current line
- buffer via readline. This keyword allows clients which are
- requesting for text completions in non-readline contexts to inform
- the completer of the entire text.
-
- cursor_pos : int, optional
- Index of the cursor in the full line buffer. Should be provided by
- remote frontends where kernel has no access to frontend state.
-
- Returns
- -------
- text : str
- Text that was actually used in the completion.
-
- matches : list
- A list of completion matches.
- """
- # if the cursor position isn't given, the only sane assumption we can
- # make is that it's at the end of the line (the common case)
- if cursor_pos is None:
- cursor_pos = len(line_buffer) if text is None else len(text)
-
+
+ return None
+
+ def complete(self, text=None, line_buffer=None, cursor_pos=None):
+ """Find completions for the given text and line context.
+
+ Note that both the text and the line_buffer are optional, but at least
+ one of them must be given.
+
+ Parameters
+ ----------
+ text : string, optional
+ Text to perform the completion on. If not given, the line buffer
+ is split using the instance's CompletionSplitter object.
+
+ line_buffer : string, optional
+ If not given, the completer attempts to obtain the current line
+ buffer via readline. This keyword allows clients which are
+ requesting for text completions in non-readline contexts to inform
+ the completer of the entire text.
+
+ cursor_pos : int, optional
+ Index of the cursor in the full line buffer. Should be provided by
+ remote frontends where kernel has no access to frontend state.
+
+ Returns
+ -------
+ text : str
+ Text that was actually used in the completion.
+
+ matches : list
+ A list of completion matches.
+ """
+ # if the cursor position isn't given, the only sane assumption we can
+ # make is that it's at the end of the line (the common case)
+ if cursor_pos is None:
+ cursor_pos = len(line_buffer) if text is None else len(text)
+
if self.use_main_ns:
self.namespace = __main__.__dict__
-
+
if PY3 and self.backslash_combining_completions:
- base_text = text if not line_buffer else line_buffer[:cursor_pos]
- latex_text, latex_matches = self.latex_matches(base_text)
- if latex_matches:
+ base_text = text if not line_buffer else line_buffer[:cursor_pos]
+ latex_text, latex_matches = self.latex_matches(base_text)
+ if latex_matches:
return latex_text, latex_matches
- name_text = ''
- name_matches = []
- for meth in (self.unicode_name_matches, back_latex_name_matches, back_unicode_name_matches):
- name_text, name_matches = meth(base_text)
- if name_text:
+ name_text = ''
+ name_matches = []
+ for meth in (self.unicode_name_matches, back_latex_name_matches, back_unicode_name_matches):
+ name_text, name_matches = meth(base_text)
+ if name_text:
return name_text, name_matches[:MATCHES_LIMIT]
-
- # if text is either None or an empty string, rely on the line buffer
- if not text:
- text = self.splitter.split_line(line_buffer, cursor_pos)
-
- # If no line buffer is given, assume the input text is all there was
- if line_buffer is None:
- line_buffer = text
-
- self.line_buffer = line_buffer
- self.text_until_cursor = self.line_buffer[:cursor_pos]
-
- # Start with a clean slate of completions
- self.matches[:] = []
- custom_res = self.dispatch_custom_completer(text)
- if custom_res is not None:
- # did custom completers produce something?
- self.matches = custom_res
- else:
- # Extend the list of completions with the results of each
- # matcher, so we return results to the user from all
- # namespaces.
- if self.merge_completions:
- self.matches = []
- for matcher in self.matchers:
- try:
- self.matches.extend(matcher(text))
- except:
- # Show the ugly traceback if the matcher causes an
- # exception, but do NOT crash the kernel!
- sys.excepthook(*sys.exc_info())
- else:
- for matcher in self.matchers:
- self.matches = matcher(text)
- if self.matches:
- break
- # FIXME: we should extend our api to return a dict with completions for
- # different types of objects. The rlcomplete() method could then
- # simply collapse the dict into a list for readline, but we'd have
- # richer completion semantics in other evironments.
+
+ # if text is either None or an empty string, rely on the line buffer
+ if not text:
+ text = self.splitter.split_line(line_buffer, cursor_pos)
+
+ # If no line buffer is given, assume the input text is all there was
+ if line_buffer is None:
+ line_buffer = text
+
+ self.line_buffer = line_buffer
+ self.text_until_cursor = self.line_buffer[:cursor_pos]
+
+ # Start with a clean slate of completions
+ self.matches[:] = []
+ custom_res = self.dispatch_custom_completer(text)
+ if custom_res is not None:
+ # did custom completers produce something?
+ self.matches = custom_res
+ else:
+ # Extend the list of completions with the results of each
+ # matcher, so we return results to the user from all
+ # namespaces.
+ if self.merge_completions:
+ self.matches = []
+ for matcher in self.matchers:
+ try:
+ self.matches.extend(matcher(text))
+ except:
+ # Show the ugly traceback if the matcher causes an
+ # exception, but do NOT crash the kernel!
+ sys.excepthook(*sys.exc_info())
+ else:
+ for matcher in self.matchers:
+ self.matches = matcher(text)
+ if self.matches:
+ break
+ # FIXME: we should extend our api to return a dict with completions for
+ # different types of objects. The rlcomplete() method could then
+ # simply collapse the dict into a list for readline, but we'd have
+ # richer completion semantics in other evironments.
self.matches = sorted(set(self.matches), key=completions_sorting_key)[:MATCHES_LIMIT]
-
- return text, self.matches
+
+ return text, self.matches
diff --git a/contrib/python/ipython/py2/IPython/core/completerlib.py b/contrib/python/ipython/py2/IPython/core/completerlib.py
index 4e7ae41941..e736ca73d1 100644
--- a/contrib/python/ipython/py2/IPython/core/completerlib.py
+++ b/contrib/python/ipython/py2/IPython/core/completerlib.py
@@ -1,76 +1,76 @@
-# encoding: utf-8
-"""Implementations for various useful completers.
-
-These are all loaded by default by IPython.
-"""
-#-----------------------------------------------------------------------------
-# Copyright (C) 2010-2011 The IPython Development Team.
-#
-# Distributed under the terms of the BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-from __future__ import print_function
-
-# Stdlib imports
-import glob
-import inspect
+# encoding: utf-8
+"""Implementations for various useful completers.
+
+These are all loaded by default by IPython.
+"""
+#-----------------------------------------------------------------------------
+# Copyright (C) 2010-2011 The IPython Development Team.
+#
+# Distributed under the terms of the BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+from __future__ import print_function
+
+# Stdlib imports
+import glob
+import inspect
import itertools
-import os
-import re
-import sys
-
-try:
- # Python >= 3.3
- from importlib.machinery import all_suffixes
- _suffixes = all_suffixes()
-except ImportError:
- from imp import get_suffixes
- _suffixes = [ s[0] for s in get_suffixes() ]
-
-# Third-party imports
-from time import time
-from zipimport import zipimporter
-
-# Our own imports
-from IPython.core.completer import expand_user, compress_user
-from IPython.core.error import TryNext
-from IPython.utils._process_common import arg_split
-from IPython.utils.py3compat import string_types
-
-# FIXME: this should be pulled in with the right call via the component system
-from IPython import get_ipython
-
+import os
+import re
+import sys
+
+try:
+ # Python >= 3.3
+ from importlib.machinery import all_suffixes
+ _suffixes = all_suffixes()
+except ImportError:
+ from imp import get_suffixes
+ _suffixes = [ s[0] for s in get_suffixes() ]
+
+# Third-party imports
+from time import time
+from zipimport import zipimporter
+
+# Our own imports
+from IPython.core.completer import expand_user, compress_user
+from IPython.core.error import TryNext
+from IPython.utils._process_common import arg_split
+from IPython.utils.py3compat import string_types
+
+# FIXME: this should be pulled in with the right call via the component system
+from IPython import get_ipython
+
from __res import importer
-#-----------------------------------------------------------------------------
-# Globals and constants
-#-----------------------------------------------------------------------------
-
-# Time in seconds after which the rootmodules will be stored permanently in the
-# ipython ip.db database (kept in the user's .ipython dir).
-TIMEOUT_STORAGE = 2
-
-# Time in seconds after which we give up
-TIMEOUT_GIVEUP = 20
-
-# Regular expression for the python import statement
-import_re = re.compile(r'(?P<name>[a-zA-Z_][a-zA-Z0-9_]*?)'
- r'(?P<package>[/\\]__init__)?'
- r'(?P<suffix>%s)$' %
- r'|'.join(re.escape(s) for s in _suffixes))
-
-# RE for the ipython %run command (python + ipython scripts)
-magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$')
-
-#-----------------------------------------------------------------------------
-# Local utilities
-#-----------------------------------------------------------------------------
-
+#-----------------------------------------------------------------------------
+# Globals and constants
+#-----------------------------------------------------------------------------
+
+# Time in seconds after which the rootmodules will be stored permanently in the
+# ipython ip.db database (kept in the user's .ipython dir).
+TIMEOUT_STORAGE = 2
+
+# Time in seconds after which we give up
+TIMEOUT_GIVEUP = 20
+
+# Regular expression for the python import statement
+import_re = re.compile(r'(?P<name>[a-zA-Z_][a-zA-Z0-9_]*?)'
+ r'(?P<package>[/\\]__init__)?'
+ r'(?P<suffix>%s)$' %
+ r'|'.join(re.escape(s) for s in _suffixes))
+
+# RE for the ipython %run command (python + ipython scripts)
+magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$')
+
+#-----------------------------------------------------------------------------
+# Local utilities
+#-----------------------------------------------------------------------------
+
arcadia_rootmodules_cache = None
arcadia_modules_cache = None
@@ -116,291 +116,291 @@ def arcadia_get_root_modules():
-def module_list(path):
- """
- Return the list containing the names of the modules available in the given
- folder.
- """
- # sys.path has the cwd as an empty string, but isdir/listdir need it as '.'
- if path == '':
- path = '.'
-
- # A few local constants to be used in loops below
- pjoin = os.path.join
-
- if os.path.isdir(path):
- # Build a list of all files in the directory and all files
- # in its subdirectories. For performance reasons, do not
- # recurse more than one level into subdirectories.
- files = []
- for root, dirs, nondirs in os.walk(path, followlinks=True):
- subdir = root[len(path)+1:]
- if subdir:
- files.extend(pjoin(subdir, f) for f in nondirs)
- dirs[:] = [] # Do not recurse into additional subdirectories.
- else:
- files.extend(nondirs)
-
- else:
- try:
- files = list(zipimporter(path)._files.keys())
- except:
- files = []
-
- # Build a list of modules which match the import_re regex.
- modules = []
- for f in files:
- m = import_re.match(f)
- if m:
- modules.append(m.group('name'))
- return list(set(modules))
-
-
-def get_root_modules():
- """
- Returns a list containing the names of all the modules available in the
- folders of the pythonpath.
-
- ip.db['rootmodules_cache'] maps sys.path entries to list of modules.
- """
- ip = get_ipython()
+def module_list(path):
+ """
+ Return the list containing the names of the modules available in the given
+ folder.
+ """
+ # sys.path has the cwd as an empty string, but isdir/listdir need it as '.'
+ if path == '':
+ path = '.'
+
+ # A few local constants to be used in loops below
+ pjoin = os.path.join
+
+ if os.path.isdir(path):
+ # Build a list of all files in the directory and all files
+ # in its subdirectories. For performance reasons, do not
+ # recurse more than one level into subdirectories.
+ files = []
+ for root, dirs, nondirs in os.walk(path, followlinks=True):
+ subdir = root[len(path)+1:]
+ if subdir:
+ files.extend(pjoin(subdir, f) for f in nondirs)
+ dirs[:] = [] # Do not recurse into additional subdirectories.
+ else:
+ files.extend(nondirs)
+
+ else:
+ try:
+ files = list(zipimporter(path)._files.keys())
+ except:
+ files = []
+
+ # Build a list of modules which match the import_re regex.
+ modules = []
+ for f in files:
+ m = import_re.match(f)
+ if m:
+ modules.append(m.group('name'))
+ return list(set(modules))
+
+
+def get_root_modules():
+ """
+ Returns a list containing the names of all the modules available in the
+ folders of the pythonpath.
+
+ ip.db['rootmodules_cache'] maps sys.path entries to list of modules.
+ """
+ ip = get_ipython()
if ip is None:
# No global shell instance to store cached list of modules.
# Don't try to scan for modules every time.
return list(sys.builtin_module_names)
- rootmodules_cache = ip.db.get('rootmodules_cache', {})
- rootmodules = list(sys.builtin_module_names)
- start_time = time()
- store = False
- for path in sys.path:
- try:
- modules = rootmodules_cache[path]
- except KeyError:
- modules = module_list(path)
- try:
- modules.remove('__init__')
- except ValueError:
- pass
- if path not in ('', '.'): # cwd modules should not be cached
- rootmodules_cache[path] = modules
- if time() - start_time > TIMEOUT_STORAGE and not store:
- store = True
- print("\nCaching the list of root modules, please wait!")
- print("(This will only be done once - type '%rehashx' to "
- "reset cache!)\n")
- sys.stdout.flush()
- if time() - start_time > TIMEOUT_GIVEUP:
- print("This is taking too long, we give up.\n")
- return []
- rootmodules.extend(modules)
- if store:
- ip.db['rootmodules_cache'] = rootmodules_cache
+ rootmodules_cache = ip.db.get('rootmodules_cache', {})
+ rootmodules = list(sys.builtin_module_names)
+ start_time = time()
+ store = False
+ for path in sys.path:
+ try:
+ modules = rootmodules_cache[path]
+ except KeyError:
+ modules = module_list(path)
+ try:
+ modules.remove('__init__')
+ except ValueError:
+ pass
+ if path not in ('', '.'): # cwd modules should not be cached
+ rootmodules_cache[path] = modules
+ if time() - start_time > TIMEOUT_STORAGE and not store:
+ store = True
+ print("\nCaching the list of root modules, please wait!")
+ print("(This will only be done once - type '%rehashx' to "
+ "reset cache!)\n")
+ sys.stdout.flush()
+ if time() - start_time > TIMEOUT_GIVEUP:
+ print("This is taking too long, we give up.\n")
+ return []
+ rootmodules.extend(modules)
+ if store:
+ ip.db['rootmodules_cache'] = rootmodules_cache
rootmodules = list(set(rootmodules))
- return rootmodules
-
-
-def is_importable(module, attr, only_modules):
- if only_modules:
- return inspect.ismodule(getattr(module, attr))
- else:
- return not(attr[:2] == '__' and attr[-2:] == '__')
-
-def try_import(mod, only_modules=False):
+ return rootmodules
+
+
+def is_importable(module, attr, only_modules):
+ if only_modules:
+ return inspect.ismodule(getattr(module, attr))
+ else:
+ return not(attr[:2] == '__' and attr[-2:] == '__')
+
+def try_import(mod, only_modules=False):
mod = mod.rstrip('.')
- try:
- m = __import__(mod)
- except:
- return []
- mods = mod.split('.')
- for module in mods[1:]:
- m = getattr(m, module)
-
+ try:
+ m = __import__(mod)
+ except:
+ return []
+ mods = mod.split('.')
+ for module in mods[1:]:
+ m = getattr(m, module)
+
filename = getattr(m, '__file__', '')
m_is_init = '__init__' in (filename or '') or filename == mod
-
- completions = []
- if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init:
- completions.extend( [attr for attr in dir(m) if
- is_importable(m, attr, only_modules)])
-
- completions.extend(getattr(m, '__all__', []))
- if m_is_init:
+
+ completions = []
+ if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init:
+ completions.extend( [attr for attr in dir(m) if
+ is_importable(m, attr, only_modules)])
+
+ completions.extend(getattr(m, '__all__', []))
+ if m_is_init:
completions.extend(arcadia_module_list(mod))
completions = {c for c in completions if isinstance(c, string_types)}
completions.discard('__init__')
return sorted(completions)
-
-
-#-----------------------------------------------------------------------------
-# Completion-related functions.
-#-----------------------------------------------------------------------------
-
-def quick_completer(cmd, completions):
- """ Easily create a trivial completer for a command.
-
- Takes either a list of completions, or all completions in string (that will
- be split on whitespace).
-
- Example::
-
- [d:\ipython]|1> import ipy_completers
- [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz'])
- [d:\ipython]|3> foo b<TAB>
- bar baz
- [d:\ipython]|3> foo ba
- """
-
- if isinstance(completions, string_types):
- completions = completions.split()
-
- def do_complete(self, event):
- return completions
-
- get_ipython().set_hook('complete_command',do_complete, str_key = cmd)
-
-def module_completion(line):
- """
- Returns a list containing the completion possibilities for an import line.
-
- The line looks like this :
- 'import xml.d'
- 'from xml.dom import'
- """
-
- words = line.split(' ')
- nwords = len(words)
-
- # from whatever <tab> -> 'import '
- if nwords == 3 and words[0] == 'from':
- return ['import ']
-
- # 'from xy<tab>' or 'import xy<tab>'
- if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) :
- if nwords == 1:
+
+
+#-----------------------------------------------------------------------------
+# Completion-related functions.
+#-----------------------------------------------------------------------------
+
+def quick_completer(cmd, completions):
+ """ Easily create a trivial completer for a command.
+
+ Takes either a list of completions, or all completions in string (that will
+ be split on whitespace).
+
+ Example::
+
+ [d:\ipython]|1> import ipy_completers
+ [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz'])
+ [d:\ipython]|3> foo b<TAB>
+ bar baz
+ [d:\ipython]|3> foo ba
+ """
+
+ if isinstance(completions, string_types):
+ completions = completions.split()
+
+ def do_complete(self, event):
+ return completions
+
+ get_ipython().set_hook('complete_command',do_complete, str_key = cmd)
+
+def module_completion(line):
+ """
+ Returns a list containing the completion possibilities for an import line.
+
+ The line looks like this :
+ 'import xml.d'
+ 'from xml.dom import'
+ """
+
+ words = line.split(' ')
+ nwords = len(words)
+
+ # from whatever <tab> -> 'import '
+ if nwords == 3 and words[0] == 'from':
+ return ['import ']
+
+ # 'from xy<tab>' or 'import xy<tab>'
+ if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) :
+ if nwords == 1:
return arcadia_get_root_modules()
- mod = words[1].split('.')
- if len(mod) < 2:
+ mod = words[1].split('.')
+ if len(mod) < 2:
return arcadia_get_root_modules()
- completion_list = try_import('.'.join(mod[:-1]), True)
- return ['.'.join(mod[:-1] + [el]) for el in completion_list]
-
- # 'from xyz import abc<tab>'
- if nwords >= 3 and words[0] == 'from':
- mod = words[1]
- return try_import(mod)
-
-#-----------------------------------------------------------------------------
-# Completers
-#-----------------------------------------------------------------------------
-# These all have the func(self, event) signature to be used as custom
-# completers
-
-def module_completer(self,event):
- """Give completions after user has typed 'import ...' or 'from ...'"""
-
- # This works in all versions of python. While 2.5 has
- # pkgutil.walk_packages(), that particular routine is fairly dangerous,
- # since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full
- # of possibly problematic side effects.
- # This search the folders in the sys.path for available modules.
-
- return module_completion(event.line)
-
-# FIXME: there's a lot of logic common to the run, cd and builtin file
-# completers, that is currently reimplemented in each.
-
-def magic_run_completer(self, event):
- """Complete files that end in .py or .ipy or .ipynb for the %run command.
- """
- comps = arg_split(event.line, strict=False)
- # relpath should be the current token that we need to complete.
- if (len(comps) > 1) and (not event.line.endswith(' ')):
- relpath = comps[-1].strip("'\"")
- else:
- relpath = ''
-
- #print("\nev=", event) # dbg
- #print("rp=", relpath) # dbg
- #print('comps=', comps) # dbg
-
- lglob = glob.glob
- isdir = os.path.isdir
- relpath, tilde_expand, tilde_val = expand_user(relpath)
-
- # Find if the user has already typed the first filename, after which we
- # should complete on all files, since after the first one other files may
- # be arguments to the input script.
-
- if any(magic_run_re.match(c) for c in comps):
- matches = [f.replace('\\','/') + ('/' if isdir(f) else '')
- for f in lglob(relpath+'*')]
- else:
- dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)]
- pys = [f.replace('\\','/')
- for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') +
- lglob(relpath+'*.ipynb') + lglob(relpath + '*.pyw')]
-
- matches = dirs + pys
-
- #print('run comp:', dirs+pys) # dbg
- return [compress_user(p, tilde_expand, tilde_val) for p in matches]
-
-
-def cd_completer(self, event):
- """Completer function for cd, which only returns directories."""
- ip = get_ipython()
- relpath = event.symbol
-
- #print(event) # dbg
- if event.line.endswith('-b') or ' -b ' in event.line:
- # return only bookmark completions
- bkms = self.db.get('bookmarks', None)
- if bkms:
- return bkms.keys()
- else:
- return []
-
- if event.symbol == '-':
- width_dh = str(len(str(len(ip.user_ns['_dh']) + 1)))
- # jump in directory history by number
- fmt = '-%0' + width_dh +'d [%s]'
- ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])]
- if len(ents) > 1:
- return ents
- return []
-
- if event.symbol.startswith('--'):
- return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']]
-
- # Expand ~ in path and normalize directory separators.
- relpath, tilde_expand, tilde_val = expand_user(relpath)
- relpath = relpath.replace('\\','/')
-
- found = []
- for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*')
- if os.path.isdir(f)]:
- if ' ' in d:
- # we don't want to deal with any of that, complex code
- # for this is elsewhere
- raise TryNext
-
- found.append(d)
-
- if not found:
- if os.path.isdir(relpath):
- return [compress_user(relpath, tilde_expand, tilde_val)]
-
- # if no completions so far, try bookmarks
- bks = self.db.get('bookmarks',{})
- bkmatches = [s for s in bks if s.startswith(event.symbol)]
- if bkmatches:
- return bkmatches
-
- raise TryNext
-
- return [compress_user(p, tilde_expand, tilde_val) for p in found]
-
-def reset_completer(self, event):
- "A completer for %reset magic"
- return '-f -s in out array dhist'.split()
+ completion_list = try_import('.'.join(mod[:-1]), True)
+ return ['.'.join(mod[:-1] + [el]) for el in completion_list]
+
+ # 'from xyz import abc<tab>'
+ if nwords >= 3 and words[0] == 'from':
+ mod = words[1]
+ return try_import(mod)
+
+#-----------------------------------------------------------------------------
+# Completers
+#-----------------------------------------------------------------------------
+# These all have the func(self, event) signature to be used as custom
+# completers
+
+def module_completer(self,event):
+ """Give completions after user has typed 'import ...' or 'from ...'"""
+
+ # This works in all versions of python. While 2.5 has
+ # pkgutil.walk_packages(), that particular routine is fairly dangerous,
+ # since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full
+ # of possibly problematic side effects.
+ # This search the folders in the sys.path for available modules.
+
+ return module_completion(event.line)
+
+# FIXME: there's a lot of logic common to the run, cd and builtin file
+# completers, that is currently reimplemented in each.
+
+def magic_run_completer(self, event):
+ """Complete files that end in .py or .ipy or .ipynb for the %run command.
+ """
+ comps = arg_split(event.line, strict=False)
+ # relpath should be the current token that we need to complete.
+ if (len(comps) > 1) and (not event.line.endswith(' ')):
+ relpath = comps[-1].strip("'\"")
+ else:
+ relpath = ''
+
+ #print("\nev=", event) # dbg
+ #print("rp=", relpath) # dbg
+ #print('comps=', comps) # dbg
+
+ lglob = glob.glob
+ isdir = os.path.isdir
+ relpath, tilde_expand, tilde_val = expand_user(relpath)
+
+ # Find if the user has already typed the first filename, after which we
+ # should complete on all files, since after the first one other files may
+ # be arguments to the input script.
+
+ if any(magic_run_re.match(c) for c in comps):
+ matches = [f.replace('\\','/') + ('/' if isdir(f) else '')
+ for f in lglob(relpath+'*')]
+ else:
+ dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)]
+ pys = [f.replace('\\','/')
+ for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') +
+ lglob(relpath+'*.ipynb') + lglob(relpath + '*.pyw')]
+
+ matches = dirs + pys
+
+ #print('run comp:', dirs+pys) # dbg
+ return [compress_user(p, tilde_expand, tilde_val) for p in matches]
+
+
+def cd_completer(self, event):
+ """Completer function for cd, which only returns directories."""
+ ip = get_ipython()
+ relpath = event.symbol
+
+ #print(event) # dbg
+ if event.line.endswith('-b') or ' -b ' in event.line:
+ # return only bookmark completions
+ bkms = self.db.get('bookmarks', None)
+ if bkms:
+ return bkms.keys()
+ else:
+ return []
+
+ if event.symbol == '-':
+ width_dh = str(len(str(len(ip.user_ns['_dh']) + 1)))
+ # jump in directory history by number
+ fmt = '-%0' + width_dh +'d [%s]'
+ ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])]
+ if len(ents) > 1:
+ return ents
+ return []
+
+ if event.symbol.startswith('--'):
+ return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']]
+
+ # Expand ~ in path and normalize directory separators.
+ relpath, tilde_expand, tilde_val = expand_user(relpath)
+ relpath = relpath.replace('\\','/')
+
+ found = []
+ for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*')
+ if os.path.isdir(f)]:
+ if ' ' in d:
+ # we don't want to deal with any of that, complex code
+ # for this is elsewhere
+ raise TryNext
+
+ found.append(d)
+
+ if not found:
+ if os.path.isdir(relpath):
+ return [compress_user(relpath, tilde_expand, tilde_val)]
+
+ # if no completions so far, try bookmarks
+ bks = self.db.get('bookmarks',{})
+ bkmatches = [s for s in bks if s.startswith(event.symbol)]
+ if bkmatches:
+ return bkmatches
+
+ raise TryNext
+
+ return [compress_user(p, tilde_expand, tilde_val) for p in found]
+
+def reset_completer(self, event):
+ "A completer for %reset magic"
+ return '-f -s in out array dhist'.split()
diff --git a/contrib/python/ipython/py2/IPython/core/crashhandler.py b/contrib/python/ipython/py2/IPython/core/crashhandler.py
index 8341a61db8..22bbd7ae81 100644
--- a/contrib/python/ipython/py2/IPython/core/crashhandler.py
+++ b/contrib/python/ipython/py2/IPython/core/crashhandler.py
@@ -1,59 +1,59 @@
-# encoding: utf-8
-"""sys.excepthook for IPython itself, leaves a detailed report on disk.
-
-Authors:
-
-* Fernando Perez
-* Brian E. Granger
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-from __future__ import print_function
-
-import os
-import sys
-import traceback
-from pprint import pformat
-
-from IPython.core import ultratb
-from IPython.core.release import author_email
-from IPython.utils.sysinfo import sys_info
-from IPython.utils.py3compat import input, getcwd
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-# Template for the user message.
-_default_message_template = """\
-Oops, {app_name} crashed. We do our best to make it stable, but...
-
-A crash report was automatically generated with the following information:
- - A verbatim copy of the crash traceback.
- - A copy of your input history during this session.
- - Data on your current {app_name} configuration.
-
-It was left in the file named:
-\t'{crash_report_fname}'
-If you can email this file to the developers, the information in it will help
-them in understanding and correcting the problem.
-
-You can mail it to: {contact_name} at {contact_email}
-with the subject '{app_name} Crash Report'.
-
-If you want to do it now, the following command will work (under Unix):
-mail -s '{app_name} Crash Report' {contact_email} < {crash_report_fname}
-
+# encoding: utf-8
+"""sys.excepthook for IPython itself, leaves a detailed report on disk.
+
+Authors:
+
+* Fernando Perez
+* Brian E. Granger
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+from __future__ import print_function
+
+import os
+import sys
+import traceback
+from pprint import pformat
+
+from IPython.core import ultratb
+from IPython.core.release import author_email
+from IPython.utils.sysinfo import sys_info
+from IPython.utils.py3compat import input, getcwd
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+# Template for the user message.
+_default_message_template = """\
+Oops, {app_name} crashed. We do our best to make it stable, but...
+
+A crash report was automatically generated with the following information:
+ - A verbatim copy of the crash traceback.
+ - A copy of your input history during this session.
+ - Data on your current {app_name} configuration.
+
+It was left in the file named:
+\t'{crash_report_fname}'
+If you can email this file to the developers, the information in it will help
+them in understanding and correcting the problem.
+
+You can mail it to: {contact_name} at {contact_email}
+with the subject '{app_name} Crash Report'.
+
+If you want to do it now, the following command will work (under Unix):
+mail -s '{app_name} Crash Report' {contact_email} < {crash_report_fname}
+
In your email, please also include information about:
- The operating system under which the crash happened: Linux, macOS, Windows,
other, and which exact version (for example: Ubuntu 16.04.3, macOS 10.13.2,
@@ -64,163 +64,163 @@ In your email, please also include information about:
input to get the same crash? Ideally, find a minimal yet complete sequence
of instructions that yields the crash.
-To ensure accurate tracking of this issue, please file a report about it at:
-{bug_tracker}
-"""
-
-_lite_message_template = """
-If you suspect this is an IPython bug, please report it at:
- https://github.com/ipython/ipython/issues
-or send an email to the mailing list at {email}
-
-You can print a more detailed traceback right now with "%tb", or use "%debug"
-to interactively debug it.
-
-Extra-detailed tracebacks for bug-reporting purposes can be enabled via:
- {config}Application.verbose_crash=True
-"""
-
-
-class CrashHandler(object):
- """Customizable crash handlers for IPython applications.
-
- Instances of this class provide a :meth:`__call__` method which can be
- used as a ``sys.excepthook``. The :meth:`__call__` signature is::
-
- def __call__(self, etype, evalue, etb)
- """
-
- message_template = _default_message_template
- section_sep = '\n\n'+'*'*75+'\n\n'
-
- def __init__(self, app, contact_name=None, contact_email=None,
- bug_tracker=None, show_crash_traceback=True, call_pdb=False):
- """Create a new crash handler
-
- Parameters
- ----------
- app : Application
- A running :class:`Application` instance, which will be queried at
- crash time for internal information.
-
- contact_name : str
- A string with the name of the person to contact.
-
- contact_email : str
- A string with the email address of the contact.
-
- bug_tracker : str
- A string with the URL for your project's bug tracker.
-
- show_crash_traceback : bool
- If false, don't print the crash traceback on stderr, only generate
- the on-disk report
-
- Non-argument instance attributes:
-
- These instances contain some non-argument attributes which allow for
- further customization of the crash handler's behavior. Please see the
- source for further details.
- """
- self.crash_report_fname = "Crash_report_%s.txt" % app.name
- self.app = app
- self.call_pdb = call_pdb
- #self.call_pdb = True # dbg
- self.show_crash_traceback = show_crash_traceback
- self.info = dict(app_name = app.name,
- contact_name = contact_name,
- contact_email = contact_email,
- bug_tracker = bug_tracker,
- crash_report_fname = self.crash_report_fname)
-
-
- def __call__(self, etype, evalue, etb):
- """Handle an exception, call for compatible with sys.excepthook"""
-
- # do not allow the crash handler to be called twice without reinstalling it
- # this prevents unlikely errors in the crash handling from entering an
- # infinite loop.
- sys.excepthook = sys.__excepthook__
-
- # Report tracebacks shouldn't use color in general (safer for users)
- color_scheme = 'NoColor'
-
- # Use this ONLY for developer debugging (keep commented out for release)
- #color_scheme = 'Linux' # dbg
- try:
- rptdir = self.app.ipython_dir
- except:
- rptdir = getcwd()
- if rptdir is None or not os.path.isdir(rptdir):
- rptdir = getcwd()
- report_name = os.path.join(rptdir,self.crash_report_fname)
- # write the report filename into the instance dict so it can get
- # properly expanded out in the user message template
- self.crash_report_fname = report_name
- self.info['crash_report_fname'] = report_name
- TBhandler = ultratb.VerboseTB(
- color_scheme=color_scheme,
- long_header=1,
- call_pdb=self.call_pdb,
- )
- if self.call_pdb:
- TBhandler(etype,evalue,etb)
- return
- else:
- traceback = TBhandler.text(etype,evalue,etb,context=31)
-
- # print traceback to screen
- if self.show_crash_traceback:
- print(traceback, file=sys.stderr)
-
- # and generate a complete report on disk
- try:
- report = open(report_name,'w')
- except:
- print('Could not create crash report on disk.', file=sys.stderr)
- return
-
- # Inform user on stderr of what happened
- print('\n'+'*'*70+'\n', file=sys.stderr)
- print(self.message_template.format(**self.info), file=sys.stderr)
-
- # Construct report on disk
- report.write(self.make_report(traceback))
- report.close()
- input("Hit <Enter> to quit (your terminal may close):")
-
- def make_report(self,traceback):
- """Return a string containing a crash report."""
-
- sec_sep = self.section_sep
-
- report = ['*'*75+'\n\n'+'IPython post-mortem report\n\n']
- rpt_add = report.append
- rpt_add(sys_info())
-
- try:
- config = pformat(self.app.config)
- rpt_add(sec_sep)
- rpt_add('Application name: %s\n\n' % self.app_name)
- rpt_add('Current user configuration structure:\n\n')
- rpt_add(config)
- except:
- pass
- rpt_add(sec_sep+'Crash traceback:\n\n' + traceback)
-
- return ''.join(report)
-
-
-def crash_handler_lite(etype, evalue, tb):
- """a light excepthook, adding a small message to the usual traceback"""
- traceback.print_exception(etype, evalue, tb)
-
- from IPython.core.interactiveshell import InteractiveShell
- if InteractiveShell.initialized():
- # we are in a Shell environment, give %magic example
- config = "%config "
- else:
- # we are not in a shell, show generic config
- config = "c."
- print(_lite_message_template.format(email=author_email, config=config), file=sys.stderr)
-
+To ensure accurate tracking of this issue, please file a report about it at:
+{bug_tracker}
+"""
+
+_lite_message_template = """
+If you suspect this is an IPython bug, please report it at:
+ https://github.com/ipython/ipython/issues
+or send an email to the mailing list at {email}
+
+You can print a more detailed traceback right now with "%tb", or use "%debug"
+to interactively debug it.
+
+Extra-detailed tracebacks for bug-reporting purposes can be enabled via:
+ {config}Application.verbose_crash=True
+"""
+
+
+class CrashHandler(object):
+ """Customizable crash handlers for IPython applications.
+
+ Instances of this class provide a :meth:`__call__` method which can be
+ used as a ``sys.excepthook``. The :meth:`__call__` signature is::
+
+ def __call__(self, etype, evalue, etb)
+ """
+
+ message_template = _default_message_template
+ section_sep = '\n\n'+'*'*75+'\n\n'
+
+ def __init__(self, app, contact_name=None, contact_email=None,
+ bug_tracker=None, show_crash_traceback=True, call_pdb=False):
+ """Create a new crash handler
+
+ Parameters
+ ----------
+ app : Application
+ A running :class:`Application` instance, which will be queried at
+ crash time for internal information.
+
+ contact_name : str
+ A string with the name of the person to contact.
+
+ contact_email : str
+ A string with the email address of the contact.
+
+ bug_tracker : str
+ A string with the URL for your project's bug tracker.
+
+ show_crash_traceback : bool
+ If false, don't print the crash traceback on stderr, only generate
+ the on-disk report
+
+ Non-argument instance attributes:
+
+ These instances contain some non-argument attributes which allow for
+ further customization of the crash handler's behavior. Please see the
+ source for further details.
+ """
+ self.crash_report_fname = "Crash_report_%s.txt" % app.name
+ self.app = app
+ self.call_pdb = call_pdb
+ #self.call_pdb = True # dbg
+ self.show_crash_traceback = show_crash_traceback
+ self.info = dict(app_name = app.name,
+ contact_name = contact_name,
+ contact_email = contact_email,
+ bug_tracker = bug_tracker,
+ crash_report_fname = self.crash_report_fname)
+
+
+ def __call__(self, etype, evalue, etb):
+ """Handle an exception, call for compatible with sys.excepthook"""
+
+ # do not allow the crash handler to be called twice without reinstalling it
+ # this prevents unlikely errors in the crash handling from entering an
+ # infinite loop.
+ sys.excepthook = sys.__excepthook__
+
+ # Report tracebacks shouldn't use color in general (safer for users)
+ color_scheme = 'NoColor'
+
+ # Use this ONLY for developer debugging (keep commented out for release)
+ #color_scheme = 'Linux' # dbg
+ try:
+ rptdir = self.app.ipython_dir
+ except:
+ rptdir = getcwd()
+ if rptdir is None or not os.path.isdir(rptdir):
+ rptdir = getcwd()
+ report_name = os.path.join(rptdir,self.crash_report_fname)
+ # write the report filename into the instance dict so it can get
+ # properly expanded out in the user message template
+ self.crash_report_fname = report_name
+ self.info['crash_report_fname'] = report_name
+ TBhandler = ultratb.VerboseTB(
+ color_scheme=color_scheme,
+ long_header=1,
+ call_pdb=self.call_pdb,
+ )
+ if self.call_pdb:
+ TBhandler(etype,evalue,etb)
+ return
+ else:
+ traceback = TBhandler.text(etype,evalue,etb,context=31)
+
+ # print traceback to screen
+ if self.show_crash_traceback:
+ print(traceback, file=sys.stderr)
+
+ # and generate a complete report on disk
+ try:
+ report = open(report_name,'w')
+ except:
+ print('Could not create crash report on disk.', file=sys.stderr)
+ return
+
+ # Inform user on stderr of what happened
+ print('\n'+'*'*70+'\n', file=sys.stderr)
+ print(self.message_template.format(**self.info), file=sys.stderr)
+
+ # Construct report on disk
+ report.write(self.make_report(traceback))
+ report.close()
+ input("Hit <Enter> to quit (your terminal may close):")
+
+ def make_report(self,traceback):
+ """Return a string containing a crash report."""
+
+ sec_sep = self.section_sep
+
+ report = ['*'*75+'\n\n'+'IPython post-mortem report\n\n']
+ rpt_add = report.append
+ rpt_add(sys_info())
+
+ try:
+ config = pformat(self.app.config)
+ rpt_add(sec_sep)
+ rpt_add('Application name: %s\n\n' % self.app_name)
+ rpt_add('Current user configuration structure:\n\n')
+ rpt_add(config)
+ except:
+ pass
+ rpt_add(sec_sep+'Crash traceback:\n\n' + traceback)
+
+ return ''.join(report)
+
+
+def crash_handler_lite(etype, evalue, tb):
+ """a light excepthook, adding a small message to the usual traceback"""
+ traceback.print_exception(etype, evalue, tb)
+
+ from IPython.core.interactiveshell import InteractiveShell
+ if InteractiveShell.initialized():
+ # we are in a Shell environment, give %magic example
+ config = "%config "
+ else:
+ # we are not in a shell, show generic config
+ config = "c."
+ print(_lite_message_template.format(email=author_email, config=config), file=sys.stderr)
+
diff --git a/contrib/python/ipython/py2/IPython/core/debugger.py b/contrib/python/ipython/py2/IPython/core/debugger.py
index c5a443eb5b..f08cfb1a78 100644
--- a/contrib/python/ipython/py2/IPython/core/debugger.py
+++ b/contrib/python/ipython/py2/IPython/core/debugger.py
@@ -1,53 +1,53 @@
-# -*- coding: utf-8 -*-
-"""
-Pdb debugger class.
-
-Modified from the standard pdb.Pdb class to avoid including readline, so that
-the command line completion of other programs which include this isn't
-damaged.
-
-In the future, this class will be expanded with improvements over the standard
-pdb.
-
-The code in this file is mainly lifted out of cmd.py in Python 2.2, with minor
-changes. Licensing should therefore be under the standard Python terms. For
-details on the PSF (Python Software Foundation) standard license, see:
-
+# -*- coding: utf-8 -*-
+"""
+Pdb debugger class.
+
+Modified from the standard pdb.Pdb class to avoid including readline, so that
+the command line completion of other programs which include this isn't
+damaged.
+
+In the future, this class will be expanded with improvements over the standard
+pdb.
+
+The code in this file is mainly lifted out of cmd.py in Python 2.2, with minor
+changes. Licensing should therefore be under the standard Python terms. For
+details on the PSF (Python Software Foundation) standard license, see:
+
https://docs.python.org/2/license.html
"""
-
-#*****************************************************************************
-#
-# This file is licensed under the PSF license.
-#
-# Copyright (C) 2001 Python Software Foundation, www.python.org
-# Copyright (C) 2005-2006 Fernando Perez. <fperez@colorado.edu>
-#
-#
-#*****************************************************************************
-from __future__ import print_function
-
-import bdb
-import functools
-import inspect
-import sys
+
+#*****************************************************************************
+#
+# This file is licensed under the PSF license.
+#
+# Copyright (C) 2001 Python Software Foundation, www.python.org
+# Copyright (C) 2005-2006 Fernando Perez. <fperez@colorado.edu>
+#
+#
+#*****************************************************************************
+from __future__ import print_function
+
+import bdb
+import functools
+import inspect
+import sys
import warnings
-
-from IPython import get_ipython
-from IPython.utils import PyColorize, ulinecache
+
+from IPython import get_ipython
+from IPython.utils import PyColorize, ulinecache
from IPython.utils import coloransi, py3compat
-from IPython.core.excolors import exception_colors
-from IPython.testing.skipdoctest import skip_doctest
-
+from IPython.core.excolors import exception_colors
+from IPython.testing.skipdoctest import skip_doctest
+
-prompt = 'ipdb> '
+prompt = 'ipdb> '
-#We have to check this directly from sys.argv, config struct not yet available
+#We have to check this directly from sys.argv, config struct not yet available
from pdb import Pdb as OldPdb
-
-# Allow the set_trace code to operate outside of an ipython instance, even if
-# it does so with some limitations. The rest of this support is implemented in
-# the Tracer constructor.
+
+# Allow the set_trace code to operate outside of an ipython instance, even if
+# it does so with some limitations. The rest of this support is implemented in
+# the Tracer constructor.
def make_arrow(pad):
"""generate the leading arrow in front of traceback or debugger"""
@@ -58,555 +58,555 @@ def make_arrow(pad):
return ''
-def BdbQuit_excepthook(et, ev, tb, excepthook=None):
- """Exception hook which handles `BdbQuit` exceptions.
-
- All other exceptions are processed using the `excepthook`
- parameter.
- """
+def BdbQuit_excepthook(et, ev, tb, excepthook=None):
+ """Exception hook which handles `BdbQuit` exceptions.
+
+ All other exceptions are processed using the `excepthook`
+ parameter.
+ """
warnings.warn("`BdbQuit_excepthook` is deprecated since version 5.1",
DeprecationWarning, stacklevel=2)
- if et==bdb.BdbQuit:
- print('Exiting Debugger.')
- elif excepthook is not None:
- excepthook(et, ev, tb)
- else:
- # Backwards compatibility. Raise deprecation warning?
- BdbQuit_excepthook.excepthook_ori(et,ev,tb)
-
-
-def BdbQuit_IPython_excepthook(self,et,ev,tb,tb_offset=None):
+ if et==bdb.BdbQuit:
+ print('Exiting Debugger.')
+ elif excepthook is not None:
+ excepthook(et, ev, tb)
+ else:
+ # Backwards compatibility. Raise deprecation warning?
+ BdbQuit_excepthook.excepthook_ori(et,ev,tb)
+
+
+def BdbQuit_IPython_excepthook(self,et,ev,tb,tb_offset=None):
warnings.warn(
"`BdbQuit_IPython_excepthook` is deprecated since version 5.1",
DeprecationWarning, stacklevel=2)
- print('Exiting Debugger.')
-
-
-class Tracer(object):
+ print('Exiting Debugger.')
+
+
+class Tracer(object):
"""
DEPRECATED
-
+
Class for local debugging, similar to pdb.set_trace.
- Instances of this class, when called, behave like pdb.set_trace, but
- providing IPython's enhanced capabilities.
-
- This is implemented as a class which must be initialized in your own code
- and not as a standalone function because we need to detect at runtime
- whether IPython is already active or not. That detection is done in the
- constructor, ensuring that this code plays nicely with a running IPython,
- while functioning acceptably (though with limitations) if outside of it.
- """
-
- @skip_doctest
- def __init__(self, colors=None):
+ Instances of this class, when called, behave like pdb.set_trace, but
+ providing IPython's enhanced capabilities.
+
+ This is implemented as a class which must be initialized in your own code
+ and not as a standalone function because we need to detect at runtime
+ whether IPython is already active or not. That detection is done in the
+ constructor, ensuring that this code plays nicely with a running IPython,
+ while functioning acceptably (though with limitations) if outside of it.
+ """
+
+ @skip_doctest
+ def __init__(self, colors=None):
"""
DEPRECATED
-
+
Create a local debugger instance.
- Parameters
- ----------
-
- colors : str, optional
- The name of the color scheme to use, it must be one of IPython's
- valid color schemes. If not given, the function will default to
- the current IPython scheme when running inside IPython, and to
- 'NoColor' otherwise.
-
- Examples
- --------
- ::
-
- from IPython.core.debugger import Tracer; debug_here = Tracer()
-
- Later in your code::
-
- debug_here() # -> will open up the debugger at that point.
-
- Once the debugger activates, you can use all of its regular commands to
- step through code, set breakpoints, etc. See the pdb documentation
- from the Python standard library for usage details.
- """
+ Parameters
+ ----------
+
+ colors : str, optional
+ The name of the color scheme to use, it must be one of IPython's
+ valid color schemes. If not given, the function will default to
+ the current IPython scheme when running inside IPython, and to
+ 'NoColor' otherwise.
+
+ Examples
+ --------
+ ::
+
+ from IPython.core.debugger import Tracer; debug_here = Tracer()
+
+ Later in your code::
+
+ debug_here() # -> will open up the debugger at that point.
+
+ Once the debugger activates, you can use all of its regular commands to
+ step through code, set breakpoints, etc. See the pdb documentation
+ from the Python standard library for usage details.
+ """
warnings.warn("`Tracer` is deprecated since version 5.1, directly use "
"`IPython.core.debugger.Pdb.set_trace()`",
DeprecationWarning, stacklevel=2)
-
- ip = get_ipython()
- if ip is None:
- # Outside of ipython, we set our own exception hook manually
- sys.excepthook = functools.partial(BdbQuit_excepthook,
- excepthook=sys.excepthook)
- def_colors = 'NoColor'
- else:
- # In ipython, we use its custom exception handler mechanism
- def_colors = ip.colors
- ip.set_custom_exc((bdb.BdbQuit,), BdbQuit_IPython_excepthook)
-
- if colors is None:
- colors = def_colors
-
- # The stdlib debugger internally uses a modified repr from the `repr`
- # module, that limits the length of printed strings to a hardcoded
- # limit of 30 characters. That much trimming is too aggressive, let's
- # at least raise that limit to 80 chars, which should be enough for
- # most interactive uses.
- try:
- try:
- from reprlib import aRepr # Py 3
- except ImportError:
- from repr import aRepr # Py 2
- aRepr.maxstring = 80
- except:
- # This is only a user-facing convenience, so any error we encounter
- # here can be warned about but can be otherwise ignored. These
- # printouts will tell us about problems if this API changes
- import traceback
- traceback.print_exc()
-
- self.debugger = Pdb(colors)
-
- def __call__(self):
- """Starts an interactive debugger at the point where called.
-
- This is similar to the pdb.set_trace() function from the std lib, but
- using IPython's enhanced debugger."""
-
- self.debugger.set_trace(sys._getframe().f_back)
-
-
-def decorate_fn_with_doc(new_fn, old_fn, additional_text=""):
- """Make new_fn have old_fn's doc string. This is particularly useful
- for the ``do_...`` commands that hook into the help system.
- Adapted from from a comp.lang.python posting
- by Duncan Booth."""
- def wrapper(*args, **kw):
- return new_fn(*args, **kw)
- if old_fn.__doc__:
- wrapper.__doc__ = old_fn.__doc__ + additional_text
- return wrapper
-
-
-def _file_lines(fname):
- """Return the contents of a named file as a list of lines.
-
- This function never raises an IOError exception: if the file can't be
- read, it simply returns an empty list."""
-
- try:
- outfile = open(fname)
- except IOError:
- return []
- else:
- out = outfile.readlines()
- outfile.close()
- return out
-
-
+
+ ip = get_ipython()
+ if ip is None:
+ # Outside of ipython, we set our own exception hook manually
+ sys.excepthook = functools.partial(BdbQuit_excepthook,
+ excepthook=sys.excepthook)
+ def_colors = 'NoColor'
+ else:
+ # In ipython, we use its custom exception handler mechanism
+ def_colors = ip.colors
+ ip.set_custom_exc((bdb.BdbQuit,), BdbQuit_IPython_excepthook)
+
+ if colors is None:
+ colors = def_colors
+
+ # The stdlib debugger internally uses a modified repr from the `repr`
+ # module, that limits the length of printed strings to a hardcoded
+ # limit of 30 characters. That much trimming is too aggressive, let's
+ # at least raise that limit to 80 chars, which should be enough for
+ # most interactive uses.
+ try:
+ try:
+ from reprlib import aRepr # Py 3
+ except ImportError:
+ from repr import aRepr # Py 2
+ aRepr.maxstring = 80
+ except:
+ # This is only a user-facing convenience, so any error we encounter
+ # here can be warned about but can be otherwise ignored. These
+ # printouts will tell us about problems if this API changes
+ import traceback
+ traceback.print_exc()
+
+ self.debugger = Pdb(colors)
+
+ def __call__(self):
+ """Starts an interactive debugger at the point where called.
+
+ This is similar to the pdb.set_trace() function from the std lib, but
+ using IPython's enhanced debugger."""
+
+ self.debugger.set_trace(sys._getframe().f_back)
+
+
+def decorate_fn_with_doc(new_fn, old_fn, additional_text=""):
+ """Make new_fn have old_fn's doc string. This is particularly useful
+ for the ``do_...`` commands that hook into the help system.
+ Adapted from from a comp.lang.python posting
+ by Duncan Booth."""
+ def wrapper(*args, **kw):
+ return new_fn(*args, **kw)
+ if old_fn.__doc__:
+ wrapper.__doc__ = old_fn.__doc__ + additional_text
+ return wrapper
+
+
+def _file_lines(fname):
+ """Return the contents of a named file as a list of lines.
+
+ This function never raises an IOError exception: if the file can't be
+ read, it simply returns an empty list."""
+
+ try:
+ outfile = open(fname)
+ except IOError:
+ return []
+ else:
+ out = outfile.readlines()
+ outfile.close()
+ return out
+
+
class Pdb(OldPdb):
"""Modified Pdb class, does not load readline.
-
+
for a standalone version that uses prompt_toolkit, see
`IPython.terminal.debugger.TerminalPdb` and
`IPython.terminal.debugger.set_trace()`
"""
def __init__(self, color_scheme=None, completekey=None,
- stdin=None, stdout=None, context=5):
-
- # Parent constructor:
- try:
+ stdin=None, stdout=None, context=5):
+
+ # Parent constructor:
+ try:
self.context = int(context)
- if self.context <= 0:
- raise ValueError("Context must be a positive integer")
- except (TypeError, ValueError):
- raise ValueError("Context must be a positive integer")
-
+ if self.context <= 0:
+ raise ValueError("Context must be a positive integer")
+ except (TypeError, ValueError):
+ raise ValueError("Context must be a positive integer")
+
OldPdb.__init__(self, completekey, stdin, stdout)
-
- # IPython changes...
- self.shell = get_ipython()
-
- if self.shell is None:
+
+ # IPython changes...
+ self.shell = get_ipython()
+
+ if self.shell is None:
save_main = sys.modules['__main__']
- # No IPython instance running, we must create one
- from IPython.terminal.interactiveshell import \
- TerminalInteractiveShell
- self.shell = TerminalInteractiveShell.instance()
+ # No IPython instance running, we must create one
+ from IPython.terminal.interactiveshell import \
+ TerminalInteractiveShell
+ self.shell = TerminalInteractiveShell.instance()
# needed by any code which calls __import__("__main__") after
# the debugger was entered. See also #9941.
sys.modules['__main__'] = save_main
-
+
if color_scheme is not None:
warnings.warn(
"The `color_scheme` argument is deprecated since version 5.1",
DeprecationWarning)
else:
color_scheme = self.shell.colors
-
- self.aliases = {}
-
- # Create color table: we copy the default one from the traceback
- # module and add a few attributes needed for debugging
- self.color_scheme_table = exception_colors()
-
- # shorthands
- C = coloransi.TermColors
- cst = self.color_scheme_table
-
- cst['NoColor'].colors.prompt = C.NoColor
- cst['NoColor'].colors.breakpoint_enabled = C.NoColor
- cst['NoColor'].colors.breakpoint_disabled = C.NoColor
-
- cst['Linux'].colors.prompt = C.Green
- cst['Linux'].colors.breakpoint_enabled = C.LightRed
- cst['Linux'].colors.breakpoint_disabled = C.Red
-
- cst['LightBG'].colors.prompt = C.Blue
- cst['LightBG'].colors.breakpoint_enabled = C.LightRed
- cst['LightBG'].colors.breakpoint_disabled = C.Red
-
+
+ self.aliases = {}
+
+ # Create color table: we copy the default one from the traceback
+ # module and add a few attributes needed for debugging
+ self.color_scheme_table = exception_colors()
+
+ # shorthands
+ C = coloransi.TermColors
+ cst = self.color_scheme_table
+
+ cst['NoColor'].colors.prompt = C.NoColor
+ cst['NoColor'].colors.breakpoint_enabled = C.NoColor
+ cst['NoColor'].colors.breakpoint_disabled = C.NoColor
+
+ cst['Linux'].colors.prompt = C.Green
+ cst['Linux'].colors.breakpoint_enabled = C.LightRed
+ cst['Linux'].colors.breakpoint_disabled = C.Red
+
+ cst['LightBG'].colors.prompt = C.Blue
+ cst['LightBG'].colors.breakpoint_enabled = C.LightRed
+ cst['LightBG'].colors.breakpoint_disabled = C.Red
+
cst['Neutral'].colors.prompt = C.Blue
cst['Neutral'].colors.breakpoint_enabled = C.LightRed
cst['Neutral'].colors.breakpoint_disabled = C.Red
- self.set_colors(color_scheme)
-
- # Add a python parser so we can syntax highlight source while
- # debugging.
- self.parser = PyColorize.Parser()
-
+ self.set_colors(color_scheme)
+
+ # Add a python parser so we can syntax highlight source while
+ # debugging.
+ self.parser = PyColorize.Parser()
+
# Set the prompt - the default prompt is '(Pdb)'
self.prompt = prompt
-
- def set_colors(self, scheme):
- """Shorthand access to the color table scheme selector method."""
- self.color_scheme_table.set_active_scheme(scheme)
-
- def interaction(self, frame, traceback):
+
+ def set_colors(self, scheme):
+ """Shorthand access to the color table scheme selector method."""
+ self.color_scheme_table.set_active_scheme(scheme)
+
+ def interaction(self, frame, traceback):
try:
OldPdb.interaction(self, frame, traceback)
except KeyboardInterrupt:
sys.stdout.write('\n' + self.shell.get_exception_only())
-
- def new_do_up(self, arg):
- OldPdb.do_up(self, arg)
- do_u = do_up = decorate_fn_with_doc(new_do_up, OldPdb.do_up)
-
- def new_do_down(self, arg):
- OldPdb.do_down(self, arg)
-
- do_d = do_down = decorate_fn_with_doc(new_do_down, OldPdb.do_down)
-
- def new_do_frame(self, arg):
- OldPdb.do_frame(self, arg)
-
- def new_do_quit(self, arg):
-
- if hasattr(self, 'old_all_completions'):
- self.shell.Completer.all_completions=self.old_all_completions
-
- return OldPdb.do_quit(self, arg)
-
- do_q = do_quit = decorate_fn_with_doc(new_do_quit, OldPdb.do_quit)
-
- def new_do_restart(self, arg):
- """Restart command. In the context of ipython this is exactly the same
- thing as 'quit'."""
- self.msg("Restart doesn't make sense here. Using 'quit' instead.")
- return self.do_quit(arg)
-
- def print_stack_trace(self, context=None):
- if context is None:
- context = self.context
- try:
- context=int(context)
- if context <= 0:
- raise ValueError("Context must be a positive integer")
- except (TypeError, ValueError):
- raise ValueError("Context must be a positive integer")
- try:
- for frame_lineno in self.stack:
- self.print_stack_entry(frame_lineno, context=context)
- except KeyboardInterrupt:
- pass
-
+
+ def new_do_up(self, arg):
+ OldPdb.do_up(self, arg)
+ do_u = do_up = decorate_fn_with_doc(new_do_up, OldPdb.do_up)
+
+ def new_do_down(self, arg):
+ OldPdb.do_down(self, arg)
+
+ do_d = do_down = decorate_fn_with_doc(new_do_down, OldPdb.do_down)
+
+ def new_do_frame(self, arg):
+ OldPdb.do_frame(self, arg)
+
+ def new_do_quit(self, arg):
+
+ if hasattr(self, 'old_all_completions'):
+ self.shell.Completer.all_completions=self.old_all_completions
+
+ return OldPdb.do_quit(self, arg)
+
+ do_q = do_quit = decorate_fn_with_doc(new_do_quit, OldPdb.do_quit)
+
+ def new_do_restart(self, arg):
+ """Restart command. In the context of ipython this is exactly the same
+ thing as 'quit'."""
+ self.msg("Restart doesn't make sense here. Using 'quit' instead.")
+ return self.do_quit(arg)
+
+ def print_stack_trace(self, context=None):
+ if context is None:
+ context = self.context
+ try:
+ context=int(context)
+ if context <= 0:
+ raise ValueError("Context must be a positive integer")
+ except (TypeError, ValueError):
+ raise ValueError("Context must be a positive integer")
+ try:
+ for frame_lineno in self.stack:
+ self.print_stack_entry(frame_lineno, context=context)
+ except KeyboardInterrupt:
+ pass
+
def print_stack_entry(self,frame_lineno, prompt_prefix='\n-> ',
- context=None):
- if context is None:
- context = self.context
- try:
- context=int(context)
- if context <= 0:
- raise ValueError("Context must be a positive integer")
- except (TypeError, ValueError):
- raise ValueError("Context must be a positive integer")
+ context=None):
+ if context is None:
+ context = self.context
+ try:
+ context=int(context)
+ if context <= 0:
+ raise ValueError("Context must be a positive integer")
+ except (TypeError, ValueError):
+ raise ValueError("Context must be a positive integer")
print(self.format_stack_entry(frame_lineno, '', context))
-
- # vds: >>
- frame, lineno = frame_lineno
- filename = frame.f_code.co_filename
- self.shell.hooks.synchronize_with_editor(filename, lineno, 0)
- # vds: <<
-
- def format_stack_entry(self, frame_lineno, lprefix=': ', context=None):
- if context is None:
- context = self.context
- try:
- context=int(context)
- if context <= 0:
- print("Context must be a positive integer")
- except (TypeError, ValueError):
- print("Context must be a positive integer")
- try:
- import reprlib # Py 3
- except ImportError:
- import repr as reprlib # Py 2
-
- ret = []
-
- Colors = self.color_scheme_table.active_colors
- ColorsNormal = Colors.Normal
- tpl_link = u'%s%%s%s' % (Colors.filenameEm, ColorsNormal)
- tpl_call = u'%s%%s%s%%s%s' % (Colors.vName, Colors.valEm, ColorsNormal)
- tpl_line = u'%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal)
- tpl_line_em = u'%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line,
- ColorsNormal)
-
- frame, lineno = frame_lineno
-
- return_value = ''
- if '__return__' in frame.f_locals:
- rv = frame.f_locals['__return__']
- #return_value += '->'
- return_value += reprlib.repr(rv) + '\n'
- ret.append(return_value)
-
- #s = filename + '(' + `lineno` + ')'
- filename = self.canonic(frame.f_code.co_filename)
- link = tpl_link % py3compat.cast_unicode(filename)
-
- if frame.f_code.co_name:
- func = frame.f_code.co_name
- else:
- func = "<lambda>"
-
- call = ''
- if func != '?':
- if '__args__' in frame.f_locals:
- args = reprlib.repr(frame.f_locals['__args__'])
- else:
- args = '()'
- call = tpl_call % (func, args)
-
- # The level info should be generated in the same format pdb uses, to
- # avoid breaking the pdbtrack functionality of python-mode in *emacs.
- if frame is self.curframe:
- ret.append('> ')
- else:
- ret.append(' ')
- ret.append(u'%s(%s)%s\n' % (link,lineno,call))
-
- start = lineno - 1 - context//2
- lines = ulinecache.getlines(filename)
- start = min(start, len(lines) - context)
- start = max(start, 0)
- lines = lines[start : start + context]
-
- for i,line in enumerate(lines):
- show_arrow = (start + 1 + i == lineno)
- linetpl = (frame is self.curframe or show_arrow) \
- and tpl_line_em \
- or tpl_line
- ret.append(self.__format_line(linetpl, filename,
- start + 1 + i, line,
- arrow = show_arrow) )
- return ''.join(ret)
-
- def __format_line(self, tpl_line, filename, lineno, line, arrow = False):
- bp_mark = ""
- bp_mark_color = ""
-
- scheme = self.color_scheme_table.active_scheme_name
- new_line, err = self.parser.format2(line, 'str', scheme)
- if not err: line = new_line
-
- bp = None
- if lineno in self.get_file_breaks(filename):
- bps = self.get_breaks(filename, lineno)
- bp = bps[-1]
-
- if bp:
- Colors = self.color_scheme_table.active_colors
- bp_mark = str(bp.number)
- bp_mark_color = Colors.breakpoint_enabled
- if not bp.enabled:
- bp_mark_color = Colors.breakpoint_disabled
-
- numbers_width = 7
- if arrow:
- # This is the line with the error
- pad = numbers_width - len(str(lineno)) - len(bp_mark)
+
+ # vds: >>
+ frame, lineno = frame_lineno
+ filename = frame.f_code.co_filename
+ self.shell.hooks.synchronize_with_editor(filename, lineno, 0)
+ # vds: <<
+
+ def format_stack_entry(self, frame_lineno, lprefix=': ', context=None):
+ if context is None:
+ context = self.context
+ try:
+ context=int(context)
+ if context <= 0:
+ print("Context must be a positive integer")
+ except (TypeError, ValueError):
+ print("Context must be a positive integer")
+ try:
+ import reprlib # Py 3
+ except ImportError:
+ import repr as reprlib # Py 2
+
+ ret = []
+
+ Colors = self.color_scheme_table.active_colors
+ ColorsNormal = Colors.Normal
+ tpl_link = u'%s%%s%s' % (Colors.filenameEm, ColorsNormal)
+ tpl_call = u'%s%%s%s%%s%s' % (Colors.vName, Colors.valEm, ColorsNormal)
+ tpl_line = u'%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal)
+ tpl_line_em = u'%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line,
+ ColorsNormal)
+
+ frame, lineno = frame_lineno
+
+ return_value = ''
+ if '__return__' in frame.f_locals:
+ rv = frame.f_locals['__return__']
+ #return_value += '->'
+ return_value += reprlib.repr(rv) + '\n'
+ ret.append(return_value)
+
+ #s = filename + '(' + `lineno` + ')'
+ filename = self.canonic(frame.f_code.co_filename)
+ link = tpl_link % py3compat.cast_unicode(filename)
+
+ if frame.f_code.co_name:
+ func = frame.f_code.co_name
+ else:
+ func = "<lambda>"
+
+ call = ''
+ if func != '?':
+ if '__args__' in frame.f_locals:
+ args = reprlib.repr(frame.f_locals['__args__'])
+ else:
+ args = '()'
+ call = tpl_call % (func, args)
+
+ # The level info should be generated in the same format pdb uses, to
+ # avoid breaking the pdbtrack functionality of python-mode in *emacs.
+ if frame is self.curframe:
+ ret.append('> ')
+ else:
+ ret.append(' ')
+ ret.append(u'%s(%s)%s\n' % (link,lineno,call))
+
+ start = lineno - 1 - context//2
+ lines = ulinecache.getlines(filename)
+ start = min(start, len(lines) - context)
+ start = max(start, 0)
+ lines = lines[start : start + context]
+
+ for i,line in enumerate(lines):
+ show_arrow = (start + 1 + i == lineno)
+ linetpl = (frame is self.curframe or show_arrow) \
+ and tpl_line_em \
+ or tpl_line
+ ret.append(self.__format_line(linetpl, filename,
+ start + 1 + i, line,
+ arrow = show_arrow) )
+ return ''.join(ret)
+
+ def __format_line(self, tpl_line, filename, lineno, line, arrow = False):
+ bp_mark = ""
+ bp_mark_color = ""
+
+ scheme = self.color_scheme_table.active_scheme_name
+ new_line, err = self.parser.format2(line, 'str', scheme)
+ if not err: line = new_line
+
+ bp = None
+ if lineno in self.get_file_breaks(filename):
+ bps = self.get_breaks(filename, lineno)
+ bp = bps[-1]
+
+ if bp:
+ Colors = self.color_scheme_table.active_colors
+ bp_mark = str(bp.number)
+ bp_mark_color = Colors.breakpoint_enabled
+ if not bp.enabled:
+ bp_mark_color = Colors.breakpoint_disabled
+
+ numbers_width = 7
+ if arrow:
+ # This is the line with the error
+ pad = numbers_width - len(str(lineno)) - len(bp_mark)
num = '%s%s' % (make_arrow(pad), str(lineno))
- else:
- num = '%*s' % (numbers_width - len(bp_mark), str(lineno))
-
+ else:
+ num = '%*s' % (numbers_width - len(bp_mark), str(lineno))
+
return tpl_line % (bp_mark_color + bp_mark, num, line)
-
-
- def print_list_lines(self, filename, first, last):
- """The printing (as opposed to the parsing part of a 'list'
- command."""
- try:
- Colors = self.color_scheme_table.active_colors
- ColorsNormal = Colors.Normal
- tpl_line = '%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal)
- tpl_line_em = '%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, ColorsNormal)
- src = []
- if filename == "<string>" and hasattr(self, "_exec_filename"):
- filename = self._exec_filename
-
- for lineno in range(first, last+1):
- line = ulinecache.getline(filename, lineno)
- if not line:
- break
-
- if lineno == self.curframe.f_lineno:
- line = self.__format_line(tpl_line_em, filename, lineno, line, arrow = True)
- else:
- line = self.__format_line(tpl_line, filename, lineno, line, arrow = False)
-
- src.append(line)
- self.lineno = lineno
-
+
+
+ def print_list_lines(self, filename, first, last):
+ """The printing (as opposed to the parsing part of a 'list'
+ command."""
+ try:
+ Colors = self.color_scheme_table.active_colors
+ ColorsNormal = Colors.Normal
+ tpl_line = '%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal)
+ tpl_line_em = '%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, ColorsNormal)
+ src = []
+ if filename == "<string>" and hasattr(self, "_exec_filename"):
+ filename = self._exec_filename
+
+ for lineno in range(first, last+1):
+ line = ulinecache.getline(filename, lineno)
+ if not line:
+ break
+
+ if lineno == self.curframe.f_lineno:
+ line = self.__format_line(tpl_line_em, filename, lineno, line, arrow = True)
+ else:
+ line = self.__format_line(tpl_line, filename, lineno, line, arrow = False)
+
+ src.append(line)
+ self.lineno = lineno
+
print(''.join(src))
-
- except KeyboardInterrupt:
- pass
-
- def do_list(self, arg):
+
+ except KeyboardInterrupt:
+ pass
+
+ def do_list(self, arg):
"""Print lines of code from the current stack frame
"""
- self.lastcmd = 'list'
- last = None
- if arg:
- try:
- x = eval(arg, {}, {})
- if type(x) == type(()):
- first, last = x
- first = int(first)
- last = int(last)
- if last < first:
- # Assume it's a count
- last = first + last
- else:
- first = max(1, int(x) - 5)
- except:
- print('*** Error in argument:', repr(arg))
- return
- elif self.lineno is None:
- first = max(1, self.curframe.f_lineno - 5)
- else:
- first = self.lineno + 1
- if last is None:
- last = first + 10
- self.print_list_lines(self.curframe.f_code.co_filename, first, last)
-
- # vds: >>
- lineno = first
- filename = self.curframe.f_code.co_filename
- self.shell.hooks.synchronize_with_editor(filename, lineno, 0)
- # vds: <<
-
- do_l = do_list
-
- def getsourcelines(self, obj):
- lines, lineno = inspect.findsource(obj)
- if inspect.isframe(obj) and obj.f_globals is obj.f_locals:
- # must be a module frame: do not try to cut a block out of it
- return lines, 1
- elif inspect.ismodule(obj):
- return lines, 1
- return inspect.getblock(lines[lineno:]), lineno+1
-
- def do_longlist(self, arg):
+ self.lastcmd = 'list'
+ last = None
+ if arg:
+ try:
+ x = eval(arg, {}, {})
+ if type(x) == type(()):
+ first, last = x
+ first = int(first)
+ last = int(last)
+ if last < first:
+ # Assume it's a count
+ last = first + last
+ else:
+ first = max(1, int(x) - 5)
+ except:
+ print('*** Error in argument:', repr(arg))
+ return
+ elif self.lineno is None:
+ first = max(1, self.curframe.f_lineno - 5)
+ else:
+ first = self.lineno + 1
+ if last is None:
+ last = first + 10
+ self.print_list_lines(self.curframe.f_code.co_filename, first, last)
+
+ # vds: >>
+ lineno = first
+ filename = self.curframe.f_code.co_filename
+ self.shell.hooks.synchronize_with_editor(filename, lineno, 0)
+ # vds: <<
+
+ do_l = do_list
+
+ def getsourcelines(self, obj):
+ lines, lineno = inspect.findsource(obj)
+ if inspect.isframe(obj) and obj.f_globals is obj.f_locals:
+ # must be a module frame: do not try to cut a block out of it
+ return lines, 1
+ elif inspect.ismodule(obj):
+ return lines, 1
+ return inspect.getblock(lines[lineno:]), lineno+1
+
+ def do_longlist(self, arg):
"""Print lines of code from the current stack frame.
Shows more lines than 'list' does.
"""
- self.lastcmd = 'longlist'
- try:
- lines, lineno = self.getsourcelines(self.curframe)
- except OSError as err:
- self.error(err)
- return
- last = lineno + len(lines)
- self.print_list_lines(self.curframe.f_code.co_filename, lineno, last)
- do_ll = do_longlist
-
- def do_pdef(self, arg):
- """Print the call signature for any callable object.
-
- The debugger interface to %pdef"""
- namespaces = [('Locals', self.curframe.f_locals),
- ('Globals', self.curframe.f_globals)]
- self.shell.find_line_magic('pdef')(arg, namespaces=namespaces)
-
- def do_pdoc(self, arg):
- """Print the docstring for an object.
-
- The debugger interface to %pdoc."""
- namespaces = [('Locals', self.curframe.f_locals),
- ('Globals', self.curframe.f_globals)]
- self.shell.find_line_magic('pdoc')(arg, namespaces=namespaces)
-
- def do_pfile(self, arg):
- """Print (or run through pager) the file where an object is defined.
-
- The debugger interface to %pfile.
- """
- namespaces = [('Locals', self.curframe.f_locals),
- ('Globals', self.curframe.f_globals)]
- self.shell.find_line_magic('pfile')(arg, namespaces=namespaces)
-
- def do_pinfo(self, arg):
- """Provide detailed information about an object.
-
- The debugger interface to %pinfo, i.e., obj?."""
- namespaces = [('Locals', self.curframe.f_locals),
- ('Globals', self.curframe.f_globals)]
- self.shell.find_line_magic('pinfo')(arg, namespaces=namespaces)
-
- def do_pinfo2(self, arg):
- """Provide extra detailed information about an object.
-
- The debugger interface to %pinfo2, i.e., obj??."""
- namespaces = [('Locals', self.curframe.f_locals),
- ('Globals', self.curframe.f_globals)]
- self.shell.find_line_magic('pinfo2')(arg, namespaces=namespaces)
-
- def do_psource(self, arg):
- """Print (or run through pager) the source code for an object."""
- namespaces = [('Locals', self.curframe.f_locals),
- ('Globals', self.curframe.f_globals)]
- self.shell.find_line_magic('psource')(arg, namespaces=namespaces)
-
- if sys.version_info > (3, ):
- def do_where(self, arg):
- """w(here)
- Print a stack trace, with the most recent frame at the bottom.
- An arrow indicates the "current frame", which determines the
- context of most commands. 'bt' is an alias for this command.
-
- Take a number as argument as an (optional) number of context line to
- print"""
- if arg:
- context = int(arg)
- self.print_stack_trace(context)
- else:
- self.print_stack_trace()
-
- do_w = do_where
+ self.lastcmd = 'longlist'
+ try:
+ lines, lineno = self.getsourcelines(self.curframe)
+ except OSError as err:
+ self.error(err)
+ return
+ last = lineno + len(lines)
+ self.print_list_lines(self.curframe.f_code.co_filename, lineno, last)
+ do_ll = do_longlist
+
+ def do_pdef(self, arg):
+ """Print the call signature for any callable object.
+
+ The debugger interface to %pdef"""
+ namespaces = [('Locals', self.curframe.f_locals),
+ ('Globals', self.curframe.f_globals)]
+ self.shell.find_line_magic('pdef')(arg, namespaces=namespaces)
+
+ def do_pdoc(self, arg):
+ """Print the docstring for an object.
+
+ The debugger interface to %pdoc."""
+ namespaces = [('Locals', self.curframe.f_locals),
+ ('Globals', self.curframe.f_globals)]
+ self.shell.find_line_magic('pdoc')(arg, namespaces=namespaces)
+
+ def do_pfile(self, arg):
+ """Print (or run through pager) the file where an object is defined.
+
+ The debugger interface to %pfile.
+ """
+ namespaces = [('Locals', self.curframe.f_locals),
+ ('Globals', self.curframe.f_globals)]
+ self.shell.find_line_magic('pfile')(arg, namespaces=namespaces)
+
+ def do_pinfo(self, arg):
+ """Provide detailed information about an object.
+
+ The debugger interface to %pinfo, i.e., obj?."""
+ namespaces = [('Locals', self.curframe.f_locals),
+ ('Globals', self.curframe.f_globals)]
+ self.shell.find_line_magic('pinfo')(arg, namespaces=namespaces)
+
+ def do_pinfo2(self, arg):
+ """Provide extra detailed information about an object.
+
+ The debugger interface to %pinfo2, i.e., obj??."""
+ namespaces = [('Locals', self.curframe.f_locals),
+ ('Globals', self.curframe.f_globals)]
+ self.shell.find_line_magic('pinfo2')(arg, namespaces=namespaces)
+
+ def do_psource(self, arg):
+ """Print (or run through pager) the source code for an object."""
+ namespaces = [('Locals', self.curframe.f_locals),
+ ('Globals', self.curframe.f_globals)]
+ self.shell.find_line_magic('psource')(arg, namespaces=namespaces)
+
+ if sys.version_info > (3, ):
+ def do_where(self, arg):
+ """w(here)
+ Print a stack trace, with the most recent frame at the bottom.
+ An arrow indicates the "current frame", which determines the
+ context of most commands. 'bt' is an alias for this command.
+
+ Take a number as argument as an (optional) number of context line to
+ print"""
+ if arg:
+ context = int(arg)
+ self.print_stack_trace(context)
+ else:
+ self.print_stack_trace()
+
+ do_w = do_where
def set_trace(frame=None):
diff --git a/contrib/python/ipython/py2/IPython/core/display.py b/contrib/python/ipython/py2/IPython/core/display.py
index 2c38b32933..5c82a57b31 100644
--- a/contrib/python/ipython/py2/IPython/core/display.py
+++ b/contrib/python/ipython/py2/IPython/core/display.py
@@ -1,124 +1,124 @@
-# -*- coding: utf-8 -*-
-"""Top-level display functions for displaying object in different formats."""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import print_function
-
+# -*- coding: utf-8 -*-
+"""Top-level display functions for displaying object in different formats."""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import print_function
+
try:
from base64 import encodebytes as base64_encode
except ImportError:
from base64 import encodestring as base64_encode
from binascii import b2a_hex, hexlify
-import json
-import mimetypes
-import os
-import struct
+import json
+import mimetypes
+import os
+import struct
import sys
-import warnings
-
-from IPython.utils.py3compat import (string_types, cast_bytes_py2, cast_unicode,
- unicode_type)
-from IPython.testing.skipdoctest import skip_doctest
-
-__all__ = ['display', 'display_pretty', 'display_html', 'display_markdown',
-'display_svg', 'display_png', 'display_jpeg', 'display_latex', 'display_json',
-'display_javascript', 'display_pdf', 'DisplayObject', 'TextDisplayObject',
+import warnings
+
+from IPython.utils.py3compat import (string_types, cast_bytes_py2, cast_unicode,
+ unicode_type)
+from IPython.testing.skipdoctest import skip_doctest
+
+__all__ = ['display', 'display_pretty', 'display_html', 'display_markdown',
+'display_svg', 'display_png', 'display_jpeg', 'display_latex', 'display_json',
+'display_javascript', 'display_pdf', 'DisplayObject', 'TextDisplayObject',
'Pretty', 'HTML', 'Markdown', 'Math', 'Latex', 'SVG', 'ProgressBar', 'JSON', 'Javascript',
-'Image', 'clear_output', 'set_matplotlib_formats', 'set_matplotlib_close',
+'Image', 'clear_output', 'set_matplotlib_formats', 'set_matplotlib_close',
'publish_display_data', 'update_display', 'DisplayHandle']
-
-#-----------------------------------------------------------------------------
-# utility functions
-#-----------------------------------------------------------------------------
-
-def _safe_exists(path):
- """Check path, but don't let exceptions raise"""
- try:
- return os.path.exists(path)
- except Exception:
- return False
-
-def _merge(d1, d2):
- """Like update, but merges sub-dicts instead of clobbering at the top level.
-
- Updates d1 in-place
- """
-
- if not isinstance(d2, dict) or not isinstance(d1, dict):
- return d2
- for key, value in d2.items():
- d1[key] = _merge(d1.get(key), value)
- return d1
-
-def _display_mimetype(mimetype, objs, raw=False, metadata=None):
- """internal implementation of all display_foo methods
-
- Parameters
- ----------
- mimetype : str
- The mimetype to be published (e.g. 'image/png')
- objs : tuple of objects
- The Python objects to display, or if raw=True raw text data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- if metadata:
- metadata = {mimetype: metadata}
- if raw:
- # turn list of pngdata into list of { 'image/png': pngdata }
- objs = [ {mimetype: obj} for obj in objs ]
- display(*objs, raw=raw, metadata=metadata, include=[mimetype])
-
-#-----------------------------------------------------------------------------
-# Main functions
-#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# utility functions
+#-----------------------------------------------------------------------------
+
+def _safe_exists(path):
+ """Check path, but don't let exceptions raise"""
+ try:
+ return os.path.exists(path)
+ except Exception:
+ return False
+
+def _merge(d1, d2):
+ """Like update, but merges sub-dicts instead of clobbering at the top level.
+
+ Updates d1 in-place
+ """
+
+ if not isinstance(d2, dict) or not isinstance(d1, dict):
+ return d2
+ for key, value in d2.items():
+ d1[key] = _merge(d1.get(key), value)
+ return d1
+
+def _display_mimetype(mimetype, objs, raw=False, metadata=None):
+ """internal implementation of all display_foo methods
+
+ Parameters
+ ----------
+ mimetype : str
+ The mimetype to be published (e.g. 'image/png')
+ objs : tuple of objects
+ The Python objects to display, or if raw=True raw text data to
+ display.
+ raw : bool
+ Are the data objects raw data or Python objects that need to be
+ formatted before display? [default: False]
+ metadata : dict (optional)
+ Metadata to be associated with the specific mimetype output.
+ """
+ if metadata:
+ metadata = {mimetype: metadata}
+ if raw:
+ # turn list of pngdata into list of { 'image/png': pngdata }
+ objs = [ {mimetype: obj} for obj in objs ]
+ display(*objs, raw=raw, metadata=metadata, include=[mimetype])
+
+#-----------------------------------------------------------------------------
+# Main functions
+#-----------------------------------------------------------------------------
# use * to indicate transient is keyword-only
def publish_display_data(data, metadata=None, source=None, **kwargs):
- """Publish data and metadata to all frontends.
-
- See the ``display_data`` message in the messaging documentation for
- more details about this message type.
-
- The following MIME types are currently implemented:
-
- * text/plain
- * text/html
- * text/markdown
- * text/latex
- * application/json
- * application/javascript
- * image/png
- * image/jpeg
- * image/svg+xml
-
- Parameters
- ----------
- data : dict
- A dictionary having keys that are valid MIME types (like
- 'text/plain' or 'image/svg+xml') and values that are the data for
- that MIME type. The data itself must be a JSON'able data
- structure. Minimally all data should have the 'text/plain' data,
- which can be displayed by all frontends. If more than the plain
- text is given, it is up to the frontend to decide which
- representation to use.
- metadata : dict
- A dictionary for metadata related to the data. This can contain
- arbitrary key, value pairs that frontends can use to interpret
- the data. mime-type keys matching those in data can be used
- to specify metadata about particular representations.
- source : str, deprecated
- Unused.
+ """Publish data and metadata to all frontends.
+
+ See the ``display_data`` message in the messaging documentation for
+ more details about this message type.
+
+ The following MIME types are currently implemented:
+
+ * text/plain
+ * text/html
+ * text/markdown
+ * text/latex
+ * application/json
+ * application/javascript
+ * image/png
+ * image/jpeg
+ * image/svg+xml
+
+ Parameters
+ ----------
+ data : dict
+ A dictionary having keys that are valid MIME types (like
+ 'text/plain' or 'image/svg+xml') and values that are the data for
+ that MIME type. The data itself must be a JSON'able data
+ structure. Minimally all data should have the 'text/plain' data,
+ which can be displayed by all frontends. If more than the plain
+ text is given, it is up to the frontend to decide which
+ representation to use.
+ metadata : dict
+ A dictionary for metadata related to the data. This can contain
+ arbitrary key, value pairs that frontends can use to interpret
+ the data. mime-type keys matching those in data can be used
+ to specify metadata about particular representations.
+ source : str, deprecated
+ Unused.
transient : dict, keyword-only
A dictionary of transient data, such as display_id.
- """
- from IPython.core.interactiveshell import InteractiveShell
+ """
+ from IPython.core.interactiveshell import InteractiveShell
display_pub = InteractiveShell.instance().display_pub
@@ -127,45 +127,45 @@ def publish_display_data(data, metadata=None, source=None, **kwargs):
# TODO: We could check for ipykernel version and provide a detailed upgrade message.
display_pub.publish(
- data=data,
- metadata=metadata,
+ data=data,
+ metadata=metadata,
**kwargs
- )
-
+ )
+
def _new_id():
"""Generate a new random text id with urandom"""
return b2a_hex(os.urandom(16)).decode('ascii')
-def display(*objs, **kwargs):
- """Display a Python object in all frontends.
-
- By default all representations will be computed and sent to the frontends.
- Frontends can decide which representation is used and how.
-
+def display(*objs, **kwargs):
+ """Display a Python object in all frontends.
+
+ By default all representations will be computed and sent to the frontends.
+ Frontends can decide which representation is used and how.
+
In terminal IPython this will be similar to using :func:`print`, for use in richer
frontends see Jupyter notebook examples with rich display logic.
- Parameters
- ----------
- objs : tuple of objects
- The Python objects to display.
- raw : bool, optional
- Are the objects to be displayed already mimetype-keyed dicts of raw display data,
- or Python objects that need to be formatted before display? [default: False]
+ Parameters
+ ----------
+ objs : tuple of objects
+ The Python objects to display.
+ raw : bool, optional
+ Are the objects to be displayed already mimetype-keyed dicts of raw display data,
+ or Python objects that need to be formatted before display? [default: False]
include : list, tuple or set, optional
- A list of format type strings (MIME types) to include in the
- format data dict. If this is set *only* the format types included
- in this list will be computed.
+ A list of format type strings (MIME types) to include in the
+ format data dict. If this is set *only* the format types included
+ in this list will be computed.
exclude : list, tuple or set, optional
- A list of format type strings (MIME types) to exclude in the format
- data dict. If this is set all format types will be computed,
- except for those included in this argument.
- metadata : dict, optional
- A dictionary of metadata to associate with the output.
- mime-type keys in this dictionary will be associated with the individual
- representation formats, if they exist.
+ A list of format type strings (MIME types) to exclude in the format
+ data dict. If this is set all format types will be computed,
+ except for those included in this argument.
+ metadata : dict, optional
+ A dictionary of metadata to associate with the output.
+ mime-type keys in this dictionary will be associated with the individual
+ representation formats, if they exist.
transient : dict, optional
A dictionary of transient data to associate with the output.
Data in this dict should not be persisted to files (e.g. notebooks).
@@ -276,8 +276,8 @@ def display(*objs, **kwargs):
from IPython.display import display
- """
- from IPython.core.interactiveshell import InteractiveShell
+ """
+ from IPython.core.interactiveshell import InteractiveShell
if not InteractiveShell.initialized():
# Directly print objects.
@@ -300,26 +300,26 @@ def display(*objs, **kwargs):
raise TypeError('display_id required for update_display')
if transient:
kwargs['transient'] = transient
-
- if not raw:
- format = InteractiveShell.instance().display_formatter.format
-
- for obj in objs:
- if raw:
+
+ if not raw:
+ format = InteractiveShell.instance().display_formatter.format
+
+ for obj in objs:
+ if raw:
publish_display_data(data=obj, metadata=metadata, **kwargs)
- else:
- format_dict, md_dict = format(obj, include=include, exclude=exclude)
- if not format_dict:
- # nothing to display (e.g. _ipython_display_ took over)
- continue
- if metadata:
- # kwarg-specified metadata gets precedence
- _merge(md_dict, metadata)
+ else:
+ format_dict, md_dict = format(obj, include=include, exclude=exclude)
+ if not format_dict:
+ # nothing to display (e.g. _ipython_display_ took over)
+ continue
+ if metadata:
+ # kwarg-specified metadata gets precedence
+ _merge(md_dict, metadata)
publish_display_data(data=format_dict, metadata=md_dict, **kwargs)
if display_id:
return DisplayHandle(display_id)
-
-
+
+
# use * for keyword-only display_id arg
def update_display(obj, **kwargs):
"""Update an existing display by id
@@ -395,346 +395,346 @@ class DisplayHandle(object):
update_display(obj, display_id=self.display_id, **kwargs)
-def display_pretty(*objs, **kwargs):
- """Display the pretty (default) representation of an object.
-
- Parameters
- ----------
- objs : tuple of objects
- The Python objects to display, or if raw=True raw text data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('text/plain', objs, **kwargs)
-
-
-def display_html(*objs, **kwargs):
- """Display the HTML representation of an object.
-
- Note: If raw=False and the object does not have a HTML
- representation, no HTML will be shown.
-
- Parameters
- ----------
- objs : tuple of objects
- The Python objects to display, or if raw=True raw HTML data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('text/html', objs, **kwargs)
-
-
-def display_markdown(*objs, **kwargs):
- """Displays the Markdown representation of an object.
-
- Parameters
- ----------
- objs : tuple of objects
- The Python objects to display, or if raw=True raw markdown data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
-
- _display_mimetype('text/markdown', objs, **kwargs)
-
-
-def display_svg(*objs, **kwargs):
- """Display the SVG representation of an object.
-
- Parameters
- ----------
- objs : tuple of objects
- The Python objects to display, or if raw=True raw svg data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('image/svg+xml', objs, **kwargs)
-
-
-def display_png(*objs, **kwargs):
- """Display the PNG representation of an object.
-
- Parameters
- ----------
- objs : tuple of objects
- The Python objects to display, or if raw=True raw png data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('image/png', objs, **kwargs)
-
-
-def display_jpeg(*objs, **kwargs):
- """Display the JPEG representation of an object.
-
- Parameters
- ----------
- objs : tuple of objects
- The Python objects to display, or if raw=True raw JPEG data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('image/jpeg', objs, **kwargs)
-
-
-def display_latex(*objs, **kwargs):
- """Display the LaTeX representation of an object.
-
- Parameters
- ----------
- objs : tuple of objects
- The Python objects to display, or if raw=True raw latex data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('text/latex', objs, **kwargs)
-
-
-def display_json(*objs, **kwargs):
- """Display the JSON representation of an object.
-
- Note that not many frontends support displaying JSON.
-
- Parameters
- ----------
- objs : tuple of objects
- The Python objects to display, or if raw=True raw json data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('application/json', objs, **kwargs)
-
-
-def display_javascript(*objs, **kwargs):
- """Display the Javascript representation of an object.
-
- Parameters
- ----------
- objs : tuple of objects
- The Python objects to display, or if raw=True raw javascript data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('application/javascript', objs, **kwargs)
-
-
-def display_pdf(*objs, **kwargs):
- """Display the PDF representation of an object.
-
- Parameters
- ----------
- objs : tuple of objects
- The Python objects to display, or if raw=True raw javascript data to
- display.
- raw : bool
- Are the data objects raw data or Python objects that need to be
- formatted before display? [default: False]
- metadata : dict (optional)
- Metadata to be associated with the specific mimetype output.
- """
- _display_mimetype('application/pdf', objs, **kwargs)
-
-
-#-----------------------------------------------------------------------------
-# Smart classes
-#-----------------------------------------------------------------------------
-
-
-class DisplayObject(object):
- """An object that wraps data to be displayed."""
-
- _read_flags = 'r'
- _show_mem_addr = False
-
- def __init__(self, data=None, url=None, filename=None):
- """Create a display object given raw data.
-
- When this object is returned by an expression or passed to the
- display function, it will result in the data being displayed
- in the frontend. The MIME type of the data should match the
- subclasses used, so the Png subclass should be used for 'image/png'
- data. If the data is a URL, the data will first be downloaded
- and then displayed. If
-
- Parameters
- ----------
- data : unicode, str or bytes
- The raw data or a URL or file to load the data from
- url : unicode
- A URL to download the data from.
- filename : unicode
- Path to a local file to load the data from.
- """
- if data is not None and isinstance(data, string_types):
- if data.startswith('http') and url is None:
- url = data
- filename = None
- data = None
- elif _safe_exists(data) and filename is None:
- url = None
- filename = data
- data = None
-
- self.data = data
- self.url = url
- self.filename = None if filename is None else unicode_type(filename)
-
- self.reload()
- self._check_data()
-
- def __repr__(self):
- if not self._show_mem_addr:
- cls = self.__class__
- r = "<%s.%s object>" % (cls.__module__, cls.__name__)
- else:
- r = super(DisplayObject, self).__repr__()
- return r
-
- def _check_data(self):
- """Override in subclasses if there's something to check."""
- pass
-
- def reload(self):
- """Reload the raw data from file or URL."""
- if self.filename is not None:
- with open(self.filename, self._read_flags) as f:
- self.data = f.read()
- elif self.url is not None:
- try:
- try:
- from urllib.request import urlopen # Py3
- except ImportError:
- from urllib2 import urlopen
- response = urlopen(self.url)
- self.data = response.read()
- # extract encoding from header, if there is one:
- encoding = None
- for sub in response.headers['content-type'].split(';'):
- sub = sub.strip()
- if sub.startswith('charset'):
- encoding = sub.split('=')[-1].strip()
- break
- # decode data, if an encoding was specified
- if encoding:
- self.data = self.data.decode(encoding, 'replace')
- except:
- self.data = None
-
-class TextDisplayObject(DisplayObject):
- """Validate that display data is text"""
- def _check_data(self):
- if self.data is not None and not isinstance(self.data, string_types):
- raise TypeError("%s expects text, not %r" % (self.__class__.__name__, self.data))
-
-class Pretty(TextDisplayObject):
-
+def display_pretty(*objs, **kwargs):
+ """Display the pretty (default) representation of an object.
+
+ Parameters
+ ----------
+ objs : tuple of objects
+ The Python objects to display, or if raw=True raw text data to
+ display.
+ raw : bool
+ Are the data objects raw data or Python objects that need to be
+ formatted before display? [default: False]
+ metadata : dict (optional)
+ Metadata to be associated with the specific mimetype output.
+ """
+ _display_mimetype('text/plain', objs, **kwargs)
+
+
+def display_html(*objs, **kwargs):
+ """Display the HTML representation of an object.
+
+ Note: If raw=False and the object does not have a HTML
+ representation, no HTML will be shown.
+
+ Parameters
+ ----------
+ objs : tuple of objects
+ The Python objects to display, or if raw=True raw HTML data to
+ display.
+ raw : bool
+ Are the data objects raw data or Python objects that need to be
+ formatted before display? [default: False]
+ metadata : dict (optional)
+ Metadata to be associated with the specific mimetype output.
+ """
+ _display_mimetype('text/html', objs, **kwargs)
+
+
+def display_markdown(*objs, **kwargs):
+ """Displays the Markdown representation of an object.
+
+ Parameters
+ ----------
+ objs : tuple of objects
+ The Python objects to display, or if raw=True raw markdown data to
+ display.
+ raw : bool
+ Are the data objects raw data or Python objects that need to be
+ formatted before display? [default: False]
+ metadata : dict (optional)
+ Metadata to be associated with the specific mimetype output.
+ """
+
+ _display_mimetype('text/markdown', objs, **kwargs)
+
+
+def display_svg(*objs, **kwargs):
+ """Display the SVG representation of an object.
+
+ Parameters
+ ----------
+ objs : tuple of objects
+ The Python objects to display, or if raw=True raw svg data to
+ display.
+ raw : bool
+ Are the data objects raw data or Python objects that need to be
+ formatted before display? [default: False]
+ metadata : dict (optional)
+ Metadata to be associated with the specific mimetype output.
+ """
+ _display_mimetype('image/svg+xml', objs, **kwargs)
+
+
+def display_png(*objs, **kwargs):
+ """Display the PNG representation of an object.
+
+ Parameters
+ ----------
+ objs : tuple of objects
+ The Python objects to display, or if raw=True raw png data to
+ display.
+ raw : bool
+ Are the data objects raw data or Python objects that need to be
+ formatted before display? [default: False]
+ metadata : dict (optional)
+ Metadata to be associated with the specific mimetype output.
+ """
+ _display_mimetype('image/png', objs, **kwargs)
+
+
+def display_jpeg(*objs, **kwargs):
+ """Display the JPEG representation of an object.
+
+ Parameters
+ ----------
+ objs : tuple of objects
+ The Python objects to display, or if raw=True raw JPEG data to
+ display.
+ raw : bool
+ Are the data objects raw data or Python objects that need to be
+ formatted before display? [default: False]
+ metadata : dict (optional)
+ Metadata to be associated with the specific mimetype output.
+ """
+ _display_mimetype('image/jpeg', objs, **kwargs)
+
+
+def display_latex(*objs, **kwargs):
+ """Display the LaTeX representation of an object.
+
+ Parameters
+ ----------
+ objs : tuple of objects
+ The Python objects to display, or if raw=True raw latex data to
+ display.
+ raw : bool
+ Are the data objects raw data or Python objects that need to be
+ formatted before display? [default: False]
+ metadata : dict (optional)
+ Metadata to be associated with the specific mimetype output.
+ """
+ _display_mimetype('text/latex', objs, **kwargs)
+
+
+def display_json(*objs, **kwargs):
+ """Display the JSON representation of an object.
+
+ Note that not many frontends support displaying JSON.
+
+ Parameters
+ ----------
+ objs : tuple of objects
+ The Python objects to display, or if raw=True raw json data to
+ display.
+ raw : bool
+ Are the data objects raw data or Python objects that need to be
+ formatted before display? [default: False]
+ metadata : dict (optional)
+ Metadata to be associated with the specific mimetype output.
+ """
+ _display_mimetype('application/json', objs, **kwargs)
+
+
+def display_javascript(*objs, **kwargs):
+ """Display the Javascript representation of an object.
+
+ Parameters
+ ----------
+ objs : tuple of objects
+ The Python objects to display, or if raw=True raw javascript data to
+ display.
+ raw : bool
+ Are the data objects raw data or Python objects that need to be
+ formatted before display? [default: False]
+ metadata : dict (optional)
+ Metadata to be associated with the specific mimetype output.
+ """
+ _display_mimetype('application/javascript', objs, **kwargs)
+
+
+def display_pdf(*objs, **kwargs):
+ """Display the PDF representation of an object.
+
+ Parameters
+ ----------
+ objs : tuple of objects
+ The Python objects to display, or if raw=True raw javascript data to
+ display.
+ raw : bool
+ Are the data objects raw data or Python objects that need to be
+ formatted before display? [default: False]
+ metadata : dict (optional)
+ Metadata to be associated with the specific mimetype output.
+ """
+ _display_mimetype('application/pdf', objs, **kwargs)
+
+
+#-----------------------------------------------------------------------------
+# Smart classes
+#-----------------------------------------------------------------------------
+
+
+class DisplayObject(object):
+ """An object that wraps data to be displayed."""
+
+ _read_flags = 'r'
+ _show_mem_addr = False
+
+ def __init__(self, data=None, url=None, filename=None):
+ """Create a display object given raw data.
+
+ When this object is returned by an expression or passed to the
+ display function, it will result in the data being displayed
+ in the frontend. The MIME type of the data should match the
+ subclasses used, so the Png subclass should be used for 'image/png'
+ data. If the data is a URL, the data will first be downloaded
+ and then displayed. If
+
+ Parameters
+ ----------
+ data : unicode, str or bytes
+ The raw data or a URL or file to load the data from
+ url : unicode
+ A URL to download the data from.
+ filename : unicode
+ Path to a local file to load the data from.
+ """
+ if data is not None and isinstance(data, string_types):
+ if data.startswith('http') and url is None:
+ url = data
+ filename = None
+ data = None
+ elif _safe_exists(data) and filename is None:
+ url = None
+ filename = data
+ data = None
+
+ self.data = data
+ self.url = url
+ self.filename = None if filename is None else unicode_type(filename)
+
+ self.reload()
+ self._check_data()
+
+ def __repr__(self):
+ if not self._show_mem_addr:
+ cls = self.__class__
+ r = "<%s.%s object>" % (cls.__module__, cls.__name__)
+ else:
+ r = super(DisplayObject, self).__repr__()
+ return r
+
+ def _check_data(self):
+ """Override in subclasses if there's something to check."""
+ pass
+
+ def reload(self):
+ """Reload the raw data from file or URL."""
+ if self.filename is not None:
+ with open(self.filename, self._read_flags) as f:
+ self.data = f.read()
+ elif self.url is not None:
+ try:
+ try:
+ from urllib.request import urlopen # Py3
+ except ImportError:
+ from urllib2 import urlopen
+ response = urlopen(self.url)
+ self.data = response.read()
+ # extract encoding from header, if there is one:
+ encoding = None
+ for sub in response.headers['content-type'].split(';'):
+ sub = sub.strip()
+ if sub.startswith('charset'):
+ encoding = sub.split('=')[-1].strip()
+ break
+ # decode data, if an encoding was specified
+ if encoding:
+ self.data = self.data.decode(encoding, 'replace')
+ except:
+ self.data = None
+
+class TextDisplayObject(DisplayObject):
+ """Validate that display data is text"""
+ def _check_data(self):
+ if self.data is not None and not isinstance(self.data, string_types):
+ raise TypeError("%s expects text, not %r" % (self.__class__.__name__, self.data))
+
+class Pretty(TextDisplayObject):
+
def _repr_pretty_(self, pp, cycle):
return pp.text(self.data)
-
-
-class HTML(TextDisplayObject):
-
- def _repr_html_(self):
- return self.data
-
- def __html__(self):
- """
- This method exists to inform other HTML-using modules (e.g. Markupsafe,
- htmltag, etc) that this object is HTML and does not need things like
- special characters (<>&) escaped.
- """
- return self._repr_html_()
-
-
-class Markdown(TextDisplayObject):
-
- def _repr_markdown_(self):
- return self.data
-
-
-class Math(TextDisplayObject):
-
- def _repr_latex_(self):
- s = self.data.strip('$')
- return "$$%s$$" % s
-
-
-class Latex(TextDisplayObject):
-
- def _repr_latex_(self):
- return self.data
-
-
-class SVG(DisplayObject):
-
+
+
+class HTML(TextDisplayObject):
+
+ def _repr_html_(self):
+ return self.data
+
+ def __html__(self):
+ """
+ This method exists to inform other HTML-using modules (e.g. Markupsafe,
+ htmltag, etc) that this object is HTML and does not need things like
+ special characters (<>&) escaped.
+ """
+ return self._repr_html_()
+
+
+class Markdown(TextDisplayObject):
+
+ def _repr_markdown_(self):
+ return self.data
+
+
+class Math(TextDisplayObject):
+
+ def _repr_latex_(self):
+ s = self.data.strip('$')
+ return "$$%s$$" % s
+
+
+class Latex(TextDisplayObject):
+
+ def _repr_latex_(self):
+ return self.data
+
+
+class SVG(DisplayObject):
+
_read_flags = 'rb'
- # wrap data in a property, which extracts the <svg> tag, discarding
- # document headers
- _data = None
-
- @property
- def data(self):
- return self._data
-
- @data.setter
- def data(self, svg):
- if svg is None:
- self._data = None
- return
- # parse into dom object
- from xml.dom import minidom
- svg = cast_bytes_py2(svg)
- x = minidom.parseString(svg)
- # get svg tag (should be 1)
- found_svg = x.getElementsByTagName('svg')
- if found_svg:
- svg = found_svg[0].toxml()
- else:
- # fallback on the input, trust the user
- # but this is probably an error.
- pass
- svg = cast_unicode(svg)
- self._data = svg
-
- def _repr_svg_(self):
- return self.data
-
+ # wrap data in a property, which extracts the <svg> tag, discarding
+ # document headers
+ _data = None
+
+ @property
+ def data(self):
+ return self._data
+
+ @data.setter
+ def data(self, svg):
+ if svg is None:
+ self._data = None
+ return
+ # parse into dom object
+ from xml.dom import minidom
+ svg = cast_bytes_py2(svg)
+ x = minidom.parseString(svg)
+ # get svg tag (should be 1)
+ found_svg = x.getElementsByTagName('svg')
+ if found_svg:
+ svg = found_svg[0].toxml()
+ else:
+ # fallback on the input, trust the user
+ # but this is probably an error.
+ pass
+ svg = cast_unicode(svg)
+ self._data = svg
+
+ def _repr_svg_(self):
+ return self.data
+
class ProgressBar(DisplayObject):
"""Progressbar supports displaying a progressbar like element
"""
@@ -751,7 +751,7 @@ class ProgressBar(DisplayObject):
self.html_width = '60ex'
self.text_width = 60
self._display_id = hexlify(os.urandom(8)).decode('ascii')
-
+
def __repr__(self):
fraction = self.progress / self.total
filled = '=' * int(fraction * self.text_width)
@@ -797,367 +797,367 @@ class ProgressBar(DisplayObject):
"""Python 2 compatibility"""
return self.__next__()
-class JSON(DisplayObject):
- """JSON expects a JSON-able dict or list
-
- not an already-serialized JSON string.
-
- Scalar types (None, number, string) are not allowed, only dict or list containers.
- """
- # wrap data in a property, which warns about passing already-serialized JSON
- _data = None
- def _check_data(self):
- if self.data is not None and not isinstance(self.data, (dict, list)):
- raise TypeError("%s expects JSONable dict or list, not %r" % (self.__class__.__name__, self.data))
-
- @property
- def data(self):
- return self._data
-
- @data.setter
- def data(self, data):
- if isinstance(data, string_types):
- warnings.warn("JSON expects JSONable dict or list, not JSON strings")
- data = json.loads(data)
- self._data = data
-
- def _repr_json_(self):
- return self.data
-
-css_t = """$("head").append($("<link/>").attr({
- rel: "stylesheet",
- type: "text/css",
- href: "%s"
-}));
-"""
-
-lib_t1 = """$.getScript("%s", function () {
-"""
-lib_t2 = """});
-"""
-
-class Javascript(TextDisplayObject):
-
- def __init__(self, data=None, url=None, filename=None, lib=None, css=None):
- """Create a Javascript display object given raw data.
-
- When this object is returned by an expression or passed to the
- display function, it will result in the data being displayed
- in the frontend. If the data is a URL, the data will first be
- downloaded and then displayed.
-
- In the Notebook, the containing element will be available as `element`,
- and jQuery will be available. Content appended to `element` will be
- visible in the output area.
-
- Parameters
- ----------
- data : unicode, str or bytes
- The Javascript source code or a URL to download it from.
- url : unicode
- A URL to download the data from.
- filename : unicode
- Path to a local file to load the data from.
- lib : list or str
- A sequence of Javascript library URLs to load asynchronously before
- running the source code. The full URLs of the libraries should
- be given. A single Javascript library URL can also be given as a
- string.
- css: : list or str
- A sequence of css files to load before running the source code.
- The full URLs of the css files should be given. A single css URL
- can also be given as a string.
- """
- if isinstance(lib, string_types):
- lib = [lib]
- elif lib is None:
- lib = []
- if isinstance(css, string_types):
- css = [css]
- elif css is None:
- css = []
- if not isinstance(lib, (list,tuple)):
- raise TypeError('expected sequence, got: %r' % lib)
- if not isinstance(css, (list,tuple)):
- raise TypeError('expected sequence, got: %r' % css)
- self.lib = lib
- self.css = css
- super(Javascript, self).__init__(data=data, url=url, filename=filename)
-
- def _repr_javascript_(self):
- r = ''
- for c in self.css:
- r += css_t % c
- for l in self.lib:
- r += lib_t1 % l
- r += self.data
- r += lib_t2*len(self.lib)
- return r
-
-# constants for identifying png/jpeg data
-_PNG = b'\x89PNG\r\n\x1a\n'
-_JPEG = b'\xff\xd8'
-
-def _pngxy(data):
- """read the (width, height) from a PNG header"""
- ihdr = data.index(b'IHDR')
- # next 8 bytes are width/height
- w4h4 = data[ihdr+4:ihdr+12]
- return struct.unpack('>ii', w4h4)
-
-def _jpegxy(data):
- """read the (width, height) from a JPEG header"""
- # adapted from http://www.64lines.com/jpeg-width-height
-
- idx = 4
- while True:
- block_size = struct.unpack('>H', data[idx:idx+2])[0]
- idx = idx + block_size
- if data[idx:idx+2] == b'\xFF\xC0':
- # found Start of Frame
- iSOF = idx
- break
- else:
- # read another block
- idx += 2
-
- h, w = struct.unpack('>HH', data[iSOF+5:iSOF+9])
- return w, h
-
-class Image(DisplayObject):
-
- _read_flags = 'rb'
- _FMT_JPEG = u'jpeg'
- _FMT_PNG = u'png'
- _ACCEPTABLE_EMBEDDINGS = [_FMT_JPEG, _FMT_PNG]
-
- def __init__(self, data=None, url=None, filename=None, format=None,
- embed=None, width=None, height=None, retina=False,
- unconfined=False, metadata=None):
- """Create a PNG/JPEG image object given raw data.
-
- When this object is returned by an input cell or passed to the
- display function, it will result in the image being displayed
- in the frontend.
-
- Parameters
- ----------
- data : unicode, str or bytes
- The raw image data or a URL or filename to load the data from.
- This always results in embedded image data.
- url : unicode
- A URL to download the data from. If you specify `url=`,
- the image data will not be embedded unless you also specify `embed=True`.
- filename : unicode
- Path to a local file to load the data from.
- Images from a file are always embedded.
- format : unicode
- The format of the image data (png/jpeg/jpg). If a filename or URL is given
- for format will be inferred from the filename extension.
- embed : bool
- Should the image data be embedded using a data URI (True) or be
- loaded using an <img> tag. Set this to True if you want the image
- to be viewable later with no internet connection in the notebook.
-
- Default is `True`, unless the keyword argument `url` is set, then
- default value is `False`.
-
- Note that QtConsole is not able to display images if `embed` is set to `False`
- width : int
+class JSON(DisplayObject):
+ """JSON expects a JSON-able dict or list
+
+ not an already-serialized JSON string.
+
+ Scalar types (None, number, string) are not allowed, only dict or list containers.
+ """
+ # wrap data in a property, which warns about passing already-serialized JSON
+ _data = None
+ def _check_data(self):
+ if self.data is not None and not isinstance(self.data, (dict, list)):
+ raise TypeError("%s expects JSONable dict or list, not %r" % (self.__class__.__name__, self.data))
+
+ @property
+ def data(self):
+ return self._data
+
+ @data.setter
+ def data(self, data):
+ if isinstance(data, string_types):
+ warnings.warn("JSON expects JSONable dict or list, not JSON strings")
+ data = json.loads(data)
+ self._data = data
+
+ def _repr_json_(self):
+ return self.data
+
+css_t = """$("head").append($("<link/>").attr({
+ rel: "stylesheet",
+ type: "text/css",
+ href: "%s"
+}));
+"""
+
+lib_t1 = """$.getScript("%s", function () {
+"""
+lib_t2 = """});
+"""
+
+class Javascript(TextDisplayObject):
+
+ def __init__(self, data=None, url=None, filename=None, lib=None, css=None):
+ """Create a Javascript display object given raw data.
+
+ When this object is returned by an expression or passed to the
+ display function, it will result in the data being displayed
+ in the frontend. If the data is a URL, the data will first be
+ downloaded and then displayed.
+
+ In the Notebook, the containing element will be available as `element`,
+ and jQuery will be available. Content appended to `element` will be
+ visible in the output area.
+
+ Parameters
+ ----------
+ data : unicode, str or bytes
+ The Javascript source code or a URL to download it from.
+ url : unicode
+ A URL to download the data from.
+ filename : unicode
+ Path to a local file to load the data from.
+ lib : list or str
+ A sequence of Javascript library URLs to load asynchronously before
+ running the source code. The full URLs of the libraries should
+ be given. A single Javascript library URL can also be given as a
+ string.
+ css: : list or str
+ A sequence of css files to load before running the source code.
+ The full URLs of the css files should be given. A single css URL
+ can also be given as a string.
+ """
+ if isinstance(lib, string_types):
+ lib = [lib]
+ elif lib is None:
+ lib = []
+ if isinstance(css, string_types):
+ css = [css]
+ elif css is None:
+ css = []
+ if not isinstance(lib, (list,tuple)):
+ raise TypeError('expected sequence, got: %r' % lib)
+ if not isinstance(css, (list,tuple)):
+ raise TypeError('expected sequence, got: %r' % css)
+ self.lib = lib
+ self.css = css
+ super(Javascript, self).__init__(data=data, url=url, filename=filename)
+
+ def _repr_javascript_(self):
+ r = ''
+ for c in self.css:
+ r += css_t % c
+ for l in self.lib:
+ r += lib_t1 % l
+ r += self.data
+ r += lib_t2*len(self.lib)
+ return r
+
+# constants for identifying png/jpeg data
+_PNG = b'\x89PNG\r\n\x1a\n'
+_JPEG = b'\xff\xd8'
+
+def _pngxy(data):
+ """read the (width, height) from a PNG header"""
+ ihdr = data.index(b'IHDR')
+ # next 8 bytes are width/height
+ w4h4 = data[ihdr+4:ihdr+12]
+ return struct.unpack('>ii', w4h4)
+
+def _jpegxy(data):
+ """read the (width, height) from a JPEG header"""
+ # adapted from http://www.64lines.com/jpeg-width-height
+
+ idx = 4
+ while True:
+ block_size = struct.unpack('>H', data[idx:idx+2])[0]
+ idx = idx + block_size
+ if data[idx:idx+2] == b'\xFF\xC0':
+ # found Start of Frame
+ iSOF = idx
+ break
+ else:
+ # read another block
+ idx += 2
+
+ h, w = struct.unpack('>HH', data[iSOF+5:iSOF+9])
+ return w, h
+
+class Image(DisplayObject):
+
+ _read_flags = 'rb'
+ _FMT_JPEG = u'jpeg'
+ _FMT_PNG = u'png'
+ _ACCEPTABLE_EMBEDDINGS = [_FMT_JPEG, _FMT_PNG]
+
+ def __init__(self, data=None, url=None, filename=None, format=None,
+ embed=None, width=None, height=None, retina=False,
+ unconfined=False, metadata=None):
+ """Create a PNG/JPEG image object given raw data.
+
+ When this object is returned by an input cell or passed to the
+ display function, it will result in the image being displayed
+ in the frontend.
+
+ Parameters
+ ----------
+ data : unicode, str or bytes
+ The raw image data or a URL or filename to load the data from.
+ This always results in embedded image data.
+ url : unicode
+ A URL to download the data from. If you specify `url=`,
+ the image data will not be embedded unless you also specify `embed=True`.
+ filename : unicode
+ Path to a local file to load the data from.
+ Images from a file are always embedded.
+ format : unicode
+ The format of the image data (png/jpeg/jpg). If a filename or URL is given
+ for format will be inferred from the filename extension.
+ embed : bool
+ Should the image data be embedded using a data URI (True) or be
+ loaded using an <img> tag. Set this to True if you want the image
+ to be viewable later with no internet connection in the notebook.
+
+ Default is `True`, unless the keyword argument `url` is set, then
+ default value is `False`.
+
+ Note that QtConsole is not able to display images if `embed` is set to `False`
+ width : int
Width in pixels to which to constrain the image in html
- height : int
+ height : int
Height in pixels to which to constrain the image in html
- retina : bool
- Automatically set the width and height to half of the measured
- width and height.
- This only works for embedded images because it reads the width/height
- from image data.
- For non-embedded images, you can just set the desired display width
- and height directly.
- unconfined: bool
- Set unconfined=True to disable max-width confinement of the image.
- metadata: dict
- Specify extra metadata to attach to the image.
-
- Examples
- --------
- # embedded image data, works in qtconsole and notebook
- # when passed positionally, the first arg can be any of raw image data,
- # a URL, or a filename from which to load image data.
- # The result is always embedding image data for inline images.
- Image('http://www.google.fr/images/srpr/logo3w.png')
- Image('/path/to/image.jpg')
- Image(b'RAW_PNG_DATA...')
-
- # Specifying Image(url=...) does not embed the image data,
- # it only generates `<img>` tag with a link to the source.
- # This will not work in the qtconsole or offline.
- Image(url='http://www.google.fr/images/srpr/logo3w.png')
-
- """
- if filename is not None:
- ext = self._find_ext(filename)
- elif url is not None:
- ext = self._find_ext(url)
- elif data is None:
- raise ValueError("No image data found. Expecting filename, url, or data.")
- elif isinstance(data, string_types) and (
- data.startswith('http') or _safe_exists(data)
- ):
- ext = self._find_ext(data)
- else:
- ext = None
-
- if format is None:
- if ext is not None:
- if ext == u'jpg' or ext == u'jpeg':
- format = self._FMT_JPEG
+ retina : bool
+ Automatically set the width and height to half of the measured
+ width and height.
+ This only works for embedded images because it reads the width/height
+ from image data.
+ For non-embedded images, you can just set the desired display width
+ and height directly.
+ unconfined: bool
+ Set unconfined=True to disable max-width confinement of the image.
+ metadata: dict
+ Specify extra metadata to attach to the image.
+
+ Examples
+ --------
+ # embedded image data, works in qtconsole and notebook
+ # when passed positionally, the first arg can be any of raw image data,
+ # a URL, or a filename from which to load image data.
+ # The result is always embedding image data for inline images.
+ Image('http://www.google.fr/images/srpr/logo3w.png')
+ Image('/path/to/image.jpg')
+ Image(b'RAW_PNG_DATA...')
+
+ # Specifying Image(url=...) does not embed the image data,
+ # it only generates `<img>` tag with a link to the source.
+ # This will not work in the qtconsole or offline.
+ Image(url='http://www.google.fr/images/srpr/logo3w.png')
+
+ """
+ if filename is not None:
+ ext = self._find_ext(filename)
+ elif url is not None:
+ ext = self._find_ext(url)
+ elif data is None:
+ raise ValueError("No image data found. Expecting filename, url, or data.")
+ elif isinstance(data, string_types) and (
+ data.startswith('http') or _safe_exists(data)
+ ):
+ ext = self._find_ext(data)
+ else:
+ ext = None
+
+ if format is None:
+ if ext is not None:
+ if ext == u'jpg' or ext == u'jpeg':
+ format = self._FMT_JPEG
elif ext == u'png':
- format = self._FMT_PNG
- else:
- format = ext.lower()
- elif isinstance(data, bytes):
- # infer image type from image data header,
- # only if format has not been specified.
- if data[:2] == _JPEG:
- format = self._FMT_JPEG
-
- # failed to detect format, default png
- if format is None:
- format = 'png'
-
- if format.lower() == 'jpg':
- # jpg->jpeg
- format = self._FMT_JPEG
-
- self.format = unicode_type(format).lower()
- self.embed = embed if embed is not None else (url is None)
-
- if self.embed and self.format not in self._ACCEPTABLE_EMBEDDINGS:
- raise ValueError("Cannot embed the '%s' image format" % (self.format))
- self.width = width
- self.height = height
- self.retina = retina
- self.unconfined = unconfined
- self.metadata = metadata
- super(Image, self).__init__(data=data, url=url, filename=filename)
-
- if retina:
- self._retina_shape()
-
- def _retina_shape(self):
- """load pixel-doubled width and height from image data"""
- if not self.embed:
- return
- if self.format == 'png':
- w, h = _pngxy(self.data)
- elif self.format == 'jpeg':
- w, h = _jpegxy(self.data)
- else:
- # retina only supports png
- return
- self.width = w // 2
- self.height = h // 2
-
- def reload(self):
- """Reload the raw data from file or URL."""
- if self.embed:
- super(Image,self).reload()
- if self.retina:
- self._retina_shape()
-
- def _repr_html_(self):
- if not self.embed:
- width = height = klass = ''
- if self.width:
- width = ' width="%d"' % self.width
- if self.height:
- height = ' height="%d"' % self.height
- if self.unconfined:
- klass = ' class="unconfined"'
- return u'<img src="{url}"{width}{height}{klass}/>'.format(
- url=self.url,
- width=width,
- height=height,
- klass=klass,
- )
-
- def _data_and_metadata(self):
- """shortcut for returning metadata with shape information, if defined"""
- md = {}
- if self.width:
- md['width'] = self.width
- if self.height:
- md['height'] = self.height
- if self.unconfined:
- md['unconfined'] = self.unconfined
- if self.metadata:
- md.update(self.metadata)
- if md:
- return self.data, md
- else:
- return self.data
-
- def _repr_png_(self):
- if self.embed and self.format == u'png':
- return self._data_and_metadata()
-
- def _repr_jpeg_(self):
- if self.embed and (self.format == u'jpeg' or self.format == u'jpg'):
- return self._data_and_metadata()
-
- def _find_ext(self, s):
- return unicode_type(s.split('.')[-1].lower())
-
-class Video(DisplayObject):
-
+ format = self._FMT_PNG
+ else:
+ format = ext.lower()
+ elif isinstance(data, bytes):
+ # infer image type from image data header,
+ # only if format has not been specified.
+ if data[:2] == _JPEG:
+ format = self._FMT_JPEG
+
+ # failed to detect format, default png
+ if format is None:
+ format = 'png'
+
+ if format.lower() == 'jpg':
+ # jpg->jpeg
+ format = self._FMT_JPEG
+
+ self.format = unicode_type(format).lower()
+ self.embed = embed if embed is not None else (url is None)
+
+ if self.embed and self.format not in self._ACCEPTABLE_EMBEDDINGS:
+ raise ValueError("Cannot embed the '%s' image format" % (self.format))
+ self.width = width
+ self.height = height
+ self.retina = retina
+ self.unconfined = unconfined
+ self.metadata = metadata
+ super(Image, self).__init__(data=data, url=url, filename=filename)
+
+ if retina:
+ self._retina_shape()
+
+ def _retina_shape(self):
+ """load pixel-doubled width and height from image data"""
+ if not self.embed:
+ return
+ if self.format == 'png':
+ w, h = _pngxy(self.data)
+ elif self.format == 'jpeg':
+ w, h = _jpegxy(self.data)
+ else:
+ # retina only supports png
+ return
+ self.width = w // 2
+ self.height = h // 2
+
+ def reload(self):
+ """Reload the raw data from file or URL."""
+ if self.embed:
+ super(Image,self).reload()
+ if self.retina:
+ self._retina_shape()
+
+ def _repr_html_(self):
+ if not self.embed:
+ width = height = klass = ''
+ if self.width:
+ width = ' width="%d"' % self.width
+ if self.height:
+ height = ' height="%d"' % self.height
+ if self.unconfined:
+ klass = ' class="unconfined"'
+ return u'<img src="{url}"{width}{height}{klass}/>'.format(
+ url=self.url,
+ width=width,
+ height=height,
+ klass=klass,
+ )
+
+ def _data_and_metadata(self):
+ """shortcut for returning metadata with shape information, if defined"""
+ md = {}
+ if self.width:
+ md['width'] = self.width
+ if self.height:
+ md['height'] = self.height
+ if self.unconfined:
+ md['unconfined'] = self.unconfined
+ if self.metadata:
+ md.update(self.metadata)
+ if md:
+ return self.data, md
+ else:
+ return self.data
+
+ def _repr_png_(self):
+ if self.embed and self.format == u'png':
+ return self._data_and_metadata()
+
+ def _repr_jpeg_(self):
+ if self.embed and (self.format == u'jpeg' or self.format == u'jpg'):
+ return self._data_and_metadata()
+
+ def _find_ext(self, s):
+ return unicode_type(s.split('.')[-1].lower())
+
+class Video(DisplayObject):
+
def __init__(self, data=None, url=None, filename=None, embed=False, mimetype=None):
- """Create a video object given raw data or an URL.
-
- When this object is returned by an input cell or passed to the
- display function, it will result in the video being displayed
- in the frontend.
-
- Parameters
- ----------
- data : unicode, str or bytes
+ """Create a video object given raw data or an URL.
+
+ When this object is returned by an input cell or passed to the
+ display function, it will result in the video being displayed
+ in the frontend.
+
+ Parameters
+ ----------
+ data : unicode, str or bytes
The raw video data or a URL or filename to load the data from.
Raw data will require passing `embed=True`.
- url : unicode
+ url : unicode
A URL for the video. If you specify `url=`,
the image data will not be embedded.
- filename : unicode
+ filename : unicode
Path to a local file containing the video.
Will be interpreted as a local URL unless `embed=True`.
- embed : bool
+ embed : bool
Should the video be embedded using a data URI (True) or be
loaded using a <video> tag (False).
-
+
Since videos are large, embedding them should be avoided, if possible.
You must confirm embedding as your intention by passing `embed=True`.
-
+
Local files can be displayed with URLs without embedding the content, via::
Video('./video.mp4')
- mimetype: unicode
+ mimetype: unicode
Specify the mimetype for embedded videos.
Default will be guessed from file extension, if available.
- Examples
- --------
+ Examples
+ --------
- Video('https://archive.org/download/Sita_Sings_the_Blues/Sita_Sings_the_Blues_small.mp4')
- Video('path/to/video.mp4')
+ Video('https://archive.org/download/Sita_Sings_the_Blues/Sita_Sings_the_Blues_small.mp4')
+ Video('path/to/video.mp4')
Video('path/to/video.mp4', embed=True)
Video(b'raw-videodata', embed=True)
- """
+ """
if url is None and isinstance(data, string_types) and data.startswith(('http:', 'https:')):
- url = data
- data = None
- elif os.path.exists(data):
- filename = data
- data = None
+ url = data
+ data = None
+ elif os.path.exists(data):
+ filename = data
+ data = None
if data and not embed:
msg = ''.join([
@@ -1166,30 +1166,30 @@ class Video(DisplayObject):
"Consider passing Video(url='...')",
])
raise ValueError(msg)
-
- self.mimetype = mimetype
+
+ self.mimetype = mimetype
self.embed = embed
- super(Video, self).__init__(data=data, url=url, filename=filename)
-
- def _repr_html_(self):
- # External URLs and potentially local files are not embedded into the
- # notebook output.
- if not self.embed:
- url = self.url if self.url is not None else self.filename
- output = """<video src="{0}" controls>
- Your browser does not support the <code>video</code> element.
- </video>""".format(url)
- return output
+ super(Video, self).__init__(data=data, url=url, filename=filename)
+
+ def _repr_html_(self):
+ # External URLs and potentially local files are not embedded into the
+ # notebook output.
+ if not self.embed:
+ url = self.url if self.url is not None else self.filename
+ output = """<video src="{0}" controls>
+ Your browser does not support the <code>video</code> element.
+ </video>""".format(url)
+ return output
# Embedded videos are base64-encoded.
mimetype = self.mimetype
- if self.filename is not None:
+ if self.filename is not None:
if not mimetype:
mimetype, _ = mimetypes.guess_type(self.filename)
with open(self.filename, 'rb') as f:
video = f.read()
- else:
+ else:
video = self.data
if isinstance(video, unicode_type):
# unicode input is already b64-encoded
@@ -1197,94 +1197,94 @@ class Video(DisplayObject):
else:
b64_video = base64_encode(video).decode('ascii').rstrip()
- output = """<video controls>
- <source src="data:{0};base64,{1}" type="{0}">
- Your browser does not support the video tag.
+ output = """<video controls>
+ <source src="data:{0};base64,{1}" type="{0}">
+ Your browser does not support the video tag.
</video>""".format(mimetype, b64_video)
- return output
-
- def reload(self):
- # TODO
- pass
-
- def _repr_png_(self):
- # TODO
- pass
- def _repr_jpeg_(self):
- # TODO
- pass
-
-def clear_output(wait=False):
- """Clear the output of the current cell receiving output.
-
- Parameters
- ----------
- wait : bool [default: false]
- Wait to clear the output until new output is available to replace it."""
- from IPython.core.interactiveshell import InteractiveShell
- if InteractiveShell.initialized():
- InteractiveShell.instance().display_pub.clear_output(wait)
- else:
+ return output
+
+ def reload(self):
+ # TODO
+ pass
+
+ def _repr_png_(self):
+ # TODO
+ pass
+ def _repr_jpeg_(self):
+ # TODO
+ pass
+
+def clear_output(wait=False):
+ """Clear the output of the current cell receiving output.
+
+ Parameters
+ ----------
+ wait : bool [default: false]
+ Wait to clear the output until new output is available to replace it."""
+ from IPython.core.interactiveshell import InteractiveShell
+ if InteractiveShell.initialized():
+ InteractiveShell.instance().display_pub.clear_output(wait)
+ else:
print('\033[2K\r', end='')
sys.stdout.flush()
print('\033[2K\r', end='')
sys.stderr.flush()
-
-
-@skip_doctest
-def set_matplotlib_formats(*formats, **kwargs):
- """Select figure formats for the inline backend. Optionally pass quality for JPEG.
-
- For example, this enables PNG and JPEG output with a JPEG quality of 90%::
-
- In [1]: set_matplotlib_formats('png', 'jpeg', quality=90)
-
- To set this in your config files use the following::
-
- c.InlineBackend.figure_formats = {'png', 'jpeg'}
- c.InlineBackend.print_figure_kwargs.update({'quality' : 90})
-
- Parameters
- ----------
- *formats : strs
- One or more figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'.
- **kwargs :
- Keyword args will be relayed to ``figure.canvas.print_figure``.
- """
- from IPython.core.interactiveshell import InteractiveShell
- from IPython.core.pylabtools import select_figure_formats
- # build kwargs, starting with InlineBackend config
- kw = {}
- from ipykernel.pylab.config import InlineBackend
- cfg = InlineBackend.instance()
- kw.update(cfg.print_figure_kwargs)
- kw.update(**kwargs)
- shell = InteractiveShell.instance()
- select_figure_formats(shell, formats, **kw)
-
-@skip_doctest
-def set_matplotlib_close(close=True):
- """Set whether the inline backend closes all figures automatically or not.
-
- By default, the inline backend used in the IPython Notebook will close all
- matplotlib figures automatically after each cell is run. This means that
- plots in different cells won't interfere. Sometimes, you may want to make
- a plot in one cell and then refine it in later cells. This can be accomplished
- by::
-
- In [1]: set_matplotlib_close(False)
-
- To set this in your config files use the following::
-
- c.InlineBackend.close_figures = False
-
- Parameters
- ----------
- close : bool
- Should all matplotlib figures be automatically closed after each cell is
- run?
- """
- from ipykernel.pylab.config import InlineBackend
- cfg = InlineBackend.instance()
- cfg.close_figures = close
-
+
+
+@skip_doctest
+def set_matplotlib_formats(*formats, **kwargs):
+ """Select figure formats for the inline backend. Optionally pass quality for JPEG.
+
+ For example, this enables PNG and JPEG output with a JPEG quality of 90%::
+
+ In [1]: set_matplotlib_formats('png', 'jpeg', quality=90)
+
+ To set this in your config files use the following::
+
+ c.InlineBackend.figure_formats = {'png', 'jpeg'}
+ c.InlineBackend.print_figure_kwargs.update({'quality' : 90})
+
+ Parameters
+ ----------
+ *formats : strs
+ One or more figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'.
+ **kwargs :
+ Keyword args will be relayed to ``figure.canvas.print_figure``.
+ """
+ from IPython.core.interactiveshell import InteractiveShell
+ from IPython.core.pylabtools import select_figure_formats
+ # build kwargs, starting with InlineBackend config
+ kw = {}
+ from ipykernel.pylab.config import InlineBackend
+ cfg = InlineBackend.instance()
+ kw.update(cfg.print_figure_kwargs)
+ kw.update(**kwargs)
+ shell = InteractiveShell.instance()
+ select_figure_formats(shell, formats, **kw)
+
+@skip_doctest
+def set_matplotlib_close(close=True):
+ """Set whether the inline backend closes all figures automatically or not.
+
+ By default, the inline backend used in the IPython Notebook will close all
+ matplotlib figures automatically after each cell is run. This means that
+ plots in different cells won't interfere. Sometimes, you may want to make
+ a plot in one cell and then refine it in later cells. This can be accomplished
+ by::
+
+ In [1]: set_matplotlib_close(False)
+
+ To set this in your config files use the following::
+
+ c.InlineBackend.close_figures = False
+
+ Parameters
+ ----------
+ close : bool
+ Should all matplotlib figures be automatically closed after each cell is
+ run?
+ """
+ from ipykernel.pylab.config import InlineBackend
+ cfg = InlineBackend.instance()
+ cfg.close_figures = close
+
diff --git a/contrib/python/ipython/py2/IPython/core/display_trap.py b/contrib/python/ipython/py2/IPython/core/display_trap.py
index 7a48a5e119..9931dfe2df 100644
--- a/contrib/python/ipython/py2/IPython/core/display_trap.py
+++ b/contrib/python/ipython/py2/IPython/core/display_trap.py
@@ -1,70 +1,70 @@
-# encoding: utf-8
-"""
-A context manager for handling sys.displayhook.
-
-Authors:
-
-* Robert Kern
-* Brian Granger
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import sys
-
-from traitlets.config.configurable import Configurable
-from traitlets import Any
-
-#-----------------------------------------------------------------------------
-# Classes and functions
-#-----------------------------------------------------------------------------
-
-
-class DisplayTrap(Configurable):
- """Object to manage sys.displayhook.
-
- This came from IPython.core.kernel.display_hook, but is simplified
- (no callbacks or formatters) until more of the core is refactored.
- """
-
- hook = Any()
-
- def __init__(self, hook=None):
- super(DisplayTrap, self).__init__(hook=hook, config=None)
- self.old_hook = None
- # We define this to track if a single BuiltinTrap is nested.
- # Only turn off the trap when the outermost call to __exit__ is made.
- self._nested_level = 0
-
- def __enter__(self):
- if self._nested_level == 0:
- self.set()
- self._nested_level += 1
- return self
-
- def __exit__(self, type, value, traceback):
- if self._nested_level == 1:
- self.unset()
- self._nested_level -= 1
- # Returning False will cause exceptions to propagate
- return False
-
- def set(self):
- """Set the hook."""
- if sys.displayhook is not self.hook:
- self.old_hook = sys.displayhook
- sys.displayhook = self.hook
-
- def unset(self):
- """Unset the hook."""
- sys.displayhook = self.old_hook
-
+# encoding: utf-8
+"""
+A context manager for handling sys.displayhook.
+
+Authors:
+
+* Robert Kern
+* Brian Granger
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import sys
+
+from traitlets.config.configurable import Configurable
+from traitlets import Any
+
+#-----------------------------------------------------------------------------
+# Classes and functions
+#-----------------------------------------------------------------------------
+
+
+class DisplayTrap(Configurable):
+ """Object to manage sys.displayhook.
+
+ This came from IPython.core.kernel.display_hook, but is simplified
+ (no callbacks or formatters) until more of the core is refactored.
+ """
+
+ hook = Any()
+
+ def __init__(self, hook=None):
+ super(DisplayTrap, self).__init__(hook=hook, config=None)
+ self.old_hook = None
+ # We define this to track if a single BuiltinTrap is nested.
+ # Only turn off the trap when the outermost call to __exit__ is made.
+ self._nested_level = 0
+
+ def __enter__(self):
+ if self._nested_level == 0:
+ self.set()
+ self._nested_level += 1
+ return self
+
+ def __exit__(self, type, value, traceback):
+ if self._nested_level == 1:
+ self.unset()
+ self._nested_level -= 1
+ # Returning False will cause exceptions to propagate
+ return False
+
+ def set(self):
+ """Set the hook."""
+ if sys.displayhook is not self.hook:
+ self.old_hook = sys.displayhook
+ sys.displayhook = self.hook
+
+ def unset(self):
+ """Unset the hook."""
+ sys.displayhook = self.old_hook
+
diff --git a/contrib/python/ipython/py2/IPython/core/displayhook.py b/contrib/python/ipython/py2/IPython/core/displayhook.py
index e2a6b0547d..cce7c83d16 100644
--- a/contrib/python/ipython/py2/IPython/core/displayhook.py
+++ b/contrib/python/ipython/py2/IPython/core/displayhook.py
@@ -1,298 +1,298 @@
-# -*- coding: utf-8 -*-
-"""Displayhook for IPython.
-
-This defines a callable class that IPython uses for `sys.displayhook`.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import print_function
-
-import sys
-import io as _io
-import tokenize
-
-from traitlets.config.configurable import Configurable
-from IPython.utils.py3compat import builtin_mod, cast_unicode_py2
-from traitlets import Instance, Float
+# -*- coding: utf-8 -*-
+"""Displayhook for IPython.
+
+This defines a callable class that IPython uses for `sys.displayhook`.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import print_function
+
+import sys
+import io as _io
+import tokenize
+
+from traitlets.config.configurable import Configurable
+from IPython.utils.py3compat import builtin_mod, cast_unicode_py2
+from traitlets import Instance, Float
from warnings import warn
-
-# TODO: Move the various attributes (cache_size, [others now moved]). Some
-# of these are also attributes of InteractiveShell. They should be on ONE object
-# only and the other objects should ask that one object for their values.
-
-class DisplayHook(Configurable):
- """The custom IPython displayhook to replace sys.displayhook.
-
- This class does many things, but the basic idea is that it is a callable
- that gets called anytime user code returns a value.
- """
-
- shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
- allow_none=True)
- exec_result = Instance('IPython.core.interactiveshell.ExecutionResult',
- allow_none=True)
- cull_fraction = Float(0.2)
-
- def __init__(self, shell=None, cache_size=1000, **kwargs):
- super(DisplayHook, self).__init__(shell=shell, **kwargs)
- cache_size_min = 3
- if cache_size <= 0:
- self.do_full_cache = 0
- cache_size = 0
- elif cache_size < cache_size_min:
- self.do_full_cache = 0
- cache_size = 0
- warn('caching was disabled (min value for cache size is %s).' %
+
+# TODO: Move the various attributes (cache_size, [others now moved]). Some
+# of these are also attributes of InteractiveShell. They should be on ONE object
+# only and the other objects should ask that one object for their values.
+
+class DisplayHook(Configurable):
+ """The custom IPython displayhook to replace sys.displayhook.
+
+ This class does many things, but the basic idea is that it is a callable
+ that gets called anytime user code returns a value.
+ """
+
+ shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
+ allow_none=True)
+ exec_result = Instance('IPython.core.interactiveshell.ExecutionResult',
+ allow_none=True)
+ cull_fraction = Float(0.2)
+
+ def __init__(self, shell=None, cache_size=1000, **kwargs):
+ super(DisplayHook, self).__init__(shell=shell, **kwargs)
+ cache_size_min = 3
+ if cache_size <= 0:
+ self.do_full_cache = 0
+ cache_size = 0
+ elif cache_size < cache_size_min:
+ self.do_full_cache = 0
+ cache_size = 0
+ warn('caching was disabled (min value for cache size is %s).' %
cache_size_min,stacklevel=3)
- else:
- self.do_full_cache = 1
-
- self.cache_size = cache_size
-
- # we need a reference to the user-level namespace
- self.shell = shell
-
- self._,self.__,self.___ = '','',''
-
- # these are deliberately global:
- to_user_ns = {'_':self._,'__':self.__,'___':self.___}
- self.shell.user_ns.update(to_user_ns)
-
- @property
- def prompt_count(self):
- return self.shell.execution_count
-
- #-------------------------------------------------------------------------
- # Methods used in __call__. Override these methods to modify the behavior
- # of the displayhook.
- #-------------------------------------------------------------------------
-
- def check_for_underscore(self):
- """Check if the user has set the '_' variable by hand."""
- # If something injected a '_' variable in __builtin__, delete
- # ipython's automatic one so we don't clobber that. gettext() in
- # particular uses _, so we need to stay away from it.
- if '_' in builtin_mod.__dict__:
- try:
- del self.shell.user_ns['_']
- except KeyError:
- pass
-
- def quiet(self):
- """Should we silence the display hook because of ';'?"""
- # do not print output if input ends in ';'
-
- try:
- cell = cast_unicode_py2(self.shell.history_manager.input_hist_parsed[-1])
- except IndexError:
- # some uses of ipshellembed may fail here
- return False
-
- sio = _io.StringIO(cell)
- tokens = list(tokenize.generate_tokens(sio.readline))
-
- for token in reversed(tokens):
+ else:
+ self.do_full_cache = 1
+
+ self.cache_size = cache_size
+
+ # we need a reference to the user-level namespace
+ self.shell = shell
+
+ self._,self.__,self.___ = '','',''
+
+ # these are deliberately global:
+ to_user_ns = {'_':self._,'__':self.__,'___':self.___}
+ self.shell.user_ns.update(to_user_ns)
+
+ @property
+ def prompt_count(self):
+ return self.shell.execution_count
+
+ #-------------------------------------------------------------------------
+ # Methods used in __call__. Override these methods to modify the behavior
+ # of the displayhook.
+ #-------------------------------------------------------------------------
+
+ def check_for_underscore(self):
+ """Check if the user has set the '_' variable by hand."""
+ # If something injected a '_' variable in __builtin__, delete
+ # ipython's automatic one so we don't clobber that. gettext() in
+ # particular uses _, so we need to stay away from it.
+ if '_' in builtin_mod.__dict__:
+ try:
+ del self.shell.user_ns['_']
+ except KeyError:
+ pass
+
+ def quiet(self):
+ """Should we silence the display hook because of ';'?"""
+ # do not print output if input ends in ';'
+
+ try:
+ cell = cast_unicode_py2(self.shell.history_manager.input_hist_parsed[-1])
+ except IndexError:
+ # some uses of ipshellembed may fail here
+ return False
+
+ sio = _io.StringIO(cell)
+ tokens = list(tokenize.generate_tokens(sio.readline))
+
+ for token in reversed(tokens):
if token[0] in (tokenize.ENDMARKER, tokenize.NL, tokenize.NEWLINE, tokenize.COMMENT):
- continue
- if (token[0] == tokenize.OP) and (token[1] == ';'):
- return True
- else:
- return False
-
- def start_displayhook(self):
- """Start the displayhook, initializing resources."""
- pass
-
- def write_output_prompt(self):
- """Write the output prompt.
-
- The default implementation simply writes the prompt to
+ continue
+ if (token[0] == tokenize.OP) and (token[1] == ';'):
+ return True
+ else:
+ return False
+
+ def start_displayhook(self):
+ """Start the displayhook, initializing resources."""
+ pass
+
+ def write_output_prompt(self):
+ """Write the output prompt.
+
+ The default implementation simply writes the prompt to
``sys.stdout``.
- """
- # Use write, not print which adds an extra space.
+ """
+ # Use write, not print which adds an extra space.
sys.stdout.write(self.shell.separate_out)
outprompt = 'Out[{}]: '.format(self.shell.execution_count)
- if self.do_full_cache:
+ if self.do_full_cache:
sys.stdout.write(outprompt)
-
- def compute_format_data(self, result):
- """Compute format data of the object to be displayed.
-
- The format data is a generalization of the :func:`repr` of an object.
- In the default implementation the format data is a :class:`dict` of
- key value pair where the keys are valid MIME types and the values
- are JSON'able data structure containing the raw data for that MIME
- type. It is up to frontends to determine pick a MIME to to use and
- display that data in an appropriate manner.
-
- This method only computes the format data for the object and should
- NOT actually print or write that to a stream.
-
- Parameters
- ----------
- result : object
- The Python object passed to the display hook, whose format will be
- computed.
-
- Returns
- -------
- (format_dict, md_dict) : dict
- format_dict is a :class:`dict` whose keys are valid MIME types and values are
- JSON'able raw data for that MIME type. It is recommended that
- all return values of this should always include the "text/plain"
- MIME type representation of the object.
- md_dict is a :class:`dict` with the same MIME type keys
- of metadata associated with each output.
-
- """
- return self.shell.display_formatter.format(result)
-
+
+ def compute_format_data(self, result):
+ """Compute format data of the object to be displayed.
+
+ The format data is a generalization of the :func:`repr` of an object.
+ In the default implementation the format data is a :class:`dict` of
+ key value pair where the keys are valid MIME types and the values
+ are JSON'able data structure containing the raw data for that MIME
+ type. It is up to frontends to determine pick a MIME to to use and
+ display that data in an appropriate manner.
+
+ This method only computes the format data for the object and should
+ NOT actually print or write that to a stream.
+
+ Parameters
+ ----------
+ result : object
+ The Python object passed to the display hook, whose format will be
+ computed.
+
+ Returns
+ -------
+ (format_dict, md_dict) : dict
+ format_dict is a :class:`dict` whose keys are valid MIME types and values are
+ JSON'able raw data for that MIME type. It is recommended that
+ all return values of this should always include the "text/plain"
+ MIME type representation of the object.
+ md_dict is a :class:`dict` with the same MIME type keys
+ of metadata associated with each output.
+
+ """
+ return self.shell.display_formatter.format(result)
+
# This can be set to True by the write_output_prompt method in a subclass
prompt_end_newline = False
- def write_format_data(self, format_dict, md_dict=None):
- """Write the format data dict to the frontend.
-
- This default version of this method simply writes the plain text
+ def write_format_data(self, format_dict, md_dict=None):
+ """Write the format data dict to the frontend.
+
+ This default version of this method simply writes the plain text
representation of the object to ``sys.stdout``. Subclasses should
- override this method to send the entire `format_dict` to the
- frontends.
-
- Parameters
- ----------
- format_dict : dict
- The format dict for the object passed to `sys.displayhook`.
- md_dict : dict (optional)
- The metadata dict to be associated with the display data.
- """
- if 'text/plain' not in format_dict:
- # nothing to do
- return
- # We want to print because we want to always make sure we have a
- # newline, even if all the prompt separators are ''. This is the
- # standard IPython behavior.
- result_repr = format_dict['text/plain']
- if '\n' in result_repr:
- # So that multi-line strings line up with the left column of
- # the screen, instead of having the output prompt mess up
- # their first line.
- # We use the prompt template instead of the expanded prompt
- # because the expansion may add ANSI escapes that will interfere
- # with our ability to determine whether or not we should add
- # a newline.
+ override this method to send the entire `format_dict` to the
+ frontends.
+
+ Parameters
+ ----------
+ format_dict : dict
+ The format dict for the object passed to `sys.displayhook`.
+ md_dict : dict (optional)
+ The metadata dict to be associated with the display data.
+ """
+ if 'text/plain' not in format_dict:
+ # nothing to do
+ return
+ # We want to print because we want to always make sure we have a
+ # newline, even if all the prompt separators are ''. This is the
+ # standard IPython behavior.
+ result_repr = format_dict['text/plain']
+ if '\n' in result_repr:
+ # So that multi-line strings line up with the left column of
+ # the screen, instead of having the output prompt mess up
+ # their first line.
+ # We use the prompt template instead of the expanded prompt
+ # because the expansion may add ANSI escapes that will interfere
+ # with our ability to determine whether or not we should add
+ # a newline.
if not self.prompt_end_newline:
- # But avoid extraneous empty lines.
- result_repr = '\n' + result_repr
-
+ # But avoid extraneous empty lines.
+ result_repr = '\n' + result_repr
+
print(result_repr)
-
- def update_user_ns(self, result):
- """Update user_ns with various things like _, __, _1, etc."""
-
- # Avoid recursive reference when displaying _oh/Out
- if result is not self.shell.user_ns['_oh']:
- if len(self.shell.user_ns['_oh']) >= self.cache_size and self.do_full_cache:
- self.cull_cache()
- # Don't overwrite '_' and friends if '_' is in __builtin__ (otherwise
- # we cause buggy behavior for things like gettext).
-
- if '_' not in builtin_mod.__dict__:
- self.___ = self.__
- self.__ = self._
- self._ = result
- self.shell.push({'_':self._,
- '__':self.__,
- '___':self.___}, interactive=False)
-
- # hackish access to top-level namespace to create _1,_2... dynamically
- to_main = {}
- if self.do_full_cache:
- new_result = '_'+repr(self.prompt_count)
- to_main[new_result] = result
- self.shell.push(to_main, interactive=False)
- self.shell.user_ns['_oh'][self.prompt_count] = result
-
- def fill_exec_result(self, result):
- if self.exec_result is not None:
- self.exec_result.result = result
-
- def log_output(self, format_dict):
- """Log the output."""
- if 'text/plain' not in format_dict:
- # nothing to do
- return
- if self.shell.logger.log_output:
- self.shell.logger.log_write(format_dict['text/plain'], 'output')
- self.shell.history_manager.output_hist_reprs[self.prompt_count] = \
- format_dict['text/plain']
-
- def finish_displayhook(self):
- """Finish up all displayhook activities."""
+
+ def update_user_ns(self, result):
+ """Update user_ns with various things like _, __, _1, etc."""
+
+ # Avoid recursive reference when displaying _oh/Out
+ if result is not self.shell.user_ns['_oh']:
+ if len(self.shell.user_ns['_oh']) >= self.cache_size and self.do_full_cache:
+ self.cull_cache()
+ # Don't overwrite '_' and friends if '_' is in __builtin__ (otherwise
+ # we cause buggy behavior for things like gettext).
+
+ if '_' not in builtin_mod.__dict__:
+ self.___ = self.__
+ self.__ = self._
+ self._ = result
+ self.shell.push({'_':self._,
+ '__':self.__,
+ '___':self.___}, interactive=False)
+
+ # hackish access to top-level namespace to create _1,_2... dynamically
+ to_main = {}
+ if self.do_full_cache:
+ new_result = '_'+repr(self.prompt_count)
+ to_main[new_result] = result
+ self.shell.push(to_main, interactive=False)
+ self.shell.user_ns['_oh'][self.prompt_count] = result
+
+ def fill_exec_result(self, result):
+ if self.exec_result is not None:
+ self.exec_result.result = result
+
+ def log_output(self, format_dict):
+ """Log the output."""
+ if 'text/plain' not in format_dict:
+ # nothing to do
+ return
+ if self.shell.logger.log_output:
+ self.shell.logger.log_write(format_dict['text/plain'], 'output')
+ self.shell.history_manager.output_hist_reprs[self.prompt_count] = \
+ format_dict['text/plain']
+
+ def finish_displayhook(self):
+ """Finish up all displayhook activities."""
sys.stdout.write(self.shell.separate_out2)
sys.stdout.flush()
-
- def __call__(self, result=None):
- """Printing with history cache management.
-
- This is invoked everytime the interpreter needs to print, and is
- activated by setting the variable sys.displayhook to it.
- """
- self.check_for_underscore()
- if result is not None and not self.quiet():
- self.start_displayhook()
- self.write_output_prompt()
- format_dict, md_dict = self.compute_format_data(result)
- self.update_user_ns(result)
- self.fill_exec_result(result)
- if format_dict:
- self.write_format_data(format_dict, md_dict)
- self.log_output(format_dict)
- self.finish_displayhook()
-
- def cull_cache(self):
- """Output cache is full, cull the oldest entries"""
- oh = self.shell.user_ns.get('_oh', {})
- sz = len(oh)
- cull_count = max(int(sz * self.cull_fraction), 2)
- warn('Output cache limit (currently {sz} entries) hit.\n'
- 'Flushing oldest {cull_count} entries.'.format(sz=sz, cull_count=cull_count))
-
- for i, n in enumerate(sorted(oh)):
- if i >= cull_count:
- break
- self.shell.user_ns.pop('_%i' % n, None)
- oh.pop(n, None)
-
-
- def flush(self):
- if not self.do_full_cache:
- raise ValueError("You shouldn't have reached the cache flush "
- "if full caching is not enabled!")
- # delete auto-generated vars from global namespace
-
- for n in range(1,self.prompt_count + 1):
- key = '_'+repr(n)
- try:
- del self.shell.user_ns[key]
- except: pass
- # In some embedded circumstances, the user_ns doesn't have the
- # '_oh' key set up.
- oh = self.shell.user_ns.get('_oh', None)
- if oh is not None:
- oh.clear()
-
- # Release our own references to objects:
- self._, self.__, self.___ = '', '', ''
-
- if '_' not in builtin_mod.__dict__:
- self.shell.user_ns.update({'_':None,'__':None, '___':None})
- import gc
- # TODO: Is this really needed?
- # IronPython blocks here forever
- if sys.platform != "cli":
- gc.collect()
+
+ def __call__(self, result=None):
+ """Printing with history cache management.
+
+ This is invoked everytime the interpreter needs to print, and is
+ activated by setting the variable sys.displayhook to it.
+ """
+ self.check_for_underscore()
+ if result is not None and not self.quiet():
+ self.start_displayhook()
+ self.write_output_prompt()
+ format_dict, md_dict = self.compute_format_data(result)
+ self.update_user_ns(result)
+ self.fill_exec_result(result)
+ if format_dict:
+ self.write_format_data(format_dict, md_dict)
+ self.log_output(format_dict)
+ self.finish_displayhook()
+
+ def cull_cache(self):
+ """Output cache is full, cull the oldest entries"""
+ oh = self.shell.user_ns.get('_oh', {})
+ sz = len(oh)
+ cull_count = max(int(sz * self.cull_fraction), 2)
+ warn('Output cache limit (currently {sz} entries) hit.\n'
+ 'Flushing oldest {cull_count} entries.'.format(sz=sz, cull_count=cull_count))
+
+ for i, n in enumerate(sorted(oh)):
+ if i >= cull_count:
+ break
+ self.shell.user_ns.pop('_%i' % n, None)
+ oh.pop(n, None)
+
+
+ def flush(self):
+ if not self.do_full_cache:
+ raise ValueError("You shouldn't have reached the cache flush "
+ "if full caching is not enabled!")
+ # delete auto-generated vars from global namespace
+
+ for n in range(1,self.prompt_count + 1):
+ key = '_'+repr(n)
+ try:
+ del self.shell.user_ns[key]
+ except: pass
+ # In some embedded circumstances, the user_ns doesn't have the
+ # '_oh' key set up.
+ oh = self.shell.user_ns.get('_oh', None)
+ if oh is not None:
+ oh.clear()
+
+ # Release our own references to objects:
+ self._, self.__, self.___ = '', '', ''
+
+ if '_' not in builtin_mod.__dict__:
+ self.shell.user_ns.update({'_':None,'__':None, '___':None})
+ import gc
+ # TODO: Is this really needed?
+ # IronPython blocks here forever
+ if sys.platform != "cli":
+ gc.collect()
class CapturingDisplayHook(object):
diff --git a/contrib/python/ipython/py2/IPython/core/displaypub.py b/contrib/python/ipython/py2/IPython/core/displaypub.py
index b417aab40f..82a859ae15 100644
--- a/contrib/python/ipython/py2/IPython/core/displaypub.py
+++ b/contrib/python/ipython/py2/IPython/core/displaypub.py
@@ -1,95 +1,95 @@
-"""An interface for publishing rich data to frontends.
-
-There are two components of the display system:
-
-* Display formatters, which take a Python object and compute the
- representation of the object in various formats (text, HTML, SVG, etc.).
-* The display publisher that is used to send the representation data to the
- various frontends.
-
-This module defines the logic display publishing. The display publisher uses
-the ``display_data`` message type that is defined in the IPython messaging
-spec.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import print_function
-
+"""An interface for publishing rich data to frontends.
+
+There are two components of the display system:
+
+* Display formatters, which take a Python object and compute the
+ representation of the object in various formats (text, HTML, SVG, etc.).
+* The display publisher that is used to send the representation data to the
+ various frontends.
+
+This module defines the logic display publishing. The display publisher uses
+the ``display_data`` message type that is defined in the IPython messaging
+spec.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import print_function
+
import sys
-from traitlets.config.configurable import Configurable
-from traitlets import List
-
-# This used to be defined here - it is imported for backwards compatibility
-from .display import publish_display_data
-
-#-----------------------------------------------------------------------------
-# Main payload class
-#-----------------------------------------------------------------------------
-
-class DisplayPublisher(Configurable):
- """A traited class that publishes display data to frontends.
-
- Instances of this class are created by the main IPython object and should
- be accessed there.
- """
-
- def _validate_data(self, data, metadata=None):
- """Validate the display data.
-
- Parameters
- ----------
- data : dict
- The formata data dictionary.
- metadata : dict
- Any metadata for the data.
- """
-
- if not isinstance(data, dict):
- raise TypeError('data must be a dict, got: %r' % data)
- if metadata is not None:
- if not isinstance(metadata, dict):
- raise TypeError('metadata must be a dict, got: %r' % data)
-
+from traitlets.config.configurable import Configurable
+from traitlets import List
+
+# This used to be defined here - it is imported for backwards compatibility
+from .display import publish_display_data
+
+#-----------------------------------------------------------------------------
+# Main payload class
+#-----------------------------------------------------------------------------
+
+class DisplayPublisher(Configurable):
+ """A traited class that publishes display data to frontends.
+
+ Instances of this class are created by the main IPython object and should
+ be accessed there.
+ """
+
+ def _validate_data(self, data, metadata=None):
+ """Validate the display data.
+
+ Parameters
+ ----------
+ data : dict
+ The formata data dictionary.
+ metadata : dict
+ Any metadata for the data.
+ """
+
+ if not isinstance(data, dict):
+ raise TypeError('data must be a dict, got: %r' % data)
+ if metadata is not None:
+ if not isinstance(metadata, dict):
+ raise TypeError('metadata must be a dict, got: %r' % data)
+
# use * to indicate transient, update are keyword-only
def publish(self, data, metadata=None, source=None, **kwargs):
- """Publish data and metadata to all frontends.
-
- See the ``display_data`` message in the messaging documentation for
- more details about this message type.
-
- The following MIME types are currently implemented:
-
- * text/plain
- * text/html
- * text/markdown
- * text/latex
- * application/json
- * application/javascript
- * image/png
- * image/jpeg
- * image/svg+xml
-
- Parameters
- ----------
- data : dict
- A dictionary having keys that are valid MIME types (like
- 'text/plain' or 'image/svg+xml') and values that are the data for
- that MIME type. The data itself must be a JSON'able data
- structure. Minimally all data should have the 'text/plain' data,
- which can be displayed by all frontends. If more than the plain
- text is given, it is up to the frontend to decide which
- representation to use.
- metadata : dict
- A dictionary for metadata related to the data. This can contain
- arbitrary key, value pairs that frontends can use to interpret
- the data. Metadata specific to each mime-type can be specified
- in the metadata dict with the same mime-type keys as
- the data itself.
- source : str, deprecated
- Unused.
+ """Publish data and metadata to all frontends.
+
+ See the ``display_data`` message in the messaging documentation for
+ more details about this message type.
+
+ The following MIME types are currently implemented:
+
+ * text/plain
+ * text/html
+ * text/markdown
+ * text/latex
+ * application/json
+ * application/javascript
+ * image/png
+ * image/jpeg
+ * image/svg+xml
+
+ Parameters
+ ----------
+ data : dict
+ A dictionary having keys that are valid MIME types (like
+ 'text/plain' or 'image/svg+xml') and values that are the data for
+ that MIME type. The data itself must be a JSON'able data
+ structure. Minimally all data should have the 'text/plain' data,
+ which can be displayed by all frontends. If more than the plain
+ text is given, it is up to the frontend to decide which
+ representation to use.
+ metadata : dict
+ A dictionary for metadata related to the data. This can contain
+ arbitrary key, value pairs that frontends can use to interpret
+ the data. Metadata specific to each mime-type can be specified
+ in the metadata dict with the same mime-type keys as
+ the data itself.
+ source : str, deprecated
+ Unused.
transient: dict, keyword-only
A dictionary for transient data.
Data in this dictionary should not be persisted as part of saving this output.
@@ -97,8 +97,8 @@ class DisplayPublisher(Configurable):
update: bool, keyword-only, default: False
If True, only update existing outputs with the same display_id,
rather than creating a new output.
- """
-
+ """
+
# These are kwargs only on Python 3, not used there.
# For consistency and avoid code divergence we leave them here to
# simplify potential backport
@@ -106,21 +106,21 @@ class DisplayPublisher(Configurable):
update = kwargs.pop('update', False)
# The default is to simply write the plain text data using sys.stdout.
- if 'text/plain' in data:
+ if 'text/plain' in data:
print(data['text/plain'])
-
- def clear_output(self, wait=False):
- """Clear the output of the cell receiving output."""
+
+ def clear_output(self, wait=False):
+ """Clear the output of the cell receiving output."""
print('\033[2K\r', end='')
sys.stdout.flush()
print('\033[2K\r', end='')
sys.stderr.flush()
-
-
-class CapturingDisplayPublisher(DisplayPublisher):
- """A DisplayPublisher that stores"""
- outputs = List()
-
+
+
+class CapturingDisplayPublisher(DisplayPublisher):
+ """A DisplayPublisher that stores"""
+ outputs = List()
+
def publish(self, data, metadata=None, source=None, **kwargs):
# These are kwargs only on Python 3, not used there.
@@ -132,8 +132,8 @@ class CapturingDisplayPublisher(DisplayPublisher):
self.outputs.append({'data':data, 'metadata':metadata,
'transient':transient, 'update':update})
- def clear_output(self, wait=False):
- super(CapturingDisplayPublisher, self).clear_output(wait)
+ def clear_output(self, wait=False):
+ super(CapturingDisplayPublisher, self).clear_output(wait)
- # empty the list, *do not* reassign a new list
+ # empty the list, *do not* reassign a new list
self.outputs.clear()
diff --git a/contrib/python/ipython/py2/IPython/core/error.py b/contrib/python/ipython/py2/IPython/core/error.py
index 66d67a6ba6..684cbc8da6 100644
--- a/contrib/python/ipython/py2/IPython/core/error.py
+++ b/contrib/python/ipython/py2/IPython/core/error.py
@@ -1,60 +1,60 @@
-# encoding: utf-8
-"""
-Global exception classes for IPython.core.
-
-Authors:
-
-* Brian Granger
-* Fernando Perez
-* Min Ragan-Kelley
-
-Notes
------
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Exception classes
-#-----------------------------------------------------------------------------
-
-class IPythonCoreError(Exception):
- pass
-
-
-class TryNext(IPythonCoreError):
- """Try next hook exception.
-
- Raise this in your hook function to indicate that the next hook handler
- should be used to handle the operation.
- """
-
-class UsageError(IPythonCoreError):
- """Error in magic function arguments, etc.
-
- Something that probably won't warrant a full traceback, but should
- nevertheless interrupt a macro / batch file.
- """
-
-class StdinNotImplementedError(IPythonCoreError, NotImplementedError):
- """raw_input was requested in a context where it is not supported
-
- For use in IPython kernels, where only some frontends may support
- stdin requests.
- """
-
-class InputRejected(Exception):
- """Input rejected by ast transformer.
-
- Raise this in your NodeTransformer to indicate that InteractiveShell should
- not execute the supplied input.
- """
+# encoding: utf-8
+"""
+Global exception classes for IPython.core.
+
+Authors:
+
+* Brian Granger
+* Fernando Perez
+* Min Ragan-Kelley
+
+Notes
+-----
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Exception classes
+#-----------------------------------------------------------------------------
+
+class IPythonCoreError(Exception):
+ pass
+
+
+class TryNext(IPythonCoreError):
+ """Try next hook exception.
+
+ Raise this in your hook function to indicate that the next hook handler
+ should be used to handle the operation.
+ """
+
+class UsageError(IPythonCoreError):
+ """Error in magic function arguments, etc.
+
+ Something that probably won't warrant a full traceback, but should
+ nevertheless interrupt a macro / batch file.
+ """
+
+class StdinNotImplementedError(IPythonCoreError, NotImplementedError):
+ """raw_input was requested in a context where it is not supported
+
+ For use in IPython kernels, where only some frontends may support
+ stdin requests.
+ """
+
+class InputRejected(Exception):
+ """Input rejected by ast transformer.
+
+ Raise this in your NodeTransformer to indicate that InteractiveShell should
+ not execute the supplied input.
+ """
diff --git a/contrib/python/ipython/py2/IPython/core/events.py b/contrib/python/ipython/py2/IPython/core/events.py
index dc289ee9a1..bfd09fec6a 100644
--- a/contrib/python/ipython/py2/IPython/core/events.py
+++ b/contrib/python/ipython/py2/IPython/core/events.py
@@ -1,131 +1,131 @@
-"""Infrastructure for registering and firing callbacks on application events.
-
-Unlike :mod:`IPython.core.hooks`, which lets end users set single functions to
-be called at specific times, or a collection of alternative methods to try,
-callbacks are designed to be used by extension authors. A number of callbacks
-can be registered for the same event without needing to be aware of one another.
-
-The functions defined in this module are no-ops indicating the names of available
-events and the arguments which will be passed to them.
-
-.. note::
-
- This API is experimental in IPython 2.0, and may be revised in future versions.
-"""
-from __future__ import print_function
-
-class EventManager(object):
- """Manage a collection of events and a sequence of callbacks for each.
-
- This is attached to :class:`~IPython.core.interactiveshell.InteractiveShell`
- instances as an ``events`` attribute.
-
- .. note::
-
- This API is experimental in IPython 2.0, and may be revised in future versions.
- """
- def __init__(self, shell, available_events):
- """Initialise the :class:`CallbackManager`.
-
- Parameters
- ----------
- shell
- The :class:`~IPython.core.interactiveshell.InteractiveShell` instance
- available_callbacks
- An iterable of names for callback events.
- """
- self.shell = shell
- self.callbacks = {n:[] for n in available_events}
-
- def register(self, event, function):
- """Register a new event callback
-
- Parameters
- ----------
- event : str
- The event for which to register this callback.
- function : callable
- A function to be called on the given event. It should take the same
- parameters as the appropriate callback prototype.
-
- Raises
- ------
- TypeError
- If ``function`` is not callable.
- KeyError
- If ``event`` is not one of the known events.
- """
- if not callable(function):
- raise TypeError('Need a callable, got %r' % function)
- self.callbacks[event].append(function)
-
- def unregister(self, event, function):
- """Remove a callback from the given event."""
- self.callbacks[event].remove(function)
-
- def trigger(self, event, *args, **kwargs):
- """Call callbacks for ``event``.
-
- Any additional arguments are passed to all callbacks registered for this
- event. Exceptions raised by callbacks are caught, and a message printed.
- """
+"""Infrastructure for registering and firing callbacks on application events.
+
+Unlike :mod:`IPython.core.hooks`, which lets end users set single functions to
+be called at specific times, or a collection of alternative methods to try,
+callbacks are designed to be used by extension authors. A number of callbacks
+can be registered for the same event without needing to be aware of one another.
+
+The functions defined in this module are no-ops indicating the names of available
+events and the arguments which will be passed to them.
+
+.. note::
+
+ This API is experimental in IPython 2.0, and may be revised in future versions.
+"""
+from __future__ import print_function
+
+class EventManager(object):
+ """Manage a collection of events and a sequence of callbacks for each.
+
+ This is attached to :class:`~IPython.core.interactiveshell.InteractiveShell`
+ instances as an ``events`` attribute.
+
+ .. note::
+
+ This API is experimental in IPython 2.0, and may be revised in future versions.
+ """
+ def __init__(self, shell, available_events):
+ """Initialise the :class:`CallbackManager`.
+
+ Parameters
+ ----------
+ shell
+ The :class:`~IPython.core.interactiveshell.InteractiveShell` instance
+ available_callbacks
+ An iterable of names for callback events.
+ """
+ self.shell = shell
+ self.callbacks = {n:[] for n in available_events}
+
+ def register(self, event, function):
+ """Register a new event callback
+
+ Parameters
+ ----------
+ event : str
+ The event for which to register this callback.
+ function : callable
+ A function to be called on the given event. It should take the same
+ parameters as the appropriate callback prototype.
+
+ Raises
+ ------
+ TypeError
+ If ``function`` is not callable.
+ KeyError
+ If ``event`` is not one of the known events.
+ """
+ if not callable(function):
+ raise TypeError('Need a callable, got %r' % function)
+ self.callbacks[event].append(function)
+
+ def unregister(self, event, function):
+ """Remove a callback from the given event."""
+ self.callbacks[event].remove(function)
+
+ def trigger(self, event, *args, **kwargs):
+ """Call callbacks for ``event``.
+
+ Any additional arguments are passed to all callbacks registered for this
+ event. Exceptions raised by callbacks are caught, and a message printed.
+ """
for func in self.callbacks[event][:]:
- try:
- func(*args, **kwargs)
- except Exception:
- print("Error in callback {} (for {}):".format(func, event))
- self.shell.showtraceback()
-
-# event_name -> prototype mapping
-available_events = {}
-
-def _define_event(callback_proto):
- available_events[callback_proto.__name__] = callback_proto
- return callback_proto
-
-# ------------------------------------------------------------------------------
-# Callback prototypes
-#
-# No-op functions which describe the names of available events and the
-# signatures of callbacks for those events.
-# ------------------------------------------------------------------------------
-
-@_define_event
-def pre_execute():
- """Fires before code is executed in response to user/frontend action.
-
- This includes comm and widget messages and silent execution, as well as user
- code cells."""
- pass
-
-@_define_event
-def pre_run_cell():
- """Fires before user-entered code runs."""
- pass
-
-@_define_event
-def post_execute():
- """Fires after code is executed in response to user/frontend action.
-
- This includes comm and widget messages and silent execution, as well as user
- code cells."""
- pass
-
-@_define_event
-def post_run_cell():
- """Fires after user-entered code runs."""
- pass
-
-@_define_event
-def shell_initialized(ip):
- """Fires after initialisation of :class:`~IPython.core.interactiveshell.InteractiveShell`.
-
- This is before extensions and startup scripts are loaded, so it can only be
- set by subclassing.
-
- Parameters
- ----------
- ip : :class:`~IPython.core.interactiveshell.InteractiveShell`
- The newly initialised shell.
- """
- pass
+ try:
+ func(*args, **kwargs)
+ except Exception:
+ print("Error in callback {} (for {}):".format(func, event))
+ self.shell.showtraceback()
+
+# event_name -> prototype mapping
+available_events = {}
+
+def _define_event(callback_proto):
+ available_events[callback_proto.__name__] = callback_proto
+ return callback_proto
+
+# ------------------------------------------------------------------------------
+# Callback prototypes
+#
+# No-op functions which describe the names of available events and the
+# signatures of callbacks for those events.
+# ------------------------------------------------------------------------------
+
+@_define_event
+def pre_execute():
+ """Fires before code is executed in response to user/frontend action.
+
+ This includes comm and widget messages and silent execution, as well as user
+ code cells."""
+ pass
+
+@_define_event
+def pre_run_cell():
+ """Fires before user-entered code runs."""
+ pass
+
+@_define_event
+def post_execute():
+ """Fires after code is executed in response to user/frontend action.
+
+ This includes comm and widget messages and silent execution, as well as user
+ code cells."""
+ pass
+
+@_define_event
+def post_run_cell():
+ """Fires after user-entered code runs."""
+ pass
+
+@_define_event
+def shell_initialized(ip):
+ """Fires after initialisation of :class:`~IPython.core.interactiveshell.InteractiveShell`.
+
+ This is before extensions and startup scripts are loaded, so it can only be
+ set by subclassing.
+
+ Parameters
+ ----------
+ ip : :class:`~IPython.core.interactiveshell.InteractiveShell`
+ The newly initialised shell.
+ """
+ pass
diff --git a/contrib/python/ipython/py2/IPython/core/excolors.py b/contrib/python/ipython/py2/IPython/core/excolors.py
index 7546b4b263..487bde18c8 100644
--- a/contrib/python/ipython/py2/IPython/core/excolors.py
+++ b/contrib/python/ipython/py2/IPython/core/excolors.py
@@ -1,133 +1,133 @@
-# -*- coding: utf-8 -*-
-"""
-Color schemes for exception handling code in IPython.
-"""
-
+# -*- coding: utf-8 -*-
+"""
+Color schemes for exception handling code in IPython.
+"""
+
import os
-import warnings
-
-#*****************************************************************************
-# Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu>
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#*****************************************************************************
-
-from IPython.utils.coloransi import ColorSchemeTable, TermColors, ColorScheme
-
-def exception_colors():
- """Return a color table with fields for exception reporting.
-
- The table is an instance of ColorSchemeTable with schemes added for
+import warnings
+
+#*****************************************************************************
+# Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu>
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#*****************************************************************************
+
+from IPython.utils.coloransi import ColorSchemeTable, TermColors, ColorScheme
+
+def exception_colors():
+ """Return a color table with fields for exception reporting.
+
+ The table is an instance of ColorSchemeTable with schemes added for
'Neutral', 'Linux', 'LightBG' and 'NoColor' and fields for exception handling filled
- in.
-
- Examples:
-
- >>> ec = exception_colors()
- >>> ec.active_scheme_name
- ''
- >>> print(ec.active_colors)
- None
-
- Now we activate a color scheme:
- >>> ec.set_active_scheme('NoColor')
- >>> ec.active_scheme_name
- 'NoColor'
- >>> sorted(ec.active_colors.keys())
- ['Normal', 'caret', 'em', 'excName', 'filename', 'filenameEm', 'line',
- 'lineno', 'linenoEm', 'name', 'nameEm', 'normalEm', 'topline', 'vName',
- 'val', 'valEm']
- """
-
- ex_colors = ColorSchemeTable()
-
- # Populate it with color schemes
- C = TermColors # shorthand and local lookup
- ex_colors.add_scheme(ColorScheme(
- 'NoColor',
- # The color to be used for the top line
- topline = C.NoColor,
-
- # The colors to be used in the traceback
- filename = C.NoColor,
- lineno = C.NoColor,
- name = C.NoColor,
- vName = C.NoColor,
- val = C.NoColor,
- em = C.NoColor,
-
- # Emphasized colors for the last frame of the traceback
- normalEm = C.NoColor,
- filenameEm = C.NoColor,
- linenoEm = C.NoColor,
- nameEm = C.NoColor,
- valEm = C.NoColor,
-
- # Colors for printing the exception
- excName = C.NoColor,
- line = C.NoColor,
- caret = C.NoColor,
- Normal = C.NoColor
- ))
-
- # make some schemes as instances so we can copy them for modification easily
- ex_colors.add_scheme(ColorScheme(
- 'Linux',
- # The color to be used for the top line
- topline = C.LightRed,
-
- # The colors to be used in the traceback
- filename = C.Green,
- lineno = C.Green,
- name = C.Purple,
- vName = C.Cyan,
- val = C.Green,
- em = C.LightCyan,
-
- # Emphasized colors for the last frame of the traceback
- normalEm = C.LightCyan,
- filenameEm = C.LightGreen,
- linenoEm = C.LightGreen,
- nameEm = C.LightPurple,
- valEm = C.LightBlue,
-
- # Colors for printing the exception
- excName = C.LightRed,
- line = C.Yellow,
- caret = C.White,
- Normal = C.Normal
- ))
-
- # For light backgrounds, swap dark/light colors
- ex_colors.add_scheme(ColorScheme(
- 'LightBG',
- # The color to be used for the top line
- topline = C.Red,
-
- # The colors to be used in the traceback
- filename = C.LightGreen,
- lineno = C.LightGreen,
- name = C.LightPurple,
- vName = C.Cyan,
- val = C.LightGreen,
- em = C.Cyan,
-
- # Emphasized colors for the last frame of the traceback
- normalEm = C.Cyan,
- filenameEm = C.Green,
- linenoEm = C.Green,
- nameEm = C.Purple,
- valEm = C.Blue,
-
- # Colors for printing the exception
- excName = C.Red,
- #line = C.Brown, # brown often is displayed as yellow
- line = C.Red,
- caret = C.Normal,
- Normal = C.Normal,
- ))
-
+ in.
+
+ Examples:
+
+ >>> ec = exception_colors()
+ >>> ec.active_scheme_name
+ ''
+ >>> print(ec.active_colors)
+ None
+
+ Now we activate a color scheme:
+ >>> ec.set_active_scheme('NoColor')
+ >>> ec.active_scheme_name
+ 'NoColor'
+ >>> sorted(ec.active_colors.keys())
+ ['Normal', 'caret', 'em', 'excName', 'filename', 'filenameEm', 'line',
+ 'lineno', 'linenoEm', 'name', 'nameEm', 'normalEm', 'topline', 'vName',
+ 'val', 'valEm']
+ """
+
+ ex_colors = ColorSchemeTable()
+
+ # Populate it with color schemes
+ C = TermColors # shorthand and local lookup
+ ex_colors.add_scheme(ColorScheme(
+ 'NoColor',
+ # The color to be used for the top line
+ topline = C.NoColor,
+
+ # The colors to be used in the traceback
+ filename = C.NoColor,
+ lineno = C.NoColor,
+ name = C.NoColor,
+ vName = C.NoColor,
+ val = C.NoColor,
+ em = C.NoColor,
+
+ # Emphasized colors for the last frame of the traceback
+ normalEm = C.NoColor,
+ filenameEm = C.NoColor,
+ linenoEm = C.NoColor,
+ nameEm = C.NoColor,
+ valEm = C.NoColor,
+
+ # Colors for printing the exception
+ excName = C.NoColor,
+ line = C.NoColor,
+ caret = C.NoColor,
+ Normal = C.NoColor
+ ))
+
+ # make some schemes as instances so we can copy them for modification easily
+ ex_colors.add_scheme(ColorScheme(
+ 'Linux',
+ # The color to be used for the top line
+ topline = C.LightRed,
+
+ # The colors to be used in the traceback
+ filename = C.Green,
+ lineno = C.Green,
+ name = C.Purple,
+ vName = C.Cyan,
+ val = C.Green,
+ em = C.LightCyan,
+
+ # Emphasized colors for the last frame of the traceback
+ normalEm = C.LightCyan,
+ filenameEm = C.LightGreen,
+ linenoEm = C.LightGreen,
+ nameEm = C.LightPurple,
+ valEm = C.LightBlue,
+
+ # Colors for printing the exception
+ excName = C.LightRed,
+ line = C.Yellow,
+ caret = C.White,
+ Normal = C.Normal
+ ))
+
+ # For light backgrounds, swap dark/light colors
+ ex_colors.add_scheme(ColorScheme(
+ 'LightBG',
+ # The color to be used for the top line
+ topline = C.Red,
+
+ # The colors to be used in the traceback
+ filename = C.LightGreen,
+ lineno = C.LightGreen,
+ name = C.LightPurple,
+ vName = C.Cyan,
+ val = C.LightGreen,
+ em = C.Cyan,
+
+ # Emphasized colors for the last frame of the traceback
+ normalEm = C.Cyan,
+ filenameEm = C.Green,
+ linenoEm = C.Green,
+ nameEm = C.Purple,
+ valEm = C.Blue,
+
+ # Colors for printing the exception
+ excName = C.Red,
+ #line = C.Brown, # brown often is displayed as yellow
+ line = C.Red,
+ caret = C.Normal,
+ Normal = C.Normal,
+ ))
+
ex_colors.add_scheme(ColorScheme(
'Neutral',
# The color to be used for the top line
@@ -163,22 +163,22 @@ def exception_colors():
if os.name == "nt":
ex_colors.add_scheme(ex_colors['Linux'].copy('Neutral'))
- return ex_colors
-
-class Deprec(object):
-
- def __init__(self, wrapped_obj):
- self.wrapped=wrapped_obj
-
- def __getattr__(self, name):
- val = getattr(self.wrapped, name)
+ return ex_colors
+
+class Deprec(object):
+
+ def __init__(self, wrapped_obj):
+ self.wrapped=wrapped_obj
+
+ def __getattr__(self, name):
+ val = getattr(self.wrapped, name)
warnings.warn("Using ExceptionColors global is deprecated and will be removed in IPython 6.0",
DeprecationWarning, stacklevel=2)
- # using getattr after warnings break ipydoctest in weird way for 3.5
- return val
-
-# For backwards compatibility, keep around a single global object. Note that
-# this should NOT be used, the factory function should be used instead, since
-# these objects are stateful and it's very easy to get strange bugs if any code
-# modifies the module-level object's state.
-ExceptionColors = Deprec(exception_colors())
+ # using getattr after warnings break ipydoctest in weird way for 3.5
+ return val
+
+# For backwards compatibility, keep around a single global object. Note that
+# this should NOT be used, the factory function should be used instead, since
+# these objects are stateful and it's very easy to get strange bugs if any code
+# modifies the module-level object's state.
+ExceptionColors = Deprec(exception_colors())
diff --git a/contrib/python/ipython/py2/IPython/core/extensions.py b/contrib/python/ipython/py2/IPython/core/extensions.py
index e20b485e97..58855466f1 100644
--- a/contrib/python/ipython/py2/IPython/core/extensions.py
+++ b/contrib/python/ipython/py2/IPython/core/extensions.py
@@ -1,173 +1,173 @@
-# encoding: utf-8
-"""A class for managing IPython extensions."""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import os
-from shutil import copyfile
-import sys
-
-from traitlets.config.configurable import Configurable
-from IPython.utils.path import ensure_dir_exists
-from traitlets import Instance
-
-try:
- from importlib import reload
-except ImportError :
- ## deprecated since 3.4
- from imp import reload
-
-#-----------------------------------------------------------------------------
-# Main class
-#-----------------------------------------------------------------------------
-
-class ExtensionManager(Configurable):
- """A class to manage IPython extensions.
-
- An IPython extension is an importable Python module that has
- a function with the signature::
-
- def load_ipython_extension(ipython):
- # Do things with ipython
-
- This function is called after your extension is imported and the
- currently active :class:`InteractiveShell` instance is passed as
- the only argument. You can do anything you want with IPython at
- that point, including defining new magic and aliases, adding new
- components, etc.
+# encoding: utf-8
+"""A class for managing IPython extensions."""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import os
+from shutil import copyfile
+import sys
+
+from traitlets.config.configurable import Configurable
+from IPython.utils.path import ensure_dir_exists
+from traitlets import Instance
+
+try:
+ from importlib import reload
+except ImportError :
+ ## deprecated since 3.4
+ from imp import reload
+
+#-----------------------------------------------------------------------------
+# Main class
+#-----------------------------------------------------------------------------
+
+class ExtensionManager(Configurable):
+ """A class to manage IPython extensions.
+
+ An IPython extension is an importable Python module that has
+ a function with the signature::
+
+ def load_ipython_extension(ipython):
+ # Do things with ipython
+
+ This function is called after your extension is imported and the
+ currently active :class:`InteractiveShell` instance is passed as
+ the only argument. You can do anything you want with IPython at
+ that point, including defining new magic and aliases, adding new
+ components, etc.
- You can also optionally define an :func:`unload_ipython_extension(ipython)`
- function, which will be called if the user unloads or reloads the extension.
- The extension manager will only call :func:`load_ipython_extension` again
- if the extension is reloaded.
-
- You can put your extension modules anywhere you want, as long as
- they can be imported by Python's standard import mechanism. However,
- to make it easy to write extensions, you can also put your extensions
- in ``os.path.join(self.ipython_dir, 'extensions')``. This directory
- is added to ``sys.path`` automatically.
- """
-
+ You can also optionally define an :func:`unload_ipython_extension(ipython)`
+ function, which will be called if the user unloads or reloads the extension.
+ The extension manager will only call :func:`load_ipython_extension` again
+ if the extension is reloaded.
+
+ You can put your extension modules anywhere you want, as long as
+ they can be imported by Python's standard import mechanism. However,
+ to make it easy to write extensions, you can also put your extensions
+ in ``os.path.join(self.ipython_dir, 'extensions')``. This directory
+ is added to ``sys.path`` automatically.
+ """
+
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
-
- def __init__(self, shell=None, **kwargs):
- super(ExtensionManager, self).__init__(shell=shell, **kwargs)
+
+ def __init__(self, shell=None, **kwargs):
+ super(ExtensionManager, self).__init__(shell=shell, **kwargs)
self.shell.observe(
self._on_ipython_dir_changed, names=('ipython_dir',)
- )
- self.loaded = set()
-
- @property
- def ipython_extension_dir(self):
- return os.path.join(self.shell.ipython_dir, u'extensions')
-
+ )
+ self.loaded = set()
+
+ @property
+ def ipython_extension_dir(self):
+ return os.path.join(self.shell.ipython_dir, u'extensions')
+
def _on_ipython_dir_changed(self, change):
- ensure_dir_exists(self.ipython_extension_dir)
-
- def load_extension(self, module_str):
- """Load an IPython extension by its module name.
-
- Returns the string "already loaded" if the extension is already loaded,
- "no load function" if the module doesn't have a load_ipython_extension
- function, or None if it succeeded.
- """
- if module_str in self.loaded:
- return "already loaded"
+ ensure_dir_exists(self.ipython_extension_dir)
+
+ def load_extension(self, module_str):
+ """Load an IPython extension by its module name.
+
+ Returns the string "already loaded" if the extension is already loaded,
+ "no load function" if the module doesn't have a load_ipython_extension
+ function, or None if it succeeded.
+ """
+ if module_str in self.loaded:
+ return "already loaded"
- with self.shell.builtin_trap:
- if module_str not in sys.modules:
+ with self.shell.builtin_trap:
+ if module_str not in sys.modules:
try:
sys.modules[module_str] = __import__('IPython.extensions.' + module_str)
except ImportError:
- __import__(module_str)
- mod = sys.modules[module_str]
- if self._call_load_ipython_extension(mod):
- self.loaded.add(module_str)
- else:
- return "no load function"
-
- def unload_extension(self, module_str):
- """Unload an IPython extension by its module name.
-
- This function looks up the extension's name in ``sys.modules`` and
- simply calls ``mod.unload_ipython_extension(self)``.
+ __import__(module_str)
+ mod = sys.modules[module_str]
+ if self._call_load_ipython_extension(mod):
+ self.loaded.add(module_str)
+ else:
+ return "no load function"
+
+ def unload_extension(self, module_str):
+ """Unload an IPython extension by its module name.
+
+ This function looks up the extension's name in ``sys.modules`` and
+ simply calls ``mod.unload_ipython_extension(self)``.
- Returns the string "no unload function" if the extension doesn't define
- a function to unload itself, "not loaded" if the extension isn't loaded,
- otherwise None.
- """
- if module_str not in self.loaded:
- return "not loaded"
+ Returns the string "no unload function" if the extension doesn't define
+ a function to unload itself, "not loaded" if the extension isn't loaded,
+ otherwise None.
+ """
+ if module_str not in self.loaded:
+ return "not loaded"
- if module_str in sys.modules:
- mod = sys.modules[module_str]
- if self._call_unload_ipython_extension(mod):
- self.loaded.discard(module_str)
- else:
- return "no unload function"
-
- def reload_extension(self, module_str):
- """Reload an IPython extension by calling reload.
-
- If the module has not been loaded before,
- :meth:`InteractiveShell.load_extension` is called. Otherwise
- :func:`reload` is called and then the :func:`load_ipython_extension`
- function of the module, if it exists is called.
- """
- from IPython.utils.syspathcontext import prepended_to_syspath
-
- if (module_str in self.loaded) and (module_str in sys.modules):
- self.unload_extension(module_str)
- mod = sys.modules[module_str]
- with prepended_to_syspath(self.ipython_extension_dir):
- reload(mod)
- if self._call_load_ipython_extension(mod):
- self.loaded.add(module_str)
- else:
- self.load_extension(module_str)
-
- def _call_load_ipython_extension(self, mod):
- if hasattr(mod, 'load_ipython_extension'):
- mod.load_ipython_extension(self.shell)
- return True
-
- def _call_unload_ipython_extension(self, mod):
- if hasattr(mod, 'unload_ipython_extension'):
- mod.unload_ipython_extension(self.shell)
- return True
-
- def install_extension(self, url, filename=None):
+ if module_str in sys.modules:
+ mod = sys.modules[module_str]
+ if self._call_unload_ipython_extension(mod):
+ self.loaded.discard(module_str)
+ else:
+ return "no unload function"
+
+ def reload_extension(self, module_str):
+ """Reload an IPython extension by calling reload.
+
+ If the module has not been loaded before,
+ :meth:`InteractiveShell.load_extension` is called. Otherwise
+ :func:`reload` is called and then the :func:`load_ipython_extension`
+ function of the module, if it exists is called.
+ """
+ from IPython.utils.syspathcontext import prepended_to_syspath
+
+ if (module_str in self.loaded) and (module_str in sys.modules):
+ self.unload_extension(module_str)
+ mod = sys.modules[module_str]
+ with prepended_to_syspath(self.ipython_extension_dir):
+ reload(mod)
+ if self._call_load_ipython_extension(mod):
+ self.loaded.add(module_str)
+ else:
+ self.load_extension(module_str)
+
+ def _call_load_ipython_extension(self, mod):
+ if hasattr(mod, 'load_ipython_extension'):
+ mod.load_ipython_extension(self.shell)
+ return True
+
+ def _call_unload_ipython_extension(self, mod):
+ if hasattr(mod, 'unload_ipython_extension'):
+ mod.unload_ipython_extension(self.shell)
+ return True
+
+ def install_extension(self, url, filename=None):
"""Download and install an IPython extension.
-
- If filename is given, the file will be so named (inside the extension
- directory). Otherwise, the name from the URL will be used. The file must
- have a .py or .zip extension; otherwise, a ValueError will be raised.
-
- Returns the full path to the installed file.
- """
- # Ensure the extension directory exists
- ensure_dir_exists(self.ipython_extension_dir)
-
- if os.path.isfile(url):
- src_filename = os.path.basename(url)
- copy = copyfile
- else:
- # Deferred imports
- try:
- from urllib.parse import urlparse # Py3
- from urllib.request import urlretrieve
- except ImportError:
- from urlparse import urlparse
- from urllib import urlretrieve
- src_filename = urlparse(url).path.split('/')[-1]
- copy = urlretrieve
-
- if filename is None:
- filename = src_filename
- if os.path.splitext(filename)[1] not in ('.py', '.zip'):
- raise ValueError("The file must have a .py or .zip extension", filename)
-
- filename = os.path.join(self.ipython_extension_dir, filename)
- copy(url, filename)
- return filename
+
+ If filename is given, the file will be so named (inside the extension
+ directory). Otherwise, the name from the URL will be used. The file must
+ have a .py or .zip extension; otherwise, a ValueError will be raised.
+
+ Returns the full path to the installed file.
+ """
+ # Ensure the extension directory exists
+ ensure_dir_exists(self.ipython_extension_dir)
+
+ if os.path.isfile(url):
+ src_filename = os.path.basename(url)
+ copy = copyfile
+ else:
+ # Deferred imports
+ try:
+ from urllib.parse import urlparse # Py3
+ from urllib.request import urlretrieve
+ except ImportError:
+ from urlparse import urlparse
+ from urllib import urlretrieve
+ src_filename = urlparse(url).path.split('/')[-1]
+ copy = urlretrieve
+
+ if filename is None:
+ filename = src_filename
+ if os.path.splitext(filename)[1] not in ('.py', '.zip'):
+ raise ValueError("The file must have a .py or .zip extension", filename)
+
+ filename = os.path.join(self.ipython_extension_dir, filename)
+ copy(url, filename)
+ return filename
diff --git a/contrib/python/ipython/py2/IPython/core/formatters.py b/contrib/python/ipython/py2/IPython/core/formatters.py
index 31ed25a4b0..d990619f27 100644
--- a/contrib/python/ipython/py2/IPython/core/formatters.py
+++ b/contrib/python/ipython/py2/IPython/core/formatters.py
@@ -1,137 +1,137 @@
-# -*- coding: utf-8 -*-
-"""Display formatters.
-
-Inheritance diagram:
-
-.. inheritance-diagram:: IPython.core.formatters
- :parts: 3
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import abc
-import json
-import sys
-import traceback
-import warnings
-
-from decorator import decorator
-
-from traitlets.config.configurable import Configurable
-from IPython.core.getipython import get_ipython
-from IPython.utils.sentinel import Sentinel
+# -*- coding: utf-8 -*-
+"""Display formatters.
+
+Inheritance diagram:
+
+.. inheritance-diagram:: IPython.core.formatters
+ :parts: 3
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import abc
+import json
+import sys
+import traceback
+import warnings
+
+from decorator import decorator
+
+from traitlets.config.configurable import Configurable
+from IPython.core.getipython import get_ipython
+from IPython.utils.sentinel import Sentinel
from IPython.utils.dir2 import get_real_method
-from IPython.lib import pretty
-from traitlets import (
- Bool, Dict, Integer, Unicode, CUnicode, ObjectName, List,
- ForwardDeclaredInstance,
+from IPython.lib import pretty
+from traitlets import (
+ Bool, Dict, Integer, Unicode, CUnicode, ObjectName, List,
+ ForwardDeclaredInstance,
default, observe,
-)
-from IPython.utils.py3compat import (
- with_metaclass, string_types, unicode_type,
-)
-
-
-class DisplayFormatter(Configurable):
-
+)
+from IPython.utils.py3compat import (
+ with_metaclass, string_types, unicode_type,
+)
+
+
+class DisplayFormatter(Configurable):
+
active_types = List(Unicode(),
- help="""List of currently active mime-types to display.
- You can use this to set a white-list for formats to display.
-
- Most users will not need to change this value.
+ help="""List of currently active mime-types to display.
+ You can use this to set a white-list for formats to display.
+
+ Most users will not need to change this value.
""").tag(config=True)
@default('active_types')
- def _active_types_default(self):
- return self.format_types
+ def _active_types_default(self):
+ return self.format_types
@observe('active_types')
def _active_types_changed(self, change):
- for key, formatter in self.formatters.items():
+ for key, formatter in self.formatters.items():
if key in change['new']:
- formatter.enabled = True
- else:
- formatter.enabled = False
+ formatter.enabled = True
+ else:
+ formatter.enabled = False
- ipython_display_formatter = ForwardDeclaredInstance('FormatterABC')
+ ipython_display_formatter = ForwardDeclaredInstance('FormatterABC')
@default('ipython_display_formatter')
def _default_formatter(self):
- return IPythonDisplayFormatter(parent=self)
+ return IPythonDisplayFormatter(parent=self)
mimebundle_formatter = ForwardDeclaredInstance('FormatterABC')
@default('mimebundle_formatter')
def _default_mime_formatter(self):
return MimeBundleFormatter(parent=self)
- # A dict of formatter whose keys are format types (MIME types) and whose
- # values are subclasses of BaseFormatter.
- formatters = Dict()
+ # A dict of formatter whose keys are format types (MIME types) and whose
+ # values are subclasses of BaseFormatter.
+ formatters = Dict()
@default('formatters')
- def _formatters_default(self):
- """Activate the default formatters."""
- formatter_classes = [
- PlainTextFormatter,
- HTMLFormatter,
- MarkdownFormatter,
- SVGFormatter,
- PNGFormatter,
- PDFFormatter,
- JPEGFormatter,
- LatexFormatter,
- JSONFormatter,
- JavascriptFormatter
- ]
- d = {}
- for cls in formatter_classes:
- f = cls(parent=self)
- d[f.format_type] = f
- return d
-
- def format(self, obj, include=None, exclude=None):
- """Return a format data dict for an object.
-
- By default all format types will be computed.
-
+ def _formatters_default(self):
+ """Activate the default formatters."""
+ formatter_classes = [
+ PlainTextFormatter,
+ HTMLFormatter,
+ MarkdownFormatter,
+ SVGFormatter,
+ PNGFormatter,
+ PDFFormatter,
+ JPEGFormatter,
+ LatexFormatter,
+ JSONFormatter,
+ JavascriptFormatter
+ ]
+ d = {}
+ for cls in formatter_classes:
+ f = cls(parent=self)
+ d[f.format_type] = f
+ return d
+
+ def format(self, obj, include=None, exclude=None):
+ """Return a format data dict for an object.
+
+ By default all format types will be computed.
+
The following MIME types are usually implemented:
-
- * text/plain
- * text/html
- * text/markdown
- * text/latex
- * application/json
- * application/javascript
- * application/pdf
- * image/png
- * image/jpeg
- * image/svg+xml
-
- Parameters
- ----------
- obj : object
- The Python object whose format data will be computed.
+
+ * text/plain
+ * text/html
+ * text/markdown
+ * text/latex
+ * application/json
+ * application/javascript
+ * application/pdf
+ * image/png
+ * image/jpeg
+ * image/svg+xml
+
+ Parameters
+ ----------
+ obj : object
+ The Python object whose format data will be computed.
include : list, tuple or set; optional
- A list of format type strings (MIME types) to include in the
- format data dict. If this is set *only* the format types included
- in this list will be computed.
+ A list of format type strings (MIME types) to include in the
+ format data dict. If this is set *only* the format types included
+ in this list will be computed.
exclude : list, tuple or set; optional
- A list of format type string (MIME types) to exclude in the format
- data dict. If this is set all format types will be computed,
- except for those included in this argument.
+ A list of format type string (MIME types) to exclude in the format
+ data dict. If this is set all format types will be computed,
+ except for those included in this argument.
Mimetypes present in exclude will take precedence over the ones in include
-
- Returns
- -------
- (format_dict, metadata_dict) : tuple of two dicts
-
- format_dict is a dictionary of key/value pairs, one of each format that was
- generated for the object. The keys are the format types, which
- will usually be MIME type strings and the values and JSON'able
- data structure containing the raw data for the representation in
- that format.
-
- metadata_dict is a dictionary of metadata about each mime-type output.
- Its keys will be a strict subset of the keys in format_dict.
+
+ Returns
+ -------
+ (format_dict, metadata_dict) : tuple of two dicts
+
+ format_dict is a dictionary of key/value pairs, one of each format that was
+ generated for the object. The keys are the format types, which
+ will usually be MIME type strings and the values and JSON'able
+ data structure containing the raw data for the representation in
+ that format.
+
+ metadata_dict is a dictionary of metadata about each mime-type output.
+ Its keys will be a strict subset of the keys in format_dict.
Notes
-----
@@ -141,13 +141,13 @@ class DisplayFormatter(Configurable):
precedence and the corresponding `_repr_*_` for this mimetype will
not be called.
- """
- format_dict = {}
- md_dict = {}
-
- if self.ipython_display_formatter(obj):
- # object handled itself, don't proceed
- return {}, {}
+ """
+ format_dict = {}
+ md_dict = {}
+
+ if self.ipython_display_formatter(obj):
+ # object handled itself, don't proceed
+ return {}, {}
format_dict, md_dict = self.mimebundle_formatter(obj, include=include, exclude=exclude)
@@ -159,733 +159,733 @@ class DisplayFormatter(Configurable):
format_dict = {k:v for k,v in format_dict.items() if k not in exclude}
md_dict = {k:v for k,v in md_dict.items() if k not in exclude}
- for format_type, formatter in self.formatters.items():
+ for format_type, formatter in self.formatters.items():
if format_type in format_dict:
# already got it from mimebundle, don't render again
continue
- if include and format_type not in include:
- continue
- if exclude and format_type in exclude:
- continue
-
- md = None
- try:
- data = formatter(obj)
- except:
- # FIXME: log the exception
- raise
-
- # formatters can return raw data or (data, metadata)
- if isinstance(data, tuple) and len(data) == 2:
- data, md = data
-
- if data is not None:
- format_dict[format_type] = data
- if md is not None:
- md_dict[format_type] = md
- return format_dict, md_dict
-
- @property
- def format_types(self):
- """Return the format types (MIME types) of the active formatters."""
- return list(self.formatters.keys())
-
-
-#-----------------------------------------------------------------------------
-# Formatters for specific format types (text, html, svg, etc.)
-#-----------------------------------------------------------------------------
-
-
-def _safe_repr(obj):
- """Try to return a repr of an object
-
- always returns a string, at least.
- """
- try:
- return repr(obj)
- except Exception as e:
- return "un-repr-able object (%r)" % e
-
-
-class FormatterWarning(UserWarning):
- """Warning class for errors in formatters"""
-
-@decorator
-def catch_format_error(method, self, *args, **kwargs):
- """show traceback on failed format call"""
- try:
- r = method(self, *args, **kwargs)
- except NotImplementedError:
- # don't warn on NotImplementedErrors
+ if include and format_type not in include:
+ continue
+ if exclude and format_type in exclude:
+ continue
+
+ md = None
+ try:
+ data = formatter(obj)
+ except:
+ # FIXME: log the exception
+ raise
+
+ # formatters can return raw data or (data, metadata)
+ if isinstance(data, tuple) and len(data) == 2:
+ data, md = data
+
+ if data is not None:
+ format_dict[format_type] = data
+ if md is not None:
+ md_dict[format_type] = md
+ return format_dict, md_dict
+
+ @property
+ def format_types(self):
+ """Return the format types (MIME types) of the active formatters."""
+ return list(self.formatters.keys())
+
+
+#-----------------------------------------------------------------------------
+# Formatters for specific format types (text, html, svg, etc.)
+#-----------------------------------------------------------------------------
+
+
+def _safe_repr(obj):
+ """Try to return a repr of an object
+
+ always returns a string, at least.
+ """
+ try:
+ return repr(obj)
+ except Exception as e:
+ return "un-repr-able object (%r)" % e
+
+
+class FormatterWarning(UserWarning):
+ """Warning class for errors in formatters"""
+
+@decorator
+def catch_format_error(method, self, *args, **kwargs):
+ """show traceback on failed format call"""
+ try:
+ r = method(self, *args, **kwargs)
+ except NotImplementedError:
+ # don't warn on NotImplementedErrors
return self._check_return(None, args[0])
- except Exception:
- exc_info = sys.exc_info()
- ip = get_ipython()
- if ip is not None:
- ip.showtraceback(exc_info)
- else:
- traceback.print_exception(*exc_info)
+ except Exception:
+ exc_info = sys.exc_info()
+ ip = get_ipython()
+ if ip is not None:
+ ip.showtraceback(exc_info)
+ else:
+ traceback.print_exception(*exc_info)
return self._check_return(None, args[0])
- return self._check_return(r, args[0])
-
-
-class FormatterABC(with_metaclass(abc.ABCMeta, object)):
- """ Abstract base class for Formatters.
-
- A formatter is a callable class that is responsible for computing the
- raw format data for a particular format type (MIME type). For example,
- an HTML formatter would have a format type of `text/html` and would return
- the HTML representation of the object when called.
- """
-
- # The format type of the data returned, usually a MIME type.
- format_type = 'text/plain'
-
- # Is the formatter enabled...
- enabled = True
-
- @abc.abstractmethod
- def __call__(self, obj):
- """Return a JSON'able representation of the object.
-
- If the object cannot be formatted by this formatter,
- warn and return None.
- """
- return repr(obj)
-
-
-def _mod_name_key(typ):
- """Return a (__module__, __name__) tuple for a type.
-
- Used as key in Formatter.deferred_printers.
- """
- module = getattr(typ, '__module__', None)
- name = getattr(typ, '__name__', None)
- return (module, name)
-
-
-def _get_type(obj):
- """Return the type of an instance (old and new-style)"""
- return getattr(obj, '__class__', None) or type(obj)
-
-
-_raise_key_error = Sentinel('_raise_key_error', __name__,
-"""
-Special value to raise a KeyError
-
-Raise KeyError in `BaseFormatter.pop` if passed as the default value to `pop`
-""")
-
-
-class BaseFormatter(Configurable):
- """A base formatter class that is configurable.
-
- This formatter should usually be used as the base class of all formatters.
- It is a traited :class:`Configurable` class and includes an extensible
- API for users to determine how their objects are formatted. The following
- logic is used to find a function to format an given object.
-
- 1. The object is introspected to see if it has a method with the name
- :attr:`print_method`. If is does, that object is passed to that method
- for formatting.
- 2. If no print method is found, three internal dictionaries are consulted
- to find print method: :attr:`singleton_printers`, :attr:`type_printers`
- and :attr:`deferred_printers`.
-
- Users should use these dictionaries to register functions that will be
- used to compute the format data for their objects (if those objects don't
- have the special print methods). The easiest way of using these
- dictionaries is through the :meth:`for_type` and :meth:`for_type_by_name`
- methods.
-
- If no function/callable is found to compute the format data, ``None`` is
- returned and this format type is not used.
- """
-
- format_type = Unicode('text/plain')
- _return_type = string_types
-
+ return self._check_return(r, args[0])
+
+
+class FormatterABC(with_metaclass(abc.ABCMeta, object)):
+ """ Abstract base class for Formatters.
+
+ A formatter is a callable class that is responsible for computing the
+ raw format data for a particular format type (MIME type). For example,
+ an HTML formatter would have a format type of `text/html` and would return
+ the HTML representation of the object when called.
+ """
+
+ # The format type of the data returned, usually a MIME type.
+ format_type = 'text/plain'
+
+ # Is the formatter enabled...
+ enabled = True
+
+ @abc.abstractmethod
+ def __call__(self, obj):
+ """Return a JSON'able representation of the object.
+
+ If the object cannot be formatted by this formatter,
+ warn and return None.
+ """
+ return repr(obj)
+
+
+def _mod_name_key(typ):
+ """Return a (__module__, __name__) tuple for a type.
+
+ Used as key in Formatter.deferred_printers.
+ """
+ module = getattr(typ, '__module__', None)
+ name = getattr(typ, '__name__', None)
+ return (module, name)
+
+
+def _get_type(obj):
+ """Return the type of an instance (old and new-style)"""
+ return getattr(obj, '__class__', None) or type(obj)
+
+
+_raise_key_error = Sentinel('_raise_key_error', __name__,
+"""
+Special value to raise a KeyError
+
+Raise KeyError in `BaseFormatter.pop` if passed as the default value to `pop`
+""")
+
+
+class BaseFormatter(Configurable):
+ """A base formatter class that is configurable.
+
+ This formatter should usually be used as the base class of all formatters.
+ It is a traited :class:`Configurable` class and includes an extensible
+ API for users to determine how their objects are formatted. The following
+ logic is used to find a function to format an given object.
+
+ 1. The object is introspected to see if it has a method with the name
+ :attr:`print_method`. If is does, that object is passed to that method
+ for formatting.
+ 2. If no print method is found, three internal dictionaries are consulted
+ to find print method: :attr:`singleton_printers`, :attr:`type_printers`
+ and :attr:`deferred_printers`.
+
+ Users should use these dictionaries to register functions that will be
+ used to compute the format data for their objects (if those objects don't
+ have the special print methods). The easiest way of using these
+ dictionaries is through the :meth:`for_type` and :meth:`for_type_by_name`
+ methods.
+
+ If no function/callable is found to compute the format data, ``None`` is
+ returned and this format type is not used.
+ """
+
+ format_type = Unicode('text/plain')
+ _return_type = string_types
+
enabled = Bool(True).tag(config=True)
-
- print_method = ObjectName('__repr__')
-
- # The singleton printers.
- # Maps the IDs of the builtin singleton objects to the format functions.
+
+ print_method = ObjectName('__repr__')
+
+ # The singleton printers.
+ # Maps the IDs of the builtin singleton objects to the format functions.
singleton_printers = Dict().tag(config=True)
-
- # The type-specific printers.
- # Map type objects to the format functions.
+
+ # The type-specific printers.
+ # Map type objects to the format functions.
type_printers = Dict().tag(config=True)
-
- # The deferred-import type-specific printers.
- # Map (modulename, classname) pairs to the format functions.
+
+ # The deferred-import type-specific printers.
+ # Map (modulename, classname) pairs to the format functions.
deferred_printers = Dict().tag(config=True)
-
- @catch_format_error
- def __call__(self, obj):
- """Compute the format for an object."""
- if self.enabled:
- # lookup registered printer
- try:
- printer = self.lookup(obj)
- except KeyError:
- pass
- else:
- return printer(obj)
- # Finally look for special method names
+
+ @catch_format_error
+ def __call__(self, obj):
+ """Compute the format for an object."""
+ if self.enabled:
+ # lookup registered printer
+ try:
+ printer = self.lookup(obj)
+ except KeyError:
+ pass
+ else:
+ return printer(obj)
+ # Finally look for special method names
method = get_real_method(obj, self.print_method)
- if method is not None:
- return method()
- return None
- else:
- return None
-
- def __contains__(self, typ):
- """map in to lookup_by_type"""
- try:
- self.lookup_by_type(typ)
- except KeyError:
- return False
- else:
- return True
-
- def _check_return(self, r, obj):
- """Check that a return value is appropriate
-
- Return the value if so, None otherwise, warning if invalid.
- """
- if r is None or isinstance(r, self._return_type) or \
- (isinstance(r, tuple) and r and isinstance(r[0], self._return_type)):
- return r
- else:
- warnings.warn(
- "%s formatter returned invalid type %s (expected %s) for object: %s" % \
- (self.format_type, type(r), self._return_type, _safe_repr(obj)),
- FormatterWarning
- )
-
- def lookup(self, obj):
- """Look up the formatter for a given instance.
-
- Parameters
- ----------
- obj : object instance
-
- Returns
- -------
- f : callable
- The registered formatting callable for the type.
-
- Raises
- ------
- KeyError if the type has not been registered.
- """
- # look for singleton first
- obj_id = id(obj)
- if obj_id in self.singleton_printers:
- return self.singleton_printers[obj_id]
- # then lookup by type
- return self.lookup_by_type(_get_type(obj))
-
- def lookup_by_type(self, typ):
- """Look up the registered formatter for a type.
-
- Parameters
- ----------
- typ : type or '__module__.__name__' string for a type
-
- Returns
- -------
- f : callable
- The registered formatting callable for the type.
-
- Raises
- ------
- KeyError if the type has not been registered.
- """
- if isinstance(typ, string_types):
- typ_key = tuple(typ.rsplit('.',1))
- if typ_key not in self.deferred_printers:
- # We may have it cached in the type map. We will have to
- # iterate over all of the types to check.
- for cls in self.type_printers:
- if _mod_name_key(cls) == typ_key:
- return self.type_printers[cls]
- else:
- return self.deferred_printers[typ_key]
- else:
- for cls in pretty._get_mro(typ):
- if cls in self.type_printers or self._in_deferred_types(cls):
- return self.type_printers[cls]
-
- # If we have reached here, the lookup failed.
- raise KeyError("No registered printer for {0!r}".format(typ))
-
- def for_type(self, typ, func=None):
- """Add a format function for a given type.
-
- Parameters
- -----------
- typ : type or '__module__.__name__' string for a type
- The class of the object that will be formatted using `func`.
- func : callable
- A callable for computing the format data.
- `func` will be called with the object to be formatted,
- and will return the raw data in this formatter's format.
- Subclasses may use a different call signature for the
- `func` argument.
-
- If `func` is None or not specified, there will be no change,
- only returning the current value.
-
- Returns
- -------
- oldfunc : callable
- The currently registered callable.
- If you are registering a new formatter,
- this will be the previous value (to enable restoring later).
- """
- # if string given, interpret as 'pkg.module.class_name'
- if isinstance(typ, string_types):
- type_module, type_name = typ.rsplit('.', 1)
- return self.for_type_by_name(type_module, type_name, func)
-
- try:
- oldfunc = self.lookup_by_type(typ)
- except KeyError:
- oldfunc = None
-
- if func is not None:
- self.type_printers[typ] = func
-
- return oldfunc
-
- def for_type_by_name(self, type_module, type_name, func=None):
- """Add a format function for a type specified by the full dotted
- module and name of the type, rather than the type of the object.
-
- Parameters
- ----------
- type_module : str
- The full dotted name of the module the type is defined in, like
- ``numpy``.
- type_name : str
- The name of the type (the class name), like ``dtype``
- func : callable
- A callable for computing the format data.
- `func` will be called with the object to be formatted,
- and will return the raw data in this formatter's format.
- Subclasses may use a different call signature for the
- `func` argument.
-
- If `func` is None or unspecified, there will be no change,
- only returning the current value.
-
- Returns
- -------
- oldfunc : callable
- The currently registered callable.
- If you are registering a new formatter,
- this will be the previous value (to enable restoring later).
- """
- key = (type_module, type_name)
-
- try:
- oldfunc = self.lookup_by_type("%s.%s" % key)
- except KeyError:
- oldfunc = None
-
- if func is not None:
- self.deferred_printers[key] = func
- return oldfunc
-
- def pop(self, typ, default=_raise_key_error):
- """Pop a formatter for the given type.
-
- Parameters
- ----------
- typ : type or '__module__.__name__' string for a type
- default : object
- value to be returned if no formatter is registered for typ.
-
- Returns
- -------
- obj : object
- The last registered object for the type.
-
- Raises
- ------
- KeyError if the type is not registered and default is not specified.
- """
-
- if isinstance(typ, string_types):
- typ_key = tuple(typ.rsplit('.',1))
- if typ_key not in self.deferred_printers:
- # We may have it cached in the type map. We will have to
- # iterate over all of the types to check.
- for cls in self.type_printers:
- if _mod_name_key(cls) == typ_key:
- old = self.type_printers.pop(cls)
- break
- else:
- old = default
- else:
- old = self.deferred_printers.pop(typ_key)
- else:
- if typ in self.type_printers:
- old = self.type_printers.pop(typ)
- else:
- old = self.deferred_printers.pop(_mod_name_key(typ), default)
- if old is _raise_key_error:
- raise KeyError("No registered value for {0!r}".format(typ))
- return old
-
- def _in_deferred_types(self, cls):
- """
- Check if the given class is specified in the deferred type registry.
-
- Successful matches will be moved to the regular type registry for future use.
- """
- mod = getattr(cls, '__module__', None)
- name = getattr(cls, '__name__', None)
- key = (mod, name)
- if key in self.deferred_printers:
- # Move the printer over to the regular registry.
- printer = self.deferred_printers.pop(key)
- self.type_printers[cls] = printer
- return True
- return False
-
-
-class PlainTextFormatter(BaseFormatter):
- """The default pretty-printer.
-
- This uses :mod:`IPython.lib.pretty` to compute the format data of
- the object. If the object cannot be pretty printed, :func:`repr` is used.
- See the documentation of :mod:`IPython.lib.pretty` for details on
- how to write pretty printers. Here is a simple example::
-
- def dtype_pprinter(obj, p, cycle):
- if cycle:
- return p.text('dtype(...)')
- if hasattr(obj, 'fields'):
- if obj.fields is None:
- p.text(repr(obj))
- else:
- p.begin_group(7, 'dtype([')
- for i, field in enumerate(obj.descr):
- if i > 0:
- p.text(',')
- p.breakable()
- p.pretty(field)
- p.end_group(7, '])')
- """
-
- # The format type of data returned.
- format_type = Unicode('text/plain')
-
- # This subclass ignores this attribute as it always need to return
- # something.
+ if method is not None:
+ return method()
+ return None
+ else:
+ return None
+
+ def __contains__(self, typ):
+ """map in to lookup_by_type"""
+ try:
+ self.lookup_by_type(typ)
+ except KeyError:
+ return False
+ else:
+ return True
+
+ def _check_return(self, r, obj):
+ """Check that a return value is appropriate
+
+ Return the value if so, None otherwise, warning if invalid.
+ """
+ if r is None or isinstance(r, self._return_type) or \
+ (isinstance(r, tuple) and r and isinstance(r[0], self._return_type)):
+ return r
+ else:
+ warnings.warn(
+ "%s formatter returned invalid type %s (expected %s) for object: %s" % \
+ (self.format_type, type(r), self._return_type, _safe_repr(obj)),
+ FormatterWarning
+ )
+
+ def lookup(self, obj):
+ """Look up the formatter for a given instance.
+
+ Parameters
+ ----------
+ obj : object instance
+
+ Returns
+ -------
+ f : callable
+ The registered formatting callable for the type.
+
+ Raises
+ ------
+ KeyError if the type has not been registered.
+ """
+ # look for singleton first
+ obj_id = id(obj)
+ if obj_id in self.singleton_printers:
+ return self.singleton_printers[obj_id]
+ # then lookup by type
+ return self.lookup_by_type(_get_type(obj))
+
+ def lookup_by_type(self, typ):
+ """Look up the registered formatter for a type.
+
+ Parameters
+ ----------
+ typ : type or '__module__.__name__' string for a type
+
+ Returns
+ -------
+ f : callable
+ The registered formatting callable for the type.
+
+ Raises
+ ------
+ KeyError if the type has not been registered.
+ """
+ if isinstance(typ, string_types):
+ typ_key = tuple(typ.rsplit('.',1))
+ if typ_key not in self.deferred_printers:
+ # We may have it cached in the type map. We will have to
+ # iterate over all of the types to check.
+ for cls in self.type_printers:
+ if _mod_name_key(cls) == typ_key:
+ return self.type_printers[cls]
+ else:
+ return self.deferred_printers[typ_key]
+ else:
+ for cls in pretty._get_mro(typ):
+ if cls in self.type_printers or self._in_deferred_types(cls):
+ return self.type_printers[cls]
+
+ # If we have reached here, the lookup failed.
+ raise KeyError("No registered printer for {0!r}".format(typ))
+
+ def for_type(self, typ, func=None):
+ """Add a format function for a given type.
+
+ Parameters
+ -----------
+ typ : type or '__module__.__name__' string for a type
+ The class of the object that will be formatted using `func`.
+ func : callable
+ A callable for computing the format data.
+ `func` will be called with the object to be formatted,
+ and will return the raw data in this formatter's format.
+ Subclasses may use a different call signature for the
+ `func` argument.
+
+ If `func` is None or not specified, there will be no change,
+ only returning the current value.
+
+ Returns
+ -------
+ oldfunc : callable
+ The currently registered callable.
+ If you are registering a new formatter,
+ this will be the previous value (to enable restoring later).
+ """
+ # if string given, interpret as 'pkg.module.class_name'
+ if isinstance(typ, string_types):
+ type_module, type_name = typ.rsplit('.', 1)
+ return self.for_type_by_name(type_module, type_name, func)
+
+ try:
+ oldfunc = self.lookup_by_type(typ)
+ except KeyError:
+ oldfunc = None
+
+ if func is not None:
+ self.type_printers[typ] = func
+
+ return oldfunc
+
+ def for_type_by_name(self, type_module, type_name, func=None):
+ """Add a format function for a type specified by the full dotted
+ module and name of the type, rather than the type of the object.
+
+ Parameters
+ ----------
+ type_module : str
+ The full dotted name of the module the type is defined in, like
+ ``numpy``.
+ type_name : str
+ The name of the type (the class name), like ``dtype``
+ func : callable
+ A callable for computing the format data.
+ `func` will be called with the object to be formatted,
+ and will return the raw data in this formatter's format.
+ Subclasses may use a different call signature for the
+ `func` argument.
+
+ If `func` is None or unspecified, there will be no change,
+ only returning the current value.
+
+ Returns
+ -------
+ oldfunc : callable
+ The currently registered callable.
+ If you are registering a new formatter,
+ this will be the previous value (to enable restoring later).
+ """
+ key = (type_module, type_name)
+
+ try:
+ oldfunc = self.lookup_by_type("%s.%s" % key)
+ except KeyError:
+ oldfunc = None
+
+ if func is not None:
+ self.deferred_printers[key] = func
+ return oldfunc
+
+ def pop(self, typ, default=_raise_key_error):
+ """Pop a formatter for the given type.
+
+ Parameters
+ ----------
+ typ : type or '__module__.__name__' string for a type
+ default : object
+ value to be returned if no formatter is registered for typ.
+
+ Returns
+ -------
+ obj : object
+ The last registered object for the type.
+
+ Raises
+ ------
+ KeyError if the type is not registered and default is not specified.
+ """
+
+ if isinstance(typ, string_types):
+ typ_key = tuple(typ.rsplit('.',1))
+ if typ_key not in self.deferred_printers:
+ # We may have it cached in the type map. We will have to
+ # iterate over all of the types to check.
+ for cls in self.type_printers:
+ if _mod_name_key(cls) == typ_key:
+ old = self.type_printers.pop(cls)
+ break
+ else:
+ old = default
+ else:
+ old = self.deferred_printers.pop(typ_key)
+ else:
+ if typ in self.type_printers:
+ old = self.type_printers.pop(typ)
+ else:
+ old = self.deferred_printers.pop(_mod_name_key(typ), default)
+ if old is _raise_key_error:
+ raise KeyError("No registered value for {0!r}".format(typ))
+ return old
+
+ def _in_deferred_types(self, cls):
+ """
+ Check if the given class is specified in the deferred type registry.
+
+ Successful matches will be moved to the regular type registry for future use.
+ """
+ mod = getattr(cls, '__module__', None)
+ name = getattr(cls, '__name__', None)
+ key = (mod, name)
+ if key in self.deferred_printers:
+ # Move the printer over to the regular registry.
+ printer = self.deferred_printers.pop(key)
+ self.type_printers[cls] = printer
+ return True
+ return False
+
+
+class PlainTextFormatter(BaseFormatter):
+ """The default pretty-printer.
+
+ This uses :mod:`IPython.lib.pretty` to compute the format data of
+ the object. If the object cannot be pretty printed, :func:`repr` is used.
+ See the documentation of :mod:`IPython.lib.pretty` for details on
+ how to write pretty printers. Here is a simple example::
+
+ def dtype_pprinter(obj, p, cycle):
+ if cycle:
+ return p.text('dtype(...)')
+ if hasattr(obj, 'fields'):
+ if obj.fields is None:
+ p.text(repr(obj))
+ else:
+ p.begin_group(7, 'dtype([')
+ for i, field in enumerate(obj.descr):
+ if i > 0:
+ p.text(',')
+ p.breakable()
+ p.pretty(field)
+ p.end_group(7, '])')
+ """
+
+ # The format type of data returned.
+ format_type = Unicode('text/plain')
+
+ # This subclass ignores this attribute as it always need to return
+ # something.
enabled = Bool(True).tag(config=False)
-
+
max_seq_length = Integer(pretty.MAX_SEQ_LENGTH,
- help="""Truncate large collections (lists, dicts, tuples, sets) to this size.
-
- Set to 0 to disable truncation.
- """
+ help="""Truncate large collections (lists, dicts, tuples, sets) to this size.
+
+ Set to 0 to disable truncation.
+ """
).tag(config=True)
-
- # Look for a _repr_pretty_ methods to use for pretty printing.
- print_method = ObjectName('_repr_pretty_')
-
- # Whether to pretty-print or not.
+
+ # Look for a _repr_pretty_ methods to use for pretty printing.
+ print_method = ObjectName('_repr_pretty_')
+
+ # Whether to pretty-print or not.
pprint = Bool(True).tag(config=True)
-
- # Whether to be verbose or not.
+
+ # Whether to be verbose or not.
verbose = Bool(False).tag(config=True)
-
- # The maximum width.
+
+ # The maximum width.
max_width = Integer(79).tag(config=True)
-
- # The newline character.
+
+ # The newline character.
newline = Unicode('\n').tag(config=True)
-
- # format-string for pprinting floats
- float_format = Unicode('%r')
- # setter for float precision, either int or direct format-string
+
+ # format-string for pprinting floats
+ float_format = Unicode('%r')
+ # setter for float precision, either int or direct format-string
float_precision = CUnicode('').tag(config=True)
-
+
@observe('float_precision')
def _float_precision_changed(self, change):
- """float_precision changed, set float_format accordingly.
-
- float_precision can be set by int or str.
- This will set float_format, after interpreting input.
- If numpy has been imported, numpy print precision will also be set.
-
- integer `n` sets format to '%.nf', otherwise, format set directly.
-
- An empty string returns to defaults (repr for float, 8 for numpy).
-
- This parameter can be set via the '%precision' magic.
- """
-
+ """float_precision changed, set float_format accordingly.
+
+ float_precision can be set by int or str.
+ This will set float_format, after interpreting input.
+ If numpy has been imported, numpy print precision will also be set.
+
+ integer `n` sets format to '%.nf', otherwise, format set directly.
+
+ An empty string returns to defaults (repr for float, 8 for numpy).
+
+ This parameter can be set via the '%precision' magic.
+ """
+
new = change['new']
- if '%' in new:
- # got explicit format string
- fmt = new
- try:
- fmt%3.14159
- except Exception:
- raise ValueError("Precision must be int or format string, not %r"%new)
- elif new:
- # otherwise, should be an int
- try:
- i = int(new)
- assert i >= 0
- except ValueError:
- raise ValueError("Precision must be int or format string, not %r"%new)
- except AssertionError:
- raise ValueError("int precision must be non-negative, not %r"%i)
-
- fmt = '%%.%if'%i
- if 'numpy' in sys.modules:
- # set numpy precision if it has been imported
- import numpy
- numpy.set_printoptions(precision=i)
- else:
- # default back to repr
- fmt = '%r'
- if 'numpy' in sys.modules:
- import numpy
- # numpy default is 8
- numpy.set_printoptions(precision=8)
- self.float_format = fmt
-
- # Use the default pretty printers from IPython.lib.pretty.
+ if '%' in new:
+ # got explicit format string
+ fmt = new
+ try:
+ fmt%3.14159
+ except Exception:
+ raise ValueError("Precision must be int or format string, not %r"%new)
+ elif new:
+ # otherwise, should be an int
+ try:
+ i = int(new)
+ assert i >= 0
+ except ValueError:
+ raise ValueError("Precision must be int or format string, not %r"%new)
+ except AssertionError:
+ raise ValueError("int precision must be non-negative, not %r"%i)
+
+ fmt = '%%.%if'%i
+ if 'numpy' in sys.modules:
+ # set numpy precision if it has been imported
+ import numpy
+ numpy.set_printoptions(precision=i)
+ else:
+ # default back to repr
+ fmt = '%r'
+ if 'numpy' in sys.modules:
+ import numpy
+ # numpy default is 8
+ numpy.set_printoptions(precision=8)
+ self.float_format = fmt
+
+ # Use the default pretty printers from IPython.lib.pretty.
@default('singleton_printers')
- def _singleton_printers_default(self):
- return pretty._singleton_pprinters.copy()
-
+ def _singleton_printers_default(self):
+ return pretty._singleton_pprinters.copy()
+
@default('type_printers')
- def _type_printers_default(self):
- d = pretty._type_pprinters.copy()
- d[float] = lambda obj,p,cycle: p.text(self.float_format%obj)
- return d
-
+ def _type_printers_default(self):
+ d = pretty._type_pprinters.copy()
+ d[float] = lambda obj,p,cycle: p.text(self.float_format%obj)
+ return d
+
@default('deferred_printers')
- def _deferred_printers_default(self):
- return pretty._deferred_type_pprinters.copy()
-
- #### FormatterABC interface ####
-
- @catch_format_error
- def __call__(self, obj):
- """Compute the pretty representation of the object."""
- if not self.pprint:
- return repr(obj)
- else:
- # handle str and unicode on Python 2
- # io.StringIO only accepts unicode,
- # cStringIO doesn't handle unicode on py2,
- # StringIO allows str, unicode but only ascii str
- stream = pretty.CUnicodeIO()
- printer = pretty.RepresentationPrinter(stream, self.verbose,
- self.max_width, self.newline,
- max_seq_length=self.max_seq_length,
- singleton_pprinters=self.singleton_printers,
- type_pprinters=self.type_printers,
- deferred_pprinters=self.deferred_printers)
- printer.pretty(obj)
- printer.flush()
- return stream.getvalue()
-
-
-class HTMLFormatter(BaseFormatter):
- """An HTML formatter.
-
- To define the callables that compute the HTML representation of your
- objects, define a :meth:`_repr_html_` method or use the :meth:`for_type`
- or :meth:`for_type_by_name` methods to register functions that handle
- this.
-
- The return value of this formatter should be a valid HTML snippet that
- could be injected into an existing DOM. It should *not* include the
- ```<html>`` or ```<body>`` tags.
- """
- format_type = Unicode('text/html')
-
- print_method = ObjectName('_repr_html_')
-
-
-class MarkdownFormatter(BaseFormatter):
- """A Markdown formatter.
-
- To define the callables that compute the Markdown representation of your
- objects, define a :meth:`_repr_markdown_` method or use the :meth:`for_type`
- or :meth:`for_type_by_name` methods to register functions that handle
- this.
-
- The return value of this formatter should be a valid Markdown.
- """
- format_type = Unicode('text/markdown')
-
- print_method = ObjectName('_repr_markdown_')
-
-class SVGFormatter(BaseFormatter):
- """An SVG formatter.
-
- To define the callables that compute the SVG representation of your
- objects, define a :meth:`_repr_svg_` method or use the :meth:`for_type`
- or :meth:`for_type_by_name` methods to register functions that handle
- this.
-
- The return value of this formatter should be valid SVG enclosed in
- ```<svg>``` tags, that could be injected into an existing DOM. It should
- *not* include the ```<html>`` or ```<body>`` tags.
- """
- format_type = Unicode('image/svg+xml')
-
- print_method = ObjectName('_repr_svg_')
-
-
-class PNGFormatter(BaseFormatter):
- """A PNG formatter.
-
- To define the callables that compute the PNG representation of your
- objects, define a :meth:`_repr_png_` method or use the :meth:`for_type`
- or :meth:`for_type_by_name` methods to register functions that handle
- this.
-
- The return value of this formatter should be raw PNG data, *not*
- base64 encoded.
- """
- format_type = Unicode('image/png')
-
- print_method = ObjectName('_repr_png_')
-
- _return_type = (bytes, unicode_type)
-
-
-class JPEGFormatter(BaseFormatter):
- """A JPEG formatter.
-
- To define the callables that compute the JPEG representation of your
- objects, define a :meth:`_repr_jpeg_` method or use the :meth:`for_type`
- or :meth:`for_type_by_name` methods to register functions that handle
- this.
-
- The return value of this formatter should be raw JPEG data, *not*
- base64 encoded.
- """
- format_type = Unicode('image/jpeg')
-
- print_method = ObjectName('_repr_jpeg_')
-
- _return_type = (bytes, unicode_type)
-
-
-class LatexFormatter(BaseFormatter):
- """A LaTeX formatter.
-
- To define the callables that compute the LaTeX representation of your
- objects, define a :meth:`_repr_latex_` method or use the :meth:`for_type`
- or :meth:`for_type_by_name` methods to register functions that handle
- this.
-
- The return value of this formatter should be a valid LaTeX equation,
- enclosed in either ```$```, ```$$``` or another LaTeX equation
- environment.
- """
- format_type = Unicode('text/latex')
-
- print_method = ObjectName('_repr_latex_')
-
-
-class JSONFormatter(BaseFormatter):
- """A JSON string formatter.
-
- To define the callables that compute the JSONable representation of
- your objects, define a :meth:`_repr_json_` method or use the :meth:`for_type`
- or :meth:`for_type_by_name` methods to register functions that handle
- this.
-
- The return value of this formatter should be a JSONable list or dict.
- JSON scalars (None, number, string) are not allowed, only dict or list containers.
- """
- format_type = Unicode('application/json')
- _return_type = (list, dict)
-
- print_method = ObjectName('_repr_json_')
-
- def _check_return(self, r, obj):
- """Check that a return value is appropriate
-
- Return the value if so, None otherwise, warning if invalid.
- """
- if r is None:
- return
- md = None
- if isinstance(r, tuple):
- # unpack data, metadata tuple for type checking on first element
- r, md = r
-
- # handle deprecated JSON-as-string form from IPython < 3
- if isinstance(r, string_types):
- warnings.warn("JSON expects JSONable list/dict containers, not JSON strings",
- FormatterWarning)
- r = json.loads(r)
-
- if md is not None:
- # put the tuple back together
- r = (r, md)
- return super(JSONFormatter, self)._check_return(r, obj)
-
-
-class JavascriptFormatter(BaseFormatter):
- """A Javascript formatter.
-
- To define the callables that compute the Javascript representation of
- your objects, define a :meth:`_repr_javascript_` method or use the
- :meth:`for_type` or :meth:`for_type_by_name` methods to register functions
- that handle this.
-
- The return value of this formatter should be valid Javascript code and
- should *not* be enclosed in ```<script>``` tags.
- """
- format_type = Unicode('application/javascript')
-
- print_method = ObjectName('_repr_javascript_')
-
-
-class PDFFormatter(BaseFormatter):
- """A PDF formatter.
-
- To define the callables that compute the PDF representation of your
- objects, define a :meth:`_repr_pdf_` method or use the :meth:`for_type`
- or :meth:`for_type_by_name` methods to register functions that handle
- this.
-
- The return value of this formatter should be raw PDF data, *not*
- base64 encoded.
- """
- format_type = Unicode('application/pdf')
-
- print_method = ObjectName('_repr_pdf_')
-
- _return_type = (bytes, unicode_type)
-
-class IPythonDisplayFormatter(BaseFormatter):
+ def _deferred_printers_default(self):
+ return pretty._deferred_type_pprinters.copy()
+
+ #### FormatterABC interface ####
+
+ @catch_format_error
+ def __call__(self, obj):
+ """Compute the pretty representation of the object."""
+ if not self.pprint:
+ return repr(obj)
+ else:
+ # handle str and unicode on Python 2
+ # io.StringIO only accepts unicode,
+ # cStringIO doesn't handle unicode on py2,
+ # StringIO allows str, unicode but only ascii str
+ stream = pretty.CUnicodeIO()
+ printer = pretty.RepresentationPrinter(stream, self.verbose,
+ self.max_width, self.newline,
+ max_seq_length=self.max_seq_length,
+ singleton_pprinters=self.singleton_printers,
+ type_pprinters=self.type_printers,
+ deferred_pprinters=self.deferred_printers)
+ printer.pretty(obj)
+ printer.flush()
+ return stream.getvalue()
+
+
+class HTMLFormatter(BaseFormatter):
+ """An HTML formatter.
+
+ To define the callables that compute the HTML representation of your
+ objects, define a :meth:`_repr_html_` method or use the :meth:`for_type`
+ or :meth:`for_type_by_name` methods to register functions that handle
+ this.
+
+ The return value of this formatter should be a valid HTML snippet that
+ could be injected into an existing DOM. It should *not* include the
+ ```<html>`` or ```<body>`` tags.
+ """
+ format_type = Unicode('text/html')
+
+ print_method = ObjectName('_repr_html_')
+
+
+class MarkdownFormatter(BaseFormatter):
+ """A Markdown formatter.
+
+ To define the callables that compute the Markdown representation of your
+ objects, define a :meth:`_repr_markdown_` method or use the :meth:`for_type`
+ or :meth:`for_type_by_name` methods to register functions that handle
+ this.
+
+ The return value of this formatter should be a valid Markdown.
+ """
+ format_type = Unicode('text/markdown')
+
+ print_method = ObjectName('_repr_markdown_')
+
+class SVGFormatter(BaseFormatter):
+ """An SVG formatter.
+
+ To define the callables that compute the SVG representation of your
+ objects, define a :meth:`_repr_svg_` method or use the :meth:`for_type`
+ or :meth:`for_type_by_name` methods to register functions that handle
+ this.
+
+ The return value of this formatter should be valid SVG enclosed in
+ ```<svg>``` tags, that could be injected into an existing DOM. It should
+ *not* include the ```<html>`` or ```<body>`` tags.
+ """
+ format_type = Unicode('image/svg+xml')
+
+ print_method = ObjectName('_repr_svg_')
+
+
+class PNGFormatter(BaseFormatter):
+ """A PNG formatter.
+
+ To define the callables that compute the PNG representation of your
+ objects, define a :meth:`_repr_png_` method or use the :meth:`for_type`
+ or :meth:`for_type_by_name` methods to register functions that handle
+ this.
+
+ The return value of this formatter should be raw PNG data, *not*
+ base64 encoded.
+ """
+ format_type = Unicode('image/png')
+
+ print_method = ObjectName('_repr_png_')
+
+ _return_type = (bytes, unicode_type)
+
+
+class JPEGFormatter(BaseFormatter):
+ """A JPEG formatter.
+
+ To define the callables that compute the JPEG representation of your
+ objects, define a :meth:`_repr_jpeg_` method or use the :meth:`for_type`
+ or :meth:`for_type_by_name` methods to register functions that handle
+ this.
+
+ The return value of this formatter should be raw JPEG data, *not*
+ base64 encoded.
+ """
+ format_type = Unicode('image/jpeg')
+
+ print_method = ObjectName('_repr_jpeg_')
+
+ _return_type = (bytes, unicode_type)
+
+
+class LatexFormatter(BaseFormatter):
+ """A LaTeX formatter.
+
+ To define the callables that compute the LaTeX representation of your
+ objects, define a :meth:`_repr_latex_` method or use the :meth:`for_type`
+ or :meth:`for_type_by_name` methods to register functions that handle
+ this.
+
+ The return value of this formatter should be a valid LaTeX equation,
+ enclosed in either ```$```, ```$$``` or another LaTeX equation
+ environment.
+ """
+ format_type = Unicode('text/latex')
+
+ print_method = ObjectName('_repr_latex_')
+
+
+class JSONFormatter(BaseFormatter):
+ """A JSON string formatter.
+
+ To define the callables that compute the JSONable representation of
+ your objects, define a :meth:`_repr_json_` method or use the :meth:`for_type`
+ or :meth:`for_type_by_name` methods to register functions that handle
+ this.
+
+ The return value of this formatter should be a JSONable list or dict.
+ JSON scalars (None, number, string) are not allowed, only dict or list containers.
+ """
+ format_type = Unicode('application/json')
+ _return_type = (list, dict)
+
+ print_method = ObjectName('_repr_json_')
+
+ def _check_return(self, r, obj):
+ """Check that a return value is appropriate
+
+ Return the value if so, None otherwise, warning if invalid.
+ """
+ if r is None:
+ return
+ md = None
+ if isinstance(r, tuple):
+ # unpack data, metadata tuple for type checking on first element
+ r, md = r
+
+ # handle deprecated JSON-as-string form from IPython < 3
+ if isinstance(r, string_types):
+ warnings.warn("JSON expects JSONable list/dict containers, not JSON strings",
+ FormatterWarning)
+ r = json.loads(r)
+
+ if md is not None:
+ # put the tuple back together
+ r = (r, md)
+ return super(JSONFormatter, self)._check_return(r, obj)
+
+
+class JavascriptFormatter(BaseFormatter):
+ """A Javascript formatter.
+
+ To define the callables that compute the Javascript representation of
+ your objects, define a :meth:`_repr_javascript_` method or use the
+ :meth:`for_type` or :meth:`for_type_by_name` methods to register functions
+ that handle this.
+
+ The return value of this formatter should be valid Javascript code and
+ should *not* be enclosed in ```<script>``` tags.
+ """
+ format_type = Unicode('application/javascript')
+
+ print_method = ObjectName('_repr_javascript_')
+
+
+class PDFFormatter(BaseFormatter):
+ """A PDF formatter.
+
+ To define the callables that compute the PDF representation of your
+ objects, define a :meth:`_repr_pdf_` method or use the :meth:`for_type`
+ or :meth:`for_type_by_name` methods to register functions that handle
+ this.
+
+ The return value of this formatter should be raw PDF data, *not*
+ base64 encoded.
+ """
+ format_type = Unicode('application/pdf')
+
+ print_method = ObjectName('_repr_pdf_')
+
+ _return_type = (bytes, unicode_type)
+
+class IPythonDisplayFormatter(BaseFormatter):
"""An escape-hatch Formatter for objects that know how to display themselves.
-
- To define the callables that compute the representation of your
- objects, define a :meth:`_ipython_display_` method or use the :meth:`for_type`
- or :meth:`for_type_by_name` methods to register functions that handle
- this. Unlike mime-type displays, this method should not return anything,
- instead calling any appropriate display methods itself.
-
- This display formatter has highest priority.
- If it fires, no other display formatter will be called.
+
+ To define the callables that compute the representation of your
+ objects, define a :meth:`_ipython_display_` method or use the :meth:`for_type`
+ or :meth:`for_type_by_name` methods to register functions that handle
+ this. Unlike mime-type displays, this method should not return anything,
+ instead calling any appropriate display methods itself.
+
+ This display formatter has highest priority.
+ If it fires, no other display formatter will be called.
Prior to IPython 6.1, `_ipython_display_` was the only way to display custom mime-types
without registering a new Formatter.
@@ -893,29 +893,29 @@ class IPythonDisplayFormatter(BaseFormatter):
IPython 6.1 introduces `_repr_mimebundle_` for displaying custom mime-types,
so `_ipython_display_` should only be used for objects that require unusual
display patterns, such as multiple display calls.
- """
- print_method = ObjectName('_ipython_display_')
- _return_type = (type(None), bool)
-
- @catch_format_error
- def __call__(self, obj):
- """Compute the format for an object."""
- if self.enabled:
- # lookup registered printer
- try:
- printer = self.lookup(obj)
- except KeyError:
- pass
- else:
- printer(obj)
- return True
- # Finally look for special method names
+ """
+ print_method = ObjectName('_ipython_display_')
+ _return_type = (type(None), bool)
+
+ @catch_format_error
+ def __call__(self, obj):
+ """Compute the format for an object."""
+ if self.enabled:
+ # lookup registered printer
+ try:
+ printer = self.lookup(obj)
+ except KeyError:
+ pass
+ else:
+ printer(obj)
+ return True
+ # Finally look for special method names
method = get_real_method(obj, self.print_method)
- if method is not None:
- method()
- return True
-
-
+ if method is not None:
+ method()
+ return True
+
+
class MimeBundleFormatter(BaseFormatter):
"""A Formatter for arbitrary mime-types.
@@ -973,65 +973,65 @@ class MimeBundleFormatter(BaseFormatter):
return None
-FormatterABC.register(BaseFormatter)
-FormatterABC.register(PlainTextFormatter)
-FormatterABC.register(HTMLFormatter)
-FormatterABC.register(MarkdownFormatter)
-FormatterABC.register(SVGFormatter)
-FormatterABC.register(PNGFormatter)
-FormatterABC.register(PDFFormatter)
-FormatterABC.register(JPEGFormatter)
-FormatterABC.register(LatexFormatter)
-FormatterABC.register(JSONFormatter)
-FormatterABC.register(JavascriptFormatter)
-FormatterABC.register(IPythonDisplayFormatter)
+FormatterABC.register(BaseFormatter)
+FormatterABC.register(PlainTextFormatter)
+FormatterABC.register(HTMLFormatter)
+FormatterABC.register(MarkdownFormatter)
+FormatterABC.register(SVGFormatter)
+FormatterABC.register(PNGFormatter)
+FormatterABC.register(PDFFormatter)
+FormatterABC.register(JPEGFormatter)
+FormatterABC.register(LatexFormatter)
+FormatterABC.register(JSONFormatter)
+FormatterABC.register(JavascriptFormatter)
+FormatterABC.register(IPythonDisplayFormatter)
FormatterABC.register(MimeBundleFormatter)
-
-
-def format_display_data(obj, include=None, exclude=None):
- """Return a format data dict for an object.
-
- By default all format types will be computed.
-
- The following MIME types are currently implemented:
-
- * text/plain
- * text/html
- * text/markdown
- * text/latex
- * application/json
- * application/javascript
- * application/pdf
- * image/png
- * image/jpeg
- * image/svg+xml
-
- Parameters
- ----------
- obj : object
- The Python object whose format data will be computed.
-
- Returns
- -------
- format_dict : dict
- A dictionary of key/value pairs, one or each format that was
- generated for the object. The keys are the format types, which
- will usually be MIME type strings and the values and JSON'able
- data structure containing the raw data for the representation in
- that format.
- include : list or tuple, optional
- A list of format type strings (MIME types) to include in the
- format data dict. If this is set *only* the format types included
- in this list will be computed.
- exclude : list or tuple, optional
- A list of format type string (MIME types) to exclue in the format
- data dict. If this is set all format types will be computed,
- except for those included in this argument.
- """
- from IPython.core.interactiveshell import InteractiveShell
-
+
+
+def format_display_data(obj, include=None, exclude=None):
+ """Return a format data dict for an object.
+
+ By default all format types will be computed.
+
+ The following MIME types are currently implemented:
+
+ * text/plain
+ * text/html
+ * text/markdown
+ * text/latex
+ * application/json
+ * application/javascript
+ * application/pdf
+ * image/png
+ * image/jpeg
+ * image/svg+xml
+
+ Parameters
+ ----------
+ obj : object
+ The Python object whose format data will be computed.
+
+ Returns
+ -------
+ format_dict : dict
+ A dictionary of key/value pairs, one or each format that was
+ generated for the object. The keys are the format types, which
+ will usually be MIME type strings and the values and JSON'able
+ data structure containing the raw data for the representation in
+ that format.
+ include : list or tuple, optional
+ A list of format type strings (MIME types) to include in the
+ format data dict. If this is set *only* the format types included
+ in this list will be computed.
+ exclude : list or tuple, optional
+ A list of format type string (MIME types) to exclue in the format
+ data dict. If this is set all format types will be computed,
+ except for those included in this argument.
+ """
+ from IPython.core.interactiveshell import InteractiveShell
+
return InteractiveShell.instance().display_formatter.format(
- obj,
- include,
- exclude
- )
+ obj,
+ include,
+ exclude
+ )
diff --git a/contrib/python/ipython/py2/IPython/core/getipython.py b/contrib/python/ipython/py2/IPython/core/getipython.py
index 9a127418ad..e6d8a4c91d 100644
--- a/contrib/python/ipython/py2/IPython/core/getipython.py
+++ b/contrib/python/ipython/py2/IPython/core/getipython.py
@@ -1,24 +1,24 @@
-# encoding: utf-8
-"""Simple function to call to get the current InteractiveShell instance
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2013 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Classes and functions
-#-----------------------------------------------------------------------------
-
-
-def get_ipython():
- """Get the global InteractiveShell instance.
-
- Returns None if no InteractiveShell instance is registered.
- """
- from IPython.core.interactiveshell import InteractiveShell
- if InteractiveShell.initialized():
- return InteractiveShell.instance()
+# encoding: utf-8
+"""Simple function to call to get the current InteractiveShell instance
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2013 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Classes and functions
+#-----------------------------------------------------------------------------
+
+
+def get_ipython():
+ """Get the global InteractiveShell instance.
+
+ Returns None if no InteractiveShell instance is registered.
+ """
+ from IPython.core.interactiveshell import InteractiveShell
+ if InteractiveShell.initialized():
+ return InteractiveShell.instance()
diff --git a/contrib/python/ipython/py2/IPython/core/history.py b/contrib/python/ipython/py2/IPython/core/history.py
index df5965695b..2e7fdbc845 100644
--- a/contrib/python/ipython/py2/IPython/core/history.py
+++ b/contrib/python/ipython/py2/IPython/core/history.py
@@ -1,92 +1,92 @@
-""" History related magics and functionality """
-
+""" History related magics and functionality """
+
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
-from __future__ import print_function
-
-import atexit
-import datetime
-import os
-import re
-try:
- import sqlite3
-except ImportError:
- try:
- from pysqlite2 import dbapi2 as sqlite3
- except ImportError:
- sqlite3 = None
-import threading
-
+from __future__ import print_function
+
+import atexit
+import datetime
+import os
+import re
+try:
+ import sqlite3
+except ImportError:
+ try:
+ from pysqlite2 import dbapi2 as sqlite3
+ except ImportError:
+ sqlite3 = None
+import threading
+
from traitlets.config.configurable import LoggingConfigurable
-from decorator import decorator
-from IPython.utils.decorators import undoc
+from decorator import decorator
+from IPython.utils.decorators import undoc
from IPython.paths import locate_profile
-from IPython.utils import py3compat
-from traitlets import (
- Any, Bool, Dict, Instance, Integer, List, Unicode, TraitError,
+from IPython.utils import py3compat
+from traitlets import (
+ Any, Bool, Dict, Instance, Integer, List, Unicode, TraitError,
default, observe,
-)
+)
from warnings import warn
-
-#-----------------------------------------------------------------------------
-# Classes and functions
-#-----------------------------------------------------------------------------
-
-@undoc
-class DummyDB(object):
- """Dummy DB that will act as a black hole for history.
-
- Only used in the absence of sqlite"""
- def execute(*args, **kwargs):
- return []
-
- def commit(self, *args, **kwargs):
- pass
-
- def __enter__(self, *args, **kwargs):
- pass
-
- def __exit__(self, *args, **kwargs):
- pass
-
-
-@decorator
-def needs_sqlite(f, self, *a, **kw):
- """Decorator: return an empty list in the absence of sqlite."""
- if sqlite3 is None or not self.enabled:
- return []
- else:
- return f(self, *a, **kw)
-
-
-if sqlite3 is not None:
- DatabaseError = sqlite3.DatabaseError
- OperationalError = sqlite3.OperationalError
-else:
- @undoc
- class DatabaseError(Exception):
- "Dummy exception when sqlite could not be imported. Should never occur."
-
- @undoc
- class OperationalError(Exception):
- "Dummy exception when sqlite could not be imported. Should never occur."
-
+
+#-----------------------------------------------------------------------------
+# Classes and functions
+#-----------------------------------------------------------------------------
+
+@undoc
+class DummyDB(object):
+ """Dummy DB that will act as a black hole for history.
+
+ Only used in the absence of sqlite"""
+ def execute(*args, **kwargs):
+ return []
+
+ def commit(self, *args, **kwargs):
+ pass
+
+ def __enter__(self, *args, **kwargs):
+ pass
+
+ def __exit__(self, *args, **kwargs):
+ pass
+
+
+@decorator
+def needs_sqlite(f, self, *a, **kw):
+ """Decorator: return an empty list in the absence of sqlite."""
+ if sqlite3 is None or not self.enabled:
+ return []
+ else:
+ return f(self, *a, **kw)
+
+
+if sqlite3 is not None:
+ DatabaseError = sqlite3.DatabaseError
+ OperationalError = sqlite3.OperationalError
+else:
+ @undoc
+ class DatabaseError(Exception):
+ "Dummy exception when sqlite could not be imported. Should never occur."
+
+ @undoc
+ class OperationalError(Exception):
+ "Dummy exception when sqlite could not be imported. Should never occur."
+
# use 16kB as threshold for whether a corrupt history db should be saved
# that should be at least 100 entries or so
_SAVE_DB_SIZE = 16384
-@decorator
-def catch_corrupt_db(f, self, *a, **kw):
- """A decorator which wraps HistoryAccessor method calls to catch errors from
- a corrupt SQLite database, move the old database out of the way, and create
- a new one.
+@decorator
+def catch_corrupt_db(f, self, *a, **kw):
+ """A decorator which wraps HistoryAccessor method calls to catch errors from
+ a corrupt SQLite database, move the old database out of the way, and create
+ a new one.
We avoid clobbering larger databases because this may be triggered due to filesystem issues,
not just a corrupt file.
- """
- try:
- return f(self, *a, **kw)
+ """
+ try:
+ return f(self, *a, **kw)
except (DatabaseError, OperationalError) as e:
self._corrupt_db_counter += 1
self.log.error("Failed to open SQLite history %s (%s).", self.hist_file, e)
@@ -114,799 +114,799 @@ def catch_corrupt_db(f, self, *a, **kw):
newpath = base + '-corrupt' + ext
os.rename(self.hist_file, newpath)
self.log.error("History file was moved to %s and a new file created.", newpath)
- self.init_db()
- return []
- else:
+ self.init_db()
+ return []
+ else:
# Failed with :memory:, something serious is wrong
- raise
-
+ raise
+
class HistoryAccessorBase(LoggingConfigurable):
- """An abstract class for History Accessors """
-
- def get_tail(self, n=10, raw=True, output=False, include_latest=False):
- raise NotImplementedError
-
- def search(self, pattern="*", raw=True, search_raw=True,
- output=False, n=None, unique=False):
- raise NotImplementedError
-
- def get_range(self, session, start=1, stop=None, raw=True,output=False):
- raise NotImplementedError
-
- def get_range_by_str(self, rangestr, raw=True, output=False):
- raise NotImplementedError
-
-
-class HistoryAccessor(HistoryAccessorBase):
- """Access the history database without adding to it.
-
- This is intended for use by standalone history tools. IPython shells use
- HistoryManager, below, which is a subclass of this."""
-
+ """An abstract class for History Accessors """
+
+ def get_tail(self, n=10, raw=True, output=False, include_latest=False):
+ raise NotImplementedError
+
+ def search(self, pattern="*", raw=True, search_raw=True,
+ output=False, n=None, unique=False):
+ raise NotImplementedError
+
+ def get_range(self, session, start=1, stop=None, raw=True,output=False):
+ raise NotImplementedError
+
+ def get_range_by_str(self, rangestr, raw=True, output=False):
+ raise NotImplementedError
+
+
+class HistoryAccessor(HistoryAccessorBase):
+ """Access the history database without adding to it.
+
+ This is intended for use by standalone history tools. IPython shells use
+ HistoryManager, below, which is a subclass of this."""
+
# counter for init_db retries, so we don't keep trying over and over
_corrupt_db_counter = 0
# after two failures, fallback on :memory:
_corrupt_db_limit = 2
- # String holding the path to the history file
+ # String holding the path to the history file
hist_file = Unicode(
- help="""Path to file to use for SQLite history database.
-
- By default, IPython will put the history database in the IPython
- profile directory. If you would rather share one history among
- profiles, you can set this value in each, so that they are consistent.
-
- Due to an issue with fcntl, SQLite is known to misbehave on some NFS
- mounts. If you see IPython hanging, try setting this to something on a
- local disk, e.g::
-
- ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite
+ help="""Path to file to use for SQLite history database.
+
+ By default, IPython will put the history database in the IPython
+ profile directory. If you would rather share one history among
+ profiles, you can set this value in each, so that they are consistent.
+
+ Due to an issue with fcntl, SQLite is known to misbehave on some NFS
+ mounts. If you see IPython hanging, try setting this to something on a
+ local disk, e.g::
+
+ ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite
you can also use the specific value `:memory:` (including the colon
at both end but not the back ticks), to avoid creating an history file.
-
+
""").tag(config=True)
-
+
enabled = Bool(True,
- help="""enable the SQLite history
-
- set enabled=False to disable the SQLite history,
- in which case there will be no stored history, no SQLite connection,
- and no background saving thread. This may be necessary in some
- threaded environments where IPython is embedded.
- """
+ help="""enable the SQLite history
+
+ set enabled=False to disable the SQLite history,
+ in which case there will be no stored history, no SQLite connection,
+ and no background saving thread. This may be necessary in some
+ threaded environments where IPython is embedded.
+ """
).tag(config=True)
-
+
connection_options = Dict(
- help="""Options for configuring the SQLite connection
-
- These options are passed as keyword args to sqlite3.connect
- when establishing database conenctions.
- """
+ help="""Options for configuring the SQLite connection
+
+ These options are passed as keyword args to sqlite3.connect
+ when establishing database conenctions.
+ """
).tag(config=True)
-
- # The SQLite database
- db = Any()
+
+ # The SQLite database
+ db = Any()
@observe('db')
def _db_changed(self, change):
- """validate the db, since it can be an Instance of two different types"""
+ """validate the db, since it can be an Instance of two different types"""
new = change['new']
- connection_types = (DummyDB,)
- if sqlite3 is not None:
- connection_types = (DummyDB, sqlite3.Connection)
- if not isinstance(new, connection_types):
- msg = "%s.db must be sqlite3 Connection or DummyDB, not %r" % \
- (self.__class__.__name__, new)
- raise TraitError(msg)
-
- def __init__(self, profile='default', hist_file=u'', **traits):
- """Create a new history accessor.
-
- Parameters
- ----------
- profile : str
- The name of the profile from which to open history.
- hist_file : str
- Path to an SQLite history database stored by IPython. If specified,
- hist_file overrides profile.
- config : :class:`~traitlets.config.loader.Config`
- Config object. hist_file can also be set through this.
- """
- # We need a pointer back to the shell for various tasks.
- super(HistoryAccessor, self).__init__(**traits)
- # defer setting hist_file from kwarg until after init,
- # otherwise the default kwarg value would clobber any value
- # set by config
- if hist_file:
- self.hist_file = hist_file
-
- if self.hist_file == u'':
- # No one has set the hist_file, yet.
- self.hist_file = self._get_hist_file_name(profile)
-
- if sqlite3 is None and self.enabled:
- warn("IPython History requires SQLite, your history will not be saved")
- self.enabled = False
-
- self.init_db()
-
- def _get_hist_file_name(self, profile='default'):
- """Find the history file for the given profile name.
-
- This is overridden by the HistoryManager subclass, to use the shell's
- active profile.
-
- Parameters
- ----------
- profile : str
- The name of a profile which has a history file.
- """
- return os.path.join(locate_profile(profile), 'history.sqlite')
-
- @catch_corrupt_db
- def init_db(self):
- """Connect to the database, and create tables if necessary."""
- if not self.enabled:
- self.db = DummyDB()
- return
-
- # use detect_types so that timestamps return datetime objects
- kwargs = dict(detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
- kwargs.update(self.connection_options)
- self.db = sqlite3.connect(self.hist_file, **kwargs)
- self.db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer
- primary key autoincrement, start timestamp,
- end timestamp, num_cmds integer, remark text)""")
- self.db.execute("""CREATE TABLE IF NOT EXISTS history
- (session integer, line integer, source text, source_raw text,
- PRIMARY KEY (session, line))""")
- # Output history is optional, but ensure the table's there so it can be
- # enabled later.
- self.db.execute("""CREATE TABLE IF NOT EXISTS output_history
- (session integer, line integer, output text,
- PRIMARY KEY (session, line))""")
- self.db.commit()
+ connection_types = (DummyDB,)
+ if sqlite3 is not None:
+ connection_types = (DummyDB, sqlite3.Connection)
+ if not isinstance(new, connection_types):
+ msg = "%s.db must be sqlite3 Connection or DummyDB, not %r" % \
+ (self.__class__.__name__, new)
+ raise TraitError(msg)
+
+ def __init__(self, profile='default', hist_file=u'', **traits):
+ """Create a new history accessor.
+
+ Parameters
+ ----------
+ profile : str
+ The name of the profile from which to open history.
+ hist_file : str
+ Path to an SQLite history database stored by IPython. If specified,
+ hist_file overrides profile.
+ config : :class:`~traitlets.config.loader.Config`
+ Config object. hist_file can also be set through this.
+ """
+ # We need a pointer back to the shell for various tasks.
+ super(HistoryAccessor, self).__init__(**traits)
+ # defer setting hist_file from kwarg until after init,
+ # otherwise the default kwarg value would clobber any value
+ # set by config
+ if hist_file:
+ self.hist_file = hist_file
+
+ if self.hist_file == u'':
+ # No one has set the hist_file, yet.
+ self.hist_file = self._get_hist_file_name(profile)
+
+ if sqlite3 is None and self.enabled:
+ warn("IPython History requires SQLite, your history will not be saved")
+ self.enabled = False
+
+ self.init_db()
+
+ def _get_hist_file_name(self, profile='default'):
+ """Find the history file for the given profile name.
+
+ This is overridden by the HistoryManager subclass, to use the shell's
+ active profile.
+
+ Parameters
+ ----------
+ profile : str
+ The name of a profile which has a history file.
+ """
+ return os.path.join(locate_profile(profile), 'history.sqlite')
+
+ @catch_corrupt_db
+ def init_db(self):
+ """Connect to the database, and create tables if necessary."""
+ if not self.enabled:
+ self.db = DummyDB()
+ return
+
+ # use detect_types so that timestamps return datetime objects
+ kwargs = dict(detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
+ kwargs.update(self.connection_options)
+ self.db = sqlite3.connect(self.hist_file, **kwargs)
+ self.db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer
+ primary key autoincrement, start timestamp,
+ end timestamp, num_cmds integer, remark text)""")
+ self.db.execute("""CREATE TABLE IF NOT EXISTS history
+ (session integer, line integer, source text, source_raw text,
+ PRIMARY KEY (session, line))""")
+ # Output history is optional, but ensure the table's there so it can be
+ # enabled later.
+ self.db.execute("""CREATE TABLE IF NOT EXISTS output_history
+ (session integer, line integer, output text,
+ PRIMARY KEY (session, line))""")
+ self.db.commit()
# success! reset corrupt db count
self._corrupt_db_counter = 0
-
- def writeout_cache(self):
- """Overridden by HistoryManager to dump the cache before certain
- database lookups."""
- pass
-
- ## -------------------------------
- ## Methods for retrieving history:
- ## -------------------------------
- def _run_sql(self, sql, params, raw=True, output=False):
- """Prepares and runs an SQL query for the history database.
-
- Parameters
- ----------
- sql : str
- Any filtering expressions to go after SELECT ... FROM ...
- params : tuple
- Parameters passed to the SQL query (to replace "?")
- raw, output : bool
- See :meth:`get_range`
-
- Returns
- -------
- Tuples as :meth:`get_range`
- """
- toget = 'source_raw' if raw else 'source'
- sqlfrom = "history"
- if output:
- sqlfrom = "history LEFT JOIN output_history USING (session, line)"
- toget = "history.%s, output_history.output" % toget
- cur = self.db.execute("SELECT session, line, %s FROM %s " %\
- (toget, sqlfrom) + sql, params)
- if output: # Regroup into 3-tuples, and parse JSON
+
+ def writeout_cache(self):
+ """Overridden by HistoryManager to dump the cache before certain
+ database lookups."""
+ pass
+
+ ## -------------------------------
+ ## Methods for retrieving history:
+ ## -------------------------------
+ def _run_sql(self, sql, params, raw=True, output=False):
+ """Prepares and runs an SQL query for the history database.
+
+ Parameters
+ ----------
+ sql : str
+ Any filtering expressions to go after SELECT ... FROM ...
+ params : tuple
+ Parameters passed to the SQL query (to replace "?")
+ raw, output : bool
+ See :meth:`get_range`
+
+ Returns
+ -------
+ Tuples as :meth:`get_range`
+ """
+ toget = 'source_raw' if raw else 'source'
+ sqlfrom = "history"
+ if output:
+ sqlfrom = "history LEFT JOIN output_history USING (session, line)"
+ toget = "history.%s, output_history.output" % toget
+ cur = self.db.execute("SELECT session, line, %s FROM %s " %\
+ (toget, sqlfrom) + sql, params)
+ if output: # Regroup into 3-tuples, and parse JSON
return ((ses, lin, (py3compat.cast_unicode_py2(inp), py3compat.cast_unicode_py2(out)))
for ses, lin, inp, out in cur)
- return cur
-
- @needs_sqlite
- @catch_corrupt_db
- def get_session_info(self, session):
- """Get info about a session.
-
- Parameters
- ----------
-
- session : int
- Session number to retrieve.
-
- Returns
- -------
-
- session_id : int
- Session ID number
- start : datetime
- Timestamp for the start of the session.
- end : datetime
- Timestamp for the end of the session, or None if IPython crashed.
- num_cmds : int
- Number of commands run, or None if IPython crashed.
- remark : unicode
- A manually set description.
- """
- query = "SELECT * from sessions where session == ?"
- return self.db.execute(query, (session,)).fetchone()
-
- @catch_corrupt_db
- def get_last_session_id(self):
- """Get the last session ID currently in the database.
-
- Within IPython, this should be the same as the value stored in
- :attr:`HistoryManager.session_number`.
- """
- for record in self.get_tail(n=1, include_latest=True):
- return record[0]
-
- @catch_corrupt_db
- def get_tail(self, n=10, raw=True, output=False, include_latest=False):
- """Get the last n lines from the history database.
-
- Parameters
- ----------
- n : int
- The number of lines to get
- raw, output : bool
- See :meth:`get_range`
- include_latest : bool
- If False (default), n+1 lines are fetched, and the latest one
- is discarded. This is intended to be used where the function
- is called by a user command, which it should not return.
-
- Returns
- -------
- Tuples as :meth:`get_range`
- """
- self.writeout_cache()
- if not include_latest:
- n += 1
- cur = self._run_sql("ORDER BY session DESC, line DESC LIMIT ?",
- (n,), raw=raw, output=output)
- if not include_latest:
- return reversed(list(cur)[1:])
- return reversed(list(cur))
-
- @catch_corrupt_db
- def search(self, pattern="*", raw=True, search_raw=True,
- output=False, n=None, unique=False):
- """Search the database using unix glob-style matching (wildcards
- * and ?).
-
- Parameters
- ----------
- pattern : str
- The wildcarded pattern to match when searching
- search_raw : bool
- If True, search the raw input, otherwise, the parsed input
- raw, output : bool
- See :meth:`get_range`
- n : None or int
- If an integer is given, it defines the limit of
- returned entries.
- unique : bool
- When it is true, return only unique entries.
-
- Returns
- -------
- Tuples as :meth:`get_range`
- """
- tosearch = "source_raw" if search_raw else "source"
- if output:
- tosearch = "history." + tosearch
- self.writeout_cache()
- sqlform = "WHERE %s GLOB ?" % tosearch
- params = (pattern,)
- if unique:
- sqlform += ' GROUP BY {0}'.format(tosearch)
- if n is not None:
- sqlform += " ORDER BY session DESC, line DESC LIMIT ?"
- params += (n,)
- elif unique:
- sqlform += " ORDER BY session, line"
- cur = self._run_sql(sqlform, params, raw=raw, output=output)
- if n is not None:
- return reversed(list(cur))
- return cur
-
- @catch_corrupt_db
- def get_range(self, session, start=1, stop=None, raw=True,output=False):
- """Retrieve input by session.
-
- Parameters
- ----------
- session : int
- Session number to retrieve.
- start : int
- First line to retrieve.
- stop : int
- End of line range (excluded from output itself). If None, retrieve
- to the end of the session.
- raw : bool
- If True, return untranslated input
- output : bool
- If True, attempt to include output. This will be 'real' Python
- objects for the current session, or text reprs from previous
- sessions if db_log_output was enabled at the time. Where no output
- is found, None is used.
-
- Returns
- -------
- entries
- An iterator over the desired lines. Each line is a 3-tuple, either
- (session, line, input) if output is False, or
- (session, line, (input, output)) if output is True.
- """
- if stop:
- lineclause = "line >= ? AND line < ?"
- params = (session, start, stop)
- else:
- lineclause = "line>=?"
- params = (session, start)
-
- return self._run_sql("WHERE session==? AND %s" % lineclause,
- params, raw=raw, output=output)
-
- def get_range_by_str(self, rangestr, raw=True, output=False):
- """Get lines of history from a string of ranges, as used by magic
- commands %hist, %save, %macro, etc.
-
- Parameters
- ----------
- rangestr : str
- A string specifying ranges, e.g. "5 ~2/1-4". See
- :func:`magic_history` for full details.
- raw, output : bool
- As :meth:`get_range`
-
- Returns
- -------
- Tuples as :meth:`get_range`
- """
- for sess, s, e in extract_hist_ranges(rangestr):
- for line in self.get_range(sess, s, e, raw=raw, output=output):
- yield line
-
-
-class HistoryManager(HistoryAccessor):
- """A class to organize all history-related functionality in one place.
- """
- # Public interface
-
- # An instance of the IPython shell we are attached to
- shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
- allow_none=True)
- # Lists to hold processed and raw history. These start with a blank entry
- # so that we can index them starting from 1
- input_hist_parsed = List([""])
- input_hist_raw = List([""])
- # A list of directories visited during session
- dir_hist = List()
+ return cur
+
+ @needs_sqlite
+ @catch_corrupt_db
+ def get_session_info(self, session):
+ """Get info about a session.
+
+ Parameters
+ ----------
+
+ session : int
+ Session number to retrieve.
+
+ Returns
+ -------
+
+ session_id : int
+ Session ID number
+ start : datetime
+ Timestamp for the start of the session.
+ end : datetime
+ Timestamp for the end of the session, or None if IPython crashed.
+ num_cmds : int
+ Number of commands run, or None if IPython crashed.
+ remark : unicode
+ A manually set description.
+ """
+ query = "SELECT * from sessions where session == ?"
+ return self.db.execute(query, (session,)).fetchone()
+
+ @catch_corrupt_db
+ def get_last_session_id(self):
+ """Get the last session ID currently in the database.
+
+ Within IPython, this should be the same as the value stored in
+ :attr:`HistoryManager.session_number`.
+ """
+ for record in self.get_tail(n=1, include_latest=True):
+ return record[0]
+
+ @catch_corrupt_db
+ def get_tail(self, n=10, raw=True, output=False, include_latest=False):
+ """Get the last n lines from the history database.
+
+ Parameters
+ ----------
+ n : int
+ The number of lines to get
+ raw, output : bool
+ See :meth:`get_range`
+ include_latest : bool
+ If False (default), n+1 lines are fetched, and the latest one
+ is discarded. This is intended to be used where the function
+ is called by a user command, which it should not return.
+
+ Returns
+ -------
+ Tuples as :meth:`get_range`
+ """
+ self.writeout_cache()
+ if not include_latest:
+ n += 1
+ cur = self._run_sql("ORDER BY session DESC, line DESC LIMIT ?",
+ (n,), raw=raw, output=output)
+ if not include_latest:
+ return reversed(list(cur)[1:])
+ return reversed(list(cur))
+
+ @catch_corrupt_db
+ def search(self, pattern="*", raw=True, search_raw=True,
+ output=False, n=None, unique=False):
+ """Search the database using unix glob-style matching (wildcards
+ * and ?).
+
+ Parameters
+ ----------
+ pattern : str
+ The wildcarded pattern to match when searching
+ search_raw : bool
+ If True, search the raw input, otherwise, the parsed input
+ raw, output : bool
+ See :meth:`get_range`
+ n : None or int
+ If an integer is given, it defines the limit of
+ returned entries.
+ unique : bool
+ When it is true, return only unique entries.
+
+ Returns
+ -------
+ Tuples as :meth:`get_range`
+ """
+ tosearch = "source_raw" if search_raw else "source"
+ if output:
+ tosearch = "history." + tosearch
+ self.writeout_cache()
+ sqlform = "WHERE %s GLOB ?" % tosearch
+ params = (pattern,)
+ if unique:
+ sqlform += ' GROUP BY {0}'.format(tosearch)
+ if n is not None:
+ sqlform += " ORDER BY session DESC, line DESC LIMIT ?"
+ params += (n,)
+ elif unique:
+ sqlform += " ORDER BY session, line"
+ cur = self._run_sql(sqlform, params, raw=raw, output=output)
+ if n is not None:
+ return reversed(list(cur))
+ return cur
+
+ @catch_corrupt_db
+ def get_range(self, session, start=1, stop=None, raw=True,output=False):
+ """Retrieve input by session.
+
+ Parameters
+ ----------
+ session : int
+ Session number to retrieve.
+ start : int
+ First line to retrieve.
+ stop : int
+ End of line range (excluded from output itself). If None, retrieve
+ to the end of the session.
+ raw : bool
+ If True, return untranslated input
+ output : bool
+ If True, attempt to include output. This will be 'real' Python
+ objects for the current session, or text reprs from previous
+ sessions if db_log_output was enabled at the time. Where no output
+ is found, None is used.
+
+ Returns
+ -------
+ entries
+ An iterator over the desired lines. Each line is a 3-tuple, either
+ (session, line, input) if output is False, or
+ (session, line, (input, output)) if output is True.
+ """
+ if stop:
+ lineclause = "line >= ? AND line < ?"
+ params = (session, start, stop)
+ else:
+ lineclause = "line>=?"
+ params = (session, start)
+
+ return self._run_sql("WHERE session==? AND %s" % lineclause,
+ params, raw=raw, output=output)
+
+ def get_range_by_str(self, rangestr, raw=True, output=False):
+ """Get lines of history from a string of ranges, as used by magic
+ commands %hist, %save, %macro, etc.
+
+ Parameters
+ ----------
+ rangestr : str
+ A string specifying ranges, e.g. "5 ~2/1-4". See
+ :func:`magic_history` for full details.
+ raw, output : bool
+ As :meth:`get_range`
+
+ Returns
+ -------
+ Tuples as :meth:`get_range`
+ """
+ for sess, s, e in extract_hist_ranges(rangestr):
+ for line in self.get_range(sess, s, e, raw=raw, output=output):
+ yield line
+
+
+class HistoryManager(HistoryAccessor):
+ """A class to organize all history-related functionality in one place.
+ """
+ # Public interface
+
+ # An instance of the IPython shell we are attached to
+ shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
+ allow_none=True)
+ # Lists to hold processed and raw history. These start with a blank entry
+ # so that we can index them starting from 1
+ input_hist_parsed = List([""])
+ input_hist_raw = List([""])
+ # A list of directories visited during session
+ dir_hist = List()
@default('dir_hist')
- def _dir_hist_default(self):
- try:
- return [py3compat.getcwd()]
- except OSError:
- return []
-
- # A dict of output history, keyed with ints from the shell's
- # execution count.
- output_hist = Dict()
- # The text/plain repr of outputs.
- output_hist_reprs = Dict()
-
- # The number of the current session in the history database
- session_number = Integer()
-
+ def _dir_hist_default(self):
+ try:
+ return [py3compat.getcwd()]
+ except OSError:
+ return []
+
+ # A dict of output history, keyed with ints from the shell's
+ # execution count.
+ output_hist = Dict()
+ # The text/plain repr of outputs.
+ output_hist_reprs = Dict()
+
+ # The number of the current session in the history database
+ session_number = Integer()
+
db_log_output = Bool(False,
- help="Should the history database include output? (default: no)"
+ help="Should the history database include output? (default: no)"
).tag(config=True)
db_cache_size = Integer(0,
- help="Write to database every x commands (higher values save disk access & power).\n"
- "Values of 1 or less effectively disable caching."
+ help="Write to database every x commands (higher values save disk access & power).\n"
+ "Values of 1 or less effectively disable caching."
).tag(config=True)
- # The input and output caches
- db_input_cache = List()
- db_output_cache = List()
-
- # History saving in separate thread
- save_thread = Instance('IPython.core.history.HistorySavingThread',
- allow_none=True)
- try: # Event is a function returning an instance of _Event...
- save_flag = Instance(threading._Event, allow_none=True)
- except AttributeError: # ...until Python 3.3, when it's a class.
- save_flag = Instance(threading.Event, allow_none=True)
-
- # Private interface
- # Variables used to store the three last inputs from the user. On each new
- # history update, we populate the user's namespace with these, shifted as
- # necessary.
- _i00 = Unicode(u'')
- _i = Unicode(u'')
- _ii = Unicode(u'')
- _iii = Unicode(u'')
-
- # A regex matching all forms of the exit command, so that we don't store
- # them in the history (it's annoying to rewind the first entry and land on
- # an exit call).
- _exit_re = re.compile(r"(exit|quit)(\s*\(.*\))?$")
-
- def __init__(self, shell=None, config=None, **traits):
- """Create a new history manager associated with a shell instance.
- """
- # We need a pointer back to the shell for various tasks.
- super(HistoryManager, self).__init__(shell=shell, config=config,
- **traits)
- self.save_flag = threading.Event()
- self.db_input_cache_lock = threading.Lock()
- self.db_output_cache_lock = threading.Lock()
-
- try:
- self.new_session()
- except OperationalError:
- self.log.error("Failed to create history session in %s. History will not be saved.",
- self.hist_file, exc_info=True)
- self.hist_file = ':memory:'
-
- if self.enabled and self.hist_file != ':memory:':
- self.save_thread = HistorySavingThread(self)
- self.save_thread.start()
-
- def _get_hist_file_name(self, profile=None):
- """Get default history file name based on the Shell's profile.
-
- The profile parameter is ignored, but must exist for compatibility with
- the parent class."""
- profile_dir = self.shell.profile_dir.location
- return os.path.join(profile_dir, 'history.sqlite')
-
- @needs_sqlite
- def new_session(self, conn=None):
- """Get a new session number."""
- if conn is None:
- conn = self.db
-
- with conn:
- cur = conn.execute("""INSERT INTO sessions VALUES (NULL, ?, NULL,
- NULL, "") """, (datetime.datetime.now(),))
- self.session_number = cur.lastrowid
-
- def end_session(self):
- """Close the database session, filling in the end time and line count."""
- self.writeout_cache()
- with self.db:
- self.db.execute("""UPDATE sessions SET end=?, num_cmds=? WHERE
- session==?""", (datetime.datetime.now(),
- len(self.input_hist_parsed)-1, self.session_number))
- self.session_number = 0
-
- def name_session(self, name):
- """Give the current session a name in the history database."""
- with self.db:
- self.db.execute("UPDATE sessions SET remark=? WHERE session==?",
- (name, self.session_number))
-
- def reset(self, new_session=True):
- """Clear the session history, releasing all object references, and
- optionally open a new session."""
- self.output_hist.clear()
- # The directory history can't be completely empty
- self.dir_hist[:] = [py3compat.getcwd()]
-
- if new_session:
- if self.session_number:
- self.end_session()
- self.input_hist_parsed[:] = [""]
- self.input_hist_raw[:] = [""]
- self.new_session()
-
- # ------------------------------
- # Methods for retrieving history
- # ------------------------------
- def get_session_info(self, session=0):
- """Get info about a session.
-
- Parameters
- ----------
-
- session : int
- Session number to retrieve. The current session is 0, and negative
- numbers count back from current session, so -1 is the previous session.
-
- Returns
- -------
-
- session_id : int
- Session ID number
- start : datetime
- Timestamp for the start of the session.
- end : datetime
- Timestamp for the end of the session, or None if IPython crashed.
- num_cmds : int
- Number of commands run, or None if IPython crashed.
- remark : unicode
- A manually set description.
- """
- if session <= 0:
- session += self.session_number
-
- return super(HistoryManager, self).get_session_info(session=session)
-
- def _get_range_session(self, start=1, stop=None, raw=True, output=False):
- """Get input and output history from the current session. Called by
- get_range, and takes similar parameters."""
- input_hist = self.input_hist_raw if raw else self.input_hist_parsed
-
- n = len(input_hist)
- if start < 0:
- start += n
- if not stop or (stop > n):
- stop = n
- elif stop < 0:
- stop += n
-
- for i in range(start, stop):
- if output:
- line = (input_hist[i], self.output_hist_reprs.get(i))
- else:
- line = input_hist[i]
- yield (0, i, line)
-
- def get_range(self, session=0, start=1, stop=None, raw=True,output=False):
- """Retrieve input by session.
-
- Parameters
- ----------
- session : int
- Session number to retrieve. The current session is 0, and negative
- numbers count back from current session, so -1 is previous session.
- start : int
- First line to retrieve.
- stop : int
- End of line range (excluded from output itself). If None, retrieve
- to the end of the session.
- raw : bool
- If True, return untranslated input
- output : bool
- If True, attempt to include output. This will be 'real' Python
- objects for the current session, or text reprs from previous
- sessions if db_log_output was enabled at the time. Where no output
- is found, None is used.
-
- Returns
- -------
- entries
- An iterator over the desired lines. Each line is a 3-tuple, either
- (session, line, input) if output is False, or
- (session, line, (input, output)) if output is True.
- """
- if session <= 0:
- session += self.session_number
- if session==self.session_number: # Current session
- return self._get_range_session(start, stop, raw, output)
- return super(HistoryManager, self).get_range(session, start, stop, raw,
- output)
-
- ## ----------------------------
- ## Methods for storing history:
- ## ----------------------------
- def store_inputs(self, line_num, source, source_raw=None):
- """Store source and raw input in history and create input cache
- variables ``_i*``.
-
- Parameters
- ----------
- line_num : int
- The prompt number of this input.
-
- source : str
- Python input.
-
- source_raw : str, optional
- If given, this is the raw input without any IPython transformations
- applied to it. If not given, ``source`` is used.
- """
- if source_raw is None:
- source_raw = source
- source = source.rstrip('\n')
- source_raw = source_raw.rstrip('\n')
-
- # do not store exit/quit commands
- if self._exit_re.match(source_raw.strip()):
- return
-
- self.input_hist_parsed.append(source)
- self.input_hist_raw.append(source_raw)
-
- with self.db_input_cache_lock:
- self.db_input_cache.append((line_num, source, source_raw))
- # Trigger to flush cache and write to DB.
- if len(self.db_input_cache) >= self.db_cache_size:
- self.save_flag.set()
-
- # update the auto _i variables
- self._iii = self._ii
- self._ii = self._i
- self._i = self._i00
- self._i00 = source_raw
-
- # hackish access to user namespace to create _i1,_i2... dynamically
- new_i = '_i%s' % line_num
- to_main = {'_i': self._i,
- '_ii': self._ii,
- '_iii': self._iii,
- new_i : self._i00 }
-
- if self.shell is not None:
- self.shell.push(to_main, interactive=False)
-
- def store_output(self, line_num):
- """If database output logging is enabled, this saves all the
- outputs from the indicated prompt number to the database. It's
- called by run_cell after code has been executed.
-
- Parameters
- ----------
- line_num : int
- The line number from which to save outputs
- """
- if (not self.db_log_output) or (line_num not in self.output_hist_reprs):
- return
- output = self.output_hist_reprs[line_num]
-
- with self.db_output_cache_lock:
- self.db_output_cache.append((line_num, output))
- if self.db_cache_size <= 1:
- self.save_flag.set()
-
- def _writeout_input_cache(self, conn):
- with conn:
- for line in self.db_input_cache:
- conn.execute("INSERT INTO history VALUES (?, ?, ?, ?)",
- (self.session_number,)+line)
-
- def _writeout_output_cache(self, conn):
- with conn:
- for line in self.db_output_cache:
- conn.execute("INSERT INTO output_history VALUES (?, ?, ?)",
- (self.session_number,)+line)
-
- @needs_sqlite
- def writeout_cache(self, conn=None):
- """Write any entries in the cache to the database."""
- if conn is None:
- conn = self.db
-
- with self.db_input_cache_lock:
- try:
- self._writeout_input_cache(conn)
- except sqlite3.IntegrityError:
- self.new_session(conn)
- print("ERROR! Session/line number was not unique in",
- "database. History logging moved to new session",
- self.session_number)
- try:
- # Try writing to the new session. If this fails, don't
- # recurse
- self._writeout_input_cache(conn)
- except sqlite3.IntegrityError:
- pass
- finally:
- self.db_input_cache = []
-
- with self.db_output_cache_lock:
- try:
- self._writeout_output_cache(conn)
- except sqlite3.IntegrityError:
- print("!! Session/line number for output was not unique",
- "in database. Output will not be stored.")
- finally:
- self.db_output_cache = []
-
-
-class HistorySavingThread(threading.Thread):
- """This thread takes care of writing history to the database, so that
- the UI isn't held up while that happens.
-
- It waits for the HistoryManager's save_flag to be set, then writes out
- the history cache. The main thread is responsible for setting the flag when
- the cache size reaches a defined threshold."""
- daemon = True
- stop_now = False
- enabled = True
- def __init__(self, history_manager):
- super(HistorySavingThread, self).__init__(name="IPythonHistorySavingThread")
- self.history_manager = history_manager
- self.enabled = history_manager.enabled
- atexit.register(self.stop)
-
- @needs_sqlite
- def run(self):
- # We need a separate db connection per thread:
- try:
- self.db = sqlite3.connect(self.history_manager.hist_file,
- **self.history_manager.connection_options
- )
- while True:
- self.history_manager.save_flag.wait()
- if self.stop_now:
- self.db.close()
- return
- self.history_manager.save_flag.clear()
- self.history_manager.writeout_cache(self.db)
- except Exception as e:
- print(("The history saving thread hit an unexpected error (%s)."
- "History will not be written to the database.") % repr(e))
-
- def stop(self):
- """This can be called from the main thread to safely stop this thread.
-
- Note that it does not attempt to write out remaining history before
- exiting. That should be done by calling the HistoryManager's
- end_session method."""
- self.stop_now = True
- self.history_manager.save_flag.set()
- self.join()
-
-
-# To match, e.g. ~5/8-~2/3
-range_re = re.compile(r"""
-((?P<startsess>~?\d+)/)?
-(?P<start>\d+)?
-((?P<sep>[\-:])
- ((?P<endsess>~?\d+)/)?
- (?P<end>\d+))?
-$""", re.VERBOSE)
-
-
-def extract_hist_ranges(ranges_str):
- """Turn a string of history ranges into 3-tuples of (session, start, stop).
-
- Examples
- --------
- >>> list(extract_hist_ranges("~8/5-~7/4 2"))
- [(-8, 5, None), (-7, 1, 5), (0, 2, 3)]
- """
- for range_str in ranges_str.split():
- rmatch = range_re.match(range_str)
- if not rmatch:
- continue
- start = rmatch.group("start")
- if start:
- start = int(start)
- end = rmatch.group("end")
- # If no end specified, get (a, a + 1)
- end = int(end) if end else start + 1
- else: # start not specified
- if not rmatch.group('startsess'): # no startsess
- continue
- start = 1
- end = None # provide the entire session hist
-
- if rmatch.group("sep") == "-": # 1-3 == 1:4 --> [1, 2, 3]
- end += 1
- startsess = rmatch.group("startsess") or "0"
- endsess = rmatch.group("endsess") or startsess
- startsess = int(startsess.replace("~","-"))
- endsess = int(endsess.replace("~","-"))
- assert endsess >= startsess, "start session must be earlier than end session"
-
- if endsess == startsess:
- yield (startsess, start, end)
- continue
- # Multiple sessions in one range:
- yield (startsess, start, None)
- for sess in range(startsess+1, endsess):
- yield (sess, 1, None)
- yield (endsess, 1, end)
-
-
-def _format_lineno(session, line):
- """Helper function to format line numbers properly."""
- if session == 0:
- return str(line)
- return "%s#%s" % (session, line)
+ # The input and output caches
+ db_input_cache = List()
+ db_output_cache = List()
+
+ # History saving in separate thread
+ save_thread = Instance('IPython.core.history.HistorySavingThread',
+ allow_none=True)
+ try: # Event is a function returning an instance of _Event...
+ save_flag = Instance(threading._Event, allow_none=True)
+ except AttributeError: # ...until Python 3.3, when it's a class.
+ save_flag = Instance(threading.Event, allow_none=True)
+
+ # Private interface
+ # Variables used to store the three last inputs from the user. On each new
+ # history update, we populate the user's namespace with these, shifted as
+ # necessary.
+ _i00 = Unicode(u'')
+ _i = Unicode(u'')
+ _ii = Unicode(u'')
+ _iii = Unicode(u'')
+
+ # A regex matching all forms of the exit command, so that we don't store
+ # them in the history (it's annoying to rewind the first entry and land on
+ # an exit call).
+ _exit_re = re.compile(r"(exit|quit)(\s*\(.*\))?$")
+
+ def __init__(self, shell=None, config=None, **traits):
+ """Create a new history manager associated with a shell instance.
+ """
+ # We need a pointer back to the shell for various tasks.
+ super(HistoryManager, self).__init__(shell=shell, config=config,
+ **traits)
+ self.save_flag = threading.Event()
+ self.db_input_cache_lock = threading.Lock()
+ self.db_output_cache_lock = threading.Lock()
+
+ try:
+ self.new_session()
+ except OperationalError:
+ self.log.error("Failed to create history session in %s. History will not be saved.",
+ self.hist_file, exc_info=True)
+ self.hist_file = ':memory:'
+
+ if self.enabled and self.hist_file != ':memory:':
+ self.save_thread = HistorySavingThread(self)
+ self.save_thread.start()
+
+ def _get_hist_file_name(self, profile=None):
+ """Get default history file name based on the Shell's profile.
+
+ The profile parameter is ignored, but must exist for compatibility with
+ the parent class."""
+ profile_dir = self.shell.profile_dir.location
+ return os.path.join(profile_dir, 'history.sqlite')
+
+ @needs_sqlite
+ def new_session(self, conn=None):
+ """Get a new session number."""
+ if conn is None:
+ conn = self.db
+
+ with conn:
+ cur = conn.execute("""INSERT INTO sessions VALUES (NULL, ?, NULL,
+ NULL, "") """, (datetime.datetime.now(),))
+ self.session_number = cur.lastrowid
+
+ def end_session(self):
+ """Close the database session, filling in the end time and line count."""
+ self.writeout_cache()
+ with self.db:
+ self.db.execute("""UPDATE sessions SET end=?, num_cmds=? WHERE
+ session==?""", (datetime.datetime.now(),
+ len(self.input_hist_parsed)-1, self.session_number))
+ self.session_number = 0
+
+ def name_session(self, name):
+ """Give the current session a name in the history database."""
+ with self.db:
+ self.db.execute("UPDATE sessions SET remark=? WHERE session==?",
+ (name, self.session_number))
+
+ def reset(self, new_session=True):
+ """Clear the session history, releasing all object references, and
+ optionally open a new session."""
+ self.output_hist.clear()
+ # The directory history can't be completely empty
+ self.dir_hist[:] = [py3compat.getcwd()]
+
+ if new_session:
+ if self.session_number:
+ self.end_session()
+ self.input_hist_parsed[:] = [""]
+ self.input_hist_raw[:] = [""]
+ self.new_session()
+
+ # ------------------------------
+ # Methods for retrieving history
+ # ------------------------------
+ def get_session_info(self, session=0):
+ """Get info about a session.
+
+ Parameters
+ ----------
+
+ session : int
+ Session number to retrieve. The current session is 0, and negative
+ numbers count back from current session, so -1 is the previous session.
+
+ Returns
+ -------
+
+ session_id : int
+ Session ID number
+ start : datetime
+ Timestamp for the start of the session.
+ end : datetime
+ Timestamp for the end of the session, or None if IPython crashed.
+ num_cmds : int
+ Number of commands run, or None if IPython crashed.
+ remark : unicode
+ A manually set description.
+ """
+ if session <= 0:
+ session += self.session_number
+
+ return super(HistoryManager, self).get_session_info(session=session)
+
+ def _get_range_session(self, start=1, stop=None, raw=True, output=False):
+ """Get input and output history from the current session. Called by
+ get_range, and takes similar parameters."""
+ input_hist = self.input_hist_raw if raw else self.input_hist_parsed
+
+ n = len(input_hist)
+ if start < 0:
+ start += n
+ if not stop or (stop > n):
+ stop = n
+ elif stop < 0:
+ stop += n
+
+ for i in range(start, stop):
+ if output:
+ line = (input_hist[i], self.output_hist_reprs.get(i))
+ else:
+ line = input_hist[i]
+ yield (0, i, line)
+
+ def get_range(self, session=0, start=1, stop=None, raw=True,output=False):
+ """Retrieve input by session.
+
+ Parameters
+ ----------
+ session : int
+ Session number to retrieve. The current session is 0, and negative
+ numbers count back from current session, so -1 is previous session.
+ start : int
+ First line to retrieve.
+ stop : int
+ End of line range (excluded from output itself). If None, retrieve
+ to the end of the session.
+ raw : bool
+ If True, return untranslated input
+ output : bool
+ If True, attempt to include output. This will be 'real' Python
+ objects for the current session, or text reprs from previous
+ sessions if db_log_output was enabled at the time. Where no output
+ is found, None is used.
+
+ Returns
+ -------
+ entries
+ An iterator over the desired lines. Each line is a 3-tuple, either
+ (session, line, input) if output is False, or
+ (session, line, (input, output)) if output is True.
+ """
+ if session <= 0:
+ session += self.session_number
+ if session==self.session_number: # Current session
+ return self._get_range_session(start, stop, raw, output)
+ return super(HistoryManager, self).get_range(session, start, stop, raw,
+ output)
+
+ ## ----------------------------
+ ## Methods for storing history:
+ ## ----------------------------
+ def store_inputs(self, line_num, source, source_raw=None):
+ """Store source and raw input in history and create input cache
+ variables ``_i*``.
+
+ Parameters
+ ----------
+ line_num : int
+ The prompt number of this input.
+
+ source : str
+ Python input.
+
+ source_raw : str, optional
+ If given, this is the raw input without any IPython transformations
+ applied to it. If not given, ``source`` is used.
+ """
+ if source_raw is None:
+ source_raw = source
+ source = source.rstrip('\n')
+ source_raw = source_raw.rstrip('\n')
+
+ # do not store exit/quit commands
+ if self._exit_re.match(source_raw.strip()):
+ return
+
+ self.input_hist_parsed.append(source)
+ self.input_hist_raw.append(source_raw)
+
+ with self.db_input_cache_lock:
+ self.db_input_cache.append((line_num, source, source_raw))
+ # Trigger to flush cache and write to DB.
+ if len(self.db_input_cache) >= self.db_cache_size:
+ self.save_flag.set()
+
+ # update the auto _i variables
+ self._iii = self._ii
+ self._ii = self._i
+ self._i = self._i00
+ self._i00 = source_raw
+
+ # hackish access to user namespace to create _i1,_i2... dynamically
+ new_i = '_i%s' % line_num
+ to_main = {'_i': self._i,
+ '_ii': self._ii,
+ '_iii': self._iii,
+ new_i : self._i00 }
+
+ if self.shell is not None:
+ self.shell.push(to_main, interactive=False)
+
+ def store_output(self, line_num):
+ """If database output logging is enabled, this saves all the
+ outputs from the indicated prompt number to the database. It's
+ called by run_cell after code has been executed.
+
+ Parameters
+ ----------
+ line_num : int
+ The line number from which to save outputs
+ """
+ if (not self.db_log_output) or (line_num not in self.output_hist_reprs):
+ return
+ output = self.output_hist_reprs[line_num]
+
+ with self.db_output_cache_lock:
+ self.db_output_cache.append((line_num, output))
+ if self.db_cache_size <= 1:
+ self.save_flag.set()
+
+ def _writeout_input_cache(self, conn):
+ with conn:
+ for line in self.db_input_cache:
+ conn.execute("INSERT INTO history VALUES (?, ?, ?, ?)",
+ (self.session_number,)+line)
+
+ def _writeout_output_cache(self, conn):
+ with conn:
+ for line in self.db_output_cache:
+ conn.execute("INSERT INTO output_history VALUES (?, ?, ?)",
+ (self.session_number,)+line)
+
+ @needs_sqlite
+ def writeout_cache(self, conn=None):
+ """Write any entries in the cache to the database."""
+ if conn is None:
+ conn = self.db
+
+ with self.db_input_cache_lock:
+ try:
+ self._writeout_input_cache(conn)
+ except sqlite3.IntegrityError:
+ self.new_session(conn)
+ print("ERROR! Session/line number was not unique in",
+ "database. History logging moved to new session",
+ self.session_number)
+ try:
+ # Try writing to the new session. If this fails, don't
+ # recurse
+ self._writeout_input_cache(conn)
+ except sqlite3.IntegrityError:
+ pass
+ finally:
+ self.db_input_cache = []
+
+ with self.db_output_cache_lock:
+ try:
+ self._writeout_output_cache(conn)
+ except sqlite3.IntegrityError:
+ print("!! Session/line number for output was not unique",
+ "in database. Output will not be stored.")
+ finally:
+ self.db_output_cache = []
+
+
+class HistorySavingThread(threading.Thread):
+ """This thread takes care of writing history to the database, so that
+ the UI isn't held up while that happens.
+
+ It waits for the HistoryManager's save_flag to be set, then writes out
+ the history cache. The main thread is responsible for setting the flag when
+ the cache size reaches a defined threshold."""
+ daemon = True
+ stop_now = False
+ enabled = True
+ def __init__(self, history_manager):
+ super(HistorySavingThread, self).__init__(name="IPythonHistorySavingThread")
+ self.history_manager = history_manager
+ self.enabled = history_manager.enabled
+ atexit.register(self.stop)
+
+ @needs_sqlite
+ def run(self):
+ # We need a separate db connection per thread:
+ try:
+ self.db = sqlite3.connect(self.history_manager.hist_file,
+ **self.history_manager.connection_options
+ )
+ while True:
+ self.history_manager.save_flag.wait()
+ if self.stop_now:
+ self.db.close()
+ return
+ self.history_manager.save_flag.clear()
+ self.history_manager.writeout_cache(self.db)
+ except Exception as e:
+ print(("The history saving thread hit an unexpected error (%s)."
+ "History will not be written to the database.") % repr(e))
+
+ def stop(self):
+ """This can be called from the main thread to safely stop this thread.
+
+ Note that it does not attempt to write out remaining history before
+ exiting. That should be done by calling the HistoryManager's
+ end_session method."""
+ self.stop_now = True
+ self.history_manager.save_flag.set()
+ self.join()
+
+
+# To match, e.g. ~5/8-~2/3
+range_re = re.compile(r"""
+((?P<startsess>~?\d+)/)?
+(?P<start>\d+)?
+((?P<sep>[\-:])
+ ((?P<endsess>~?\d+)/)?
+ (?P<end>\d+))?
+$""", re.VERBOSE)
+
+
+def extract_hist_ranges(ranges_str):
+ """Turn a string of history ranges into 3-tuples of (session, start, stop).
+
+ Examples
+ --------
+ >>> list(extract_hist_ranges("~8/5-~7/4 2"))
+ [(-8, 5, None), (-7, 1, 5), (0, 2, 3)]
+ """
+ for range_str in ranges_str.split():
+ rmatch = range_re.match(range_str)
+ if not rmatch:
+ continue
+ start = rmatch.group("start")
+ if start:
+ start = int(start)
+ end = rmatch.group("end")
+ # If no end specified, get (a, a + 1)
+ end = int(end) if end else start + 1
+ else: # start not specified
+ if not rmatch.group('startsess'): # no startsess
+ continue
+ start = 1
+ end = None # provide the entire session hist
+
+ if rmatch.group("sep") == "-": # 1-3 == 1:4 --> [1, 2, 3]
+ end += 1
+ startsess = rmatch.group("startsess") or "0"
+ endsess = rmatch.group("endsess") or startsess
+ startsess = int(startsess.replace("~","-"))
+ endsess = int(endsess.replace("~","-"))
+ assert endsess >= startsess, "start session must be earlier than end session"
+
+ if endsess == startsess:
+ yield (startsess, start, end)
+ continue
+ # Multiple sessions in one range:
+ yield (startsess, start, None)
+ for sess in range(startsess+1, endsess):
+ yield (sess, 1, None)
+ yield (endsess, 1, end)
+
+
+def _format_lineno(session, line):
+ """Helper function to format line numbers properly."""
+ if session == 0:
+ return str(line)
+ return "%s#%s" % (session, line)
diff --git a/contrib/python/ipython/py2/IPython/core/historyapp.py b/contrib/python/ipython/py2/IPython/core/historyapp.py
index b693cbc0d8..d51426d2ca 100644
--- a/contrib/python/ipython/py2/IPython/core/historyapp.py
+++ b/contrib/python/ipython/py2/IPython/core/historyapp.py
@@ -1,162 +1,162 @@
-# encoding: utf-8
-"""
-An application for managing IPython history.
-
-To be invoked as the `ipython history` subcommand.
-"""
-from __future__ import print_function
-
-import os
-import sqlite3
-
-from traitlets.config.application import Application
-from IPython.core.application import BaseIPythonApplication
-from traitlets import Bool, Int, Dict
-from IPython.utils.io import ask_yes_no
-
-trim_hist_help = """Trim the IPython history database to the last 1000 entries.
-
-This actually copies the last 1000 entries to a new database, and then replaces
-the old file with the new. Use the `--keep=` argument to specify a number
-other than 1000.
-"""
-
-clear_hist_help = """Clear the IPython history database, deleting all entries.
-
-Because this is a destructive operation, IPython will prompt the user if they
-really want to do this. Passing a `-f` flag will force clearing without a
-prompt.
-
-This is an handy alias to `ipython history trim --keep=0`
-"""
-
-
-class HistoryTrim(BaseIPythonApplication):
- description = trim_hist_help
-
+# encoding: utf-8
+"""
+An application for managing IPython history.
+
+To be invoked as the `ipython history` subcommand.
+"""
+from __future__ import print_function
+
+import os
+import sqlite3
+
+from traitlets.config.application import Application
+from IPython.core.application import BaseIPythonApplication
+from traitlets import Bool, Int, Dict
+from IPython.utils.io import ask_yes_no
+
+trim_hist_help = """Trim the IPython history database to the last 1000 entries.
+
+This actually copies the last 1000 entries to a new database, and then replaces
+the old file with the new. Use the `--keep=` argument to specify a number
+other than 1000.
+"""
+
+clear_hist_help = """Clear the IPython history database, deleting all entries.
+
+Because this is a destructive operation, IPython will prompt the user if they
+really want to do this. Passing a `-f` flag will force clearing without a
+prompt.
+
+This is an handy alias to `ipython history trim --keep=0`
+"""
+
+
+class HistoryTrim(BaseIPythonApplication):
+ description = trim_hist_help
+
backup = Bool(False,
help="Keep the old history file as history.sqlite.<N>"
).tag(config=True)
-
+
keep = Int(1000,
help="Number of recent lines to keep in the database."
).tag(config=True)
-
- flags = Dict(dict(
- backup = ({'HistoryTrim' : {'backup' : True}},
+
+ flags = Dict(dict(
+ backup = ({'HistoryTrim' : {'backup' : True}},
backup.help
- )
- ))
-
- aliases=Dict(dict(
- keep = 'HistoryTrim.keep'
- ))
-
- def start(self):
- profile_dir = self.profile_dir.location
- hist_file = os.path.join(profile_dir, 'history.sqlite')
- con = sqlite3.connect(hist_file)
-
- # Grab the recent history from the current database.
- inputs = list(con.execute('SELECT session, line, source, source_raw FROM '
- 'history ORDER BY session DESC, line DESC LIMIT ?', (self.keep+1,)))
- if len(inputs) <= self.keep:
- print("There are already at most %d entries in the history database." % self.keep)
- print("Not doing anything. Use --keep= argument to keep fewer entries")
- return
-
- print("Trimming history to the most recent %d entries." % self.keep)
-
- inputs.pop() # Remove the extra element we got to check the length.
- inputs.reverse()
- if inputs:
- first_session = inputs[0][0]
- outputs = list(con.execute('SELECT session, line, output FROM '
- 'output_history WHERE session >= ?', (first_session,)))
- sessions = list(con.execute('SELECT session, start, end, num_cmds, remark FROM '
- 'sessions WHERE session >= ?', (first_session,)))
- con.close()
-
- # Create the new history database.
- new_hist_file = os.path.join(profile_dir, 'history.sqlite.new')
- i = 0
- while os.path.exists(new_hist_file):
- # Make sure we don't interfere with an existing file.
- i += 1
- new_hist_file = os.path.join(profile_dir, 'history.sqlite.new'+str(i))
- new_db = sqlite3.connect(new_hist_file)
- new_db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer
- primary key autoincrement, start timestamp,
- end timestamp, num_cmds integer, remark text)""")
- new_db.execute("""CREATE TABLE IF NOT EXISTS history
- (session integer, line integer, source text, source_raw text,
- PRIMARY KEY (session, line))""")
- new_db.execute("""CREATE TABLE IF NOT EXISTS output_history
- (session integer, line integer, output text,
- PRIMARY KEY (session, line))""")
- new_db.commit()
-
-
- if inputs:
- with new_db:
- # Add the recent history into the new database.
- new_db.executemany('insert into sessions values (?,?,?,?,?)', sessions)
- new_db.executemany('insert into history values (?,?,?,?)', inputs)
- new_db.executemany('insert into output_history values (?,?,?)', outputs)
- new_db.close()
-
- if self.backup:
- i = 1
- backup_hist_file = os.path.join(profile_dir, 'history.sqlite.old.%d' % i)
- while os.path.exists(backup_hist_file):
- i += 1
- backup_hist_file = os.path.join(profile_dir, 'history.sqlite.old.%d' % i)
- os.rename(hist_file, backup_hist_file)
- print("Backed up longer history file to", backup_hist_file)
- else:
- os.remove(hist_file)
-
- os.rename(new_hist_file, hist_file)
-
-class HistoryClear(HistoryTrim):
- description = clear_hist_help
+ )
+ ))
+
+ aliases=Dict(dict(
+ keep = 'HistoryTrim.keep'
+ ))
+
+ def start(self):
+ profile_dir = self.profile_dir.location
+ hist_file = os.path.join(profile_dir, 'history.sqlite')
+ con = sqlite3.connect(hist_file)
+
+ # Grab the recent history from the current database.
+ inputs = list(con.execute('SELECT session, line, source, source_raw FROM '
+ 'history ORDER BY session DESC, line DESC LIMIT ?', (self.keep+1,)))
+ if len(inputs) <= self.keep:
+ print("There are already at most %d entries in the history database." % self.keep)
+ print("Not doing anything. Use --keep= argument to keep fewer entries")
+ return
+
+ print("Trimming history to the most recent %d entries." % self.keep)
+
+ inputs.pop() # Remove the extra element we got to check the length.
+ inputs.reverse()
+ if inputs:
+ first_session = inputs[0][0]
+ outputs = list(con.execute('SELECT session, line, output FROM '
+ 'output_history WHERE session >= ?', (first_session,)))
+ sessions = list(con.execute('SELECT session, start, end, num_cmds, remark FROM '
+ 'sessions WHERE session >= ?', (first_session,)))
+ con.close()
+
+ # Create the new history database.
+ new_hist_file = os.path.join(profile_dir, 'history.sqlite.new')
+ i = 0
+ while os.path.exists(new_hist_file):
+ # Make sure we don't interfere with an existing file.
+ i += 1
+ new_hist_file = os.path.join(profile_dir, 'history.sqlite.new'+str(i))
+ new_db = sqlite3.connect(new_hist_file)
+ new_db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer
+ primary key autoincrement, start timestamp,
+ end timestamp, num_cmds integer, remark text)""")
+ new_db.execute("""CREATE TABLE IF NOT EXISTS history
+ (session integer, line integer, source text, source_raw text,
+ PRIMARY KEY (session, line))""")
+ new_db.execute("""CREATE TABLE IF NOT EXISTS output_history
+ (session integer, line integer, output text,
+ PRIMARY KEY (session, line))""")
+ new_db.commit()
+
+
+ if inputs:
+ with new_db:
+ # Add the recent history into the new database.
+ new_db.executemany('insert into sessions values (?,?,?,?,?)', sessions)
+ new_db.executemany('insert into history values (?,?,?,?)', inputs)
+ new_db.executemany('insert into output_history values (?,?,?)', outputs)
+ new_db.close()
+
+ if self.backup:
+ i = 1
+ backup_hist_file = os.path.join(profile_dir, 'history.sqlite.old.%d' % i)
+ while os.path.exists(backup_hist_file):
+ i += 1
+ backup_hist_file = os.path.join(profile_dir, 'history.sqlite.old.%d' % i)
+ os.rename(hist_file, backup_hist_file)
+ print("Backed up longer history file to", backup_hist_file)
+ else:
+ os.remove(hist_file)
+
+ os.rename(new_hist_file, hist_file)
+
+class HistoryClear(HistoryTrim):
+ description = clear_hist_help
keep = Int(0,
- help="Number of recent lines to keep in the database.")
-
+ help="Number of recent lines to keep in the database.")
+
force = Bool(False,
help="Don't prompt user for confirmation"
).tag(config=True)
-
- flags = Dict(dict(
- force = ({'HistoryClear' : {'force' : True}},
+
+ flags = Dict(dict(
+ force = ({'HistoryClear' : {'force' : True}},
force.help),
- f = ({'HistoryTrim' : {'force' : True}},
+ f = ({'HistoryTrim' : {'force' : True}},
force.help
- )
- ))
- aliases = Dict()
-
- def start(self):
- if self.force or ask_yes_no("Really delete all ipython history? ",
- default="no", interrupt="no"):
- HistoryTrim.start(self)
-
-class HistoryApp(Application):
- name = u'ipython-history'
- description = "Manage the IPython history database."
-
- subcommands = Dict(dict(
- trim = (HistoryTrim, HistoryTrim.description.splitlines()[0]),
- clear = (HistoryClear, HistoryClear.description.splitlines()[0]),
- ))
-
- def start(self):
- if self.subapp is None:
- print("No subcommand specified. Must specify one of: %s" % \
- (self.subcommands.keys()))
- print()
- self.print_description()
- self.print_subcommands()
- self.exit(1)
- else:
- return self.subapp.start()
+ )
+ ))
+ aliases = Dict()
+
+ def start(self):
+ if self.force or ask_yes_no("Really delete all ipython history? ",
+ default="no", interrupt="no"):
+ HistoryTrim.start(self)
+
+class HistoryApp(Application):
+ name = u'ipython-history'
+ description = "Manage the IPython history database."
+
+ subcommands = Dict(dict(
+ trim = (HistoryTrim, HistoryTrim.description.splitlines()[0]),
+ clear = (HistoryClear, HistoryClear.description.splitlines()[0]),
+ ))
+
+ def start(self):
+ if self.subapp is None:
+ print("No subcommand specified. Must specify one of: %s" % \
+ (self.subcommands.keys()))
+ print()
+ self.print_description()
+ self.print_subcommands()
+ self.exit(1)
+ else:
+ return self.subapp.start()
diff --git a/contrib/python/ipython/py2/IPython/core/hooks.py b/contrib/python/ipython/py2/IPython/core/hooks.py
index 374ccb4b0f..e6fc84087f 100644
--- a/contrib/python/ipython/py2/IPython/core/hooks.py
+++ b/contrib/python/ipython/py2/IPython/core/hooks.py
@@ -1,98 +1,98 @@
-"""Hooks for IPython.
-
-In Python, it is possible to overwrite any method of any object if you really
-want to. But IPython exposes a few 'hooks', methods which are *designed* to
-be overwritten by users for customization purposes. This module defines the
-default versions of all such hooks, which get used by IPython if not
-overridden by the user.
-
-Hooks are simple functions, but they should be declared with ``self`` as their
-first argument, because when activated they are registered into IPython as
-instance methods. The self argument will be the IPython running instance
-itself, so hooks have full access to the entire IPython object.
-
-If you wish to define a new hook and activate it, you can make an :doc:`extension
-</config/extensions/index>` or a :ref:`startup script <startup_files>`. For
-example, you could use a startup file like this::
-
- import os
-
- def calljed(self,filename, linenum):
- "My editor hook calls the jed editor directly."
- print "Calling my own editor, jed ..."
- if os.system('jed +%d %s' % (linenum,filename)) != 0:
- raise TryNext()
-
- def load_ipython_extension(ip):
- ip.set_hook('editor', calljed)
-
-"""
-
-#*****************************************************************************
-# Copyright (C) 2005 Fernando Perez. <fperez@colorado.edu>
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#*****************************************************************************
-
-import os
-import subprocess
+"""Hooks for IPython.
+
+In Python, it is possible to overwrite any method of any object if you really
+want to. But IPython exposes a few 'hooks', methods which are *designed* to
+be overwritten by users for customization purposes. This module defines the
+default versions of all such hooks, which get used by IPython if not
+overridden by the user.
+
+Hooks are simple functions, but they should be declared with ``self`` as their
+first argument, because when activated they are registered into IPython as
+instance methods. The self argument will be the IPython running instance
+itself, so hooks have full access to the entire IPython object.
+
+If you wish to define a new hook and activate it, you can make an :doc:`extension
+</config/extensions/index>` or a :ref:`startup script <startup_files>`. For
+example, you could use a startup file like this::
+
+ import os
+
+ def calljed(self,filename, linenum):
+ "My editor hook calls the jed editor directly."
+ print "Calling my own editor, jed ..."
+ if os.system('jed +%d %s' % (linenum,filename)) != 0:
+ raise TryNext()
+
+ def load_ipython_extension(ip):
+ ip.set_hook('editor', calljed)
+
+"""
+
+#*****************************************************************************
+# Copyright (C) 2005 Fernando Perez. <fperez@colorado.edu>
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#*****************************************************************************
+
+import os
+import subprocess
import warnings
-import sys
-
-from IPython.core.error import TryNext
-
-# List here all the default hooks. For now it's just the editor functions
-# but over time we'll move here all the public API for user-accessible things.
-
-__all__ = ['editor', 'fix_error_editor', 'synchronize_with_editor',
- 'shutdown_hook', 'late_startup_hook',
- 'show_in_pager','pre_prompt_hook',
- 'pre_run_code_hook', 'clipboard_get']
-
-deprecated = {'pre_run_code_hook': "a callback for the 'pre_execute' or 'pre_run_cell' event",
- 'late_startup_hook': "a callback for the 'shell_initialized' event",
- 'shutdown_hook': "the atexit module",
- }
-
-def editor(self, filename, linenum=None, wait=True):
- """Open the default editor at the given filename and linenumber.
-
- This is IPython's default editor hook, you can use it as an example to
- write your own modified one. To set your own editor function as the
- new editor hook, call ip.set_hook('editor',yourfunc)."""
-
- # IPython configures a default editor at startup by reading $EDITOR from
- # the environment, and falling back on vi (unix) or notepad (win32).
- editor = self.editor
-
- # marker for at which line to open the file (for existing objects)
- if linenum is None or editor=='notepad':
- linemark = ''
- else:
- linemark = '+%d' % int(linenum)
-
- # Enclose in quotes if necessary and legal
- if ' ' in editor and os.path.isfile(editor) and editor[0] != '"':
- editor = '"%s"' % editor
-
- # Call the actual editor
- proc = subprocess.Popen('%s %s %s' % (editor, linemark, filename),
- shell=True)
- if wait and proc.wait() != 0:
- raise TryNext()
-
-import tempfile
-def fix_error_editor(self,filename,linenum,column,msg):
+import sys
+
+from IPython.core.error import TryNext
+
+# List here all the default hooks. For now it's just the editor functions
+# but over time we'll move here all the public API for user-accessible things.
+
+__all__ = ['editor', 'fix_error_editor', 'synchronize_with_editor',
+ 'shutdown_hook', 'late_startup_hook',
+ 'show_in_pager','pre_prompt_hook',
+ 'pre_run_code_hook', 'clipboard_get']
+
+deprecated = {'pre_run_code_hook': "a callback for the 'pre_execute' or 'pre_run_cell' event",
+ 'late_startup_hook': "a callback for the 'shell_initialized' event",
+ 'shutdown_hook': "the atexit module",
+ }
+
+def editor(self, filename, linenum=None, wait=True):
+ """Open the default editor at the given filename and linenumber.
+
+ This is IPython's default editor hook, you can use it as an example to
+ write your own modified one. To set your own editor function as the
+ new editor hook, call ip.set_hook('editor',yourfunc)."""
+
+ # IPython configures a default editor at startup by reading $EDITOR from
+ # the environment, and falling back on vi (unix) or notepad (win32).
+ editor = self.editor
+
+ # marker for at which line to open the file (for existing objects)
+ if linenum is None or editor=='notepad':
+ linemark = ''
+ else:
+ linemark = '+%d' % int(linenum)
+
+ # Enclose in quotes if necessary and legal
+ if ' ' in editor and os.path.isfile(editor) and editor[0] != '"':
+ editor = '"%s"' % editor
+
+ # Call the actual editor
+ proc = subprocess.Popen('%s %s %s' % (editor, linemark, filename),
+ shell=True)
+ if wait and proc.wait() != 0:
+ raise TryNext()
+
+import tempfile
+def fix_error_editor(self,filename,linenum,column,msg):
"""DEPRECATED
Open the editor at the given filename, linenumber, column and
- show an error message. This is used for correcting syntax errors.
- The current implementation only has special support for the VIM editor,
- and falls back on the 'editor' hook if VIM is not used.
-
+ show an error message. This is used for correcting syntax errors.
+ The current implementation only has special support for the VIM editor,
+ and falls back on the 'editor' hook if VIM is not used.
+
Call ip.set_hook('fix_error_editor',yourfunc) to use your own function,
- """
+ """
warnings.warn("""
`fix_error_editor` is pending deprecation as of IPython 5.0 and will be removed
@@ -102,125 +102,125 @@ happend to use this function and still need it please make your voice heard on
the mailing list ipython-dev@python.org , or on the GitHub Issue tracker:
https://github.com/ipython/ipython/issues/9649 """, UserWarning)
- def vim_quickfix_file():
- t = tempfile.NamedTemporaryFile()
- t.write('%s:%d:%d:%s\n' % (filename,linenum,column,msg))
- t.flush()
- return t
- if os.path.basename(self.editor) != 'vim':
- self.hooks.editor(filename,linenum)
- return
- t = vim_quickfix_file()
- try:
- if os.system('vim --cmd "set errorformat=%f:%l:%c:%m" -q ' + t.name):
- raise TryNext()
- finally:
- t.close()
-
-
-def synchronize_with_editor(self, filename, linenum, column):
- pass
-
-
-class CommandChainDispatcher:
- """ Dispatch calls to a chain of commands until some func can handle it
-
- Usage: instantiate, execute "add" to add commands (with optional
- priority), execute normally via f() calling mechanism.
-
- """
- def __init__(self,commands=None):
- if commands is None:
- self.chain = []
- else:
- self.chain = commands
-
-
- def __call__(self,*args, **kw):
- """ Command chain is called just like normal func.
-
- This will call all funcs in chain with the same args as were given to
- this function, and return the result of first func that didn't raise
- TryNext"""
- last_exc = TryNext()
- for prio,cmd in self.chain:
- #print "prio",prio,"cmd",cmd #dbg
- try:
- return cmd(*args, **kw)
- except TryNext as exc:
- last_exc = exc
- # if no function will accept it, raise TryNext up to the caller
- raise last_exc
-
- def __str__(self):
- return str(self.chain)
-
- def add(self, func, priority=0):
- """ Add a func to the cmd chain with given priority """
- self.chain.append((priority, func))
- self.chain.sort(key=lambda x: x[0])
-
- def __iter__(self):
- """ Return all objects in chain.
-
- Handy if the objects are not callable.
- """
- return iter(self.chain)
-
-
-def shutdown_hook(self):
- """ default shutdown hook
-
- Typically, shotdown hooks should raise TryNext so all shutdown ops are done
- """
-
- #print "default shutdown hook ok" # dbg
- return
-
-
-def late_startup_hook(self):
- """ Executed after ipython has been constructed and configured
-
- """
- #print "default startup hook ok" # dbg
-
-
-def show_in_pager(self, data, start, screen_lines):
- """ Run a string through pager """
- # raising TryNext here will use the default paging functionality
- raise TryNext
-
-
-def pre_prompt_hook(self):
- """ Run before displaying the next prompt
-
- Use this e.g. to display output from asynchronous operations (in order
- to not mess up text entry)
- """
-
- return None
-
-
-def pre_run_code_hook(self):
- """ Executed before running the (prefiltered) code in IPython """
- return None
-
-
-def clipboard_get(self):
- """ Get text from the clipboard.
- """
- from IPython.lib.clipboard import (
- osx_clipboard_get, tkinter_clipboard_get,
- win32_clipboard_get
- )
- if sys.platform == 'win32':
- chain = [win32_clipboard_get, tkinter_clipboard_get]
- elif sys.platform == 'darwin':
- chain = [osx_clipboard_get, tkinter_clipboard_get]
- else:
- chain = [tkinter_clipboard_get]
- dispatcher = CommandChainDispatcher()
- for func in chain:
- dispatcher.add(func)
- text = dispatcher()
- return text
+ def vim_quickfix_file():
+ t = tempfile.NamedTemporaryFile()
+ t.write('%s:%d:%d:%s\n' % (filename,linenum,column,msg))
+ t.flush()
+ return t
+ if os.path.basename(self.editor) != 'vim':
+ self.hooks.editor(filename,linenum)
+ return
+ t = vim_quickfix_file()
+ try:
+ if os.system('vim --cmd "set errorformat=%f:%l:%c:%m" -q ' + t.name):
+ raise TryNext()
+ finally:
+ t.close()
+
+
+def synchronize_with_editor(self, filename, linenum, column):
+ pass
+
+
+class CommandChainDispatcher:
+ """ Dispatch calls to a chain of commands until some func can handle it
+
+ Usage: instantiate, execute "add" to add commands (with optional
+ priority), execute normally via f() calling mechanism.
+
+ """
+ def __init__(self,commands=None):
+ if commands is None:
+ self.chain = []
+ else:
+ self.chain = commands
+
+
+ def __call__(self,*args, **kw):
+ """ Command chain is called just like normal func.
+
+ This will call all funcs in chain with the same args as were given to
+ this function, and return the result of first func that didn't raise
+ TryNext"""
+ last_exc = TryNext()
+ for prio,cmd in self.chain:
+ #print "prio",prio,"cmd",cmd #dbg
+ try:
+ return cmd(*args, **kw)
+ except TryNext as exc:
+ last_exc = exc
+ # if no function will accept it, raise TryNext up to the caller
+ raise last_exc
+
+ def __str__(self):
+ return str(self.chain)
+
+ def add(self, func, priority=0):
+ """ Add a func to the cmd chain with given priority """
+ self.chain.append((priority, func))
+ self.chain.sort(key=lambda x: x[0])
+
+ def __iter__(self):
+ """ Return all objects in chain.
+
+ Handy if the objects are not callable.
+ """
+ return iter(self.chain)
+
+
+def shutdown_hook(self):
+ """ default shutdown hook
+
+ Typically, shotdown hooks should raise TryNext so all shutdown ops are done
+ """
+
+ #print "default shutdown hook ok" # dbg
+ return
+
+
+def late_startup_hook(self):
+ """ Executed after ipython has been constructed and configured
+
+ """
+ #print "default startup hook ok" # dbg
+
+
+def show_in_pager(self, data, start, screen_lines):
+ """ Run a string through pager """
+ # raising TryNext here will use the default paging functionality
+ raise TryNext
+
+
+def pre_prompt_hook(self):
+ """ Run before displaying the next prompt
+
+ Use this e.g. to display output from asynchronous operations (in order
+ to not mess up text entry)
+ """
+
+ return None
+
+
+def pre_run_code_hook(self):
+ """ Executed before running the (prefiltered) code in IPython """
+ return None
+
+
+def clipboard_get(self):
+ """ Get text from the clipboard.
+ """
+ from IPython.lib.clipboard import (
+ osx_clipboard_get, tkinter_clipboard_get,
+ win32_clipboard_get
+ )
+ if sys.platform == 'win32':
+ chain = [win32_clipboard_get, tkinter_clipboard_get]
+ elif sys.platform == 'darwin':
+ chain = [osx_clipboard_get, tkinter_clipboard_get]
+ else:
+ chain = [tkinter_clipboard_get]
+ dispatcher = CommandChainDispatcher()
+ for func in chain:
+ dispatcher.add(func)
+ text = dispatcher()
+ return text
diff --git a/contrib/python/ipython/py2/IPython/core/inputsplitter.py b/contrib/python/ipython/py2/IPython/core/inputsplitter.py
index 2c7125f88d..ac14747d69 100644
--- a/contrib/python/ipython/py2/IPython/core/inputsplitter.py
+++ b/contrib/python/ipython/py2/IPython/core/inputsplitter.py
@@ -1,681 +1,681 @@
-"""Input handling and transformation machinery.
-
-The first class in this module, :class:`InputSplitter`, is designed to tell when
-input from a line-oriented frontend is complete and should be executed, and when
-the user should be prompted for another line of code instead. The name 'input
-splitter' is largely for historical reasons.
-
-A companion, :class:`IPythonInputSplitter`, provides the same functionality but
-with full support for the extended IPython syntax (magics, system calls, etc).
-The code to actually do these transformations is in :mod:`IPython.core.inputtransformer`.
-:class:`IPythonInputSplitter` feeds the raw code to the transformers in order
-and stores the results.
-
-For more details, see the class docstrings below.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-import ast
-import codeop
-import re
-import sys
-import warnings
-
-from IPython.utils.py3compat import cast_unicode
-from IPython.core.inputtransformer import (leading_indent,
- classic_prompt,
- ipy_prompt,
- strip_encoding_cookie,
- cellmagic,
- assemble_logical_lines,
- help_end,
- escaped_commands,
- assign_from_magic,
- assign_from_system,
- assemble_python_lines,
- )
-
-# These are available in this module for backwards compatibility.
-from IPython.core.inputtransformer import (ESC_SHELL, ESC_SH_CAP, ESC_HELP,
- ESC_HELP2, ESC_MAGIC, ESC_MAGIC2,
- ESC_QUOTE, ESC_QUOTE2, ESC_PAREN, ESC_SEQUENCES)
-
-#-----------------------------------------------------------------------------
-# Utilities
-#-----------------------------------------------------------------------------
-
-# FIXME: These are general-purpose utilities that later can be moved to the
-# general ward. Kept here for now because we're being very strict about test
-# coverage with this code, and this lets us ensure that we keep 100% coverage
-# while developing.
-
-# compiled regexps for autoindent management
-dedent_re = re.compile('|'.join([
- r'^\s+raise(\s.*)?$', # raise statement (+ space + other stuff, maybe)
- r'^\s+raise\([^\)]*\).*$', # wacky raise with immediate open paren
- r'^\s+return(\s.*)?$', # normal return (+ space + other stuff, maybe)
- r'^\s+return\([^\)]*\).*$', # wacky return with immediate open paren
- r'^\s+pass\s*$', # pass (optionally followed by trailing spaces)
- r'^\s+break\s*$', # break (optionally followed by trailing spaces)
- r'^\s+continue\s*$', # continue (optionally followed by trailing spaces)
-]))
-ini_spaces_re = re.compile(r'^([ \t\r\f\v]+)')
-
-# regexp to match pure comment lines so we don't accidentally insert 'if 1:'
-# before pure comments
-comment_line_re = re.compile('^\s*\#')
-
-
-def num_ini_spaces(s):
- """Return the number of initial spaces in a string.
-
- Note that tabs are counted as a single space. For now, we do *not* support
- mixing of tabs and spaces in the user's input.
-
- Parameters
- ----------
- s : string
-
- Returns
- -------
- n : int
- """
-
- ini_spaces = ini_spaces_re.match(s)
- if ini_spaces:
- return ini_spaces.end()
- else:
- return 0
-
-def last_blank(src):
- """Determine if the input source ends in a blank.
-
- A blank is either a newline or a line consisting of whitespace.
-
- Parameters
- ----------
- src : string
- A single or multiline string.
- """
- if not src: return False
- ll = src.splitlines()[-1]
- return (ll == '') or ll.isspace()
-
-
-last_two_blanks_re = re.compile(r'\n\s*\n\s*$', re.MULTILINE)
-last_two_blanks_re2 = re.compile(r'.+\n\s*\n\s+$', re.MULTILINE)
-
-def last_two_blanks(src):
- """Determine if the input source ends in two blanks.
-
- A blank is either a newline or a line consisting of whitespace.
-
- Parameters
- ----------
- src : string
- A single or multiline string.
- """
- if not src: return False
- # The logic here is tricky: I couldn't get a regexp to work and pass all
- # the tests, so I took a different approach: split the source by lines,
- # grab the last two and prepend '###\n' as a stand-in for whatever was in
- # the body before the last two lines. Then, with that structure, it's
- # possible to analyze with two regexps. Not the most elegant solution, but
- # it works. If anyone tries to change this logic, make sure to validate
- # the whole test suite first!
- new_src = '\n'.join(['###\n'] + src.splitlines()[-2:])
- return (bool(last_two_blanks_re.match(new_src)) or
- bool(last_two_blanks_re2.match(new_src)) )
-
-
-def remove_comments(src):
- """Remove all comments from input source.
-
- Note: comments are NOT recognized inside of strings!
-
- Parameters
- ----------
- src : string
- A single or multiline input string.
-
- Returns
- -------
- String with all Python comments removed.
- """
-
- return re.sub('#.*', '', src)
-
-
-def get_input_encoding():
- """Return the default standard input encoding.
-
- If sys.stdin has no encoding, 'ascii' is returned."""
- # There are strange environments for which sys.stdin.encoding is None. We
- # ensure that a valid encoding is returned.
- encoding = getattr(sys.stdin, 'encoding', None)
- if encoding is None:
- encoding = 'ascii'
- return encoding
-
-#-----------------------------------------------------------------------------
-# Classes and functions for normal Python syntax handling
-#-----------------------------------------------------------------------------
-
-class InputSplitter(object):
- r"""An object that can accumulate lines of Python source before execution.
-
- This object is designed to be fed python source line-by-line, using
- :meth:`push`. It will return on each push whether the currently pushed
- code could be executed already. In addition, it provides a method called
- :meth:`push_accepts_more` that can be used to query whether more input
- can be pushed into a single interactive block.
-
- This is a simple example of how an interactive terminal-based client can use
- this tool::
-
- isp = InputSplitter()
- while isp.push_accepts_more():
- indent = ' '*isp.indent_spaces
- prompt = '>>> ' + indent
- line = indent + raw_input(prompt)
- isp.push(line)
- print 'Input source was:\n', isp.source_reset(),
- """
- # Number of spaces of indentation computed from input that has been pushed
- # so far. This is the attributes callers should query to get the current
- # indentation level, in order to provide auto-indent facilities.
- indent_spaces = 0
- # String, indicating the default input encoding. It is computed by default
- # at initialization time via get_input_encoding(), but it can be reset by a
- # client with specific knowledge of the encoding.
- encoding = ''
- # String where the current full source input is stored, properly encoded.
- # Reading this attribute is the normal way of querying the currently pushed
- # source code, that has been properly encoded.
- source = ''
- # Code object corresponding to the current source. It is automatically
- # synced to the source, so it can be queried at any time to obtain the code
- # object; it will be None if the source doesn't compile to valid Python.
- code = None
-
- # Private attributes
-
- # List with lines of input accumulated so far
- _buffer = None
- # Command compiler
- _compile = None
- # Mark when input has changed indentation all the way back to flush-left
- _full_dedent = False
- # Boolean indicating whether the current block is complete
- _is_complete = None
- # Boolean indicating whether the current block has an unrecoverable syntax error
- _is_invalid = False
-
- def __init__(self):
- """Create a new InputSplitter instance.
- """
- self._buffer = []
- self._compile = codeop.CommandCompiler()
- self.encoding = get_input_encoding()
-
- def reset(self):
- """Reset the input buffer and associated state."""
- self.indent_spaces = 0
- self._buffer[:] = []
- self.source = ''
- self.code = None
- self._is_complete = False
- self._is_invalid = False
- self._full_dedent = False
-
- def source_reset(self):
- """Return the input source and perform a full reset.
- """
- out = self.source
- self.reset()
- return out
-
- def check_complete(self, source):
- """Return whether a block of code is ready to execute, or should be continued
-
- This is a non-stateful API, and will reset the state of this InputSplitter.
-
- Parameters
- ----------
- source : string
- Python input code, which can be multiline.
-
- Returns
- -------
- status : str
- One of 'complete', 'incomplete', or 'invalid' if source is not a
- prefix of valid code.
- indent_spaces : int or None
- The number of spaces by which to indent the next line of code. If
- status is not 'incomplete', this is None.
- """
- self.reset()
- try:
- self.push(source)
- except SyntaxError:
- # Transformers in IPythonInputSplitter can raise SyntaxError,
- # which push() will not catch.
- return 'invalid', None
- else:
- if self._is_invalid:
- return 'invalid', None
- elif self.push_accepts_more():
- return 'incomplete', self.indent_spaces
- else:
- return 'complete', None
- finally:
- self.reset()
-
- def push(self, lines):
- """Push one or more lines of input.
-
- This stores the given lines and returns a status code indicating
- whether the code forms a complete Python block or not.
-
- Any exceptions generated in compilation are swallowed, but if an
- exception was produced, the method returns True.
-
- Parameters
- ----------
- lines : string
- One or more lines of Python input.
-
- Returns
- -------
- is_complete : boolean
- True if the current input source (the result of the current input
- plus prior inputs) forms a complete Python execution block. Note that
- this value is also stored as a private attribute (``_is_complete``), so it
- can be queried at any time.
- """
- self._store(lines)
- source = self.source
-
- # Before calling _compile(), reset the code object to None so that if an
- # exception is raised in compilation, we don't mislead by having
- # inconsistent code/source attributes.
- self.code, self._is_complete = None, None
- self._is_invalid = False
-
- # Honor termination lines properly
- if source.endswith('\\\n'):
- return False
-
- self._update_indent(lines)
- try:
- with warnings.catch_warnings():
- warnings.simplefilter('error', SyntaxWarning)
- self.code = self._compile(source, symbol="exec")
- # Invalid syntax can produce any of a number of different errors from
- # inside the compiler, so we have to catch them all. Syntax errors
- # immediately produce a 'ready' block, so the invalid Python can be
- # sent to the kernel for evaluation with possible ipython
- # special-syntax conversion.
- except (SyntaxError, OverflowError, ValueError, TypeError,
- MemoryError, SyntaxWarning):
- self._is_complete = True
- self._is_invalid = True
- else:
- # Compilation didn't produce any exceptions (though it may not have
- # given a complete code object)
- self._is_complete = self.code is not None
-
- return self._is_complete
-
- def push_accepts_more(self):
- """Return whether a block of interactive input can accept more input.
-
- This method is meant to be used by line-oriented frontends, who need to
- guess whether a block is complete or not based solely on prior and
- current input lines. The InputSplitter considers it has a complete
- interactive block and will not accept more input when either:
-
- * A SyntaxError is raised
-
- * The code is complete and consists of a single line or a single
- non-compound statement
-
- * The code is complete and has a blank line at the end
-
- If the current input produces a syntax error, this method immediately
- returns False but does *not* raise the syntax error exception, as
- typically clients will want to send invalid syntax to an execution
- backend which might convert the invalid syntax into valid Python via
- one of the dynamic IPython mechanisms.
- """
-
- # With incomplete input, unconditionally accept more
- # A syntax error also sets _is_complete to True - see push()
- if not self._is_complete:
- #print("Not complete") # debug
- return True
-
- # The user can make any (complete) input execute by leaving a blank line
- last_line = self.source.splitlines()[-1]
- if (not last_line) or last_line.isspace():
- #print("Blank line") # debug
- return False
-
- # If there's just a single line or AST node, and we're flush left, as is
- # the case after a simple statement such as 'a=1', we want to execute it
- # straight away.
- if self.indent_spaces==0:
- if len(self.source.splitlines()) <= 1:
- return False
-
- try:
- code_ast = ast.parse(u''.join(self._buffer))
- except Exception:
- #print("Can't parse AST") # debug
- return False
- else:
- if len(code_ast.body) == 1 and \
- not hasattr(code_ast.body[0], 'body'):
- #print("Simple statement") # debug
- return False
-
- # General fallback - accept more code
- return True
-
- #------------------------------------------------------------------------
- # Private interface
- #------------------------------------------------------------------------
-
- def _find_indent(self, line):
- """Compute the new indentation level for a single line.
-
- Parameters
- ----------
- line : str
- A single new line of non-whitespace, non-comment Python input.
-
- Returns
- -------
- indent_spaces : int
- New value for the indent level (it may be equal to self.indent_spaces
- if indentation doesn't change.
-
- full_dedent : boolean
- Whether the new line causes a full flush-left dedent.
- """
- indent_spaces = self.indent_spaces
- full_dedent = self._full_dedent
-
- inisp = num_ini_spaces(line)
- if inisp < indent_spaces:
- indent_spaces = inisp
- if indent_spaces <= 0:
- #print 'Full dedent in text',self.source # dbg
- full_dedent = True
-
- if line.rstrip()[-1] == ':':
- indent_spaces += 4
- elif dedent_re.match(line):
- indent_spaces -= 4
- if indent_spaces <= 0:
- full_dedent = True
-
- # Safety
- if indent_spaces < 0:
- indent_spaces = 0
- #print 'safety' # dbg
-
- return indent_spaces, full_dedent
-
- def _update_indent(self, lines):
- for line in remove_comments(lines).splitlines():
- if line and not line.isspace():
- self.indent_spaces, self._full_dedent = self._find_indent(line)
-
- def _store(self, lines, buffer=None, store='source'):
- """Store one or more lines of input.
-
- If input lines are not newline-terminated, a newline is automatically
- appended."""
-
- if buffer is None:
- buffer = self._buffer
-
- if lines.endswith('\n'):
- buffer.append(lines)
- else:
- buffer.append(lines+'\n')
- setattr(self, store, self._set_source(buffer))
-
- def _set_source(self, buffer):
- return u''.join(buffer)
-
-
-class IPythonInputSplitter(InputSplitter):
- """An input splitter that recognizes all of IPython's special syntax."""
-
- # String with raw, untransformed input.
- source_raw = ''
-
- # Flag to track when a transformer has stored input that it hasn't given
- # back yet.
- transformer_accumulating = False
-
- # Flag to track when assemble_python_lines has stored input that it hasn't
- # given back yet.
- within_python_line = False
-
- # Private attributes
-
- # List with lines of raw input accumulated so far.
- _buffer_raw = None
-
- def __init__(self, line_input_checker=True, physical_line_transforms=None,
- logical_line_transforms=None, python_line_transforms=None):
- super(IPythonInputSplitter, self).__init__()
- self._buffer_raw = []
- self._validate = True
-
- if physical_line_transforms is not None:
- self.physical_line_transforms = physical_line_transforms
- else:
- self.physical_line_transforms = [
- leading_indent(),
- classic_prompt(),
- ipy_prompt(),
- cellmagic(end_on_blank_line=line_input_checker),
- strip_encoding_cookie(),
- ]
-
- self.assemble_logical_lines = assemble_logical_lines()
- if logical_line_transforms is not None:
- self.logical_line_transforms = logical_line_transforms
- else:
- self.logical_line_transforms = [
- help_end(),
- escaped_commands(),
- assign_from_magic(),
- assign_from_system(),
- ]
-
- self.assemble_python_lines = assemble_python_lines()
- if python_line_transforms is not None:
- self.python_line_transforms = python_line_transforms
- else:
- # We don't use any of these at present
- self.python_line_transforms = []
-
- @property
- def transforms(self):
- "Quick access to all transformers."
- return self.physical_line_transforms + \
- [self.assemble_logical_lines] + self.logical_line_transforms + \
- [self.assemble_python_lines] + self.python_line_transforms
-
- @property
- def transforms_in_use(self):
- """Transformers, excluding logical line transformers if we're in a
- Python line."""
- t = self.physical_line_transforms[:]
- if not self.within_python_line:
- t += [self.assemble_logical_lines] + self.logical_line_transforms
- return t + [self.assemble_python_lines] + self.python_line_transforms
-
- def reset(self):
- """Reset the input buffer and associated state."""
- super(IPythonInputSplitter, self).reset()
- self._buffer_raw[:] = []
- self.source_raw = ''
- self.transformer_accumulating = False
- self.within_python_line = False
-
- for t in self.transforms:
- try:
- t.reset()
- except SyntaxError:
- # Nothing that calls reset() expects to handle transformer
- # errors
- pass
-
- def flush_transformers(self):
- def _flush(transform, outs):
- """yield transformed lines
-
- always strings, never None
-
- transform: the current transform
- outs: an iterable of previously transformed inputs.
- Each may be multiline, which will be passed
- one line at a time to transform.
- """
- for out in outs:
- for line in out.splitlines():
- # push one line at a time
- tmp = transform.push(line)
- if tmp is not None:
- yield tmp
-
- # reset the transform
- tmp = transform.reset()
- if tmp is not None:
- yield tmp
-
- out = []
- for t in self.transforms_in_use:
- out = _flush(t, out)
-
- out = list(out)
- if out:
- self._store('\n'.join(out))
-
- def raw_reset(self):
- """Return raw input only and perform a full reset.
- """
- out = self.source_raw
- self.reset()
- return out
-
- def source_reset(self):
- try:
- self.flush_transformers()
- return self.source
- finally:
- self.reset()
-
- def push_accepts_more(self):
- if self.transformer_accumulating:
- return True
- else:
- return super(IPythonInputSplitter, self).push_accepts_more()
-
- def transform_cell(self, cell):
- """Process and translate a cell of input.
- """
- self.reset()
- try:
- self.push(cell)
- self.flush_transformers()
- return self.source
- finally:
- self.reset()
-
- def push(self, lines):
- """Push one or more lines of IPython input.
-
- This stores the given lines and returns a status code indicating
- whether the code forms a complete Python block or not, after processing
- all input lines for special IPython syntax.
-
- Any exceptions generated in compilation are swallowed, but if an
- exception was produced, the method returns True.
-
- Parameters
- ----------
- lines : string
- One or more lines of Python input.
-
- Returns
- -------
- is_complete : boolean
- True if the current input source (the result of the current input
- plus prior inputs) forms a complete Python execution block. Note that
- this value is also stored as a private attribute (_is_complete), so it
- can be queried at any time.
- """
-
- # We must ensure all input is pure unicode
- lines = cast_unicode(lines, self.encoding)
- # ''.splitlines() --> [], but we need to push the empty line to transformers
- lines_list = lines.splitlines()
- if not lines_list:
- lines_list = ['']
-
- # Store raw source before applying any transformations to it. Note
- # that this must be done *after* the reset() call that would otherwise
- # flush the buffer.
- self._store(lines, self._buffer_raw, 'source_raw')
-
- for line in lines_list:
- out = self.push_line(line)
-
- return out
-
- def push_line(self, line):
- buf = self._buffer
-
- def _accumulating(dbg):
- #print(dbg)
- self.transformer_accumulating = True
- return False
-
- for transformer in self.physical_line_transforms:
- line = transformer.push(line)
- if line is None:
- return _accumulating(transformer)
-
- if not self.within_python_line:
- line = self.assemble_logical_lines.push(line)
- if line is None:
- return _accumulating('acc logical line')
-
- for transformer in self.logical_line_transforms:
- line = transformer.push(line)
- if line is None:
- return _accumulating(transformer)
-
- line = self.assemble_python_lines.push(line)
- if line is None:
- self.within_python_line = True
- return _accumulating('acc python line')
- else:
- self.within_python_line = False
-
- for transformer in self.python_line_transforms:
- line = transformer.push(line)
- if line is None:
- return _accumulating(transformer)
-
- #print("transformers clear") #debug
- self.transformer_accumulating = False
- return super(IPythonInputSplitter, self).push(line)
+"""Input handling and transformation machinery.
+
+The first class in this module, :class:`InputSplitter`, is designed to tell when
+input from a line-oriented frontend is complete and should be executed, and when
+the user should be prompted for another line of code instead. The name 'input
+splitter' is largely for historical reasons.
+
+A companion, :class:`IPythonInputSplitter`, provides the same functionality but
+with full support for the extended IPython syntax (magics, system calls, etc).
+The code to actually do these transformations is in :mod:`IPython.core.inputtransformer`.
+:class:`IPythonInputSplitter` feeds the raw code to the transformers in order
+and stores the results.
+
+For more details, see the class docstrings below.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+import ast
+import codeop
+import re
+import sys
+import warnings
+
+from IPython.utils.py3compat import cast_unicode
+from IPython.core.inputtransformer import (leading_indent,
+ classic_prompt,
+ ipy_prompt,
+ strip_encoding_cookie,
+ cellmagic,
+ assemble_logical_lines,
+ help_end,
+ escaped_commands,
+ assign_from_magic,
+ assign_from_system,
+ assemble_python_lines,
+ )
+
+# These are available in this module for backwards compatibility.
+from IPython.core.inputtransformer import (ESC_SHELL, ESC_SH_CAP, ESC_HELP,
+ ESC_HELP2, ESC_MAGIC, ESC_MAGIC2,
+ ESC_QUOTE, ESC_QUOTE2, ESC_PAREN, ESC_SEQUENCES)
+
+#-----------------------------------------------------------------------------
+# Utilities
+#-----------------------------------------------------------------------------
+
+# FIXME: These are general-purpose utilities that later can be moved to the
+# general ward. Kept here for now because we're being very strict about test
+# coverage with this code, and this lets us ensure that we keep 100% coverage
+# while developing.
+
+# compiled regexps for autoindent management
+dedent_re = re.compile('|'.join([
+ r'^\s+raise(\s.*)?$', # raise statement (+ space + other stuff, maybe)
+ r'^\s+raise\([^\)]*\).*$', # wacky raise with immediate open paren
+ r'^\s+return(\s.*)?$', # normal return (+ space + other stuff, maybe)
+ r'^\s+return\([^\)]*\).*$', # wacky return with immediate open paren
+ r'^\s+pass\s*$', # pass (optionally followed by trailing spaces)
+ r'^\s+break\s*$', # break (optionally followed by trailing spaces)
+ r'^\s+continue\s*$', # continue (optionally followed by trailing spaces)
+]))
+ini_spaces_re = re.compile(r'^([ \t\r\f\v]+)')
+
+# regexp to match pure comment lines so we don't accidentally insert 'if 1:'
+# before pure comments
+comment_line_re = re.compile('^\s*\#')
+
+
+def num_ini_spaces(s):
+ """Return the number of initial spaces in a string.
+
+ Note that tabs are counted as a single space. For now, we do *not* support
+ mixing of tabs and spaces in the user's input.
+
+ Parameters
+ ----------
+ s : string
+
+ Returns
+ -------
+ n : int
+ """
+
+ ini_spaces = ini_spaces_re.match(s)
+ if ini_spaces:
+ return ini_spaces.end()
+ else:
+ return 0
+
+def last_blank(src):
+ """Determine if the input source ends in a blank.
+
+ A blank is either a newline or a line consisting of whitespace.
+
+ Parameters
+ ----------
+ src : string
+ A single or multiline string.
+ """
+ if not src: return False
+ ll = src.splitlines()[-1]
+ return (ll == '') or ll.isspace()
+
+
+last_two_blanks_re = re.compile(r'\n\s*\n\s*$', re.MULTILINE)
+last_two_blanks_re2 = re.compile(r'.+\n\s*\n\s+$', re.MULTILINE)
+
+def last_two_blanks(src):
+ """Determine if the input source ends in two blanks.
+
+ A blank is either a newline or a line consisting of whitespace.
+
+ Parameters
+ ----------
+ src : string
+ A single or multiline string.
+ """
+ if not src: return False
+ # The logic here is tricky: I couldn't get a regexp to work and pass all
+ # the tests, so I took a different approach: split the source by lines,
+ # grab the last two and prepend '###\n' as a stand-in for whatever was in
+ # the body before the last two lines. Then, with that structure, it's
+ # possible to analyze with two regexps. Not the most elegant solution, but
+ # it works. If anyone tries to change this logic, make sure to validate
+ # the whole test suite first!
+ new_src = '\n'.join(['###\n'] + src.splitlines()[-2:])
+ return (bool(last_two_blanks_re.match(new_src)) or
+ bool(last_two_blanks_re2.match(new_src)) )
+
+
+def remove_comments(src):
+ """Remove all comments from input source.
+
+ Note: comments are NOT recognized inside of strings!
+
+ Parameters
+ ----------
+ src : string
+ A single or multiline input string.
+
+ Returns
+ -------
+ String with all Python comments removed.
+ """
+
+ return re.sub('#.*', '', src)
+
+
+def get_input_encoding():
+ """Return the default standard input encoding.
+
+ If sys.stdin has no encoding, 'ascii' is returned."""
+ # There are strange environments for which sys.stdin.encoding is None. We
+ # ensure that a valid encoding is returned.
+ encoding = getattr(sys.stdin, 'encoding', None)
+ if encoding is None:
+ encoding = 'ascii'
+ return encoding
+
+#-----------------------------------------------------------------------------
+# Classes and functions for normal Python syntax handling
+#-----------------------------------------------------------------------------
+
+class InputSplitter(object):
+ r"""An object that can accumulate lines of Python source before execution.
+
+ This object is designed to be fed python source line-by-line, using
+ :meth:`push`. It will return on each push whether the currently pushed
+ code could be executed already. In addition, it provides a method called
+ :meth:`push_accepts_more` that can be used to query whether more input
+ can be pushed into a single interactive block.
+
+ This is a simple example of how an interactive terminal-based client can use
+ this tool::
+
+ isp = InputSplitter()
+ while isp.push_accepts_more():
+ indent = ' '*isp.indent_spaces
+ prompt = '>>> ' + indent
+ line = indent + raw_input(prompt)
+ isp.push(line)
+ print 'Input source was:\n', isp.source_reset(),
+ """
+ # Number of spaces of indentation computed from input that has been pushed
+ # so far. This is the attributes callers should query to get the current
+ # indentation level, in order to provide auto-indent facilities.
+ indent_spaces = 0
+ # String, indicating the default input encoding. It is computed by default
+ # at initialization time via get_input_encoding(), but it can be reset by a
+ # client with specific knowledge of the encoding.
+ encoding = ''
+ # String where the current full source input is stored, properly encoded.
+ # Reading this attribute is the normal way of querying the currently pushed
+ # source code, that has been properly encoded.
+ source = ''
+ # Code object corresponding to the current source. It is automatically
+ # synced to the source, so it can be queried at any time to obtain the code
+ # object; it will be None if the source doesn't compile to valid Python.
+ code = None
+
+ # Private attributes
+
+ # List with lines of input accumulated so far
+ _buffer = None
+ # Command compiler
+ _compile = None
+ # Mark when input has changed indentation all the way back to flush-left
+ _full_dedent = False
+ # Boolean indicating whether the current block is complete
+ _is_complete = None
+ # Boolean indicating whether the current block has an unrecoverable syntax error
+ _is_invalid = False
+
+ def __init__(self):
+ """Create a new InputSplitter instance.
+ """
+ self._buffer = []
+ self._compile = codeop.CommandCompiler()
+ self.encoding = get_input_encoding()
+
+ def reset(self):
+ """Reset the input buffer and associated state."""
+ self.indent_spaces = 0
+ self._buffer[:] = []
+ self.source = ''
+ self.code = None
+ self._is_complete = False
+ self._is_invalid = False
+ self._full_dedent = False
+
+ def source_reset(self):
+ """Return the input source and perform a full reset.
+ """
+ out = self.source
+ self.reset()
+ return out
+
+ def check_complete(self, source):
+ """Return whether a block of code is ready to execute, or should be continued
+
+ This is a non-stateful API, and will reset the state of this InputSplitter.
+
+ Parameters
+ ----------
+ source : string
+ Python input code, which can be multiline.
+
+ Returns
+ -------
+ status : str
+ One of 'complete', 'incomplete', or 'invalid' if source is not a
+ prefix of valid code.
+ indent_spaces : int or None
+ The number of spaces by which to indent the next line of code. If
+ status is not 'incomplete', this is None.
+ """
+ self.reset()
+ try:
+ self.push(source)
+ except SyntaxError:
+ # Transformers in IPythonInputSplitter can raise SyntaxError,
+ # which push() will not catch.
+ return 'invalid', None
+ else:
+ if self._is_invalid:
+ return 'invalid', None
+ elif self.push_accepts_more():
+ return 'incomplete', self.indent_spaces
+ else:
+ return 'complete', None
+ finally:
+ self.reset()
+
+ def push(self, lines):
+ """Push one or more lines of input.
+
+ This stores the given lines and returns a status code indicating
+ whether the code forms a complete Python block or not.
+
+ Any exceptions generated in compilation are swallowed, but if an
+ exception was produced, the method returns True.
+
+ Parameters
+ ----------
+ lines : string
+ One or more lines of Python input.
+
+ Returns
+ -------
+ is_complete : boolean
+ True if the current input source (the result of the current input
+ plus prior inputs) forms a complete Python execution block. Note that
+ this value is also stored as a private attribute (``_is_complete``), so it
+ can be queried at any time.
+ """
+ self._store(lines)
+ source = self.source
+
+ # Before calling _compile(), reset the code object to None so that if an
+ # exception is raised in compilation, we don't mislead by having
+ # inconsistent code/source attributes.
+ self.code, self._is_complete = None, None
+ self._is_invalid = False
+
+ # Honor termination lines properly
+ if source.endswith('\\\n'):
+ return False
+
+ self._update_indent(lines)
+ try:
+ with warnings.catch_warnings():
+ warnings.simplefilter('error', SyntaxWarning)
+ self.code = self._compile(source, symbol="exec")
+ # Invalid syntax can produce any of a number of different errors from
+ # inside the compiler, so we have to catch them all. Syntax errors
+ # immediately produce a 'ready' block, so the invalid Python can be
+ # sent to the kernel for evaluation with possible ipython
+ # special-syntax conversion.
+ except (SyntaxError, OverflowError, ValueError, TypeError,
+ MemoryError, SyntaxWarning):
+ self._is_complete = True
+ self._is_invalid = True
+ else:
+ # Compilation didn't produce any exceptions (though it may not have
+ # given a complete code object)
+ self._is_complete = self.code is not None
+
+ return self._is_complete
+
+ def push_accepts_more(self):
+ """Return whether a block of interactive input can accept more input.
+
+ This method is meant to be used by line-oriented frontends, who need to
+ guess whether a block is complete or not based solely on prior and
+ current input lines. The InputSplitter considers it has a complete
+ interactive block and will not accept more input when either:
+
+ * A SyntaxError is raised
+
+ * The code is complete and consists of a single line or a single
+ non-compound statement
+
+ * The code is complete and has a blank line at the end
+
+ If the current input produces a syntax error, this method immediately
+ returns False but does *not* raise the syntax error exception, as
+ typically clients will want to send invalid syntax to an execution
+ backend which might convert the invalid syntax into valid Python via
+ one of the dynamic IPython mechanisms.
+ """
+
+ # With incomplete input, unconditionally accept more
+ # A syntax error also sets _is_complete to True - see push()
+ if not self._is_complete:
+ #print("Not complete") # debug
+ return True
+
+ # The user can make any (complete) input execute by leaving a blank line
+ last_line = self.source.splitlines()[-1]
+ if (not last_line) or last_line.isspace():
+ #print("Blank line") # debug
+ return False
+
+ # If there's just a single line or AST node, and we're flush left, as is
+ # the case after a simple statement such as 'a=1', we want to execute it
+ # straight away.
+ if self.indent_spaces==0:
+ if len(self.source.splitlines()) <= 1:
+ return False
+
+ try:
+ code_ast = ast.parse(u''.join(self._buffer))
+ except Exception:
+ #print("Can't parse AST") # debug
+ return False
+ else:
+ if len(code_ast.body) == 1 and \
+ not hasattr(code_ast.body[0], 'body'):
+ #print("Simple statement") # debug
+ return False
+
+ # General fallback - accept more code
+ return True
+
+ #------------------------------------------------------------------------
+ # Private interface
+ #------------------------------------------------------------------------
+
+ def _find_indent(self, line):
+ """Compute the new indentation level for a single line.
+
+ Parameters
+ ----------
+ line : str
+ A single new line of non-whitespace, non-comment Python input.
+
+ Returns
+ -------
+ indent_spaces : int
+ New value for the indent level (it may be equal to self.indent_spaces
+ if indentation doesn't change.
+
+ full_dedent : boolean
+ Whether the new line causes a full flush-left dedent.
+ """
+ indent_spaces = self.indent_spaces
+ full_dedent = self._full_dedent
+
+ inisp = num_ini_spaces(line)
+ if inisp < indent_spaces:
+ indent_spaces = inisp
+ if indent_spaces <= 0:
+ #print 'Full dedent in text',self.source # dbg
+ full_dedent = True
+
+ if line.rstrip()[-1] == ':':
+ indent_spaces += 4
+ elif dedent_re.match(line):
+ indent_spaces -= 4
+ if indent_spaces <= 0:
+ full_dedent = True
+
+ # Safety
+ if indent_spaces < 0:
+ indent_spaces = 0
+ #print 'safety' # dbg
+
+ return indent_spaces, full_dedent
+
+ def _update_indent(self, lines):
+ for line in remove_comments(lines).splitlines():
+ if line and not line.isspace():
+ self.indent_spaces, self._full_dedent = self._find_indent(line)
+
+ def _store(self, lines, buffer=None, store='source'):
+ """Store one or more lines of input.
+
+ If input lines are not newline-terminated, a newline is automatically
+ appended."""
+
+ if buffer is None:
+ buffer = self._buffer
+
+ if lines.endswith('\n'):
+ buffer.append(lines)
+ else:
+ buffer.append(lines+'\n')
+ setattr(self, store, self._set_source(buffer))
+
+ def _set_source(self, buffer):
+ return u''.join(buffer)
+
+
+class IPythonInputSplitter(InputSplitter):
+ """An input splitter that recognizes all of IPython's special syntax."""
+
+ # String with raw, untransformed input.
+ source_raw = ''
+
+ # Flag to track when a transformer has stored input that it hasn't given
+ # back yet.
+ transformer_accumulating = False
+
+ # Flag to track when assemble_python_lines has stored input that it hasn't
+ # given back yet.
+ within_python_line = False
+
+ # Private attributes
+
+ # List with lines of raw input accumulated so far.
+ _buffer_raw = None
+
+ def __init__(self, line_input_checker=True, physical_line_transforms=None,
+ logical_line_transforms=None, python_line_transforms=None):
+ super(IPythonInputSplitter, self).__init__()
+ self._buffer_raw = []
+ self._validate = True
+
+ if physical_line_transforms is not None:
+ self.physical_line_transforms = physical_line_transforms
+ else:
+ self.physical_line_transforms = [
+ leading_indent(),
+ classic_prompt(),
+ ipy_prompt(),
+ cellmagic(end_on_blank_line=line_input_checker),
+ strip_encoding_cookie(),
+ ]
+
+ self.assemble_logical_lines = assemble_logical_lines()
+ if logical_line_transforms is not None:
+ self.logical_line_transforms = logical_line_transforms
+ else:
+ self.logical_line_transforms = [
+ help_end(),
+ escaped_commands(),
+ assign_from_magic(),
+ assign_from_system(),
+ ]
+
+ self.assemble_python_lines = assemble_python_lines()
+ if python_line_transforms is not None:
+ self.python_line_transforms = python_line_transforms
+ else:
+ # We don't use any of these at present
+ self.python_line_transforms = []
+
+ @property
+ def transforms(self):
+ "Quick access to all transformers."
+ return self.physical_line_transforms + \
+ [self.assemble_logical_lines] + self.logical_line_transforms + \
+ [self.assemble_python_lines] + self.python_line_transforms
+
+ @property
+ def transforms_in_use(self):
+ """Transformers, excluding logical line transformers if we're in a
+ Python line."""
+ t = self.physical_line_transforms[:]
+ if not self.within_python_line:
+ t += [self.assemble_logical_lines] + self.logical_line_transforms
+ return t + [self.assemble_python_lines] + self.python_line_transforms
+
+ def reset(self):
+ """Reset the input buffer and associated state."""
+ super(IPythonInputSplitter, self).reset()
+ self._buffer_raw[:] = []
+ self.source_raw = ''
+ self.transformer_accumulating = False
+ self.within_python_line = False
+
+ for t in self.transforms:
+ try:
+ t.reset()
+ except SyntaxError:
+ # Nothing that calls reset() expects to handle transformer
+ # errors
+ pass
+
+ def flush_transformers(self):
+ def _flush(transform, outs):
+ """yield transformed lines
+
+ always strings, never None
+
+ transform: the current transform
+ outs: an iterable of previously transformed inputs.
+ Each may be multiline, which will be passed
+ one line at a time to transform.
+ """
+ for out in outs:
+ for line in out.splitlines():
+ # push one line at a time
+ tmp = transform.push(line)
+ if tmp is not None:
+ yield tmp
+
+ # reset the transform
+ tmp = transform.reset()
+ if tmp is not None:
+ yield tmp
+
+ out = []
+ for t in self.transforms_in_use:
+ out = _flush(t, out)
+
+ out = list(out)
+ if out:
+ self._store('\n'.join(out))
+
+ def raw_reset(self):
+ """Return raw input only and perform a full reset.
+ """
+ out = self.source_raw
+ self.reset()
+ return out
+
+ def source_reset(self):
+ try:
+ self.flush_transformers()
+ return self.source
+ finally:
+ self.reset()
+
+ def push_accepts_more(self):
+ if self.transformer_accumulating:
+ return True
+ else:
+ return super(IPythonInputSplitter, self).push_accepts_more()
+
+ def transform_cell(self, cell):
+ """Process and translate a cell of input.
+ """
+ self.reset()
+ try:
+ self.push(cell)
+ self.flush_transformers()
+ return self.source
+ finally:
+ self.reset()
+
+ def push(self, lines):
+ """Push one or more lines of IPython input.
+
+ This stores the given lines and returns a status code indicating
+ whether the code forms a complete Python block or not, after processing
+ all input lines for special IPython syntax.
+
+ Any exceptions generated in compilation are swallowed, but if an
+ exception was produced, the method returns True.
+
+ Parameters
+ ----------
+ lines : string
+ One or more lines of Python input.
+
+ Returns
+ -------
+ is_complete : boolean
+ True if the current input source (the result of the current input
+ plus prior inputs) forms a complete Python execution block. Note that
+ this value is also stored as a private attribute (_is_complete), so it
+ can be queried at any time.
+ """
+
+ # We must ensure all input is pure unicode
+ lines = cast_unicode(lines, self.encoding)
+ # ''.splitlines() --> [], but we need to push the empty line to transformers
+ lines_list = lines.splitlines()
+ if not lines_list:
+ lines_list = ['']
+
+ # Store raw source before applying any transformations to it. Note
+ # that this must be done *after* the reset() call that would otherwise
+ # flush the buffer.
+ self._store(lines, self._buffer_raw, 'source_raw')
+
+ for line in lines_list:
+ out = self.push_line(line)
+
+ return out
+
+ def push_line(self, line):
+ buf = self._buffer
+
+ def _accumulating(dbg):
+ #print(dbg)
+ self.transformer_accumulating = True
+ return False
+
+ for transformer in self.physical_line_transforms:
+ line = transformer.push(line)
+ if line is None:
+ return _accumulating(transformer)
+
+ if not self.within_python_line:
+ line = self.assemble_logical_lines.push(line)
+ if line is None:
+ return _accumulating('acc logical line')
+
+ for transformer in self.logical_line_transforms:
+ line = transformer.push(line)
+ if line is None:
+ return _accumulating(transformer)
+
+ line = self.assemble_python_lines.push(line)
+ if line is None:
+ self.within_python_line = True
+ return _accumulating('acc python line')
+ else:
+ self.within_python_line = False
+
+ for transformer in self.python_line_transforms:
+ line = transformer.push(line)
+ if line is None:
+ return _accumulating(transformer)
+
+ #print("transformers clear") #debug
+ self.transformer_accumulating = False
+ return super(IPythonInputSplitter, self).push(line)
diff --git a/contrib/python/ipython/py2/IPython/core/inputtransformer.py b/contrib/python/ipython/py2/IPython/core/inputtransformer.py
index a67d93e1a4..3ba49b951d 100644
--- a/contrib/python/ipython/py2/IPython/core/inputtransformer.py
+++ b/contrib/python/ipython/py2/IPython/core/inputtransformer.py
@@ -1,555 +1,555 @@
-"""Input transformer classes to support IPython special syntax.
-
-This includes the machinery to recognise and transform ``%magic`` commands,
-``!system`` commands, ``help?`` querying, prompt stripping, and so forth.
-"""
-import abc
-import functools
-import re
-
-from IPython.core.splitinput import LineInfo
-from IPython.utils import tokenize2
-from IPython.utils.openpy import cookie_comment_re
-from IPython.utils.py3compat import with_metaclass, PY3
-from IPython.utils.tokenize2 import generate_tokens, untokenize, TokenError
-
-if PY3:
- from io import StringIO
-else:
- from StringIO import StringIO
-
-#-----------------------------------------------------------------------------
-# Globals
-#-----------------------------------------------------------------------------
-
-# The escape sequences that define the syntax transformations IPython will
-# apply to user input. These can NOT be just changed here: many regular
-# expressions and other parts of the code may use their hardcoded values, and
-# for all intents and purposes they constitute the 'IPython syntax', so they
-# should be considered fixed.
-
-ESC_SHELL = '!' # Send line to underlying system shell
-ESC_SH_CAP = '!!' # Send line to system shell and capture output
-ESC_HELP = '?' # Find information about object
-ESC_HELP2 = '??' # Find extra-detailed information about object
-ESC_MAGIC = '%' # Call magic function
-ESC_MAGIC2 = '%%' # Call cell-magic function
-ESC_QUOTE = ',' # Split args on whitespace, quote each as string and call
-ESC_QUOTE2 = ';' # Quote all args as a single string, call
-ESC_PAREN = '/' # Call first argument with rest of line as arguments
-
-ESC_SEQUENCES = [ESC_SHELL, ESC_SH_CAP, ESC_HELP ,\
- ESC_HELP2, ESC_MAGIC, ESC_MAGIC2,\
- ESC_QUOTE, ESC_QUOTE2, ESC_PAREN ]
-
-
-class InputTransformer(with_metaclass(abc.ABCMeta, object)):
- """Abstract base class for line-based input transformers."""
-
- @abc.abstractmethod
- def push(self, line):
- """Send a line of input to the transformer, returning the transformed
- input or None if the transformer is waiting for more input.
-
- Must be overridden by subclasses.
-
- Implementations may raise ``SyntaxError`` if the input is invalid. No
- other exceptions may be raised.
- """
- pass
-
- @abc.abstractmethod
- def reset(self):
- """Return, transformed any lines that the transformer has accumulated,
- and reset its internal state.
-
- Must be overridden by subclasses.
- """
- pass
-
- @classmethod
- def wrap(cls, func):
- """Can be used by subclasses as a decorator, to return a factory that
- will allow instantiation with the decorated object.
- """
- @functools.wraps(func)
- def transformer_factory(**kwargs):
- return cls(func, **kwargs)
-
- return transformer_factory
-
-class StatelessInputTransformer(InputTransformer):
- """Wrapper for a stateless input transformer implemented as a function."""
- def __init__(self, func):
- self.func = func
-
- def __repr__(self):
- return "StatelessInputTransformer(func={0!r})".format(self.func)
-
- def push(self, line):
- """Send a line of input to the transformer, returning the
- transformed input."""
- return self.func(line)
-
- def reset(self):
- """No-op - exists for compatibility."""
- pass
-
-class CoroutineInputTransformer(InputTransformer):
- """Wrapper for an input transformer implemented as a coroutine."""
- def __init__(self, coro, **kwargs):
- # Prime it
- self.coro = coro(**kwargs)
- next(self.coro)
-
- def __repr__(self):
- return "CoroutineInputTransformer(coro={0!r})".format(self.coro)
-
- def push(self, line):
- """Send a line of input to the transformer, returning the
- transformed input or None if the transformer is waiting for more
- input.
- """
- return self.coro.send(line)
-
- def reset(self):
- """Return, transformed any lines that the transformer has
- accumulated, and reset its internal state.
- """
- return self.coro.send(None)
-
-class TokenInputTransformer(InputTransformer):
- """Wrapper for a token-based input transformer.
-
- func should accept a list of tokens (5-tuples, see tokenize docs), and
- return an iterable which can be passed to tokenize.untokenize().
- """
- def __init__(self, func):
- self.func = func
+"""Input transformer classes to support IPython special syntax.
+
+This includes the machinery to recognise and transform ``%magic`` commands,
+``!system`` commands, ``help?`` querying, prompt stripping, and so forth.
+"""
+import abc
+import functools
+import re
+
+from IPython.core.splitinput import LineInfo
+from IPython.utils import tokenize2
+from IPython.utils.openpy import cookie_comment_re
+from IPython.utils.py3compat import with_metaclass, PY3
+from IPython.utils.tokenize2 import generate_tokens, untokenize, TokenError
+
+if PY3:
+ from io import StringIO
+else:
+ from StringIO import StringIO
+
+#-----------------------------------------------------------------------------
+# Globals
+#-----------------------------------------------------------------------------
+
+# The escape sequences that define the syntax transformations IPython will
+# apply to user input. These can NOT be just changed here: many regular
+# expressions and other parts of the code may use their hardcoded values, and
+# for all intents and purposes they constitute the 'IPython syntax', so they
+# should be considered fixed.
+
+ESC_SHELL = '!' # Send line to underlying system shell
+ESC_SH_CAP = '!!' # Send line to system shell and capture output
+ESC_HELP = '?' # Find information about object
+ESC_HELP2 = '??' # Find extra-detailed information about object
+ESC_MAGIC = '%' # Call magic function
+ESC_MAGIC2 = '%%' # Call cell-magic function
+ESC_QUOTE = ',' # Split args on whitespace, quote each as string and call
+ESC_QUOTE2 = ';' # Quote all args as a single string, call
+ESC_PAREN = '/' # Call first argument with rest of line as arguments
+
+ESC_SEQUENCES = [ESC_SHELL, ESC_SH_CAP, ESC_HELP ,\
+ ESC_HELP2, ESC_MAGIC, ESC_MAGIC2,\
+ ESC_QUOTE, ESC_QUOTE2, ESC_PAREN ]
+
+
+class InputTransformer(with_metaclass(abc.ABCMeta, object)):
+ """Abstract base class for line-based input transformers."""
+
+ @abc.abstractmethod
+ def push(self, line):
+ """Send a line of input to the transformer, returning the transformed
+ input or None if the transformer is waiting for more input.
+
+ Must be overridden by subclasses.
+
+ Implementations may raise ``SyntaxError`` if the input is invalid. No
+ other exceptions may be raised.
+ """
+ pass
+
+ @abc.abstractmethod
+ def reset(self):
+ """Return, transformed any lines that the transformer has accumulated,
+ and reset its internal state.
+
+ Must be overridden by subclasses.
+ """
+ pass
+
+ @classmethod
+ def wrap(cls, func):
+ """Can be used by subclasses as a decorator, to return a factory that
+ will allow instantiation with the decorated object.
+ """
+ @functools.wraps(func)
+ def transformer_factory(**kwargs):
+ return cls(func, **kwargs)
+
+ return transformer_factory
+
+class StatelessInputTransformer(InputTransformer):
+ """Wrapper for a stateless input transformer implemented as a function."""
+ def __init__(self, func):
+ self.func = func
+
+ def __repr__(self):
+ return "StatelessInputTransformer(func={0!r})".format(self.func)
+
+ def push(self, line):
+ """Send a line of input to the transformer, returning the
+ transformed input."""
+ return self.func(line)
+
+ def reset(self):
+ """No-op - exists for compatibility."""
+ pass
+
+class CoroutineInputTransformer(InputTransformer):
+ """Wrapper for an input transformer implemented as a coroutine."""
+ def __init__(self, coro, **kwargs):
+ # Prime it
+ self.coro = coro(**kwargs)
+ next(self.coro)
+
+ def __repr__(self):
+ return "CoroutineInputTransformer(coro={0!r})".format(self.coro)
+
+ def push(self, line):
+ """Send a line of input to the transformer, returning the
+ transformed input or None if the transformer is waiting for more
+ input.
+ """
+ return self.coro.send(line)
+
+ def reset(self):
+ """Return, transformed any lines that the transformer has
+ accumulated, and reset its internal state.
+ """
+ return self.coro.send(None)
+
+class TokenInputTransformer(InputTransformer):
+ """Wrapper for a token-based input transformer.
+
+ func should accept a list of tokens (5-tuples, see tokenize docs), and
+ return an iterable which can be passed to tokenize.untokenize().
+ """
+ def __init__(self, func):
+ self.func = func
self.buf = []
- self.reset_tokenizer()
+ self.reset_tokenizer()
- def reset_tokenizer(self):
+ def reset_tokenizer(self):
it = iter(self.buf)
nxt = it.__next__ if PY3 else it.next
self.tokenizer = generate_tokens(nxt)
- def push(self, line):
+ def push(self, line):
self.buf.append(line + '\n')
if all(l.isspace() for l in self.buf):
- return self.reset()
-
- tokens = []
- stop_at_NL = False
- try:
- for intok in self.tokenizer:
- tokens.append(intok)
- t = intok[0]
- if t == tokenize2.NEWLINE or (stop_at_NL and t == tokenize2.NL):
- # Stop before we try to pull a line we don't have yet
- break
- elif t == tokenize2.ERRORTOKEN:
- stop_at_NL = True
- except TokenError:
- # Multi-line statement - stop and try again with the next line
- self.reset_tokenizer()
- return None
-
- return self.output(tokens)
-
- def output(self, tokens):
+ return self.reset()
+
+ tokens = []
+ stop_at_NL = False
+ try:
+ for intok in self.tokenizer:
+ tokens.append(intok)
+ t = intok[0]
+ if t == tokenize2.NEWLINE or (stop_at_NL and t == tokenize2.NL):
+ # Stop before we try to pull a line we don't have yet
+ break
+ elif t == tokenize2.ERRORTOKEN:
+ stop_at_NL = True
+ except TokenError:
+ # Multi-line statement - stop and try again with the next line
+ self.reset_tokenizer()
+ return None
+
+ return self.output(tokens)
+
+ def output(self, tokens):
self.buf[:] = []
- self.reset_tokenizer()
- return untokenize(self.func(tokens)).rstrip('\n')
-
- def reset(self):
+ self.reset_tokenizer()
+ return untokenize(self.func(tokens)).rstrip('\n')
+
+ def reset(self):
l = ''.join(self.buf)
self.buf[:] = []
- self.reset_tokenizer()
- if l:
- return l.rstrip('\n')
-
-class assemble_python_lines(TokenInputTransformer):
- def __init__(self):
- super(assemble_python_lines, self).__init__(None)
-
- def output(self, tokens):
- return self.reset()
-
-@CoroutineInputTransformer.wrap
-def assemble_logical_lines():
- """Join lines following explicit line continuations (\)"""
- line = ''
- while True:
- line = (yield line)
- if not line or line.isspace():
- continue
-
- parts = []
- while line is not None:
- if line.endswith('\\') and (not has_comment(line)):
- parts.append(line[:-1])
- line = (yield None) # Get another line
- else:
- parts.append(line)
- break
-
- # Output
- line = ''.join(parts)
-
-# Utilities
-def _make_help_call(target, esc, lspace, next_input=None):
- """Prepares a pinfo(2)/psearch call from a target name and the escape
- (i.e. ? or ??)"""
- method = 'pinfo2' if esc == '??' \
- else 'psearch' if '*' in target \
- else 'pinfo'
- arg = " ".join([method, target])
- if next_input is None:
- return '%sget_ipython().magic(%r)' % (lspace, arg)
- else:
- return '%sget_ipython().set_next_input(%r);get_ipython().magic(%r)' % \
- (lspace, next_input, arg)
-
-# These define the transformations for the different escape characters.
-def _tr_system(line_info):
- "Translate lines escaped with: !"
- cmd = line_info.line.lstrip().lstrip(ESC_SHELL)
- return '%sget_ipython().system(%r)' % (line_info.pre, cmd)
-
-def _tr_system2(line_info):
- "Translate lines escaped with: !!"
- cmd = line_info.line.lstrip()[2:]
- return '%sget_ipython().getoutput(%r)' % (line_info.pre, cmd)
-
-def _tr_help(line_info):
- "Translate lines escaped with: ?/??"
- # A naked help line should just fire the intro help screen
- if not line_info.line[1:]:
- return 'get_ipython().show_usage()'
-
- return _make_help_call(line_info.ifun, line_info.esc, line_info.pre)
-
-def _tr_magic(line_info):
- "Translate lines escaped with: %"
- tpl = '%sget_ipython().magic(%r)'
- if line_info.line.startswith(ESC_MAGIC2):
- return line_info.line
- cmd = ' '.join([line_info.ifun, line_info.the_rest]).strip()
- return tpl % (line_info.pre, cmd)
-
-def _tr_quote(line_info):
- "Translate lines escaped with: ,"
- return '%s%s("%s")' % (line_info.pre, line_info.ifun,
- '", "'.join(line_info.the_rest.split()) )
-
-def _tr_quote2(line_info):
- "Translate lines escaped with: ;"
- return '%s%s("%s")' % (line_info.pre, line_info.ifun,
- line_info.the_rest)
-
-def _tr_paren(line_info):
- "Translate lines escaped with: /"
- return '%s%s(%s)' % (line_info.pre, line_info.ifun,
- ", ".join(line_info.the_rest.split()))
-
-tr = { ESC_SHELL : _tr_system,
- ESC_SH_CAP : _tr_system2,
- ESC_HELP : _tr_help,
- ESC_HELP2 : _tr_help,
- ESC_MAGIC : _tr_magic,
- ESC_QUOTE : _tr_quote,
- ESC_QUOTE2 : _tr_quote2,
- ESC_PAREN : _tr_paren }
-
-@StatelessInputTransformer.wrap
-def escaped_commands(line):
- """Transform escaped commands - %magic, !system, ?help + various autocalls.
- """
- if not line or line.isspace():
- return line
- lineinf = LineInfo(line)
- if lineinf.esc not in tr:
- return line
-
- return tr[lineinf.esc](lineinf)
-
-_initial_space_re = re.compile(r'\s*')
-
-_help_end_re = re.compile(r"""(%{0,2}
- [a-zA-Z_*][\w*]* # Variable name
- (\.[a-zA-Z_*][\w*]*)* # .etc.etc
- )
- (\?\??)$ # ? or ??
- """,
- re.VERBOSE)
-
-# Extra pseudotokens for multiline strings and data structures
-_MULTILINE_STRING = object()
-_MULTILINE_STRUCTURE = object()
-
-def _line_tokens(line):
- """Helper for has_comment and ends_in_comment_or_string."""
- readline = StringIO(line).readline
- toktypes = set()
- try:
- for t in generate_tokens(readline):
- toktypes.add(t[0])
- except TokenError as e:
- # There are only two cases where a TokenError is raised.
- if 'multi-line string' in e.args[0]:
- toktypes.add(_MULTILINE_STRING)
- else:
- toktypes.add(_MULTILINE_STRUCTURE)
- return toktypes
-
-def has_comment(src):
- """Indicate whether an input line has (i.e. ends in, or is) a comment.
-
- This uses tokenize, so it can distinguish comments from # inside strings.
-
- Parameters
- ----------
- src : string
- A single line input string.
-
- Returns
- -------
- comment : bool
- True if source has a comment.
- """
- return (tokenize2.COMMENT in _line_tokens(src))
-
-def ends_in_comment_or_string(src):
- """Indicates whether or not an input line ends in a comment or within
- a multiline string.
-
- Parameters
- ----------
- src : string
- A single line input string.
-
- Returns
- -------
- comment : bool
- True if source ends in a comment or multiline string.
- """
- toktypes = _line_tokens(src)
- return (tokenize2.COMMENT in toktypes) or (_MULTILINE_STRING in toktypes)
-
-
-@StatelessInputTransformer.wrap
-def help_end(line):
- """Translate lines with ?/?? at the end"""
- m = _help_end_re.search(line)
- if m is None or ends_in_comment_or_string(line):
- return line
- target = m.group(1)
- esc = m.group(3)
- lspace = _initial_space_re.match(line).group(0)
-
- # If we're mid-command, put it back on the next prompt for the user.
- next_input = line.rstrip('?') if line.strip() != m.group(0) else None
-
- return _make_help_call(target, esc, lspace, next_input)
-
-
-@CoroutineInputTransformer.wrap
-def cellmagic(end_on_blank_line=False):
- """Captures & transforms cell magics.
-
- After a cell magic is started, this stores up any lines it gets until it is
- reset (sent None).
- """
- tpl = 'get_ipython().run_cell_magic(%r, %r, %r)'
- cellmagic_help_re = re.compile('%%\w+\?')
- line = ''
- while True:
- line = (yield line)
- # consume leading empty lines
- while not line:
- line = (yield line)
-
- if not line.startswith(ESC_MAGIC2):
- # This isn't a cell magic, idle waiting for reset then start over
- while line is not None:
- line = (yield line)
- continue
-
- if cellmagic_help_re.match(line):
- # This case will be handled by help_end
- continue
-
- first = line
- body = []
- line = (yield None)
- while (line is not None) and \
- ((line.strip() != '') or not end_on_blank_line):
- body.append(line)
- line = (yield None)
-
- # Output
- magic_name, _, first = first.partition(' ')
- magic_name = magic_name.lstrip(ESC_MAGIC2)
- line = tpl % (magic_name, first, u'\n'.join(body))
-
-
-def _strip_prompts(prompt_re, initial_re=None, turnoff_re=None):
- """Remove matching input prompts from a block of input.
-
- Parameters
- ----------
- prompt_re : regular expression
- A regular expression matching any input prompt (including continuation)
- initial_re : regular expression, optional
- A regular expression matching only the initial prompt, but not continuation.
- If no initial expression is given, prompt_re will be used everywhere.
- Used mainly for plain Python prompts, where the continuation prompt
- ``...`` is a valid Python expression in Python 3, so shouldn't be stripped.
-
- If initial_re and prompt_re differ,
- only initial_re will be tested against the first line.
- If any prompt is found on the first two lines,
- prompts will be stripped from the rest of the block.
- """
- if initial_re is None:
- initial_re = prompt_re
- line = ''
- while True:
- line = (yield line)
-
- # First line of cell
- if line is None:
- continue
- out, n1 = initial_re.subn('', line, count=1)
- if turnoff_re and not n1:
- if turnoff_re.match(line):
- # We're in e.g. a cell magic; disable this transformer for
- # the rest of the cell.
- while line is not None:
- line = (yield line)
- continue
-
- line = (yield out)
-
- if line is None:
- continue
- # check for any prompt on the second line of the cell,
- # because people often copy from just after the first prompt,
- # so we might not see it in the first line.
- out, n2 = prompt_re.subn('', line, count=1)
- line = (yield out)
-
- if n1 or n2:
- # Found a prompt in the first two lines - check for it in
- # the rest of the cell as well.
- while line is not None:
- line = (yield prompt_re.sub('', line, count=1))
-
- else:
- # Prompts not in input - wait for reset
- while line is not None:
- line = (yield line)
-
-@CoroutineInputTransformer.wrap
-def classic_prompt():
- """Strip the >>>/... prompts of the Python interactive shell."""
- # FIXME: non-capturing version (?:...) usable?
- prompt_re = re.compile(r'^(>>>|\.\.\.)( |$)')
- initial_re = re.compile(r'^>>>( |$)')
- # Any %magic/!system is IPython syntax, so we needn't look for >>> prompts
- turnoff_re = re.compile(r'^[%!]')
- return _strip_prompts(prompt_re, initial_re, turnoff_re)
-
-@CoroutineInputTransformer.wrap
-def ipy_prompt():
- """Strip IPython's In [1]:/...: prompts."""
- # FIXME: non-capturing version (?:...) usable?
- prompt_re = re.compile(r'^(In \[\d+\]: |\s*\.{3,}: ?)')
- # Disable prompt stripping inside cell magics
- turnoff_re = re.compile(r'^%%')
- return _strip_prompts(prompt_re, turnoff_re=turnoff_re)
-
-
-@CoroutineInputTransformer.wrap
-def leading_indent():
- """Remove leading indentation.
-
- If the first line starts with a spaces or tabs, the same whitespace will be
- removed from each following line until it is reset.
- """
- space_re = re.compile(r'^[ \t]+')
- line = ''
- while True:
- line = (yield line)
-
- if line is None:
- continue
-
- m = space_re.match(line)
- if m:
- space = m.group(0)
- while line is not None:
- if line.startswith(space):
- line = line[len(space):]
- line = (yield line)
- else:
- # No leading spaces - wait for reset
- while line is not None:
- line = (yield line)
-
-
-@CoroutineInputTransformer.wrap
-def strip_encoding_cookie():
- """Remove encoding comment if found in first two lines
-
- If the first or second line has the `# coding: utf-8` comment,
- it will be removed.
- """
- line = ''
- while True:
- line = (yield line)
- # check comment on first two lines
- for i in range(2):
- if line is None:
- break
- if cookie_comment_re.match(line):
- line = (yield "")
- else:
- line = (yield line)
-
- # no-op on the rest of the cell
- while line is not None:
- line = (yield line)
-
-_assign_pat = \
-r'''(?P<lhs>(\s*)
- ([\w\.]+) # Initial identifier
- (\s*,\s*
- \*?[\w\.]+)* # Further identifiers for unpacking
- \s*?,? # Trailing comma
- )
- \s*=\s*
-'''
-
-assign_system_re = re.compile(r'{}!\s*(?P<cmd>.*)'.format(_assign_pat), re.VERBOSE)
-assign_system_template = '%s = get_ipython().getoutput(%r)'
-@StatelessInputTransformer.wrap
-def assign_from_system(line):
- """Transform assignment from system commands (e.g. files = !ls)"""
- m = assign_system_re.match(line)
- if m is None:
- return line
-
- return assign_system_template % m.group('lhs', 'cmd')
-
-assign_magic_re = re.compile(r'{}%\s*(?P<cmd>.*)'.format(_assign_pat), re.VERBOSE)
-assign_magic_template = '%s = get_ipython().magic(%r)'
-@StatelessInputTransformer.wrap
-def assign_from_magic(line):
- """Transform assignment from magic commands (e.g. a = %who_ls)"""
- m = assign_magic_re.match(line)
- if m is None:
- return line
-
- return assign_magic_template % m.group('lhs', 'cmd')
+ self.reset_tokenizer()
+ if l:
+ return l.rstrip('\n')
+
+class assemble_python_lines(TokenInputTransformer):
+ def __init__(self):
+ super(assemble_python_lines, self).__init__(None)
+
+ def output(self, tokens):
+ return self.reset()
+
+@CoroutineInputTransformer.wrap
+def assemble_logical_lines():
+ """Join lines following explicit line continuations (\)"""
+ line = ''
+ while True:
+ line = (yield line)
+ if not line or line.isspace():
+ continue
+
+ parts = []
+ while line is not None:
+ if line.endswith('\\') and (not has_comment(line)):
+ parts.append(line[:-1])
+ line = (yield None) # Get another line
+ else:
+ parts.append(line)
+ break
+
+ # Output
+ line = ''.join(parts)
+
+# Utilities
+def _make_help_call(target, esc, lspace, next_input=None):
+ """Prepares a pinfo(2)/psearch call from a target name and the escape
+ (i.e. ? or ??)"""
+ method = 'pinfo2' if esc == '??' \
+ else 'psearch' if '*' in target \
+ else 'pinfo'
+ arg = " ".join([method, target])
+ if next_input is None:
+ return '%sget_ipython().magic(%r)' % (lspace, arg)
+ else:
+ return '%sget_ipython().set_next_input(%r);get_ipython().magic(%r)' % \
+ (lspace, next_input, arg)
+
+# These define the transformations for the different escape characters.
+def _tr_system(line_info):
+ "Translate lines escaped with: !"
+ cmd = line_info.line.lstrip().lstrip(ESC_SHELL)
+ return '%sget_ipython().system(%r)' % (line_info.pre, cmd)
+
+def _tr_system2(line_info):
+ "Translate lines escaped with: !!"
+ cmd = line_info.line.lstrip()[2:]
+ return '%sget_ipython().getoutput(%r)' % (line_info.pre, cmd)
+
+def _tr_help(line_info):
+ "Translate lines escaped with: ?/??"
+ # A naked help line should just fire the intro help screen
+ if not line_info.line[1:]:
+ return 'get_ipython().show_usage()'
+
+ return _make_help_call(line_info.ifun, line_info.esc, line_info.pre)
+
+def _tr_magic(line_info):
+ "Translate lines escaped with: %"
+ tpl = '%sget_ipython().magic(%r)'
+ if line_info.line.startswith(ESC_MAGIC2):
+ return line_info.line
+ cmd = ' '.join([line_info.ifun, line_info.the_rest]).strip()
+ return tpl % (line_info.pre, cmd)
+
+def _tr_quote(line_info):
+ "Translate lines escaped with: ,"
+ return '%s%s("%s")' % (line_info.pre, line_info.ifun,
+ '", "'.join(line_info.the_rest.split()) )
+
+def _tr_quote2(line_info):
+ "Translate lines escaped with: ;"
+ return '%s%s("%s")' % (line_info.pre, line_info.ifun,
+ line_info.the_rest)
+
+def _tr_paren(line_info):
+ "Translate lines escaped with: /"
+ return '%s%s(%s)' % (line_info.pre, line_info.ifun,
+ ", ".join(line_info.the_rest.split()))
+
+tr = { ESC_SHELL : _tr_system,
+ ESC_SH_CAP : _tr_system2,
+ ESC_HELP : _tr_help,
+ ESC_HELP2 : _tr_help,
+ ESC_MAGIC : _tr_magic,
+ ESC_QUOTE : _tr_quote,
+ ESC_QUOTE2 : _tr_quote2,
+ ESC_PAREN : _tr_paren }
+
+@StatelessInputTransformer.wrap
+def escaped_commands(line):
+ """Transform escaped commands - %magic, !system, ?help + various autocalls.
+ """
+ if not line or line.isspace():
+ return line
+ lineinf = LineInfo(line)
+ if lineinf.esc not in tr:
+ return line
+
+ return tr[lineinf.esc](lineinf)
+
+_initial_space_re = re.compile(r'\s*')
+
+_help_end_re = re.compile(r"""(%{0,2}
+ [a-zA-Z_*][\w*]* # Variable name
+ (\.[a-zA-Z_*][\w*]*)* # .etc.etc
+ )
+ (\?\??)$ # ? or ??
+ """,
+ re.VERBOSE)
+
+# Extra pseudotokens for multiline strings and data structures
+_MULTILINE_STRING = object()
+_MULTILINE_STRUCTURE = object()
+
+def _line_tokens(line):
+ """Helper for has_comment and ends_in_comment_or_string."""
+ readline = StringIO(line).readline
+ toktypes = set()
+ try:
+ for t in generate_tokens(readline):
+ toktypes.add(t[0])
+ except TokenError as e:
+ # There are only two cases where a TokenError is raised.
+ if 'multi-line string' in e.args[0]:
+ toktypes.add(_MULTILINE_STRING)
+ else:
+ toktypes.add(_MULTILINE_STRUCTURE)
+ return toktypes
+
+def has_comment(src):
+ """Indicate whether an input line has (i.e. ends in, or is) a comment.
+
+ This uses tokenize, so it can distinguish comments from # inside strings.
+
+ Parameters
+ ----------
+ src : string
+ A single line input string.
+
+ Returns
+ -------
+ comment : bool
+ True if source has a comment.
+ """
+ return (tokenize2.COMMENT in _line_tokens(src))
+
+def ends_in_comment_or_string(src):
+ """Indicates whether or not an input line ends in a comment or within
+ a multiline string.
+
+ Parameters
+ ----------
+ src : string
+ A single line input string.
+
+ Returns
+ -------
+ comment : bool
+ True if source ends in a comment or multiline string.
+ """
+ toktypes = _line_tokens(src)
+ return (tokenize2.COMMENT in toktypes) or (_MULTILINE_STRING in toktypes)
+
+
+@StatelessInputTransformer.wrap
+def help_end(line):
+ """Translate lines with ?/?? at the end"""
+ m = _help_end_re.search(line)
+ if m is None or ends_in_comment_or_string(line):
+ return line
+ target = m.group(1)
+ esc = m.group(3)
+ lspace = _initial_space_re.match(line).group(0)
+
+ # If we're mid-command, put it back on the next prompt for the user.
+ next_input = line.rstrip('?') if line.strip() != m.group(0) else None
+
+ return _make_help_call(target, esc, lspace, next_input)
+
+
+@CoroutineInputTransformer.wrap
+def cellmagic(end_on_blank_line=False):
+ """Captures & transforms cell magics.
+
+ After a cell magic is started, this stores up any lines it gets until it is
+ reset (sent None).
+ """
+ tpl = 'get_ipython().run_cell_magic(%r, %r, %r)'
+ cellmagic_help_re = re.compile('%%\w+\?')
+ line = ''
+ while True:
+ line = (yield line)
+ # consume leading empty lines
+ while not line:
+ line = (yield line)
+
+ if not line.startswith(ESC_MAGIC2):
+ # This isn't a cell magic, idle waiting for reset then start over
+ while line is not None:
+ line = (yield line)
+ continue
+
+ if cellmagic_help_re.match(line):
+ # This case will be handled by help_end
+ continue
+
+ first = line
+ body = []
+ line = (yield None)
+ while (line is not None) and \
+ ((line.strip() != '') or not end_on_blank_line):
+ body.append(line)
+ line = (yield None)
+
+ # Output
+ magic_name, _, first = first.partition(' ')
+ magic_name = magic_name.lstrip(ESC_MAGIC2)
+ line = tpl % (magic_name, first, u'\n'.join(body))
+
+
+def _strip_prompts(prompt_re, initial_re=None, turnoff_re=None):
+ """Remove matching input prompts from a block of input.
+
+ Parameters
+ ----------
+ prompt_re : regular expression
+ A regular expression matching any input prompt (including continuation)
+ initial_re : regular expression, optional
+ A regular expression matching only the initial prompt, but not continuation.
+ If no initial expression is given, prompt_re will be used everywhere.
+ Used mainly for plain Python prompts, where the continuation prompt
+ ``...`` is a valid Python expression in Python 3, so shouldn't be stripped.
+
+ If initial_re and prompt_re differ,
+ only initial_re will be tested against the first line.
+ If any prompt is found on the first two lines,
+ prompts will be stripped from the rest of the block.
+ """
+ if initial_re is None:
+ initial_re = prompt_re
+ line = ''
+ while True:
+ line = (yield line)
+
+ # First line of cell
+ if line is None:
+ continue
+ out, n1 = initial_re.subn('', line, count=1)
+ if turnoff_re and not n1:
+ if turnoff_re.match(line):
+ # We're in e.g. a cell magic; disable this transformer for
+ # the rest of the cell.
+ while line is not None:
+ line = (yield line)
+ continue
+
+ line = (yield out)
+
+ if line is None:
+ continue
+ # check for any prompt on the second line of the cell,
+ # because people often copy from just after the first prompt,
+ # so we might not see it in the first line.
+ out, n2 = prompt_re.subn('', line, count=1)
+ line = (yield out)
+
+ if n1 or n2:
+ # Found a prompt in the first two lines - check for it in
+ # the rest of the cell as well.
+ while line is not None:
+ line = (yield prompt_re.sub('', line, count=1))
+
+ else:
+ # Prompts not in input - wait for reset
+ while line is not None:
+ line = (yield line)
+
+@CoroutineInputTransformer.wrap
+def classic_prompt():
+ """Strip the >>>/... prompts of the Python interactive shell."""
+ # FIXME: non-capturing version (?:...) usable?
+ prompt_re = re.compile(r'^(>>>|\.\.\.)( |$)')
+ initial_re = re.compile(r'^>>>( |$)')
+ # Any %magic/!system is IPython syntax, so we needn't look for >>> prompts
+ turnoff_re = re.compile(r'^[%!]')
+ return _strip_prompts(prompt_re, initial_re, turnoff_re)
+
+@CoroutineInputTransformer.wrap
+def ipy_prompt():
+ """Strip IPython's In [1]:/...: prompts."""
+ # FIXME: non-capturing version (?:...) usable?
+ prompt_re = re.compile(r'^(In \[\d+\]: |\s*\.{3,}: ?)')
+ # Disable prompt stripping inside cell magics
+ turnoff_re = re.compile(r'^%%')
+ return _strip_prompts(prompt_re, turnoff_re=turnoff_re)
+
+
+@CoroutineInputTransformer.wrap
+def leading_indent():
+ """Remove leading indentation.
+
+ If the first line starts with a spaces or tabs, the same whitespace will be
+ removed from each following line until it is reset.
+ """
+ space_re = re.compile(r'^[ \t]+')
+ line = ''
+ while True:
+ line = (yield line)
+
+ if line is None:
+ continue
+
+ m = space_re.match(line)
+ if m:
+ space = m.group(0)
+ while line is not None:
+ if line.startswith(space):
+ line = line[len(space):]
+ line = (yield line)
+ else:
+ # No leading spaces - wait for reset
+ while line is not None:
+ line = (yield line)
+
+
+@CoroutineInputTransformer.wrap
+def strip_encoding_cookie():
+ """Remove encoding comment if found in first two lines
+
+ If the first or second line has the `# coding: utf-8` comment,
+ it will be removed.
+ """
+ line = ''
+ while True:
+ line = (yield line)
+ # check comment on first two lines
+ for i in range(2):
+ if line is None:
+ break
+ if cookie_comment_re.match(line):
+ line = (yield "")
+ else:
+ line = (yield line)
+
+ # no-op on the rest of the cell
+ while line is not None:
+ line = (yield line)
+
+_assign_pat = \
+r'''(?P<lhs>(\s*)
+ ([\w\.]+) # Initial identifier
+ (\s*,\s*
+ \*?[\w\.]+)* # Further identifiers for unpacking
+ \s*?,? # Trailing comma
+ )
+ \s*=\s*
+'''
+
+assign_system_re = re.compile(r'{}!\s*(?P<cmd>.*)'.format(_assign_pat), re.VERBOSE)
+assign_system_template = '%s = get_ipython().getoutput(%r)'
+@StatelessInputTransformer.wrap
+def assign_from_system(line):
+ """Transform assignment from system commands (e.g. files = !ls)"""
+ m = assign_system_re.match(line)
+ if m is None:
+ return line
+
+ return assign_system_template % m.group('lhs', 'cmd')
+
+assign_magic_re = re.compile(r'{}%\s*(?P<cmd>.*)'.format(_assign_pat), re.VERBOSE)
+assign_magic_template = '%s = get_ipython().magic(%r)'
+@StatelessInputTransformer.wrap
+def assign_from_magic(line):
+ """Transform assignment from magic commands (e.g. a = %who_ls)"""
+ m = assign_magic_re.match(line)
+ if m is None:
+ return line
+
+ return assign_magic_template % m.group('lhs', 'cmd')
diff --git a/contrib/python/ipython/py2/IPython/core/interactiveshell.py b/contrib/python/ipython/py2/IPython/core/interactiveshell.py
index ba96cb0676..ad8824b606 100644
--- a/contrib/python/ipython/py2/IPython/core/interactiveshell.py
+++ b/contrib/python/ipython/py2/IPython/core/interactiveshell.py
@@ -1,78 +1,78 @@
-# -*- coding: utf-8 -*-
-"""Main IPython class."""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de>
-# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-from __future__ import absolute_import, print_function
-
-import __future__
-import abc
-import ast
-import atexit
-import functools
-import os
-import re
-import runpy
-import sys
-import tempfile
-import traceback
-import types
-import subprocess
-import warnings
-from io import open as io_open
-
-from pickleshare import PickleShareDB
-
-from traitlets.config.configurable import SingletonConfigurable
+# -*- coding: utf-8 -*-
+"""Main IPython class."""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de>
+# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+from __future__ import absolute_import, print_function
+
+import __future__
+import abc
+import ast
+import atexit
+import functools
+import os
+import re
+import runpy
+import sys
+import tempfile
+import traceback
+import types
+import subprocess
+import warnings
+from io import open as io_open
+
+from pickleshare import PickleShareDB
+
+from traitlets.config.configurable import SingletonConfigurable
from IPython.core import oinspect
-from IPython.core import magic
-from IPython.core import page
-from IPython.core import prefilter
-from IPython.core import shadowns
-from IPython.core import ultratb
-from IPython.core.alias import Alias, AliasManager
-from IPython.core.autocall import ExitAutocall
-from IPython.core.builtin_trap import BuiltinTrap
-from IPython.core.events import EventManager, available_events
-from IPython.core.compilerop import CachingCompiler, check_linecache_ipython
+from IPython.core import magic
+from IPython.core import page
+from IPython.core import prefilter
+from IPython.core import shadowns
+from IPython.core import ultratb
+from IPython.core.alias import Alias, AliasManager
+from IPython.core.autocall import ExitAutocall
+from IPython.core.builtin_trap import BuiltinTrap
+from IPython.core.events import EventManager, available_events
+from IPython.core.compilerop import CachingCompiler, check_linecache_ipython
from IPython.core.debugger import Pdb
-from IPython.core.display_trap import DisplayTrap
-from IPython.core.displayhook import DisplayHook
-from IPython.core.displaypub import DisplayPublisher
-from IPython.core.error import InputRejected, UsageError
-from IPython.core.extensions import ExtensionManager
-from IPython.core.formatters import DisplayFormatter
-from IPython.core.history import HistoryManager
+from IPython.core.display_trap import DisplayTrap
+from IPython.core.displayhook import DisplayHook
+from IPython.core.displaypub import DisplayPublisher
+from IPython.core.error import InputRejected, UsageError
+from IPython.core.extensions import ExtensionManager
+from IPython.core.formatters import DisplayFormatter
+from IPython.core.history import HistoryManager
from IPython.core.inputsplitter import ESC_MAGIC, ESC_MAGIC2
-from IPython.core.logger import Logger
-from IPython.core.macro import Macro
-from IPython.core.payload import PayloadManager
-from IPython.core.prefilter import PrefilterManager
-from IPython.core.profiledir import ProfileDir
-from IPython.core.usage import default_banner
+from IPython.core.logger import Logger
+from IPython.core.macro import Macro
+from IPython.core.payload import PayloadManager
+from IPython.core.prefilter import PrefilterManager
+from IPython.core.profiledir import ProfileDir
+from IPython.core.usage import default_banner
from IPython.testing.skipdoctest import skip_doctest_py2, skip_doctest
from IPython.display import display
-from IPython.utils import PyColorize
-from IPython.utils import io
-from IPython.utils import py3compat
-from IPython.utils import openpy
-from IPython.utils.decorators import undoc
-from IPython.utils.io import ask_yes_no
-from IPython.utils.ipstruct import Struct
-from IPython.paths import get_ipython_dir
+from IPython.utils import PyColorize
+from IPython.utils import io
+from IPython.utils import py3compat
+from IPython.utils import openpy
+from IPython.utils.decorators import undoc
+from IPython.utils.io import ask_yes_no
+from IPython.utils.ipstruct import Struct
+from IPython.paths import get_ipython_dir
from IPython.utils.path import get_home_dir, get_py_filename, ensure_dir_exists
-from IPython.utils.process import system, getoutput
-from IPython.utils.py3compat import (builtin_mod, unicode_type, string_types,
- with_metaclass, iteritems)
-from IPython.utils.strdispatch import StrDispatch
-from IPython.utils.syspathcontext import prepended_to_syspath
+from IPython.utils.process import system, getoutput
+from IPython.utils.py3compat import (builtin_mod, unicode_type, string_types,
+ with_metaclass, iteritems)
+from IPython.utils.strdispatch import StrDispatch
+from IPython.utils.syspathcontext import prepended_to_syspath
from IPython.utils.text import format_screen, LSString, SList, DollarFormatter
from IPython.utils.tempdir import TemporaryDirectory
from traitlets import (
@@ -81,8 +81,8 @@ from traitlets import (
)
from warnings import warn
from logging import error
-import IPython.core.hooks
-
+import IPython.core.hooks
+
# NoOpContext is deprecated, but ipykernel imports it from here.
# See https://github.com/ipython/ipykernel/issues/157
from IPython.utils.contexts import NoOpContext
@@ -106,87 +106,87 @@ class ProvisionalWarning(DeprecationWarning):
"""
pass
-#-----------------------------------------------------------------------------
-# Globals
-#-----------------------------------------------------------------------------
-
-# compiled regexps for autoindent management
-dedent_re = re.compile(r'^\s+raise|^\s+return|^\s+pass')
-
-#-----------------------------------------------------------------------------
-# Utilities
-#-----------------------------------------------------------------------------
-
-@undoc
-def softspace(file, newvalue):
- """Copied from code.py, to remove the dependency"""
-
- oldvalue = 0
- try:
- oldvalue = file.softspace
- except AttributeError:
- pass
- try:
- file.softspace = newvalue
- except (AttributeError, TypeError):
- # "attribute-less object" or "read-only attributes"
- pass
- return oldvalue
-
-@undoc
-def no_op(*a, **kw): pass
-
-
-class SpaceInInput(Exception): pass
-
-
-def get_default_colors():
+#-----------------------------------------------------------------------------
+# Globals
+#-----------------------------------------------------------------------------
+
+# compiled regexps for autoindent management
+dedent_re = re.compile(r'^\s+raise|^\s+return|^\s+pass')
+
+#-----------------------------------------------------------------------------
+# Utilities
+#-----------------------------------------------------------------------------
+
+@undoc
+def softspace(file, newvalue):
+ """Copied from code.py, to remove the dependency"""
+
+ oldvalue = 0
+ try:
+ oldvalue = file.softspace
+ except AttributeError:
+ pass
+ try:
+ file.softspace = newvalue
+ except (AttributeError, TypeError):
+ # "attribute-less object" or "read-only attributes"
+ pass
+ return oldvalue
+
+@undoc
+def no_op(*a, **kw): pass
+
+
+class SpaceInInput(Exception): pass
+
+
+def get_default_colors():
"DEPRECATED"
warn('get_default_color is Deprecated, and is `Neutral` on all platforms.',
DeprecationWarning, stacklevel=2)
return 'Neutral'
-
-
-class SeparateUnicode(Unicode):
- r"""A Unicode subclass to validate separate_in, separate_out, etc.
-
- This is a Unicode based trait that converts '0'->'' and ``'\\n'->'\n'``.
- """
-
- def validate(self, obj, value):
- if value == '0': value = ''
- value = value.replace('\\n','\n')
- return super(SeparateUnicode, self).validate(obj, value)
-
-
-@undoc
-class DummyMod(object):
- """A dummy module used for IPython's interactive module when
- a namespace must be assigned to the module's __dict__."""
- pass
-
-
-class ExecutionResult(object):
- """The result of a call to :meth:`InteractiveShell.run_cell`
-
- Stores information about what took place.
- """
- execution_count = None
- error_before_exec = None
- error_in_exec = None
- result = None
-
- @property
- def success(self):
- return (self.error_before_exec is None) and (self.error_in_exec is None)
-
- def raise_error(self):
- """Reraises error if `success` is `False`, otherwise does nothing"""
- if self.error_before_exec is not None:
- raise self.error_before_exec
- if self.error_in_exec is not None:
- raise self.error_in_exec
-
+
+
+class SeparateUnicode(Unicode):
+ r"""A Unicode subclass to validate separate_in, separate_out, etc.
+
+ This is a Unicode based trait that converts '0'->'' and ``'\\n'->'\n'``.
+ """
+
+ def validate(self, obj, value):
+ if value == '0': value = ''
+ value = value.replace('\\n','\n')
+ return super(SeparateUnicode, self).validate(obj, value)
+
+
+@undoc
+class DummyMod(object):
+ """A dummy module used for IPython's interactive module when
+ a namespace must be assigned to the module's __dict__."""
+ pass
+
+
+class ExecutionResult(object):
+ """The result of a call to :meth:`InteractiveShell.run_cell`
+
+ Stores information about what took place.
+ """
+ execution_count = None
+ error_before_exec = None
+ error_in_exec = None
+ result = None
+
+ @property
+ def success(self):
+ return (self.error_before_exec is None) and (self.error_in_exec is None)
+
+ def raise_error(self):
+ """Reraises error if `success` is `False`, otherwise does nothing"""
+ if self.error_before_exec is not None:
+ raise self.error_before_exec
+ if self.error_in_exec is not None:
+ raise self.error_in_exec
+
def __repr__(self):
if sys.version_info > (3,):
name = self.__class__.__qualname__
@@ -194,67 +194,67 @@ class ExecutionResult(object):
name = self.__class__.__name__
return '<%s object at %x, execution_count=%s error_before_exec=%s error_in_exec=%s result=%s>' %\
(name, id(self), self.execution_count, self.error_before_exec, self.error_in_exec, repr(self.result))
-
-class InteractiveShell(SingletonConfigurable):
- """An enhanced, interactive shell for Python."""
-
- _instance = None
+
+class InteractiveShell(SingletonConfigurable):
+ """An enhanced, interactive shell for Python."""
+
+ _instance = None
ast_transformers = List([], help=
- """
- A list of ast.NodeTransformer subclass instances, which will be applied
- to user input before code is run.
- """
+ """
+ A list of ast.NodeTransformer subclass instances, which will be applied
+ to user input before code is run.
+ """
).tag(config=True)
-
+
autocall = Enum((0,1,2), default_value=0, help=
- """
- Make IPython automatically call any callable object even if you didn't
- type explicit parentheses. For example, 'str 43' becomes 'str(43)'
- automatically. The value can be '0' to disable the feature, '1' for
- 'smart' autocall, where it is not applied if there are no more
- arguments on the line, and '2' for 'full' autocall, where all callable
- objects are automatically called (even if no arguments are present).
- """
+ """
+ Make IPython automatically call any callable object even if you didn't
+ type explicit parentheses. For example, 'str 43' becomes 'str(43)'
+ automatically. The value can be '0' to disable the feature, '1' for
+ 'smart' autocall, where it is not applied if there are no more
+ arguments on the line, and '2' for 'full' autocall, where all callable
+ objects are automatically called (even if no arguments are present).
+ """
).tag(config=True)
- # TODO: remove all autoindent logic and put into frontends.
- # We can't do this yet because even runlines uses the autoindent.
+ # TODO: remove all autoindent logic and put into frontends.
+ # We can't do this yet because even runlines uses the autoindent.
autoindent = Bool(True, help=
- """
- Autoindent IPython code entered interactively.
- """
+ """
+ Autoindent IPython code entered interactively.
+ """
).tag(config=True)
automagic = Bool(True, help=
- """
- Enable magic commands to be called without the leading %.
- """
+ """
+ Enable magic commands to be called without the leading %.
+ """
).tag(config=True)
banner1 = Unicode(default_banner,
- help="""The part of the banner to be printed before the profile"""
+ help="""The part of the banner to be printed before the profile"""
).tag(config=True)
banner2 = Unicode('',
- help="""The part of the banner to be printed after the profile"""
+ help="""The part of the banner to be printed after the profile"""
).tag(config=True)
-
+
cache_size = Integer(1000, help=
- """
- Set the size of the output cache. The default is 1000, you can
- change it permanently in your config file. Setting it to 0 completely
- disables the caching system, and the minimum value accepted is 20 (if
- you provide a value less than 20, it is reset to 0 and a warning is
- issued). This limit is defined because otherwise you'll spend more
- time re-flushing a too small cache than working
- """
+ """
+ Set the size of the output cache. The default is 1000, you can
+ change it permanently in your config file. Setting it to 0 completely
+ disables the caching system, and the minimum value accepted is 20 (if
+ you provide a value less than 20, it is reset to 0 and a warning is
+ issued). This limit is defined because otherwise you'll spend more
+ time re-flushing a too small cache than working
+ """
).tag(config=True)
color_info = Bool(True, help=
- """
- Use colors for displaying information about objects. Because this
- information is passed through a pager (like 'less'), and some pagers
- get confused with color codes, this capability can be turned off.
- """
+ """
+ Use colors for displaying information about objects. Because this
+ information is passed through a pager (like 'less'), and some pagers
+ get confused with color codes, this capability can be turned off.
+ """
).tag(config=True)
colors = CaselessStrEnum(('Neutral', 'NoColor','LightBG','Linux'),
default_value='Neutral',
@@ -262,26 +262,26 @@ class InteractiveShell(SingletonConfigurable):
).tag(config=True)
debug = Bool(False).tag(config=True)
deep_reload = Bool(False, help=
- """
- **Deprecated**
-
- Will be removed in IPython 6.0
-
- Enable deep (recursive) reloading by default. IPython can use the
- deep_reload module which reloads changes in modules recursively (it
- replaces the reload() function, so you don't need to change anything to
- use it). `deep_reload` forces a full reload of modules whose code may
- have changed, which the default reload() function does not. When
- deep_reload is off, IPython will use the normal reload(), but
- deep_reload will still be available as dreload().
- """
+ """
+ **Deprecated**
+
+ Will be removed in IPython 6.0
+
+ Enable deep (recursive) reloading by default. IPython can use the
+ deep_reload module which reloads changes in modules recursively (it
+ replaces the reload() function, so you don't need to change anything to
+ use it). `deep_reload` forces a full reload of modules whose code may
+ have changed, which the default reload() function does not. When
+ deep_reload is off, IPython will use the normal reload(), but
+ deep_reload will still be available as dreload().
+ """
).tag(config=True)
disable_failing_post_execute = Bool(False,
- help="Don't call post-execute functions that have failed in the past."
+ help="Don't call post-execute functions that have failed in the past."
).tag(config=True)
- display_formatter = Instance(DisplayFormatter, allow_none=True)
- displayhook_class = Type(DisplayHook)
- display_pub_class = Type(DisplayPublisher)
+ display_formatter = Instance(DisplayFormatter, allow_none=True)
+ displayhook_class = Type(DisplayHook)
+ display_pub_class = Type(DisplayPublisher)
sphinxify_docstring = Bool(False, help=
"""
@@ -305,58 +305,58 @@ class InteractiveShell(SingletonConfigurable):
if change['new']:
warn("`enable_html_pager` is provisional since IPython 5.0 and might change in future versions.", ProvisionalWarning)
- data_pub_class = None
-
+ data_pub_class = None
+
exit_now = Bool(False)
- exiter = Instance(ExitAutocall)
+ exiter = Instance(ExitAutocall)
@default('exiter')
- def _exiter_default(self):
- return ExitAutocall(self)
- # Monotonically increasing execution counter
- execution_count = Integer(1)
- filename = Unicode("<ipython console>")
+ def _exiter_default(self):
+ return ExitAutocall(self)
+ # Monotonically increasing execution counter
+ execution_count = Integer(1)
+ filename = Unicode("<ipython console>")
ipython_dir= Unicode('').tag(config=True) # Set to get_ipython_dir() in __init__
-
- # Input splitter, to transform input line by line and detect when a block
- # is ready to be executed.
- input_splitter = Instance('IPython.core.inputsplitter.IPythonInputSplitter',
- (), {'line_input_checker': True})
-
- # This InputSplitter instance is used to transform completed cells before
- # running them. It allows cell magics to contain blank lines.
- input_transformer_manager = Instance('IPython.core.inputsplitter.IPythonInputSplitter',
- (), {'line_input_checker': False})
-
+
+ # Input splitter, to transform input line by line and detect when a block
+ # is ready to be executed.
+ input_splitter = Instance('IPython.core.inputsplitter.IPythonInputSplitter',
+ (), {'line_input_checker': True})
+
+ # This InputSplitter instance is used to transform completed cells before
+ # running them. It allows cell magics to contain blank lines.
+ input_transformer_manager = Instance('IPython.core.inputsplitter.IPythonInputSplitter',
+ (), {'line_input_checker': False})
+
logstart = Bool(False, help=
- """
- Start logging to the default log file in overwrite mode.
- Use `logappend` to specify a log file to **append** logs to.
- """
+ """
+ Start logging to the default log file in overwrite mode.
+ Use `logappend` to specify a log file to **append** logs to.
+ """
).tag(config=True)
logfile = Unicode('', help=
- """
- The name of the logfile to use.
- """
+ """
+ The name of the logfile to use.
+ """
).tag(config=True)
logappend = Unicode('', help=
- """
- Start logging to the given file in append mode.
- Use `logfile` to specify a log file to **overwrite** logs to.
- """
+ """
+ Start logging to the given file in append mode.
+ Use `logfile` to specify a log file to **overwrite** logs to.
+ """
).tag(config=True)
- object_info_string_level = Enum((0,1,2), default_value=0,
+ object_info_string_level = Enum((0,1,2), default_value=0,
).tag(config=True)
pdb = Bool(False, help=
- """
- Automatically call the pdb debugger after every exception.
- """
+ """
+ Automatically call the pdb debugger after every exception.
+ """
).tag(config=True)
display_page = Bool(False,
- help="""If True, anything that would be passed to the pager
- will be displayed as regular output instead."""
+ help="""If True, anything that would be passed to the pager
+ will be displayed as regular output instead."""
).tag(config=True)
-
- # deprecated prompt traits:
+
+ # deprecated prompt traits:
prompt_in1 = Unicode('In [\\#]: ',
help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly."
@@ -376,1183 +376,1183 @@ class InteractiveShell(SingletonConfigurable):
name = change['name']
warn("InteractiveShell.{name} is deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly.".format(
name=name)
- )
- # protect against weird cases where self.config may not exist:
-
+ )
+ # protect against weird cases where self.config may not exist:
+
show_rewritten_input = Bool(True,
- help="Show rewritten input, e.g. for autocall."
+ help="Show rewritten input, e.g. for autocall."
).tag(config=True)
-
+
quiet = Bool(False).tag(config=True)
-
+
history_length = Integer(10000,
help='Total length of command history'
).tag(config=True)
-
+
history_load_length = Integer(1000, help=
- """
- The number of saved history entries to be loaded
+ """
+ The number of saved history entries to be loaded
into the history buffer at startup.
- """
+ """
).tag(config=True)
-
- ast_node_interactivity = Enum(['all', 'last', 'last_expr', 'none'],
+
+ ast_node_interactivity = Enum(['all', 'last', 'last_expr', 'none'],
default_value='last_expr',
- help="""
- 'all', 'last', 'last_expr' or 'none', specifying which nodes should be
+ help="""
+ 'all', 'last', 'last_expr' or 'none', specifying which nodes should be
run interactively (displaying output from expressions)."""
).tag(config=True)
-
- # TODO: this part of prompt management should be moved to the frontends.
- # Use custom TraitTypes that convert '0'->'' and '\\n'->'\n'
+
+ # TODO: this part of prompt management should be moved to the frontends.
+ # Use custom TraitTypes that convert '0'->'' and '\\n'->'\n'
separate_in = SeparateUnicode('\n').tag(config=True)
separate_out = SeparateUnicode('').tag(config=True)
separate_out2 = SeparateUnicode('').tag(config=True)
wildcards_case_sensitive = Bool(True).tag(config=True)
- xmode = CaselessStrEnum(('Context','Plain', 'Verbose'),
+ xmode = CaselessStrEnum(('Context','Plain', 'Verbose'),
default_value='Context').tag(config=True)
-
- # Subcomponents of InteractiveShell
- alias_manager = Instance('IPython.core.alias.AliasManager', allow_none=True)
- prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True)
- builtin_trap = Instance('IPython.core.builtin_trap.BuiltinTrap', allow_none=True)
- display_trap = Instance('IPython.core.display_trap.DisplayTrap', allow_none=True)
- extension_manager = Instance('IPython.core.extensions.ExtensionManager', allow_none=True)
- payload_manager = Instance('IPython.core.payload.PayloadManager', allow_none=True)
- history_manager = Instance('IPython.core.history.HistoryAccessorBase', allow_none=True)
- magics_manager = Instance('IPython.core.magic.MagicsManager', allow_none=True)
-
- profile_dir = Instance('IPython.core.application.ProfileDir', allow_none=True)
- @property
- def profile(self):
- if self.profile_dir is not None:
- name = os.path.basename(self.profile_dir.location)
- return name.replace('profile_','')
-
-
- # Private interface
- _post_execute = Dict()
-
- # Tracks any GUI loop loaded for pylab
- pylab_gui_select = None
-
+
+ # Subcomponents of InteractiveShell
+ alias_manager = Instance('IPython.core.alias.AliasManager', allow_none=True)
+ prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True)
+ builtin_trap = Instance('IPython.core.builtin_trap.BuiltinTrap', allow_none=True)
+ display_trap = Instance('IPython.core.display_trap.DisplayTrap', allow_none=True)
+ extension_manager = Instance('IPython.core.extensions.ExtensionManager', allow_none=True)
+ payload_manager = Instance('IPython.core.payload.PayloadManager', allow_none=True)
+ history_manager = Instance('IPython.core.history.HistoryAccessorBase', allow_none=True)
+ magics_manager = Instance('IPython.core.magic.MagicsManager', allow_none=True)
+
+ profile_dir = Instance('IPython.core.application.ProfileDir', allow_none=True)
+ @property
+ def profile(self):
+ if self.profile_dir is not None:
+ name = os.path.basename(self.profile_dir.location)
+ return name.replace('profile_','')
+
+
+ # Private interface
+ _post_execute = Dict()
+
+ # Tracks any GUI loop loaded for pylab
+ pylab_gui_select = None
+
last_execution_succeeded = Bool(True, help='Did last executed command succeeded')
- def __init__(self, ipython_dir=None, profile_dir=None,
- user_module=None, user_ns=None,
- custom_exceptions=((), None), **kwargs):
-
- # This is where traits with a config_key argument are updated
- # from the values on config.
- super(InteractiveShell, self).__init__(**kwargs)
+ def __init__(self, ipython_dir=None, profile_dir=None,
+ user_module=None, user_ns=None,
+ custom_exceptions=((), None), **kwargs):
+
+ # This is where traits with a config_key argument are updated
+ # from the values on config.
+ super(InteractiveShell, self).__init__(**kwargs)
if 'PromptManager' in self.config:
warn('As of IPython 5.0 `PromptManager` config will have no effect'
' and has been replaced by TerminalInteractiveShell.prompts_class')
- self.configurables = [self]
-
- # These are relatively independent and stateless
- self.init_ipython_dir(ipython_dir)
- self.init_profile_dir(profile_dir)
- self.init_instance_attrs()
- self.init_environment()
+ self.configurables = [self]
+
+ # These are relatively independent and stateless
+ self.init_ipython_dir(ipython_dir)
+ self.init_profile_dir(profile_dir)
+ self.init_instance_attrs()
+ self.init_environment()
- # Check if we're in a virtualenv, and set up sys.path.
- self.init_virtualenv()
-
- # Create namespaces (user_ns, user_global_ns, etc.)
- self.init_create_namespaces(user_module, user_ns)
- # This has to be done after init_create_namespaces because it uses
- # something in self.user_ns, but before init_sys_modules, which
- # is the first thing to modify sys.
- # TODO: When we override sys.stdout and sys.stderr before this class
- # is created, we are saving the overridden ones here. Not sure if this
- # is what we want to do.
- self.save_sys_module_state()
- self.init_sys_modules()
-
- # While we're trying to have each part of the code directly access what
- # it needs without keeping redundant references to objects, we have too
- # much legacy code that expects ip.db to exist.
- self.db = PickleShareDB(os.path.join(self.profile_dir.location, 'db'))
-
- self.init_history()
- self.init_encoding()
- self.init_prefilter()
-
- self.init_syntax_highlighting()
- self.init_hooks()
- self.init_events()
- self.init_pushd_popd_magic()
- self.init_user_ns()
- self.init_logger()
- self.init_builtins()
-
- # The following was in post_config_initialization
- self.init_inspector()
- if py3compat.PY3:
- self.raw_input_original = input
- else:
- self.raw_input_original = raw_input
- self.init_completer()
- # TODO: init_io() needs to happen before init_traceback handlers
- # because the traceback handlers hardcode the stdout/stderr streams.
- # This logic in in debugger.Pdb and should eventually be changed.
- self.init_io()
- self.init_traceback_handlers(custom_exceptions)
- self.init_prompts()
- self.init_display_formatter()
- self.init_display_pub()
- self.init_data_pub()
- self.init_displayhook()
- self.init_magics()
- self.init_alias()
- self.init_logstart()
- self.init_pdb()
- self.init_extension_manager()
- self.init_payload()
- self.init_deprecation_warnings()
- self.hooks.late_startup_hook()
- self.events.trigger('shell_initialized', self)
- atexit.register(self.atexit_operations)
-
- def get_ipython(self):
- """Return the currently running IPython instance."""
- return self
-
- #-------------------------------------------------------------------------
- # Trait changed handlers
- #-------------------------------------------------------------------------
+ # Check if we're in a virtualenv, and set up sys.path.
+ self.init_virtualenv()
+
+ # Create namespaces (user_ns, user_global_ns, etc.)
+ self.init_create_namespaces(user_module, user_ns)
+ # This has to be done after init_create_namespaces because it uses
+ # something in self.user_ns, but before init_sys_modules, which
+ # is the first thing to modify sys.
+ # TODO: When we override sys.stdout and sys.stderr before this class
+ # is created, we are saving the overridden ones here. Not sure if this
+ # is what we want to do.
+ self.save_sys_module_state()
+ self.init_sys_modules()
+
+ # While we're trying to have each part of the code directly access what
+ # it needs without keeping redundant references to objects, we have too
+ # much legacy code that expects ip.db to exist.
+ self.db = PickleShareDB(os.path.join(self.profile_dir.location, 'db'))
+
+ self.init_history()
+ self.init_encoding()
+ self.init_prefilter()
+
+ self.init_syntax_highlighting()
+ self.init_hooks()
+ self.init_events()
+ self.init_pushd_popd_magic()
+ self.init_user_ns()
+ self.init_logger()
+ self.init_builtins()
+
+ # The following was in post_config_initialization
+ self.init_inspector()
+ if py3compat.PY3:
+ self.raw_input_original = input
+ else:
+ self.raw_input_original = raw_input
+ self.init_completer()
+ # TODO: init_io() needs to happen before init_traceback handlers
+ # because the traceback handlers hardcode the stdout/stderr streams.
+ # This logic in in debugger.Pdb and should eventually be changed.
+ self.init_io()
+ self.init_traceback_handlers(custom_exceptions)
+ self.init_prompts()
+ self.init_display_formatter()
+ self.init_display_pub()
+ self.init_data_pub()
+ self.init_displayhook()
+ self.init_magics()
+ self.init_alias()
+ self.init_logstart()
+ self.init_pdb()
+ self.init_extension_manager()
+ self.init_payload()
+ self.init_deprecation_warnings()
+ self.hooks.late_startup_hook()
+ self.events.trigger('shell_initialized', self)
+ atexit.register(self.atexit_operations)
+
+ def get_ipython(self):
+ """Return the currently running IPython instance."""
+ return self
+
+ #-------------------------------------------------------------------------
+ # Trait changed handlers
+ #-------------------------------------------------------------------------
@observe('ipython_dir')
def _ipython_dir_changed(self, change):
ensure_dir_exists(change['new'])
-
- def set_autoindent(self,value=None):
+
+ def set_autoindent(self,value=None):
"""Set the autoindent flag.
-
- If called with no arguments, it acts as a toggle."""
- if value is None:
- self.autoindent = not self.autoindent
- else:
- self.autoindent = value
-
- #-------------------------------------------------------------------------
- # init_* methods called by __init__
- #-------------------------------------------------------------------------
-
- def init_ipython_dir(self, ipython_dir):
- if ipython_dir is not None:
- self.ipython_dir = ipython_dir
- return
-
- self.ipython_dir = get_ipython_dir()
-
- def init_profile_dir(self, profile_dir):
- if profile_dir is not None:
- self.profile_dir = profile_dir
- return
- self.profile_dir =\
- ProfileDir.create_profile_dir_by_name(self.ipython_dir, 'default')
-
- def init_instance_attrs(self):
- self.more = False
-
- # command compiler
- self.compile = CachingCompiler()
-
- # Make an empty namespace, which extension writers can rely on both
- # existing and NEVER being used by ipython itself. This gives them a
- # convenient location for storing additional information and state
- # their extensions may require, without fear of collisions with other
- # ipython names that may develop later.
- self.meta = Struct()
-
- # Temporary files used for various purposes. Deleted at exit.
- self.tempfiles = []
- self.tempdirs = []
-
- # keep track of where we started running (mainly for crash post-mortem)
- # This is not being used anywhere currently.
- self.starting_dir = py3compat.getcwd()
-
- # Indentation management
- self.indent_current_nsp = 0
-
- # Dict to track post-execution functions that have been registered
- self._post_execute = {}
-
- def init_environment(self):
- """Any changes we need to make to the user's environment."""
- pass
-
- def init_encoding(self):
- # Get system encoding at startup time. Certain terminals (like Emacs
- # under Win32 have it set to None, and we need to have a known valid
- # encoding to use in the raw_input() method
- try:
- self.stdin_encoding = sys.stdin.encoding or 'ascii'
- except AttributeError:
- self.stdin_encoding = 'ascii'
-
- def init_syntax_highlighting(self):
- # Python source parser/formatter for syntax highlighting
- pyformat = PyColorize.Parser().format
- self.pycolorize = lambda src: pyformat(src,'str',self.colors)
-
+
+ If called with no arguments, it acts as a toggle."""
+ if value is None:
+ self.autoindent = not self.autoindent
+ else:
+ self.autoindent = value
+
+ #-------------------------------------------------------------------------
+ # init_* methods called by __init__
+ #-------------------------------------------------------------------------
+
+ def init_ipython_dir(self, ipython_dir):
+ if ipython_dir is not None:
+ self.ipython_dir = ipython_dir
+ return
+
+ self.ipython_dir = get_ipython_dir()
+
+ def init_profile_dir(self, profile_dir):
+ if profile_dir is not None:
+ self.profile_dir = profile_dir
+ return
+ self.profile_dir =\
+ ProfileDir.create_profile_dir_by_name(self.ipython_dir, 'default')
+
+ def init_instance_attrs(self):
+ self.more = False
+
+ # command compiler
+ self.compile = CachingCompiler()
+
+ # Make an empty namespace, which extension writers can rely on both
+ # existing and NEVER being used by ipython itself. This gives them a
+ # convenient location for storing additional information and state
+ # their extensions may require, without fear of collisions with other
+ # ipython names that may develop later.
+ self.meta = Struct()
+
+ # Temporary files used for various purposes. Deleted at exit.
+ self.tempfiles = []
+ self.tempdirs = []
+
+ # keep track of where we started running (mainly for crash post-mortem)
+ # This is not being used anywhere currently.
+ self.starting_dir = py3compat.getcwd()
+
+ # Indentation management
+ self.indent_current_nsp = 0
+
+ # Dict to track post-execution functions that have been registered
+ self._post_execute = {}
+
+ def init_environment(self):
+ """Any changes we need to make to the user's environment."""
+ pass
+
+ def init_encoding(self):
+ # Get system encoding at startup time. Certain terminals (like Emacs
+ # under Win32 have it set to None, and we need to have a known valid
+ # encoding to use in the raw_input() method
+ try:
+ self.stdin_encoding = sys.stdin.encoding or 'ascii'
+ except AttributeError:
+ self.stdin_encoding = 'ascii'
+
+ def init_syntax_highlighting(self):
+ # Python source parser/formatter for syntax highlighting
+ pyformat = PyColorize.Parser().format
+ self.pycolorize = lambda src: pyformat(src,'str',self.colors)
+
def refresh_style(self):
# No-op here, used in subclass
pass
- def init_pushd_popd_magic(self):
- # for pushd/popd management
- self.home_dir = get_home_dir()
-
- self.dir_stack = []
-
- def init_logger(self):
- self.logger = Logger(self.home_dir, logfname='ipython_log.py',
- logmode='rotate')
-
- def init_logstart(self):
- """Initialize logging in case it was requested at the command line.
- """
- if self.logappend:
- self.magic('logstart %s append' % self.logappend)
- elif self.logfile:
- self.magic('logstart %s' % self.logfile)
- elif self.logstart:
- self.magic('logstart')
-
- def init_deprecation_warnings(self):
- """
- register default filter for deprecation warning.
-
- This will allow deprecation warning of function used interactively to show
- warning to users, and still hide deprecation warning from libraries import.
- """
- warnings.filterwarnings("default", category=DeprecationWarning, module=self.user_ns.get("__name__"))
-
- def init_builtins(self):
- # A single, static flag that we set to True. Its presence indicates
- # that an IPython shell has been created, and we make no attempts at
- # removing on exit or representing the existence of more than one
- # IPython at a time.
- builtin_mod.__dict__['__IPYTHON__'] = True
+ def init_pushd_popd_magic(self):
+ # for pushd/popd management
+ self.home_dir = get_home_dir()
+
+ self.dir_stack = []
+
+ def init_logger(self):
+ self.logger = Logger(self.home_dir, logfname='ipython_log.py',
+ logmode='rotate')
+
+ def init_logstart(self):
+ """Initialize logging in case it was requested at the command line.
+ """
+ if self.logappend:
+ self.magic('logstart %s append' % self.logappend)
+ elif self.logfile:
+ self.magic('logstart %s' % self.logfile)
+ elif self.logstart:
+ self.magic('logstart')
+
+ def init_deprecation_warnings(self):
+ """
+ register default filter for deprecation warning.
+
+ This will allow deprecation warning of function used interactively to show
+ warning to users, and still hide deprecation warning from libraries import.
+ """
+ warnings.filterwarnings("default", category=DeprecationWarning, module=self.user_ns.get("__name__"))
+
+ def init_builtins(self):
+ # A single, static flag that we set to True. Its presence indicates
+ # that an IPython shell has been created, and we make no attempts at
+ # removing on exit or representing the existence of more than one
+ # IPython at a time.
+ builtin_mod.__dict__['__IPYTHON__'] = True
builtin_mod.__dict__['display'] = display
-
- self.builtin_trap = BuiltinTrap(shell=self)
-
- def init_inspector(self):
- # Object inspector
- self.inspector = oinspect.Inspector(oinspect.InspectColors,
- PyColorize.ANSICodeColors,
- 'NoColor',
- self.object_info_string_level)
-
- def init_io(self):
- # This will just use sys.stdout and sys.stderr. If you want to
- # override sys.stdout and sys.stderr themselves, you need to do that
- # *before* instantiating this class, because io holds onto
- # references to the underlying streams.
+
+ self.builtin_trap = BuiltinTrap(shell=self)
+
+ def init_inspector(self):
+ # Object inspector
+ self.inspector = oinspect.Inspector(oinspect.InspectColors,
+ PyColorize.ANSICodeColors,
+ 'NoColor',
+ self.object_info_string_level)
+
+ def init_io(self):
+ # This will just use sys.stdout and sys.stderr. If you want to
+ # override sys.stdout and sys.stderr themselves, you need to do that
+ # *before* instantiating this class, because io holds onto
+ # references to the underlying streams.
# io.std* are deprecated, but don't show our own deprecation warnings
# during initialization of the deprecated API.
with warnings.catch_warnings():
warnings.simplefilter('ignore', DeprecationWarning)
- io.stdout = io.IOStream(sys.stdout)
- io.stderr = io.IOStream(sys.stderr)
-
- def init_prompts(self):
- # Set system prompts, so that scripts can decide if they are running
- # interactively.
- sys.ps1 = 'In : '
- sys.ps2 = '...: '
- sys.ps3 = 'Out: '
-
- def init_display_formatter(self):
- self.display_formatter = DisplayFormatter(parent=self)
- self.configurables.append(self.display_formatter)
-
- def init_display_pub(self):
- self.display_pub = self.display_pub_class(parent=self)
- self.configurables.append(self.display_pub)
-
- def init_data_pub(self):
- if not self.data_pub_class:
- self.data_pub = None
- return
- self.data_pub = self.data_pub_class(parent=self)
- self.configurables.append(self.data_pub)
-
- def init_displayhook(self):
- # Initialize displayhook, set in/out prompts and printing system
- self.displayhook = self.displayhook_class(
- parent=self,
- shell=self,
- cache_size=self.cache_size,
- )
- self.configurables.append(self.displayhook)
- # This is a context manager that installs/revmoes the displayhook at
- # the appropriate time.
- self.display_trap = DisplayTrap(hook=self.displayhook)
-
- def init_virtualenv(self):
- """Add a virtualenv to sys.path so the user can import modules from it.
- This isn't perfect: it doesn't use the Python interpreter with which the
- virtualenv was built, and it ignores the --no-site-packages option. A
- warning will appear suggesting the user installs IPython in the
- virtualenv, but for many cases, it probably works well enough.
+ io.stdout = io.IOStream(sys.stdout)
+ io.stderr = io.IOStream(sys.stderr)
+
+ def init_prompts(self):
+ # Set system prompts, so that scripts can decide if they are running
+ # interactively.
+ sys.ps1 = 'In : '
+ sys.ps2 = '...: '
+ sys.ps3 = 'Out: '
+
+ def init_display_formatter(self):
+ self.display_formatter = DisplayFormatter(parent=self)
+ self.configurables.append(self.display_formatter)
+
+ def init_display_pub(self):
+ self.display_pub = self.display_pub_class(parent=self)
+ self.configurables.append(self.display_pub)
+
+ def init_data_pub(self):
+ if not self.data_pub_class:
+ self.data_pub = None
+ return
+ self.data_pub = self.data_pub_class(parent=self)
+ self.configurables.append(self.data_pub)
+
+ def init_displayhook(self):
+ # Initialize displayhook, set in/out prompts and printing system
+ self.displayhook = self.displayhook_class(
+ parent=self,
+ shell=self,
+ cache_size=self.cache_size,
+ )
+ self.configurables.append(self.displayhook)
+ # This is a context manager that installs/revmoes the displayhook at
+ # the appropriate time.
+ self.display_trap = DisplayTrap(hook=self.displayhook)
+
+ def init_virtualenv(self):
+ """Add a virtualenv to sys.path so the user can import modules from it.
+ This isn't perfect: it doesn't use the Python interpreter with which the
+ virtualenv was built, and it ignores the --no-site-packages option. A
+ warning will appear suggesting the user installs IPython in the
+ virtualenv, but for many cases, it probably works well enough.
- Adapted from code snippets online.
+ Adapted from code snippets online.
- http://blog.ufsoft.org/2009/1/29/ipython-and-virtualenv
- """
- if 'VIRTUAL_ENV' not in os.environ:
- # Not in a virtualenv
- return
+ http://blog.ufsoft.org/2009/1/29/ipython-and-virtualenv
+ """
+ if 'VIRTUAL_ENV' not in os.environ:
+ # Not in a virtualenv
+ return
- # venv detection:
- # stdlib venv may symlink sys.executable, so we can't use realpath.
- # but others can symlink *to* the venv Python, so we can't just use sys.executable.
- # So we just check every item in the symlink tree (generally <= 3)
- p = os.path.normcase(sys.executable)
- paths = [p]
- while os.path.islink(p):
- p = os.path.normcase(os.path.join(os.path.dirname(p), os.readlink(p)))
- paths.append(p)
- p_venv = os.path.normcase(os.environ['VIRTUAL_ENV'])
- if any(p.startswith(p_venv) for p in paths):
- # Running properly in the virtualenv, don't need to do anything
- return
+ # venv detection:
+ # stdlib venv may symlink sys.executable, so we can't use realpath.
+ # but others can symlink *to* the venv Python, so we can't just use sys.executable.
+ # So we just check every item in the symlink tree (generally <= 3)
+ p = os.path.normcase(sys.executable)
+ paths = [p]
+ while os.path.islink(p):
+ p = os.path.normcase(os.path.join(os.path.dirname(p), os.readlink(p)))
+ paths.append(p)
+ p_venv = os.path.normcase(os.environ['VIRTUAL_ENV'])
+ if any(p.startswith(p_venv) for p in paths):
+ # Running properly in the virtualenv, don't need to do anything
+ return
- warn("Attempting to work in a virtualenv. If you encounter problems, please "
- "install IPython inside the virtualenv.")
- if sys.platform == "win32":
+ warn("Attempting to work in a virtualenv. If you encounter problems, please "
+ "install IPython inside the virtualenv.")
+ if sys.platform == "win32":
virtual_env = os.path.join(os.environ['VIRTUAL_ENV'], 'Lib', 'site-packages')
- else:
- virtual_env = os.path.join(os.environ['VIRTUAL_ENV'], 'lib',
- 'python%d.%d' % sys.version_info[:2], 'site-packages')
+ else:
+ virtual_env = os.path.join(os.environ['VIRTUAL_ENV'], 'lib',
+ 'python%d.%d' % sys.version_info[:2], 'site-packages')
- import site
- sys.path.insert(0, virtual_env)
- site.addsitedir(virtual_env)
-
- #-------------------------------------------------------------------------
- # Things related to injections into the sys module
- #-------------------------------------------------------------------------
-
- def save_sys_module_state(self):
- """Save the state of hooks in the sys module.
-
- This has to be called after self.user_module is created.
- """
- self._orig_sys_module_state = {'stdin': sys.stdin,
- 'stdout': sys.stdout,
- 'stderr': sys.stderr,
- 'excepthook': sys.excepthook}
- self._orig_sys_modules_main_name = self.user_module.__name__
- self._orig_sys_modules_main_mod = sys.modules.get(self.user_module.__name__)
-
- def restore_sys_module_state(self):
- """Restore the state of the sys module."""
- try:
- for k, v in iteritems(self._orig_sys_module_state):
- setattr(sys, k, v)
- except AttributeError:
- pass
- # Reset what what done in self.init_sys_modules
- if self._orig_sys_modules_main_mod is not None:
- sys.modules[self._orig_sys_modules_main_name] = self._orig_sys_modules_main_mod
-
- #-------------------------------------------------------------------------
- # Things related to the banner
- #-------------------------------------------------------------------------
+ import site
+ sys.path.insert(0, virtual_env)
+ site.addsitedir(virtual_env)
+
+ #-------------------------------------------------------------------------
+ # Things related to injections into the sys module
+ #-------------------------------------------------------------------------
+
+ def save_sys_module_state(self):
+ """Save the state of hooks in the sys module.
+
+ This has to be called after self.user_module is created.
+ """
+ self._orig_sys_module_state = {'stdin': sys.stdin,
+ 'stdout': sys.stdout,
+ 'stderr': sys.stderr,
+ 'excepthook': sys.excepthook}
+ self._orig_sys_modules_main_name = self.user_module.__name__
+ self._orig_sys_modules_main_mod = sys.modules.get(self.user_module.__name__)
+
+ def restore_sys_module_state(self):
+ """Restore the state of the sys module."""
+ try:
+ for k, v in iteritems(self._orig_sys_module_state):
+ setattr(sys, k, v)
+ except AttributeError:
+ pass
+ # Reset what what done in self.init_sys_modules
+ if self._orig_sys_modules_main_mod is not None:
+ sys.modules[self._orig_sys_modules_main_name] = self._orig_sys_modules_main_mod
+
+ #-------------------------------------------------------------------------
+ # Things related to the banner
+ #-------------------------------------------------------------------------
- @property
- def banner(self):
- banner = self.banner1
- if self.profile and self.profile != 'default':
- banner += '\nIPython profile: %s\n' % self.profile
- if self.banner2:
- banner += '\n' + self.banner2
- return banner
-
- def show_banner(self, banner=None):
- if banner is None:
- banner = self.banner
+ @property
+ def banner(self):
+ banner = self.banner1
+ if self.profile and self.profile != 'default':
+ banner += '\nIPython profile: %s\n' % self.profile
+ if self.banner2:
+ banner += '\n' + self.banner2
+ return banner
+
+ def show_banner(self, banner=None):
+ if banner is None:
+ banner = self.banner
sys.stdout.write(banner)
- #-------------------------------------------------------------------------
- # Things related to hooks
- #-------------------------------------------------------------------------
-
- def init_hooks(self):
- # hooks holds pointers used for user-side customizations
- self.hooks = Struct()
-
- self.strdispatchers = {}
-
- # Set all default hooks, defined in the IPython.hooks module.
- hooks = IPython.core.hooks
- for hook_name in hooks.__all__:
- # default hooks have priority 100, i.e. low; user hooks should have
- # 0-100 priority
- self.set_hook(hook_name,getattr(hooks,hook_name), 100, _warn_deprecated=False)
+ #-------------------------------------------------------------------------
+ # Things related to hooks
+ #-------------------------------------------------------------------------
+
+ def init_hooks(self):
+ # hooks holds pointers used for user-side customizations
+ self.hooks = Struct()
+
+ self.strdispatchers = {}
+
+ # Set all default hooks, defined in the IPython.hooks module.
+ hooks = IPython.core.hooks
+ for hook_name in hooks.__all__:
+ # default hooks have priority 100, i.e. low; user hooks should have
+ # 0-100 priority
+ self.set_hook(hook_name,getattr(hooks,hook_name), 100, _warn_deprecated=False)
- if self.display_page:
- self.set_hook('show_in_pager', page.as_hook(page.display_page), 90)
+ if self.display_page:
+ self.set_hook('show_in_pager', page.as_hook(page.display_page), 90)
- def set_hook(self,name,hook, priority=50, str_key=None, re_key=None,
- _warn_deprecated=True):
- """set_hook(name,hook) -> sets an internal IPython hook.
-
- IPython exposes some of its internal API as user-modifiable hooks. By
- adding your function to one of these hooks, you can modify IPython's
- behavior to call at runtime your own routines."""
-
- # At some point in the future, this should validate the hook before it
- # accepts it. Probably at least check that the hook takes the number
- # of args it's supposed to.
-
- f = types.MethodType(hook,self)
-
- # check if the hook is for strdispatcher first
- if str_key is not None:
- sdp = self.strdispatchers.get(name, StrDispatch())
- sdp.add_s(str_key, f, priority )
- self.strdispatchers[name] = sdp
- return
- if re_key is not None:
- sdp = self.strdispatchers.get(name, StrDispatch())
- sdp.add_re(re.compile(re_key), f, priority )
- self.strdispatchers[name] = sdp
- return
-
- dp = getattr(self.hooks, name, None)
- if name not in IPython.core.hooks.__all__:
- print("Warning! Hook '%s' is not one of %s" % \
- (name, IPython.core.hooks.__all__ ))
-
- if _warn_deprecated and (name in IPython.core.hooks.deprecated):
- alternative = IPython.core.hooks.deprecated[name]
- warn("Hook {} is deprecated. Use {} instead.".format(name, alternative))
-
- if not dp:
- dp = IPython.core.hooks.CommandChainDispatcher()
-
- try:
- dp.add(f,priority)
- except AttributeError:
- # it was not commandchain, plain old func - replace
- dp = f
-
- setattr(self.hooks,name, dp)
-
- #-------------------------------------------------------------------------
- # Things related to events
- #-------------------------------------------------------------------------
-
- def init_events(self):
- self.events = EventManager(self, available_events)
-
- self.events.register("pre_execute", self._clear_warning_registry)
-
- def register_post_execute(self, func):
- """DEPRECATED: Use ip.events.register('post_run_cell', func)
+ def set_hook(self,name,hook, priority=50, str_key=None, re_key=None,
+ _warn_deprecated=True):
+ """set_hook(name,hook) -> sets an internal IPython hook.
+
+ IPython exposes some of its internal API as user-modifiable hooks. By
+ adding your function to one of these hooks, you can modify IPython's
+ behavior to call at runtime your own routines."""
+
+ # At some point in the future, this should validate the hook before it
+ # accepts it. Probably at least check that the hook takes the number
+ # of args it's supposed to.
+
+ f = types.MethodType(hook,self)
+
+ # check if the hook is for strdispatcher first
+ if str_key is not None:
+ sdp = self.strdispatchers.get(name, StrDispatch())
+ sdp.add_s(str_key, f, priority )
+ self.strdispatchers[name] = sdp
+ return
+ if re_key is not None:
+ sdp = self.strdispatchers.get(name, StrDispatch())
+ sdp.add_re(re.compile(re_key), f, priority )
+ self.strdispatchers[name] = sdp
+ return
+
+ dp = getattr(self.hooks, name, None)
+ if name not in IPython.core.hooks.__all__:
+ print("Warning! Hook '%s' is not one of %s" % \
+ (name, IPython.core.hooks.__all__ ))
+
+ if _warn_deprecated and (name in IPython.core.hooks.deprecated):
+ alternative = IPython.core.hooks.deprecated[name]
+ warn("Hook {} is deprecated. Use {} instead.".format(name, alternative))
+
+ if not dp:
+ dp = IPython.core.hooks.CommandChainDispatcher()
+
+ try:
+ dp.add(f,priority)
+ except AttributeError:
+ # it was not commandchain, plain old func - replace
+ dp = f
+
+ setattr(self.hooks,name, dp)
+
+ #-------------------------------------------------------------------------
+ # Things related to events
+ #-------------------------------------------------------------------------
+
+ def init_events(self):
+ self.events = EventManager(self, available_events)
+
+ self.events.register("pre_execute", self._clear_warning_registry)
+
+ def register_post_execute(self, func):
+ """DEPRECATED: Use ip.events.register('post_run_cell', func)
- Register a function for calling after code execution.
- """
- warn("ip.register_post_execute is deprecated, use "
- "ip.events.register('post_run_cell', func) instead.")
- self.events.register('post_run_cell', func)
+ Register a function for calling after code execution.
+ """
+ warn("ip.register_post_execute is deprecated, use "
+ "ip.events.register('post_run_cell', func) instead.")
+ self.events.register('post_run_cell', func)
- def _clear_warning_registry(self):
- # clear the warning registry, so that different code blocks with
- # overlapping line number ranges don't cause spurious suppression of
- # warnings (see gh-6611 for details)
- if "__warningregistry__" in self.user_global_ns:
- del self.user_global_ns["__warningregistry__"]
-
- #-------------------------------------------------------------------------
- # Things related to the "main" module
- #-------------------------------------------------------------------------
-
- def new_main_mod(self, filename, modname):
- """Return a new 'main' module object for user code execution.
+ def _clear_warning_registry(self):
+ # clear the warning registry, so that different code blocks with
+ # overlapping line number ranges don't cause spurious suppression of
+ # warnings (see gh-6611 for details)
+ if "__warningregistry__" in self.user_global_ns:
+ del self.user_global_ns["__warningregistry__"]
+
+ #-------------------------------------------------------------------------
+ # Things related to the "main" module
+ #-------------------------------------------------------------------------
+
+ def new_main_mod(self, filename, modname):
+ """Return a new 'main' module object for user code execution.
- ``filename`` should be the path of the script which will be run in the
- module. Requests with the same filename will get the same module, with
- its namespace cleared.
+ ``filename`` should be the path of the script which will be run in the
+ module. Requests with the same filename will get the same module, with
+ its namespace cleared.
- ``modname`` should be the module name - normally either '__main__' or
- the basename of the file without the extension.
+ ``modname`` should be the module name - normally either '__main__' or
+ the basename of the file without the extension.
- When scripts are executed via %run, we must keep a reference to their
- __main__ module around so that Python doesn't
- clear it, rendering references to module globals useless.
-
- This method keeps said reference in a private dict, keyed by the
- absolute path of the script. This way, for multiple executions of the
- same script we only keep one copy of the namespace (the last one),
- thus preventing memory leaks from old references while allowing the
- objects from the last execution to be accessible.
- """
- filename = os.path.abspath(filename)
- try:
- main_mod = self._main_mod_cache[filename]
- except KeyError:
- main_mod = self._main_mod_cache[filename] = types.ModuleType(
- py3compat.cast_bytes_py2(modname),
- doc="Module created for script run in IPython")
- else:
- main_mod.__dict__.clear()
- main_mod.__name__ = modname
+ When scripts are executed via %run, we must keep a reference to their
+ __main__ module around so that Python doesn't
+ clear it, rendering references to module globals useless.
+
+ This method keeps said reference in a private dict, keyed by the
+ absolute path of the script. This way, for multiple executions of the
+ same script we only keep one copy of the namespace (the last one),
+ thus preventing memory leaks from old references while allowing the
+ objects from the last execution to be accessible.
+ """
+ filename = os.path.abspath(filename)
+ try:
+ main_mod = self._main_mod_cache[filename]
+ except KeyError:
+ main_mod = self._main_mod_cache[filename] = types.ModuleType(
+ py3compat.cast_bytes_py2(modname),
+ doc="Module created for script run in IPython")
+ else:
+ main_mod.__dict__.clear()
+ main_mod.__name__ = modname
- main_mod.__file__ = filename
- # It seems pydoc (and perhaps others) needs any module instance to
- # implement a __nonzero__ method
- main_mod.__nonzero__ = lambda : True
+ main_mod.__file__ = filename
+ # It seems pydoc (and perhaps others) needs any module instance to
+ # implement a __nonzero__ method
+ main_mod.__nonzero__ = lambda : True
- return main_mod
-
- def clear_main_mod_cache(self):
- """Clear the cache of main modules.
-
- Mainly for use by utilities like %reset.
-
- Examples
- --------
-
- In [15]: import IPython
-
- In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython')
-
- In [17]: len(_ip._main_mod_cache) > 0
- Out[17]: True
-
- In [18]: _ip.clear_main_mod_cache()
-
- In [19]: len(_ip._main_mod_cache) == 0
- Out[19]: True
- """
- self._main_mod_cache.clear()
-
- #-------------------------------------------------------------------------
- # Things related to debugging
- #-------------------------------------------------------------------------
-
- def init_pdb(self):
- # Set calling of pdb on exceptions
- # self.call_pdb is a property
- self.call_pdb = self.pdb
-
- def _get_call_pdb(self):
- return self._call_pdb
-
- def _set_call_pdb(self,val):
-
- if val not in (0,1,False,True):
- raise ValueError('new call_pdb value must be boolean')
-
- # store value in instance
- self._call_pdb = val
-
- # notify the actual exception handlers
- self.InteractiveTB.call_pdb = val
-
- call_pdb = property(_get_call_pdb,_set_call_pdb,None,
- 'Control auto-activation of pdb at exceptions')
-
- def debugger(self,force=False):
+ return main_mod
+
+ def clear_main_mod_cache(self):
+ """Clear the cache of main modules.
+
+ Mainly for use by utilities like %reset.
+
+ Examples
+ --------
+
+ In [15]: import IPython
+
+ In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython')
+
+ In [17]: len(_ip._main_mod_cache) > 0
+ Out[17]: True
+
+ In [18]: _ip.clear_main_mod_cache()
+
+ In [19]: len(_ip._main_mod_cache) == 0
+ Out[19]: True
+ """
+ self._main_mod_cache.clear()
+
+ #-------------------------------------------------------------------------
+ # Things related to debugging
+ #-------------------------------------------------------------------------
+
+ def init_pdb(self):
+ # Set calling of pdb on exceptions
+ # self.call_pdb is a property
+ self.call_pdb = self.pdb
+
+ def _get_call_pdb(self):
+ return self._call_pdb
+
+ def _set_call_pdb(self,val):
+
+ if val not in (0,1,False,True):
+ raise ValueError('new call_pdb value must be boolean')
+
+ # store value in instance
+ self._call_pdb = val
+
+ # notify the actual exception handlers
+ self.InteractiveTB.call_pdb = val
+
+ call_pdb = property(_get_call_pdb,_set_call_pdb,None,
+ 'Control auto-activation of pdb at exceptions')
+
+ def debugger(self,force=False):
"""Call the pdb debugger.
-
- Keywords:
-
- - force(False): by default, this routine checks the instance call_pdb
- flag and does not actually invoke the debugger if the flag is false.
- The 'force' option forces the debugger to activate even if the flag
- is false.
- """
-
- if not (force or self.call_pdb):
- return
-
- if not hasattr(sys,'last_traceback'):
- error('No traceback has been produced, nothing to debug.')
- return
-
+
+ Keywords:
+
+ - force(False): by default, this routine checks the instance call_pdb
+ flag and does not actually invoke the debugger if the flag is false.
+ The 'force' option forces the debugger to activate even if the flag
+ is false.
+ """
+
+ if not (force or self.call_pdb):
+ return
+
+ if not hasattr(sys,'last_traceback'):
+ error('No traceback has been produced, nothing to debug.')
+ return
+
self.InteractiveTB.debugger(force=True)
-
- #-------------------------------------------------------------------------
- # Things related to IPython's various namespaces
- #-------------------------------------------------------------------------
- default_user_namespaces = True
-
- def init_create_namespaces(self, user_module=None, user_ns=None):
- # Create the namespace where the user will operate. user_ns is
- # normally the only one used, and it is passed to the exec calls as
- # the locals argument. But we do carry a user_global_ns namespace
- # given as the exec 'globals' argument, This is useful in embedding
- # situations where the ipython shell opens in a context where the
- # distinction between locals and globals is meaningful. For
- # non-embedded contexts, it is just the same object as the user_ns dict.
-
- # FIXME. For some strange reason, __builtins__ is showing up at user
- # level as a dict instead of a module. This is a manual fix, but I
- # should really track down where the problem is coming from. Alex
- # Schmolck reported this problem first.
-
- # A useful post by Alex Martelli on this topic:
- # Re: inconsistent value from __builtins__
- # Von: Alex Martelli <aleaxit@yahoo.com>
- # Datum: Freitag 01 Oktober 2004 04:45:34 nachmittags/abends
- # Gruppen: comp.lang.python
-
- # Michael Hohn <hohn@hooknose.lbl.gov> wrote:
- # > >>> print type(builtin_check.get_global_binding('__builtins__'))
- # > <type 'dict'>
- # > >>> print type(__builtins__)
- # > <type 'module'>
- # > Is this difference in return value intentional?
-
- # Well, it's documented that '__builtins__' can be either a dictionary
- # or a module, and it's been that way for a long time. Whether it's
- # intentional (or sensible), I don't know. In any case, the idea is
- # that if you need to access the built-in namespace directly, you
- # should start with "import __builtin__" (note, no 's') which will
- # definitely give you a module. Yeah, it's somewhat confusing:-(.
-
- # These routines return a properly built module and dict as needed by
- # the rest of the code, and can also be used by extension writers to
- # generate properly initialized namespaces.
- if (user_ns is not None) or (user_module is not None):
- self.default_user_namespaces = False
- self.user_module, self.user_ns = self.prepare_user_module(user_module, user_ns)
-
- # A record of hidden variables we have added to the user namespace, so
- # we can list later only variables defined in actual interactive use.
- self.user_ns_hidden = {}
-
- # Now that FakeModule produces a real module, we've run into a nasty
- # problem: after script execution (via %run), the module where the user
- # code ran is deleted. Now that this object is a true module (needed
- # so doctest and other tools work correctly), the Python module
- # teardown mechanism runs over it, and sets to None every variable
- # present in that module. Top-level references to objects from the
- # script survive, because the user_ns is updated with them. However,
- # calling functions defined in the script that use other things from
- # the script will fail, because the function's closure had references
- # to the original objects, which are now all None. So we must protect
- # these modules from deletion by keeping a cache.
- #
- # To avoid keeping stale modules around (we only need the one from the
- # last run), we use a dict keyed with the full path to the script, so
- # only the last version of the module is held in the cache. Note,
- # however, that we must cache the module *namespace contents* (their
- # __dict__). Because if we try to cache the actual modules, old ones
- # (uncached) could be destroyed while still holding references (such as
- # those held by GUI objects that tend to be long-lived)>
- #
- # The %reset command will flush this cache. See the cache_main_mod()
- # and clear_main_mod_cache() methods for details on use.
-
- # This is the cache used for 'main' namespaces
- self._main_mod_cache = {}
-
- # A table holding all the namespaces IPython deals with, so that
- # introspection facilities can search easily.
- self.ns_table = {'user_global':self.user_module.__dict__,
- 'user_local':self.user_ns,
- 'builtin':builtin_mod.__dict__
- }
+
+ #-------------------------------------------------------------------------
+ # Things related to IPython's various namespaces
+ #-------------------------------------------------------------------------
+ default_user_namespaces = True
+
+ def init_create_namespaces(self, user_module=None, user_ns=None):
+ # Create the namespace where the user will operate. user_ns is
+ # normally the only one used, and it is passed to the exec calls as
+ # the locals argument. But we do carry a user_global_ns namespace
+ # given as the exec 'globals' argument, This is useful in embedding
+ # situations where the ipython shell opens in a context where the
+ # distinction between locals and globals is meaningful. For
+ # non-embedded contexts, it is just the same object as the user_ns dict.
+
+ # FIXME. For some strange reason, __builtins__ is showing up at user
+ # level as a dict instead of a module. This is a manual fix, but I
+ # should really track down where the problem is coming from. Alex
+ # Schmolck reported this problem first.
+
+ # A useful post by Alex Martelli on this topic:
+ # Re: inconsistent value from __builtins__
+ # Von: Alex Martelli <aleaxit@yahoo.com>
+ # Datum: Freitag 01 Oktober 2004 04:45:34 nachmittags/abends
+ # Gruppen: comp.lang.python
+
+ # Michael Hohn <hohn@hooknose.lbl.gov> wrote:
+ # > >>> print type(builtin_check.get_global_binding('__builtins__'))
+ # > <type 'dict'>
+ # > >>> print type(__builtins__)
+ # > <type 'module'>
+ # > Is this difference in return value intentional?
+
+ # Well, it's documented that '__builtins__' can be either a dictionary
+ # or a module, and it's been that way for a long time. Whether it's
+ # intentional (or sensible), I don't know. In any case, the idea is
+ # that if you need to access the built-in namespace directly, you
+ # should start with "import __builtin__" (note, no 's') which will
+ # definitely give you a module. Yeah, it's somewhat confusing:-(.
+
+ # These routines return a properly built module and dict as needed by
+ # the rest of the code, and can also be used by extension writers to
+ # generate properly initialized namespaces.
+ if (user_ns is not None) or (user_module is not None):
+ self.default_user_namespaces = False
+ self.user_module, self.user_ns = self.prepare_user_module(user_module, user_ns)
+
+ # A record of hidden variables we have added to the user namespace, so
+ # we can list later only variables defined in actual interactive use.
+ self.user_ns_hidden = {}
+
+ # Now that FakeModule produces a real module, we've run into a nasty
+ # problem: after script execution (via %run), the module where the user
+ # code ran is deleted. Now that this object is a true module (needed
+ # so doctest and other tools work correctly), the Python module
+ # teardown mechanism runs over it, and sets to None every variable
+ # present in that module. Top-level references to objects from the
+ # script survive, because the user_ns is updated with them. However,
+ # calling functions defined in the script that use other things from
+ # the script will fail, because the function's closure had references
+ # to the original objects, which are now all None. So we must protect
+ # these modules from deletion by keeping a cache.
+ #
+ # To avoid keeping stale modules around (we only need the one from the
+ # last run), we use a dict keyed with the full path to the script, so
+ # only the last version of the module is held in the cache. Note,
+ # however, that we must cache the module *namespace contents* (their
+ # __dict__). Because if we try to cache the actual modules, old ones
+ # (uncached) could be destroyed while still holding references (such as
+ # those held by GUI objects that tend to be long-lived)>
+ #
+ # The %reset command will flush this cache. See the cache_main_mod()
+ # and clear_main_mod_cache() methods for details on use.
+
+ # This is the cache used for 'main' namespaces
+ self._main_mod_cache = {}
+
+ # A table holding all the namespaces IPython deals with, so that
+ # introspection facilities can search easily.
+ self.ns_table = {'user_global':self.user_module.__dict__,
+ 'user_local':self.user_ns,
+ 'builtin':builtin_mod.__dict__
+ }
- @property
- def user_global_ns(self):
- return self.user_module.__dict__
-
- def prepare_user_module(self, user_module=None, user_ns=None):
- """Prepare the module and namespace in which user code will be run.
+ @property
+ def user_global_ns(self):
+ return self.user_module.__dict__
+
+ def prepare_user_module(self, user_module=None, user_ns=None):
+ """Prepare the module and namespace in which user code will be run.
- When IPython is started normally, both parameters are None: a new module
- is created automatically, and its __dict__ used as the namespace.
+ When IPython is started normally, both parameters are None: a new module
+ is created automatically, and its __dict__ used as the namespace.
- If only user_module is provided, its __dict__ is used as the namespace.
- If only user_ns is provided, a dummy module is created, and user_ns
- becomes the global namespace. If both are provided (as they may be
- when embedding), user_ns is the local namespace, and user_module
- provides the global namespace.
-
- Parameters
- ----------
- user_module : module, optional
- The current user module in which IPython is being run. If None,
- a clean module will be created.
- user_ns : dict, optional
- A namespace in which to run interactive commands.
-
- Returns
- -------
- A tuple of user_module and user_ns, each properly initialised.
- """
- if user_module is None and user_ns is not None:
- user_ns.setdefault("__name__", "__main__")
- user_module = DummyMod()
- user_module.__dict__ = user_ns
+ If only user_module is provided, its __dict__ is used as the namespace.
+ If only user_ns is provided, a dummy module is created, and user_ns
+ becomes the global namespace. If both are provided (as they may be
+ when embedding), user_ns is the local namespace, and user_module
+ provides the global namespace.
+
+ Parameters
+ ----------
+ user_module : module, optional
+ The current user module in which IPython is being run. If None,
+ a clean module will be created.
+ user_ns : dict, optional
+ A namespace in which to run interactive commands.
+
+ Returns
+ -------
+ A tuple of user_module and user_ns, each properly initialised.
+ """
+ if user_module is None and user_ns is not None:
+ user_ns.setdefault("__name__", "__main__")
+ user_module = DummyMod()
+ user_module.__dict__ = user_ns
- if user_module is None:
- user_module = types.ModuleType("__main__",
- doc="Automatically created module for IPython interactive environment")
+ if user_module is None:
+ user_module = types.ModuleType("__main__",
+ doc="Automatically created module for IPython interactive environment")
- # We must ensure that __builtin__ (without the final 's') is always
- # available and pointing to the __builtin__ *module*. For more details:
- # http://mail.python.org/pipermail/python-dev/2001-April/014068.html
- user_module.__dict__.setdefault('__builtin__', builtin_mod)
- user_module.__dict__.setdefault('__builtins__', builtin_mod)
+ # We must ensure that __builtin__ (without the final 's') is always
+ # available and pointing to the __builtin__ *module*. For more details:
+ # http://mail.python.org/pipermail/python-dev/2001-April/014068.html
+ user_module.__dict__.setdefault('__builtin__', builtin_mod)
+ user_module.__dict__.setdefault('__builtins__', builtin_mod)
- if user_ns is None:
- user_ns = user_module.__dict__
-
- return user_module, user_ns
-
- def init_sys_modules(self):
- # We need to insert into sys.modules something that looks like a
- # module but which accesses the IPython namespace, for shelve and
- # pickle to work interactively. Normally they rely on getting
- # everything out of __main__, but for embedding purposes each IPython
- # instance has its own private namespace, so we can't go shoving
- # everything into __main__.
-
- # note, however, that we should only do this for non-embedded
- # ipythons, which really mimic the __main__.__dict__ with their own
- # namespace. Embedded instances, on the other hand, should not do
- # this because they need to manage the user local/global namespaces
- # only, but they live within a 'normal' __main__ (meaning, they
- # shouldn't overtake the execution environment of the script they're
- # embedded in).
-
- # This is overridden in the InteractiveShellEmbed subclass to a no-op.
- main_name = self.user_module.__name__
- sys.modules[main_name] = self.user_module
-
- def init_user_ns(self):
- """Initialize all user-visible namespaces to their minimum defaults.
-
- Certain history lists are also initialized here, as they effectively
- act as user namespaces.
-
- Notes
- -----
- All data structures here are only filled in, they are NOT reset by this
- method. If they were not empty before, data will simply be added to
- therm.
- """
- # This function works in two parts: first we put a few things in
- # user_ns, and we sync that contents into user_ns_hidden so that these
- # initial variables aren't shown by %who. After the sync, we add the
- # rest of what we *do* want the user to see with %who even on a new
- # session (probably nothing, so they really only see their own stuff)
-
- # The user dict must *always* have a __builtin__ reference to the
- # Python standard __builtin__ namespace, which must be imported.
- # This is so that certain operations in prompt evaluation can be
- # reliably executed with builtins. Note that we can NOT use
- # __builtins__ (note the 's'), because that can either be a dict or a
- # module, and can even mutate at runtime, depending on the context
- # (Python makes no guarantees on it). In contrast, __builtin__ is
- # always a module object, though it must be explicitly imported.
-
- # For more details:
- # http://mail.python.org/pipermail/python-dev/2001-April/014068.html
- ns = dict()
+ if user_ns is None:
+ user_ns = user_module.__dict__
+
+ return user_module, user_ns
+
+ def init_sys_modules(self):
+ # We need to insert into sys.modules something that looks like a
+ # module but which accesses the IPython namespace, for shelve and
+ # pickle to work interactively. Normally they rely on getting
+ # everything out of __main__, but for embedding purposes each IPython
+ # instance has its own private namespace, so we can't go shoving
+ # everything into __main__.
+
+ # note, however, that we should only do this for non-embedded
+ # ipythons, which really mimic the __main__.__dict__ with their own
+ # namespace. Embedded instances, on the other hand, should not do
+ # this because they need to manage the user local/global namespaces
+ # only, but they live within a 'normal' __main__ (meaning, they
+ # shouldn't overtake the execution environment of the script they're
+ # embedded in).
+
+ # This is overridden in the InteractiveShellEmbed subclass to a no-op.
+ main_name = self.user_module.__name__
+ sys.modules[main_name] = self.user_module
+
+ def init_user_ns(self):
+ """Initialize all user-visible namespaces to their minimum defaults.
+
+ Certain history lists are also initialized here, as they effectively
+ act as user namespaces.
+
+ Notes
+ -----
+ All data structures here are only filled in, they are NOT reset by this
+ method. If they were not empty before, data will simply be added to
+ therm.
+ """
+ # This function works in two parts: first we put a few things in
+ # user_ns, and we sync that contents into user_ns_hidden so that these
+ # initial variables aren't shown by %who. After the sync, we add the
+ # rest of what we *do* want the user to see with %who even on a new
+ # session (probably nothing, so they really only see their own stuff)
+
+ # The user dict must *always* have a __builtin__ reference to the
+ # Python standard __builtin__ namespace, which must be imported.
+ # This is so that certain operations in prompt evaluation can be
+ # reliably executed with builtins. Note that we can NOT use
+ # __builtins__ (note the 's'), because that can either be a dict or a
+ # module, and can even mutate at runtime, depending on the context
+ # (Python makes no guarantees on it). In contrast, __builtin__ is
+ # always a module object, though it must be explicitly imported.
+
+ # For more details:
+ # http://mail.python.org/pipermail/python-dev/2001-April/014068.html
+ ns = dict()
- # make global variables for user access to the histories
- ns['_ih'] = self.history_manager.input_hist_parsed
- ns['_oh'] = self.history_manager.output_hist
- ns['_dh'] = self.history_manager.dir_hist
-
- ns['_sh'] = shadowns
-
- # user aliases to input and output histories. These shouldn't show up
- # in %who, as they can have very large reprs.
- ns['In'] = self.history_manager.input_hist_parsed
- ns['Out'] = self.history_manager.output_hist
-
- # Store myself as the public api!!!
- ns['get_ipython'] = self.get_ipython
+ # make global variables for user access to the histories
+ ns['_ih'] = self.history_manager.input_hist_parsed
+ ns['_oh'] = self.history_manager.output_hist
+ ns['_dh'] = self.history_manager.dir_hist
+
+ ns['_sh'] = shadowns
+
+ # user aliases to input and output histories. These shouldn't show up
+ # in %who, as they can have very large reprs.
+ ns['In'] = self.history_manager.input_hist_parsed
+ ns['Out'] = self.history_manager.output_hist
+
+ # Store myself as the public api!!!
+ ns['get_ipython'] = self.get_ipython
- ns['exit'] = self.exiter
- ns['quit'] = self.exiter
-
- # Sync what we've added so far to user_ns_hidden so these aren't seen
- # by %who
- self.user_ns_hidden.update(ns)
-
- # Anything put into ns now would show up in %who. Think twice before
- # putting anything here, as we really want %who to show the user their
- # stuff, not our variables.
-
- # Finally, update the real user's namespace
- self.user_ns.update(ns)
+ ns['exit'] = self.exiter
+ ns['quit'] = self.exiter
+
+ # Sync what we've added so far to user_ns_hidden so these aren't seen
+ # by %who
+ self.user_ns_hidden.update(ns)
+
+ # Anything put into ns now would show up in %who. Think twice before
+ # putting anything here, as we really want %who to show the user their
+ # stuff, not our variables.
+
+ # Finally, update the real user's namespace
+ self.user_ns.update(ns)
- @property
- def all_ns_refs(self):
- """Get a list of references to all the namespace dictionaries in which
- IPython might store a user-created object.
+ @property
+ def all_ns_refs(self):
+ """Get a list of references to all the namespace dictionaries in which
+ IPython might store a user-created object.
- Note that this does not include the displayhook, which also caches
- objects from the output."""
- return [self.user_ns, self.user_global_ns, self.user_ns_hidden] + \
- [m.__dict__ for m in self._main_mod_cache.values()]
-
- def reset(self, new_session=True):
- """Clear all internal namespaces, and attempt to release references to
- user objects.
-
- If new_session is True, a new history session will be opened.
- """
- # Clear histories
- self.history_manager.reset(new_session)
- # Reset counter used to index all histories
- if new_session:
- self.execution_count = 1
-
- # Flush cached output items
- if self.displayhook.do_full_cache:
- self.displayhook.flush()
-
- # The main execution namespaces must be cleared very carefully,
- # skipping the deletion of the builtin-related keys, because doing so
- # would cause errors in many object's __del__ methods.
- if self.user_ns is not self.user_global_ns:
- self.user_ns.clear()
- ns = self.user_global_ns
- drop_keys = set(ns.keys())
- drop_keys.discard('__builtin__')
- drop_keys.discard('__builtins__')
- drop_keys.discard('__name__')
- for k in drop_keys:
- del ns[k]
+ Note that this does not include the displayhook, which also caches
+ objects from the output."""
+ return [self.user_ns, self.user_global_ns, self.user_ns_hidden] + \
+ [m.__dict__ for m in self._main_mod_cache.values()]
+
+ def reset(self, new_session=True):
+ """Clear all internal namespaces, and attempt to release references to
+ user objects.
+
+ If new_session is True, a new history session will be opened.
+ """
+ # Clear histories
+ self.history_manager.reset(new_session)
+ # Reset counter used to index all histories
+ if new_session:
+ self.execution_count = 1
+
+ # Flush cached output items
+ if self.displayhook.do_full_cache:
+ self.displayhook.flush()
+
+ # The main execution namespaces must be cleared very carefully,
+ # skipping the deletion of the builtin-related keys, because doing so
+ # would cause errors in many object's __del__ methods.
+ if self.user_ns is not self.user_global_ns:
+ self.user_ns.clear()
+ ns = self.user_global_ns
+ drop_keys = set(ns.keys())
+ drop_keys.discard('__builtin__')
+ drop_keys.discard('__builtins__')
+ drop_keys.discard('__name__')
+ for k in drop_keys:
+ del ns[k]
- self.user_ns_hidden.clear()
+ self.user_ns_hidden.clear()
- # Restore the user namespaces to minimal usability
- self.init_user_ns()
-
- # Restore the default and user aliases
- self.alias_manager.clear_aliases()
- self.alias_manager.init_aliases()
-
- # Flush the private list of module references kept for script
- # execution protection
- self.clear_main_mod_cache()
-
- def del_var(self, varname, by_name=False):
- """Delete a variable from the various namespaces, so that, as
- far as possible, we're not keeping any hidden references to it.
-
- Parameters
- ----------
- varname : str
- The name of the variable to delete.
- by_name : bool
- If True, delete variables with the given name in each
- namespace. If False (default), find the variable in the user
- namespace, and delete references to it.
- """
- if varname in ('__builtin__', '__builtins__'):
- raise ValueError("Refusing to delete %s" % varname)
-
- ns_refs = self.all_ns_refs
+ # Restore the user namespaces to minimal usability
+ self.init_user_ns()
+
+ # Restore the default and user aliases
+ self.alias_manager.clear_aliases()
+ self.alias_manager.init_aliases()
+
+ # Flush the private list of module references kept for script
+ # execution protection
+ self.clear_main_mod_cache()
+
+ def del_var(self, varname, by_name=False):
+ """Delete a variable from the various namespaces, so that, as
+ far as possible, we're not keeping any hidden references to it.
+
+ Parameters
+ ----------
+ varname : str
+ The name of the variable to delete.
+ by_name : bool
+ If True, delete variables with the given name in each
+ namespace. If False (default), find the variable in the user
+ namespace, and delete references to it.
+ """
+ if varname in ('__builtin__', '__builtins__'):
+ raise ValueError("Refusing to delete %s" % varname)
+
+ ns_refs = self.all_ns_refs
- if by_name: # Delete by name
- for ns in ns_refs:
- try:
- del ns[varname]
- except KeyError:
- pass
- else: # Delete by object
- try:
- obj = self.user_ns[varname]
- except KeyError:
- raise NameError("name '%s' is not defined" % varname)
- # Also check in output history
- ns_refs.append(self.history_manager.output_hist)
- for ns in ns_refs:
- to_delete = [n for n, o in iteritems(ns) if o is obj]
- for name in to_delete:
- del ns[name]
-
- # displayhook keeps extra references, but not in a dictionary
- for name in ('_', '__', '___'):
- if getattr(self.displayhook, name) is obj:
- setattr(self.displayhook, name, None)
-
- def reset_selective(self, regex=None):
- """Clear selective variables from internal namespaces based on a
- specified regular expression.
-
- Parameters
- ----------
- regex : string or compiled pattern, optional
- A regular expression pattern that will be used in searching
- variable names in the users namespaces.
- """
- if regex is not None:
- try:
- m = re.compile(regex)
- except TypeError:
- raise TypeError('regex must be a string or compiled pattern')
- # Search for keys in each namespace that match the given regex
- # If a match is found, delete the key/value pair.
- for ns in self.all_ns_refs:
- for var in ns:
- if m.search(var):
- del ns[var]
-
- def push(self, variables, interactive=True):
- """Inject a group of variables into the IPython user namespace.
-
- Parameters
- ----------
- variables : dict, str or list/tuple of str
- The variables to inject into the user's namespace. If a dict, a
- simple update is done. If a str, the string is assumed to have
- variable names separated by spaces. A list/tuple of str can also
- be used to give the variable names. If just the variable names are
- give (list/tuple/str) then the variable values looked up in the
- callers frame.
- interactive : bool
- If True (default), the variables will be listed with the ``who``
- magic.
- """
- vdict = None
-
- # We need a dict of name/value pairs to do namespace updates.
- if isinstance(variables, dict):
- vdict = variables
- elif isinstance(variables, string_types+(list, tuple)):
- if isinstance(variables, string_types):
- vlist = variables.split()
- else:
- vlist = variables
- vdict = {}
- cf = sys._getframe(1)
- for name in vlist:
- try:
- vdict[name] = eval(name, cf.f_globals, cf.f_locals)
- except:
- print('Could not get variable %s from %s' %
- (name,cf.f_code.co_name))
- else:
- raise ValueError('variables must be a dict/str/list/tuple')
-
- # Propagate variables to user namespace
- self.user_ns.update(vdict)
-
- # And configure interactive visibility
- user_ns_hidden = self.user_ns_hidden
- if interactive:
- for name in vdict:
- user_ns_hidden.pop(name, None)
- else:
- user_ns_hidden.update(vdict)
-
- def drop_by_id(self, variables):
- """Remove a dict of variables from the user namespace, if they are the
- same as the values in the dictionary.
+ if by_name: # Delete by name
+ for ns in ns_refs:
+ try:
+ del ns[varname]
+ except KeyError:
+ pass
+ else: # Delete by object
+ try:
+ obj = self.user_ns[varname]
+ except KeyError:
+ raise NameError("name '%s' is not defined" % varname)
+ # Also check in output history
+ ns_refs.append(self.history_manager.output_hist)
+ for ns in ns_refs:
+ to_delete = [n for n, o in iteritems(ns) if o is obj]
+ for name in to_delete:
+ del ns[name]
+
+ # displayhook keeps extra references, but not in a dictionary
+ for name in ('_', '__', '___'):
+ if getattr(self.displayhook, name) is obj:
+ setattr(self.displayhook, name, None)
+
+ def reset_selective(self, regex=None):
+ """Clear selective variables from internal namespaces based on a
+ specified regular expression.
+
+ Parameters
+ ----------
+ regex : string or compiled pattern, optional
+ A regular expression pattern that will be used in searching
+ variable names in the users namespaces.
+ """
+ if regex is not None:
+ try:
+ m = re.compile(regex)
+ except TypeError:
+ raise TypeError('regex must be a string or compiled pattern')
+ # Search for keys in each namespace that match the given regex
+ # If a match is found, delete the key/value pair.
+ for ns in self.all_ns_refs:
+ for var in ns:
+ if m.search(var):
+ del ns[var]
+
+ def push(self, variables, interactive=True):
+ """Inject a group of variables into the IPython user namespace.
+
+ Parameters
+ ----------
+ variables : dict, str or list/tuple of str
+ The variables to inject into the user's namespace. If a dict, a
+ simple update is done. If a str, the string is assumed to have
+ variable names separated by spaces. A list/tuple of str can also
+ be used to give the variable names. If just the variable names are
+ give (list/tuple/str) then the variable values looked up in the
+ callers frame.
+ interactive : bool
+ If True (default), the variables will be listed with the ``who``
+ magic.
+ """
+ vdict = None
+
+ # We need a dict of name/value pairs to do namespace updates.
+ if isinstance(variables, dict):
+ vdict = variables
+ elif isinstance(variables, string_types+(list, tuple)):
+ if isinstance(variables, string_types):
+ vlist = variables.split()
+ else:
+ vlist = variables
+ vdict = {}
+ cf = sys._getframe(1)
+ for name in vlist:
+ try:
+ vdict[name] = eval(name, cf.f_globals, cf.f_locals)
+ except:
+ print('Could not get variable %s from %s' %
+ (name,cf.f_code.co_name))
+ else:
+ raise ValueError('variables must be a dict/str/list/tuple')
+
+ # Propagate variables to user namespace
+ self.user_ns.update(vdict)
+
+ # And configure interactive visibility
+ user_ns_hidden = self.user_ns_hidden
+ if interactive:
+ for name in vdict:
+ user_ns_hidden.pop(name, None)
+ else:
+ user_ns_hidden.update(vdict)
+
+ def drop_by_id(self, variables):
+ """Remove a dict of variables from the user namespace, if they are the
+ same as the values in the dictionary.
- This is intended for use by extensions: variables that they've added can
- be taken back out if they are unloaded, without removing any that the
- user has overwritten.
+ This is intended for use by extensions: variables that they've added can
+ be taken back out if they are unloaded, without removing any that the
+ user has overwritten.
- Parameters
- ----------
- variables : dict
- A dictionary mapping object names (as strings) to the objects.
- """
- for name, obj in iteritems(variables):
- if name in self.user_ns and self.user_ns[name] is obj:
- del self.user_ns[name]
- self.user_ns_hidden.pop(name, None)
-
- #-------------------------------------------------------------------------
- # Things related to object introspection
- #-------------------------------------------------------------------------
-
- def _ofind(self, oname, namespaces=None):
- """Find an object in the available namespaces.
-
- self._ofind(oname) -> dict with keys: found,obj,ospace,ismagic
-
- Has special code to detect magic functions.
- """
- oname = oname.strip()
- #print '1- oname: <%r>' % oname # dbg
- if not oname.startswith(ESC_MAGIC) and \
- not oname.startswith(ESC_MAGIC2) and \
- not py3compat.isidentifier(oname, dotted=True):
- return dict(found=False)
-
- if namespaces is None:
- # Namespaces to search in:
- # Put them in a list. The order is important so that we
- # find things in the same order that Python finds them.
- namespaces = [ ('Interactive', self.user_ns),
- ('Interactive (global)', self.user_global_ns),
- ('Python builtin', builtin_mod.__dict__),
- ]
-
- # initialize results to 'null'
- found = False; obj = None; ospace = None;
- ismagic = False; isalias = False; parent = None
-
- # We need to special-case 'print', which as of python2.6 registers as a
- # function but should only be treated as one if print_function was
- # loaded with a future import. In this case, just bail.
- if (oname == 'print' and not py3compat.PY3 and not \
- (self.compile.compiler_flags & __future__.CO_FUTURE_PRINT_FUNCTION)):
- return {'found':found, 'obj':obj, 'namespace':ospace,
- 'ismagic':ismagic, 'isalias':isalias, 'parent':parent}
-
- # Look for the given name by splitting it in parts. If the head is
- # found, then we look for all the remaining parts as members, and only
- # declare success if we can find them all.
- oname_parts = oname.split('.')
- oname_head, oname_rest = oname_parts[0],oname_parts[1:]
- for nsname,ns in namespaces:
- try:
- obj = ns[oname_head]
- except KeyError:
- continue
- else:
- #print 'oname_rest:', oname_rest # dbg
- for idx, part in enumerate(oname_rest):
- try:
- parent = obj
- # The last part is looked up in a special way to avoid
- # descriptor invocation as it may raise or have side
- # effects.
- if idx == len(oname_rest) - 1:
- obj = self._getattr_property(obj, part)
- else:
- obj = getattr(obj, part)
- except:
- # Blanket except b/c some badly implemented objects
- # allow __getattr__ to raise exceptions other than
- # AttributeError, which then crashes IPython.
- break
- else:
- # If we finish the for loop (no break), we got all members
- found = True
- ospace = nsname
- break # namespace loop
-
- # Try to see if it's magic
- if not found:
- obj = None
- if oname.startswith(ESC_MAGIC2):
- oname = oname.lstrip(ESC_MAGIC2)
- obj = self.find_cell_magic(oname)
- elif oname.startswith(ESC_MAGIC):
- oname = oname.lstrip(ESC_MAGIC)
- obj = self.find_line_magic(oname)
- else:
- # search without prefix, so run? will find %run?
- obj = self.find_line_magic(oname)
- if obj is None:
- obj = self.find_cell_magic(oname)
- if obj is not None:
- found = True
- ospace = 'IPython internal'
- ismagic = True
- isalias = isinstance(obj, Alias)
-
- # Last try: special-case some literals like '', [], {}, etc:
- if not found and oname_head in ["''",'""','[]','{}','()']:
- obj = eval(oname_head)
- found = True
- ospace = 'Interactive'
-
- return {'found':found, 'obj':obj, 'namespace':ospace,
- 'ismagic':ismagic, 'isalias':isalias, 'parent':parent}
-
- @staticmethod
- def _getattr_property(obj, attrname):
- """Property-aware getattr to use in object finding.
-
- If attrname represents a property, return it unevaluated (in case it has
- side effects or raises an error.
-
- """
- if not isinstance(obj, type):
- try:
- # `getattr(type(obj), attrname)` is not guaranteed to return
- # `obj`, but does so for property:
- #
- # property.__get__(self, None, cls) -> self
- #
- # The universal alternative is to traverse the mro manually
- # searching for attrname in class dicts.
- attr = getattr(type(obj), attrname)
- except AttributeError:
- pass
- else:
- # This relies on the fact that data descriptors (with both
- # __get__ & __set__ magic methods) take precedence over
- # instance-level attributes:
- #
- # class A(object):
- # @property
- # def foobar(self): return 123
- # a = A()
- # a.__dict__['foobar'] = 345
- # a.foobar # == 123
- #
- # So, a property may be returned right away.
- if isinstance(attr, property):
- return attr
-
- # Nothing helped, fall back.
- return getattr(obj, attrname)
-
- def _object_find(self, oname, namespaces=None):
- """Find an object and return a struct with info about it."""
- return Struct(self._ofind(oname, namespaces))
-
- def _inspect(self, meth, oname, namespaces=None, **kw):
- """Generic interface to the inspector system.
-
+ Parameters
+ ----------
+ variables : dict
+ A dictionary mapping object names (as strings) to the objects.
+ """
+ for name, obj in iteritems(variables):
+ if name in self.user_ns and self.user_ns[name] is obj:
+ del self.user_ns[name]
+ self.user_ns_hidden.pop(name, None)
+
+ #-------------------------------------------------------------------------
+ # Things related to object introspection
+ #-------------------------------------------------------------------------
+
+ def _ofind(self, oname, namespaces=None):
+ """Find an object in the available namespaces.
+
+ self._ofind(oname) -> dict with keys: found,obj,ospace,ismagic
+
+ Has special code to detect magic functions.
+ """
+ oname = oname.strip()
+ #print '1- oname: <%r>' % oname # dbg
+ if not oname.startswith(ESC_MAGIC) and \
+ not oname.startswith(ESC_MAGIC2) and \
+ not py3compat.isidentifier(oname, dotted=True):
+ return dict(found=False)
+
+ if namespaces is None:
+ # Namespaces to search in:
+ # Put them in a list. The order is important so that we
+ # find things in the same order that Python finds them.
+ namespaces = [ ('Interactive', self.user_ns),
+ ('Interactive (global)', self.user_global_ns),
+ ('Python builtin', builtin_mod.__dict__),
+ ]
+
+ # initialize results to 'null'
+ found = False; obj = None; ospace = None;
+ ismagic = False; isalias = False; parent = None
+
+ # We need to special-case 'print', which as of python2.6 registers as a
+ # function but should only be treated as one if print_function was
+ # loaded with a future import. In this case, just bail.
+ if (oname == 'print' and not py3compat.PY3 and not \
+ (self.compile.compiler_flags & __future__.CO_FUTURE_PRINT_FUNCTION)):
+ return {'found':found, 'obj':obj, 'namespace':ospace,
+ 'ismagic':ismagic, 'isalias':isalias, 'parent':parent}
+
+ # Look for the given name by splitting it in parts. If the head is
+ # found, then we look for all the remaining parts as members, and only
+ # declare success if we can find them all.
+ oname_parts = oname.split('.')
+ oname_head, oname_rest = oname_parts[0],oname_parts[1:]
+ for nsname,ns in namespaces:
+ try:
+ obj = ns[oname_head]
+ except KeyError:
+ continue
+ else:
+ #print 'oname_rest:', oname_rest # dbg
+ for idx, part in enumerate(oname_rest):
+ try:
+ parent = obj
+ # The last part is looked up in a special way to avoid
+ # descriptor invocation as it may raise or have side
+ # effects.
+ if idx == len(oname_rest) - 1:
+ obj = self._getattr_property(obj, part)
+ else:
+ obj = getattr(obj, part)
+ except:
+ # Blanket except b/c some badly implemented objects
+ # allow __getattr__ to raise exceptions other than
+ # AttributeError, which then crashes IPython.
+ break
+ else:
+ # If we finish the for loop (no break), we got all members
+ found = True
+ ospace = nsname
+ break # namespace loop
+
+ # Try to see if it's magic
+ if not found:
+ obj = None
+ if oname.startswith(ESC_MAGIC2):
+ oname = oname.lstrip(ESC_MAGIC2)
+ obj = self.find_cell_magic(oname)
+ elif oname.startswith(ESC_MAGIC):
+ oname = oname.lstrip(ESC_MAGIC)
+ obj = self.find_line_magic(oname)
+ else:
+ # search without prefix, so run? will find %run?
+ obj = self.find_line_magic(oname)
+ if obj is None:
+ obj = self.find_cell_magic(oname)
+ if obj is not None:
+ found = True
+ ospace = 'IPython internal'
+ ismagic = True
+ isalias = isinstance(obj, Alias)
+
+ # Last try: special-case some literals like '', [], {}, etc:
+ if not found and oname_head in ["''",'""','[]','{}','()']:
+ obj = eval(oname_head)
+ found = True
+ ospace = 'Interactive'
+
+ return {'found':found, 'obj':obj, 'namespace':ospace,
+ 'ismagic':ismagic, 'isalias':isalias, 'parent':parent}
+
+ @staticmethod
+ def _getattr_property(obj, attrname):
+ """Property-aware getattr to use in object finding.
+
+ If attrname represents a property, return it unevaluated (in case it has
+ side effects or raises an error.
+
+ """
+ if not isinstance(obj, type):
+ try:
+ # `getattr(type(obj), attrname)` is not guaranteed to return
+ # `obj`, but does so for property:
+ #
+ # property.__get__(self, None, cls) -> self
+ #
+ # The universal alternative is to traverse the mro manually
+ # searching for attrname in class dicts.
+ attr = getattr(type(obj), attrname)
+ except AttributeError:
+ pass
+ else:
+ # This relies on the fact that data descriptors (with both
+ # __get__ & __set__ magic methods) take precedence over
+ # instance-level attributes:
+ #
+ # class A(object):
+ # @property
+ # def foobar(self): return 123
+ # a = A()
+ # a.__dict__['foobar'] = 345
+ # a.foobar # == 123
+ #
+ # So, a property may be returned right away.
+ if isinstance(attr, property):
+ return attr
+
+ # Nothing helped, fall back.
+ return getattr(obj, attrname)
+
+ def _object_find(self, oname, namespaces=None):
+ """Find an object and return a struct with info about it."""
+ return Struct(self._ofind(oname, namespaces))
+
+ def _inspect(self, meth, oname, namespaces=None, **kw):
+ """Generic interface to the inspector system.
+
This function is meant to be called by pdef, pdoc & friends.
"""
- info = self._object_find(oname, namespaces)
+ info = self._object_find(oname, namespaces)
docformat = sphinxify if self.sphinxify_docstring else None
- if info.found:
- pmethod = getattr(self.inspector, meth)
+ if info.found:
+ pmethod = getattr(self.inspector, meth)
# TODO: only apply format_screen to the plain/text repr of the mime
# bundle.
formatter = format_screen if info.ismagic else docformat
- if meth == 'pdoc':
- pmethod(info.obj, oname, formatter)
- elif meth == 'pinfo':
+ if meth == 'pdoc':
+ pmethod(info.obj, oname, formatter)
+ elif meth == 'pinfo':
pmethod(info.obj, oname, formatter, info,
enable_html_pager=self.enable_html_pager, **kw)
- else:
- pmethod(info.obj, oname)
- else:
- print('Object `%s` not found.' % oname)
- return 'not found' # so callers can take other action
-
- def object_inspect(self, oname, detail_level=0):
- """Get object info about oname"""
- with self.builtin_trap:
- info = self._object_find(oname)
- if info.found:
- return self.inspector.info(info.obj, oname, info=info,
- detail_level=detail_level
- )
- else:
- return oinspect.object_info(name=oname, found=False)
-
- def object_inspect_text(self, oname, detail_level=0):
- """Get object info as formatted text"""
+ else:
+ pmethod(info.obj, oname)
+ else:
+ print('Object `%s` not found.' % oname)
+ return 'not found' # so callers can take other action
+
+ def object_inspect(self, oname, detail_level=0):
+ """Get object info about oname"""
+ with self.builtin_trap:
+ info = self._object_find(oname)
+ if info.found:
+ return self.inspector.info(info.obj, oname, info=info,
+ detail_level=detail_level
+ )
+ else:
+ return oinspect.object_info(name=oname, found=False)
+
+ def object_inspect_text(self, oname, detail_level=0):
+ """Get object info as formatted text"""
return self.object_inspect_mime(oname, detail_level)['text/plain']
def object_inspect_mime(self, oname, detail_level=0):
@@ -1561,1345 +1561,1345 @@ class InteractiveShell(SingletonConfigurable):
A mimebundle is a dictionary, keyed by mime-type.
It must always have the key `'text/plain'`.
"""
- with self.builtin_trap:
- info = self._object_find(oname)
- if info.found:
+ with self.builtin_trap:
+ info = self._object_find(oname)
+ if info.found:
return self.inspector._get_info(info.obj, oname, info=info,
- detail_level=detail_level
- )
- else:
- raise KeyError(oname)
-
- #-------------------------------------------------------------------------
- # Things related to history management
- #-------------------------------------------------------------------------
-
- def init_history(self):
- """Sets up the command history, and starts regular autosaves."""
- self.history_manager = HistoryManager(shell=self, parent=self)
- self.configurables.append(self.history_manager)
-
- #-------------------------------------------------------------------------
- # Things related to exception handling and tracebacks (not debugging)
- #-------------------------------------------------------------------------
-
+ detail_level=detail_level
+ )
+ else:
+ raise KeyError(oname)
+
+ #-------------------------------------------------------------------------
+ # Things related to history management
+ #-------------------------------------------------------------------------
+
+ def init_history(self):
+ """Sets up the command history, and starts regular autosaves."""
+ self.history_manager = HistoryManager(shell=self, parent=self)
+ self.configurables.append(self.history_manager)
+
+ #-------------------------------------------------------------------------
+ # Things related to exception handling and tracebacks (not debugging)
+ #-------------------------------------------------------------------------
+
debugger_cls = Pdb
- def init_traceback_handlers(self, custom_exceptions):
- # Syntax error handler.
- self.SyntaxTB = ultratb.SyntaxTB(color_scheme='NoColor')
-
- # The interactive one is initialized with an offset, meaning we always
- # want to remove the topmost item in the traceback, which is our own
- # internal code. Valid modes: ['Plain','Context','Verbose']
- self.InteractiveTB = ultratb.AutoFormattedTB(mode = 'Plain',
- color_scheme='NoColor',
- tb_offset = 1,
+ def init_traceback_handlers(self, custom_exceptions):
+ # Syntax error handler.
+ self.SyntaxTB = ultratb.SyntaxTB(color_scheme='NoColor')
+
+ # The interactive one is initialized with an offset, meaning we always
+ # want to remove the topmost item in the traceback, which is our own
+ # internal code. Valid modes: ['Plain','Context','Verbose']
+ self.InteractiveTB = ultratb.AutoFormattedTB(mode = 'Plain',
+ color_scheme='NoColor',
+ tb_offset = 1,
check_cache=check_linecache_ipython,
debugger_cls=self.debugger_cls)
-
- # The instance will store a pointer to the system-wide exception hook,
- # so that runtime code (such as magics) can access it. This is because
- # during the read-eval loop, it may get temporarily overwritten.
- self.sys_excepthook = sys.excepthook
-
- # and add any custom exception handlers the user may have specified
- self.set_custom_exc(*custom_exceptions)
-
- # Set the exception mode
- self.InteractiveTB.set_mode(mode=self.xmode)
-
- def set_custom_exc(self, exc_tuple, handler):
+
+ # The instance will store a pointer to the system-wide exception hook,
+ # so that runtime code (such as magics) can access it. This is because
+ # during the read-eval loop, it may get temporarily overwritten.
+ self.sys_excepthook = sys.excepthook
+
+ # and add any custom exception handlers the user may have specified
+ self.set_custom_exc(*custom_exceptions)
+
+ # Set the exception mode
+ self.InteractiveTB.set_mode(mode=self.xmode)
+
+ def set_custom_exc(self, exc_tuple, handler):
"""set_custom_exc(exc_tuple, handler)
-
- Set a custom exception handler, which will be called if any of the
- exceptions in exc_tuple occur in the mainloop (specifically, in the
- run_code() method).
-
- Parameters
- ----------
-
- exc_tuple : tuple of exception classes
- A *tuple* of exception classes, for which to call the defined
- handler. It is very important that you use a tuple, and NOT A
- LIST here, because of the way Python's except statement works. If
- you only want to trap a single exception, use a singleton tuple::
-
- exc_tuple == (MyCustomException,)
-
- handler : callable
- handler must have the following signature::
-
- def my_handler(self, etype, value, tb, tb_offset=None):
- ...
- return structured_traceback
-
- Your handler must return a structured traceback (a list of strings),
- or None.
-
- This will be made into an instance method (via types.MethodType)
- of IPython itself, and it will be called if any of the exceptions
- listed in the exc_tuple are caught. If the handler is None, an
- internal basic one is used, which just prints basic info.
-
- To protect IPython from crashes, if your handler ever raises an
- exception or returns an invalid result, it will be immediately
- disabled.
-
- WARNING: by putting in your own exception handler into IPython's main
- execution loop, you run a very good chance of nasty crashes. This
- facility should only be used if you really know what you are doing."""
-
- assert type(exc_tuple)==type(()) , \
- "The custom exceptions must be given AS A TUPLE."
-
+
+ Set a custom exception handler, which will be called if any of the
+ exceptions in exc_tuple occur in the mainloop (specifically, in the
+ run_code() method).
+
+ Parameters
+ ----------
+
+ exc_tuple : tuple of exception classes
+ A *tuple* of exception classes, for which to call the defined
+ handler. It is very important that you use a tuple, and NOT A
+ LIST here, because of the way Python's except statement works. If
+ you only want to trap a single exception, use a singleton tuple::
+
+ exc_tuple == (MyCustomException,)
+
+ handler : callable
+ handler must have the following signature::
+
+ def my_handler(self, etype, value, tb, tb_offset=None):
+ ...
+ return structured_traceback
+
+ Your handler must return a structured traceback (a list of strings),
+ or None.
+
+ This will be made into an instance method (via types.MethodType)
+ of IPython itself, and it will be called if any of the exceptions
+ listed in the exc_tuple are caught. If the handler is None, an
+ internal basic one is used, which just prints basic info.
+
+ To protect IPython from crashes, if your handler ever raises an
+ exception or returns an invalid result, it will be immediately
+ disabled.
+
+ WARNING: by putting in your own exception handler into IPython's main
+ execution loop, you run a very good chance of nasty crashes. This
+ facility should only be used if you really know what you are doing."""
+
+ assert type(exc_tuple)==type(()) , \
+ "The custom exceptions must be given AS A TUPLE."
+
def dummy_handler(self, etype, value, tb, tb_offset=None):
- print('*** Simple custom exception handler ***')
- print('Exception type :',etype)
- print('Exception value:',value)
- print('Traceback :',tb)
- #print 'Source code :','\n'.join(self.buffer)
+ print('*** Simple custom exception handler ***')
+ print('Exception type :',etype)
+ print('Exception value:',value)
+ print('Traceback :',tb)
+ #print 'Source code :','\n'.join(self.buffer)
- def validate_stb(stb):
- """validate structured traceback return type
+ def validate_stb(stb):
+ """validate structured traceback return type
- return type of CustomTB *should* be a list of strings, but allow
- single strings or None, which are harmless.
+ return type of CustomTB *should* be a list of strings, but allow
+ single strings or None, which are harmless.
- This function will *always* return a list of strings,
- and will raise a TypeError if stb is inappropriate.
- """
- msg = "CustomTB must return list of strings, not %r" % stb
- if stb is None:
- return []
- elif isinstance(stb, string_types):
- return [stb]
- elif not isinstance(stb, list):
- raise TypeError(msg)
- # it's a list
- for line in stb:
- # check every element
- if not isinstance(line, string_types):
- raise TypeError(msg)
- return stb
-
- if handler is None:
- wrapped = dummy_handler
- else:
- def wrapped(self,etype,value,tb,tb_offset=None):
- """wrap CustomTB handler, to protect IPython from user code
+ This function will *always* return a list of strings,
+ and will raise a TypeError if stb is inappropriate.
+ """
+ msg = "CustomTB must return list of strings, not %r" % stb
+ if stb is None:
+ return []
+ elif isinstance(stb, string_types):
+ return [stb]
+ elif not isinstance(stb, list):
+ raise TypeError(msg)
+ # it's a list
+ for line in stb:
+ # check every element
+ if not isinstance(line, string_types):
+ raise TypeError(msg)
+ return stb
+
+ if handler is None:
+ wrapped = dummy_handler
+ else:
+ def wrapped(self,etype,value,tb,tb_offset=None):
+ """wrap CustomTB handler, to protect IPython from user code
- This makes it harder (but not impossible) for custom exception
- handlers to crash IPython.
- """
- try:
- stb = handler(self,etype,value,tb,tb_offset=tb_offset)
- return validate_stb(stb)
- except:
- # clear custom handler immediately
- self.set_custom_exc((), None)
+ This makes it harder (but not impossible) for custom exception
+ handlers to crash IPython.
+ """
+ try:
+ stb = handler(self,etype,value,tb,tb_offset=tb_offset)
+ return validate_stb(stb)
+ except:
+ # clear custom handler immediately
+ self.set_custom_exc((), None)
print("Custom TB Handler failed, unregistering", file=sys.stderr)
- # show the exception in handler first
- stb = self.InteractiveTB.structured_traceback(*sys.exc_info())
+ # show the exception in handler first
+ stb = self.InteractiveTB.structured_traceback(*sys.exc_info())
print(self.InteractiveTB.stb2text(stb))
print("The original exception:")
- stb = self.InteractiveTB.structured_traceback(
- (etype,value,tb), tb_offset=tb_offset
- )
- return stb
-
- self.CustomTB = types.MethodType(wrapped,self)
- self.custom_exceptions = exc_tuple
-
- def excepthook(self, etype, value, tb):
- """One more defense for GUI apps that call sys.excepthook.
-
- GUI frameworks like wxPython trap exceptions and call
- sys.excepthook themselves. I guess this is a feature that
- enables them to keep running after exceptions that would
- otherwise kill their mainloop. This is a bother for IPython
- which excepts to catch all of the program exceptions with a try:
- except: statement.
-
- Normally, IPython sets sys.excepthook to a CrashHandler instance, so if
- any app directly invokes sys.excepthook, it will look to the user like
- IPython crashed. In order to work around this, we can disable the
- CrashHandler and replace it with this excepthook instead, which prints a
- regular traceback using our InteractiveTB. In this fashion, apps which
- call sys.excepthook will generate a regular-looking exception from
- IPython, and the CrashHandler will only be triggered by real IPython
- crashes.
-
- This hook should be used sparingly, only in places which are not likely
- to be true IPython errors.
- """
- self.showtraceback((etype, value, tb), tb_offset=0)
-
- def _get_exc_info(self, exc_tuple=None):
- """get exc_info from a given tuple, sys.exc_info() or sys.last_type etc.
+ stb = self.InteractiveTB.structured_traceback(
+ (etype,value,tb), tb_offset=tb_offset
+ )
+ return stb
+
+ self.CustomTB = types.MethodType(wrapped,self)
+ self.custom_exceptions = exc_tuple
+
+ def excepthook(self, etype, value, tb):
+ """One more defense for GUI apps that call sys.excepthook.
+
+ GUI frameworks like wxPython trap exceptions and call
+ sys.excepthook themselves. I guess this is a feature that
+ enables them to keep running after exceptions that would
+ otherwise kill their mainloop. This is a bother for IPython
+ which excepts to catch all of the program exceptions with a try:
+ except: statement.
+
+ Normally, IPython sets sys.excepthook to a CrashHandler instance, so if
+ any app directly invokes sys.excepthook, it will look to the user like
+ IPython crashed. In order to work around this, we can disable the
+ CrashHandler and replace it with this excepthook instead, which prints a
+ regular traceback using our InteractiveTB. In this fashion, apps which
+ call sys.excepthook will generate a regular-looking exception from
+ IPython, and the CrashHandler will only be triggered by real IPython
+ crashes.
+
+ This hook should be used sparingly, only in places which are not likely
+ to be true IPython errors.
+ """
+ self.showtraceback((etype, value, tb), tb_offset=0)
+
+ def _get_exc_info(self, exc_tuple=None):
+ """get exc_info from a given tuple, sys.exc_info() or sys.last_type etc.
- Ensures sys.last_type,value,traceback hold the exc_info we found,
- from whichever source.
+ Ensures sys.last_type,value,traceback hold the exc_info we found,
+ from whichever source.
- raises ValueError if none of these contain any information
- """
- if exc_tuple is None:
- etype, value, tb = sys.exc_info()
- else:
- etype, value, tb = exc_tuple
-
- if etype is None:
- if hasattr(sys, 'last_type'):
- etype, value, tb = sys.last_type, sys.last_value, \
- sys.last_traceback
+ raises ValueError if none of these contain any information
+ """
+ if exc_tuple is None:
+ etype, value, tb = sys.exc_info()
+ else:
+ etype, value, tb = exc_tuple
+
+ if etype is None:
+ if hasattr(sys, 'last_type'):
+ etype, value, tb = sys.last_type, sys.last_value, \
+ sys.last_traceback
- if etype is None:
- raise ValueError("No exception to find")
+ if etype is None:
+ raise ValueError("No exception to find")
- # Now store the exception info in sys.last_type etc.
- # WARNING: these variables are somewhat deprecated and not
- # necessarily safe to use in a threaded environment, but tools
- # like pdb depend on their existence, so let's set them. If we
- # find problems in the field, we'll need to revisit their use.
- sys.last_type = etype
- sys.last_value = value
- sys.last_traceback = tb
+ # Now store the exception info in sys.last_type etc.
+ # WARNING: these variables are somewhat deprecated and not
+ # necessarily safe to use in a threaded environment, but tools
+ # like pdb depend on their existence, so let's set them. If we
+ # find problems in the field, we'll need to revisit their use.
+ sys.last_type = etype
+ sys.last_value = value
+ sys.last_traceback = tb
- return etype, value, tb
+ return etype, value, tb
- def show_usage_error(self, exc):
- """Show a short message for UsageErrors
+ def show_usage_error(self, exc):
+ """Show a short message for UsageErrors
- These are special exceptions that shouldn't show a traceback.
- """
+ These are special exceptions that shouldn't show a traceback.
+ """
print("UsageError: %s" % exc, file=sys.stderr)
- def get_exception_only(self, exc_tuple=None):
- """
- Return as a string (ending with a newline) the exception that
- just occurred, without any traceback.
- """
- etype, value, tb = self._get_exc_info(exc_tuple)
- msg = traceback.format_exception_only(etype, value)
- return ''.join(msg)
-
- def showtraceback(self, exc_tuple=None, filename=None, tb_offset=None,
- exception_only=False):
- """Display the exception that just occurred.
-
- If nothing is known about the exception, this is the method which
- should be used throughout the code for presenting user tracebacks,
- rather than directly invoking the InteractiveTB object.
-
- A specific showsyntaxerror() also exists, but this method can take
- care of calling it if needed, so unless you are explicitly catching a
- SyntaxError exception, don't try to analyze the stack manually and
- simply call this method."""
-
- try:
- try:
- etype, value, tb = self._get_exc_info(exc_tuple)
- except ValueError:
+ def get_exception_only(self, exc_tuple=None):
+ """
+ Return as a string (ending with a newline) the exception that
+ just occurred, without any traceback.
+ """
+ etype, value, tb = self._get_exc_info(exc_tuple)
+ msg = traceback.format_exception_only(etype, value)
+ return ''.join(msg)
+
+ def showtraceback(self, exc_tuple=None, filename=None, tb_offset=None,
+ exception_only=False):
+ """Display the exception that just occurred.
+
+ If nothing is known about the exception, this is the method which
+ should be used throughout the code for presenting user tracebacks,
+ rather than directly invoking the InteractiveTB object.
+
+ A specific showsyntaxerror() also exists, but this method can take
+ care of calling it if needed, so unless you are explicitly catching a
+ SyntaxError exception, don't try to analyze the stack manually and
+ simply call this method."""
+
+ try:
+ try:
+ etype, value, tb = self._get_exc_info(exc_tuple)
+ except ValueError:
print('No traceback available to show.', file=sys.stderr)
- return
+ return
- if issubclass(etype, SyntaxError):
- # Though this won't be called by syntax errors in the input
- # line, there may be SyntaxError cases with imported code.
- self.showsyntaxerror(filename)
- elif etype is UsageError:
- self.show_usage_error(value)
- else:
- if exception_only:
- stb = ['An exception has occurred, use %tb to see '
- 'the full traceback.\n']
- stb.extend(self.InteractiveTB.get_exception_only(etype,
- value))
- else:
- try:
- # Exception classes can customise their traceback - we
- # use this in IPython.parallel for exceptions occurring
- # in the engines. This should return a list of strings.
- stb = value._render_traceback_()
- except Exception:
- stb = self.InteractiveTB.structured_traceback(etype,
- value, tb, tb_offset=tb_offset)
-
- self._showtraceback(etype, value, stb)
- if self.call_pdb:
- # drop into debugger
- self.debugger(force=True)
- return
-
- # Actually show the traceback
- self._showtraceback(etype, value, stb)
-
- except KeyboardInterrupt:
+ if issubclass(etype, SyntaxError):
+ # Though this won't be called by syntax errors in the input
+ # line, there may be SyntaxError cases with imported code.
+ self.showsyntaxerror(filename)
+ elif etype is UsageError:
+ self.show_usage_error(value)
+ else:
+ if exception_only:
+ stb = ['An exception has occurred, use %tb to see '
+ 'the full traceback.\n']
+ stb.extend(self.InteractiveTB.get_exception_only(etype,
+ value))
+ else:
+ try:
+ # Exception classes can customise their traceback - we
+ # use this in IPython.parallel for exceptions occurring
+ # in the engines. This should return a list of strings.
+ stb = value._render_traceback_()
+ except Exception:
+ stb = self.InteractiveTB.structured_traceback(etype,
+ value, tb, tb_offset=tb_offset)
+
+ self._showtraceback(etype, value, stb)
+ if self.call_pdb:
+ # drop into debugger
+ self.debugger(force=True)
+ return
+
+ # Actually show the traceback
+ self._showtraceback(etype, value, stb)
+
+ except KeyboardInterrupt:
print('\n' + self.get_exception_only(), file=sys.stderr)
-
- def _showtraceback(self, etype, evalue, stb):
- """Actually show a traceback.
-
- Subclasses may override this method to put the traceback on a different
- place, like a side channel.
- """
+
+ def _showtraceback(self, etype, evalue, stb):
+ """Actually show a traceback.
+
+ Subclasses may override this method to put the traceback on a different
+ place, like a side channel.
+ """
print(self.InteractiveTB.stb2text(stb))
-
- def showsyntaxerror(self, filename=None):
- """Display the syntax error that just occurred.
-
- This doesn't display a stack trace because there isn't one.
-
- If a filename is given, it is stuffed in the exception instead
- of what was there before (because Python's parser always uses
- "<string>" when reading from a string).
- """
- etype, value, last_traceback = self._get_exc_info()
-
- if filename and issubclass(etype, SyntaxError):
- try:
- value.filename = filename
- except:
- # Not the format we expect; leave it alone
- pass
+
+ def showsyntaxerror(self, filename=None):
+ """Display the syntax error that just occurred.
+
+ This doesn't display a stack trace because there isn't one.
+
+ If a filename is given, it is stuffed in the exception instead
+ of what was there before (because Python's parser always uses
+ "<string>" when reading from a string).
+ """
+ etype, value, last_traceback = self._get_exc_info()
+
+ if filename and issubclass(etype, SyntaxError):
+ try:
+ value.filename = filename
+ except:
+ # Not the format we expect; leave it alone
+ pass
- stb = self.SyntaxTB.structured_traceback(etype, value, [])
- self._showtraceback(etype, value, stb)
-
- # This is overridden in TerminalInteractiveShell to show a message about
- # the %paste magic.
- def showindentationerror(self):
- """Called by run_cell when there's an IndentationError in code entered
- at the prompt.
-
- This is overridden in TerminalInteractiveShell to show a message about
- the %paste magic."""
- self.showsyntaxerror()
-
- #-------------------------------------------------------------------------
- # Things related to readline
- #-------------------------------------------------------------------------
-
- def init_readline(self):
+ stb = self.SyntaxTB.structured_traceback(etype, value, [])
+ self._showtraceback(etype, value, stb)
+
+ # This is overridden in TerminalInteractiveShell to show a message about
+ # the %paste magic.
+ def showindentationerror(self):
+ """Called by run_cell when there's an IndentationError in code entered
+ at the prompt.
+
+ This is overridden in TerminalInteractiveShell to show a message about
+ the %paste magic."""
+ self.showsyntaxerror()
+
+ #-------------------------------------------------------------------------
+ # Things related to readline
+ #-------------------------------------------------------------------------
+
+ def init_readline(self):
"""DEPRECATED
Moved to terminal subclass, here only to simplify the init logic."""
- # Set a number of methods that depend on readline to be no-op
+ # Set a number of methods that depend on readline to be no-op
warnings.warn('`init_readline` is no-op since IPython 5.0 and is Deprecated',
DeprecationWarning, stacklevel=2)
- self.set_custom_completer = no_op
-
- @skip_doctest
- def set_next_input(self, s, replace=False):
- """ Sets the 'default' input string for the next command line.
-
- Example::
-
- In [1]: _ip.set_next_input("Hello Word")
- In [2]: Hello Word_ # cursor is here
- """
- self.rl_next_input = py3compat.cast_bytes_py2(s)
-
- def _indent_current_str(self):
- """return the current level of indentation as a string"""
- return self.input_splitter.indent_spaces * ' '
-
- #-------------------------------------------------------------------------
- # Things related to text completion
- #-------------------------------------------------------------------------
-
- def init_completer(self):
- """Initialize the completion machinery.
-
- This creates completion machinery that can be used by client code,
- either interactively in-process (typically triggered by the readline
- library), programmatically (such as in test suites) or out-of-process
- (typically over the network by remote frontends).
- """
- from IPython.core.completer import IPCompleter
- from IPython.core.completerlib import (module_completer,
- magic_run_completer, cd_completer, reset_completer)
-
- self.Completer = IPCompleter(shell=self,
- namespace=self.user_ns,
- global_namespace=self.user_global_ns,
+ self.set_custom_completer = no_op
+
+ @skip_doctest
+ def set_next_input(self, s, replace=False):
+ """ Sets the 'default' input string for the next command line.
+
+ Example::
+
+ In [1]: _ip.set_next_input("Hello Word")
+ In [2]: Hello Word_ # cursor is here
+ """
+ self.rl_next_input = py3compat.cast_bytes_py2(s)
+
+ def _indent_current_str(self):
+ """return the current level of indentation as a string"""
+ return self.input_splitter.indent_spaces * ' '
+
+ #-------------------------------------------------------------------------
+ # Things related to text completion
+ #-------------------------------------------------------------------------
+
+ def init_completer(self):
+ """Initialize the completion machinery.
+
+ This creates completion machinery that can be used by client code,
+ either interactively in-process (typically triggered by the readline
+ library), programmatically (such as in test suites) or out-of-process
+ (typically over the network by remote frontends).
+ """
+ from IPython.core.completer import IPCompleter
+ from IPython.core.completerlib import (module_completer,
+ magic_run_completer, cd_completer, reset_completer)
+
+ self.Completer = IPCompleter(shell=self,
+ namespace=self.user_ns,
+ global_namespace=self.user_global_ns,
use_readline=False,
- parent=self,
- )
- self.configurables.append(self.Completer)
-
- # Add custom completers to the basic ones built into IPCompleter
- sdisp = self.strdispatchers.get('complete_command', StrDispatch())
- self.strdispatchers['complete_command'] = sdisp
- self.Completer.custom_completers = sdisp
-
- self.set_hook('complete_command', module_completer, str_key = 'import')
- self.set_hook('complete_command', module_completer, str_key = 'from')
- self.set_hook('complete_command', module_completer, str_key = '%aimport')
- self.set_hook('complete_command', magic_run_completer, str_key = '%run')
- self.set_hook('complete_command', cd_completer, str_key = '%cd')
- self.set_hook('complete_command', reset_completer, str_key = '%reset')
-
-
+ parent=self,
+ )
+ self.configurables.append(self.Completer)
+
+ # Add custom completers to the basic ones built into IPCompleter
+ sdisp = self.strdispatchers.get('complete_command', StrDispatch())
+ self.strdispatchers['complete_command'] = sdisp
+ self.Completer.custom_completers = sdisp
+
+ self.set_hook('complete_command', module_completer, str_key = 'import')
+ self.set_hook('complete_command', module_completer, str_key = 'from')
+ self.set_hook('complete_command', module_completer, str_key = '%aimport')
+ self.set_hook('complete_command', magic_run_completer, str_key = '%run')
+ self.set_hook('complete_command', cd_completer, str_key = '%cd')
+ self.set_hook('complete_command', reset_completer, str_key = '%reset')
+
+
@skip_doctest_py2
- def complete(self, text, line=None, cursor_pos=None):
- """Return the completed text and a list of completions.
-
- Parameters
- ----------
-
- text : string
- A string of text to be completed on. It can be given as empty and
- instead a line/position pair are given. In this case, the
- completer itself will split the line like readline does.
-
- line : string, optional
- The complete line that text is part of.
-
- cursor_pos : int, optional
- The position of the cursor on the input line.
-
- Returns
- -------
- text : string
- The actual text that was completed.
-
- matches : list
- A sorted list with all possible completions.
-
- The optional arguments allow the completion to take more context into
- account, and are part of the low-level completion API.
-
- This is a wrapper around the completion mechanism, similar to what
- readline does at the command line when the TAB key is hit. By
- exposing it as a method, it can be used by other non-readline
- environments (such as GUIs) for text completion.
-
- Simple usage example:
-
- In [1]: x = 'hello'
-
- In [2]: _ip.complete('x.l')
- Out[2]: ('x.l', ['x.ljust', 'x.lower', 'x.lstrip'])
- """
-
- # Inject names into __builtin__ so we can complete on the added names.
- with self.builtin_trap:
- return self.Completer.complete(text, line, cursor_pos)
-
- def set_custom_completer(self, completer, pos=0):
- """Adds a new custom completer function.
-
- The position argument (defaults to 0) is the index in the completers
- list where you want the completer to be inserted."""
-
- newcomp = types.MethodType(completer,self.Completer)
- self.Completer.matchers.insert(pos,newcomp)
-
- def set_completer_frame(self, frame=None):
- """Set the frame of the completer."""
- if frame:
- self.Completer.namespace = frame.f_locals
- self.Completer.global_namespace = frame.f_globals
- else:
- self.Completer.namespace = self.user_ns
- self.Completer.global_namespace = self.user_global_ns
-
- #-------------------------------------------------------------------------
- # Things related to magics
- #-------------------------------------------------------------------------
-
- def init_magics(self):
- from IPython.core import magics as m
- self.magics_manager = magic.MagicsManager(shell=self,
- parent=self,
- user_magics=m.UserMagics(self))
- self.configurables.append(self.magics_manager)
-
- # Expose as public API from the magics manager
- self.register_magics = self.magics_manager.register
-
- self.register_magics(m.AutoMagics, m.BasicMagics, m.CodeMagics,
+ def complete(self, text, line=None, cursor_pos=None):
+ """Return the completed text and a list of completions.
+
+ Parameters
+ ----------
+
+ text : string
+ A string of text to be completed on. It can be given as empty and
+ instead a line/position pair are given. In this case, the
+ completer itself will split the line like readline does.
+
+ line : string, optional
+ The complete line that text is part of.
+
+ cursor_pos : int, optional
+ The position of the cursor on the input line.
+
+ Returns
+ -------
+ text : string
+ The actual text that was completed.
+
+ matches : list
+ A sorted list with all possible completions.
+
+ The optional arguments allow the completion to take more context into
+ account, and are part of the low-level completion API.
+
+ This is a wrapper around the completion mechanism, similar to what
+ readline does at the command line when the TAB key is hit. By
+ exposing it as a method, it can be used by other non-readline
+ environments (such as GUIs) for text completion.
+
+ Simple usage example:
+
+ In [1]: x = 'hello'
+
+ In [2]: _ip.complete('x.l')
+ Out[2]: ('x.l', ['x.ljust', 'x.lower', 'x.lstrip'])
+ """
+
+ # Inject names into __builtin__ so we can complete on the added names.
+ with self.builtin_trap:
+ return self.Completer.complete(text, line, cursor_pos)
+
+ def set_custom_completer(self, completer, pos=0):
+ """Adds a new custom completer function.
+
+ The position argument (defaults to 0) is the index in the completers
+ list where you want the completer to be inserted."""
+
+ newcomp = types.MethodType(completer,self.Completer)
+ self.Completer.matchers.insert(pos,newcomp)
+
+ def set_completer_frame(self, frame=None):
+ """Set the frame of the completer."""
+ if frame:
+ self.Completer.namespace = frame.f_locals
+ self.Completer.global_namespace = frame.f_globals
+ else:
+ self.Completer.namespace = self.user_ns
+ self.Completer.global_namespace = self.user_global_ns
+
+ #-------------------------------------------------------------------------
+ # Things related to magics
+ #-------------------------------------------------------------------------
+
+ def init_magics(self):
+ from IPython.core import magics as m
+ self.magics_manager = magic.MagicsManager(shell=self,
+ parent=self,
+ user_magics=m.UserMagics(self))
+ self.configurables.append(self.magics_manager)
+
+ # Expose as public API from the magics manager
+ self.register_magics = self.magics_manager.register
+
+ self.register_magics(m.AutoMagics, m.BasicMagics, m.CodeMagics,
m.ConfigMagics, m.DisplayMagics, m.ExecutionMagics,
- m.ExtensionMagics, m.HistoryMagics, m.LoggingMagics,
- m.NamespaceMagics, m.OSMagics, m.PylabMagics, m.ScriptMagics,
- )
-
- # Register Magic Aliases
- mman = self.magics_manager
- # FIXME: magic aliases should be defined by the Magics classes
- # or in MagicsManager, not here
- mman.register_alias('ed', 'edit')
- mman.register_alias('hist', 'history')
- mman.register_alias('rep', 'recall')
- mman.register_alias('SVG', 'svg', 'cell')
- mman.register_alias('HTML', 'html', 'cell')
- mman.register_alias('file', 'writefile', 'cell')
-
- # FIXME: Move the color initialization to the DisplayHook, which
- # should be split into a prompt manager and displayhook. We probably
- # even need a centralize colors management object.
- self.magic('colors %s' % self.colors)
+ m.ExtensionMagics, m.HistoryMagics, m.LoggingMagics,
+ m.NamespaceMagics, m.OSMagics, m.PylabMagics, m.ScriptMagics,
+ )
+
+ # Register Magic Aliases
+ mman = self.magics_manager
+ # FIXME: magic aliases should be defined by the Magics classes
+ # or in MagicsManager, not here
+ mman.register_alias('ed', 'edit')
+ mman.register_alias('hist', 'history')
+ mman.register_alias('rep', 'recall')
+ mman.register_alias('SVG', 'svg', 'cell')
+ mman.register_alias('HTML', 'html', 'cell')
+ mman.register_alias('file', 'writefile', 'cell')
+
+ # FIXME: Move the color initialization to the DisplayHook, which
+ # should be split into a prompt manager and displayhook. We probably
+ # even need a centralize colors management object.
+ self.magic('colors %s' % self.colors)
- # Defined here so that it's included in the documentation
- @functools.wraps(magic.MagicsManager.register_function)
- def register_magic_function(self, func, magic_kind='line', magic_name=None):
+ # Defined here so that it's included in the documentation
+ @functools.wraps(magic.MagicsManager.register_function)
+ def register_magic_function(self, func, magic_kind='line', magic_name=None):
self.magics_manager.register_function(func,
- magic_kind=magic_kind, magic_name=magic_name)
-
- def run_line_magic(self, magic_name, line):
- """Execute the given line magic.
-
- Parameters
- ----------
- magic_name : str
- Name of the desired magic function, without '%' prefix.
-
- line : str
- The rest of the input line as a single string.
- """
- fn = self.find_line_magic(magic_name)
- if fn is None:
- cm = self.find_cell_magic(magic_name)
- etpl = "Line magic function `%%%s` not found%s."
- extra = '' if cm is None else (' (But cell magic `%%%%%s` exists, '
- 'did you mean that instead?)' % magic_name )
+ magic_kind=magic_kind, magic_name=magic_name)
+
+ def run_line_magic(self, magic_name, line):
+ """Execute the given line magic.
+
+ Parameters
+ ----------
+ magic_name : str
+ Name of the desired magic function, without '%' prefix.
+
+ line : str
+ The rest of the input line as a single string.
+ """
+ fn = self.find_line_magic(magic_name)
+ if fn is None:
+ cm = self.find_cell_magic(magic_name)
+ etpl = "Line magic function `%%%s` not found%s."
+ extra = '' if cm is None else (' (But cell magic `%%%%%s` exists, '
+ 'did you mean that instead?)' % magic_name )
raise UsageError(etpl % (magic_name, extra))
- else:
- # Note: this is the distance in the stack to the user's frame.
- # This will need to be updated if the internal calling logic gets
- # refactored, or else we'll be expanding the wrong variables.
- stack_depth = 2
- magic_arg_s = self.var_expand(line, stack_depth)
- # Put magic args in a list so we can call with f(*a) syntax
- args = [magic_arg_s]
- kwargs = {}
- # Grab local namespace if we need it:
- if getattr(fn, "needs_local_scope", False):
- kwargs['local_ns'] = sys._getframe(stack_depth).f_locals
- with self.builtin_trap:
- result = fn(*args,**kwargs)
- return result
-
- def run_cell_magic(self, magic_name, line, cell):
- """Execute the given cell magic.
+ else:
+ # Note: this is the distance in the stack to the user's frame.
+ # This will need to be updated if the internal calling logic gets
+ # refactored, or else we'll be expanding the wrong variables.
+ stack_depth = 2
+ magic_arg_s = self.var_expand(line, stack_depth)
+ # Put magic args in a list so we can call with f(*a) syntax
+ args = [magic_arg_s]
+ kwargs = {}
+ # Grab local namespace if we need it:
+ if getattr(fn, "needs_local_scope", False):
+ kwargs['local_ns'] = sys._getframe(stack_depth).f_locals
+ with self.builtin_trap:
+ result = fn(*args,**kwargs)
+ return result
+
+ def run_cell_magic(self, magic_name, line, cell):
+ """Execute the given cell magic.
- Parameters
- ----------
- magic_name : str
- Name of the desired magic function, without '%' prefix.
-
- line : str
- The rest of the first input line as a single string.
-
- cell : str
- The body of the cell as a (possibly multiline) string.
- """
- fn = self.find_cell_magic(magic_name)
- if fn is None:
- lm = self.find_line_magic(magic_name)
- etpl = "Cell magic `%%{0}` not found{1}."
- extra = '' if lm is None else (' (But line magic `%{0}` exists, '
- 'did you mean that instead?)'.format(magic_name))
+ Parameters
+ ----------
+ magic_name : str
+ Name of the desired magic function, without '%' prefix.
+
+ line : str
+ The rest of the first input line as a single string.
+
+ cell : str
+ The body of the cell as a (possibly multiline) string.
+ """
+ fn = self.find_cell_magic(magic_name)
+ if fn is None:
+ lm = self.find_line_magic(magic_name)
+ etpl = "Cell magic `%%{0}` not found{1}."
+ extra = '' if lm is None else (' (But line magic `%{0}` exists, '
+ 'did you mean that instead?)'.format(magic_name))
raise UsageError(etpl.format(magic_name, extra))
- elif cell == '':
- message = '%%{0} is a cell magic, but the cell body is empty.'.format(magic_name)
- if self.find_line_magic(magic_name) is not None:
- message += ' Did you mean the line magic %{0} (single %)?'.format(magic_name)
- raise UsageError(message)
- else:
- # Note: this is the distance in the stack to the user's frame.
- # This will need to be updated if the internal calling logic gets
- # refactored, or else we'll be expanding the wrong variables.
- stack_depth = 2
- magic_arg_s = self.var_expand(line, stack_depth)
- with self.builtin_trap:
- result = fn(magic_arg_s, cell)
- return result
-
- def find_line_magic(self, magic_name):
- """Find and return a line magic by name.
-
- Returns None if the magic isn't found."""
- return self.magics_manager.magics['line'].get(magic_name)
-
- def find_cell_magic(self, magic_name):
- """Find and return a cell magic by name.
-
- Returns None if the magic isn't found."""
- return self.magics_manager.magics['cell'].get(magic_name)
-
- def find_magic(self, magic_name, magic_kind='line'):
- """Find and return a magic of the given type by name.
-
- Returns None if the magic isn't found."""
- return self.magics_manager.magics[magic_kind].get(magic_name)
-
- def magic(self, arg_s):
- """DEPRECATED. Use run_line_magic() instead.
-
- Call a magic function by name.
-
- Input: a string containing the name of the magic function to call and
- any additional arguments to be passed to the magic.
-
- magic('name -opt foo bar') is equivalent to typing at the ipython
- prompt:
-
- In[1]: %name -opt foo bar
-
- To call a magic without arguments, simply use magic('name').
-
- This provides a proper Python function to call IPython's magics in any
- valid Python code you can type at the interpreter, including loops and
- compound statements.
- """
- # TODO: should we issue a loud deprecation warning here?
- magic_name, _, magic_arg_s = arg_s.partition(' ')
- magic_name = magic_name.lstrip(prefilter.ESC_MAGIC)
- return self.run_line_magic(magic_name, magic_arg_s)
-
- #-------------------------------------------------------------------------
- # Things related to macros
- #-------------------------------------------------------------------------
-
- def define_macro(self, name, themacro):
- """Define a new macro
-
- Parameters
- ----------
- name : str
- The name of the macro.
- themacro : str or Macro
- The action to do upon invoking the macro. If a string, a new
- Macro object is created by passing the string to it.
- """
-
- from IPython.core import macro
-
- if isinstance(themacro, string_types):
- themacro = macro.Macro(themacro)
- if not isinstance(themacro, macro.Macro):
- raise ValueError('A macro must be a string or a Macro instance.')
- self.user_ns[name] = themacro
-
- #-------------------------------------------------------------------------
- # Things related to the running of system commands
- #-------------------------------------------------------------------------
-
- def system_piped(self, cmd):
- """Call the given cmd in a subprocess, piping stdout/err
-
- Parameters
- ----------
- cmd : str
- Command to execute (can not end in '&', as background processes are
- not supported. Should not be a command that expects input
- other than simple text.
- """
- if cmd.rstrip().endswith('&'):
- # this is *far* from a rigorous test
- # We do not support backgrounding processes because we either use
- # pexpect or pipes to read from. Users can always just call
- # os.system() or use ip.system=ip.system_raw
- # if they really want a background process.
- raise OSError("Background processes not supported.")
-
- # we explicitly do NOT return the subprocess status code, because
- # a non-None value would trigger :func:`sys.displayhook` calls.
- # Instead, we store the exit_code in user_ns.
- self.user_ns['_exit_code'] = system(self.var_expand(cmd, depth=1))
-
- def system_raw(self, cmd):
- """Call the given cmd in a subprocess using os.system on Windows or
- subprocess.call using the system shell on other platforms.
-
- Parameters
- ----------
- cmd : str
- Command to execute.
- """
- cmd = self.var_expand(cmd, depth=1)
- # protect os.system from UNC paths on Windows, which it can't handle:
- if sys.platform == 'win32':
- from IPython.utils._process_win32 import AvoidUNCPath
- with AvoidUNCPath() as path:
- if path is not None:
- cmd = '"pushd %s &&"%s' % (path, cmd)
- cmd = py3compat.unicode_to_str(cmd)
- try:
- ec = os.system(cmd)
- except KeyboardInterrupt:
+ elif cell == '':
+ message = '%%{0} is a cell magic, but the cell body is empty.'.format(magic_name)
+ if self.find_line_magic(magic_name) is not None:
+ message += ' Did you mean the line magic %{0} (single %)?'.format(magic_name)
+ raise UsageError(message)
+ else:
+ # Note: this is the distance in the stack to the user's frame.
+ # This will need to be updated if the internal calling logic gets
+ # refactored, or else we'll be expanding the wrong variables.
+ stack_depth = 2
+ magic_arg_s = self.var_expand(line, stack_depth)
+ with self.builtin_trap:
+ result = fn(magic_arg_s, cell)
+ return result
+
+ def find_line_magic(self, magic_name):
+ """Find and return a line magic by name.
+
+ Returns None if the magic isn't found."""
+ return self.magics_manager.magics['line'].get(magic_name)
+
+ def find_cell_magic(self, magic_name):
+ """Find and return a cell magic by name.
+
+ Returns None if the magic isn't found."""
+ return self.magics_manager.magics['cell'].get(magic_name)
+
+ def find_magic(self, magic_name, magic_kind='line'):
+ """Find and return a magic of the given type by name.
+
+ Returns None if the magic isn't found."""
+ return self.magics_manager.magics[magic_kind].get(magic_name)
+
+ def magic(self, arg_s):
+ """DEPRECATED. Use run_line_magic() instead.
+
+ Call a magic function by name.
+
+ Input: a string containing the name of the magic function to call and
+ any additional arguments to be passed to the magic.
+
+ magic('name -opt foo bar') is equivalent to typing at the ipython
+ prompt:
+
+ In[1]: %name -opt foo bar
+
+ To call a magic without arguments, simply use magic('name').
+
+ This provides a proper Python function to call IPython's magics in any
+ valid Python code you can type at the interpreter, including loops and
+ compound statements.
+ """
+ # TODO: should we issue a loud deprecation warning here?
+ magic_name, _, magic_arg_s = arg_s.partition(' ')
+ magic_name = magic_name.lstrip(prefilter.ESC_MAGIC)
+ return self.run_line_magic(magic_name, magic_arg_s)
+
+ #-------------------------------------------------------------------------
+ # Things related to macros
+ #-------------------------------------------------------------------------
+
+ def define_macro(self, name, themacro):
+ """Define a new macro
+
+ Parameters
+ ----------
+ name : str
+ The name of the macro.
+ themacro : str or Macro
+ The action to do upon invoking the macro. If a string, a new
+ Macro object is created by passing the string to it.
+ """
+
+ from IPython.core import macro
+
+ if isinstance(themacro, string_types):
+ themacro = macro.Macro(themacro)
+ if not isinstance(themacro, macro.Macro):
+ raise ValueError('A macro must be a string or a Macro instance.')
+ self.user_ns[name] = themacro
+
+ #-------------------------------------------------------------------------
+ # Things related to the running of system commands
+ #-------------------------------------------------------------------------
+
+ def system_piped(self, cmd):
+ """Call the given cmd in a subprocess, piping stdout/err
+
+ Parameters
+ ----------
+ cmd : str
+ Command to execute (can not end in '&', as background processes are
+ not supported. Should not be a command that expects input
+ other than simple text.
+ """
+ if cmd.rstrip().endswith('&'):
+ # this is *far* from a rigorous test
+ # We do not support backgrounding processes because we either use
+ # pexpect or pipes to read from. Users can always just call
+ # os.system() or use ip.system=ip.system_raw
+ # if they really want a background process.
+ raise OSError("Background processes not supported.")
+
+ # we explicitly do NOT return the subprocess status code, because
+ # a non-None value would trigger :func:`sys.displayhook` calls.
+ # Instead, we store the exit_code in user_ns.
+ self.user_ns['_exit_code'] = system(self.var_expand(cmd, depth=1))
+
+ def system_raw(self, cmd):
+ """Call the given cmd in a subprocess using os.system on Windows or
+ subprocess.call using the system shell on other platforms.
+
+ Parameters
+ ----------
+ cmd : str
+ Command to execute.
+ """
+ cmd = self.var_expand(cmd, depth=1)
+ # protect os.system from UNC paths on Windows, which it can't handle:
+ if sys.platform == 'win32':
+ from IPython.utils._process_win32 import AvoidUNCPath
+ with AvoidUNCPath() as path:
+ if path is not None:
+ cmd = '"pushd %s &&"%s' % (path, cmd)
+ cmd = py3compat.unicode_to_str(cmd)
+ try:
+ ec = os.system(cmd)
+ except KeyboardInterrupt:
print('\n' + self.get_exception_only(), file=sys.stderr)
- ec = -2
- else:
- cmd = py3compat.unicode_to_str(cmd)
- # For posix the result of the subprocess.call() below is an exit
- # code, which by convention is zero for success, positive for
- # program failure. Exit codes above 128 are reserved for signals,
- # and the formula for converting a signal to an exit code is usually
- # signal_number+128. To more easily differentiate between exit
- # codes and signals, ipython uses negative numbers. For instance
- # since control-c is signal 2 but exit code 130, ipython's
- # _exit_code variable will read -2. Note that some shells like
- # csh and fish don't follow sh/bash conventions for exit codes.
- executable = os.environ.get('SHELL', None)
- try:
- # Use env shell instead of default /bin/sh
- ec = subprocess.call(cmd, shell=True, executable=executable)
- except KeyboardInterrupt:
- # intercept control-C; a long traceback is not useful here
+ ec = -2
+ else:
+ cmd = py3compat.unicode_to_str(cmd)
+ # For posix the result of the subprocess.call() below is an exit
+ # code, which by convention is zero for success, positive for
+ # program failure. Exit codes above 128 are reserved for signals,
+ # and the formula for converting a signal to an exit code is usually
+ # signal_number+128. To more easily differentiate between exit
+ # codes and signals, ipython uses negative numbers. For instance
+ # since control-c is signal 2 but exit code 130, ipython's
+ # _exit_code variable will read -2. Note that some shells like
+ # csh and fish don't follow sh/bash conventions for exit codes.
+ executable = os.environ.get('SHELL', None)
+ try:
+ # Use env shell instead of default /bin/sh
+ ec = subprocess.call(cmd, shell=True, executable=executable)
+ except KeyboardInterrupt:
+ # intercept control-C; a long traceback is not useful here
print('\n' + self.get_exception_only(), file=sys.stderr)
- ec = 130
- if ec > 128:
- ec = -(ec - 128)
+ ec = 130
+ if ec > 128:
+ ec = -(ec - 128)
- # We explicitly do NOT return the subprocess status code, because
- # a non-None value would trigger :func:`sys.displayhook` calls.
- # Instead, we store the exit_code in user_ns. Note the semantics
- # of _exit_code: for control-c, _exit_code == -signal.SIGNIT,
- # but raising SystemExit(_exit_code) will give status 254!
- self.user_ns['_exit_code'] = ec
-
- # use piped system by default, because it is better behaved
- system = system_piped
-
- def getoutput(self, cmd, split=True, depth=0):
- """Get output (possibly including stderr) from a subprocess.
-
- Parameters
- ----------
- cmd : str
- Command to execute (can not end in '&', as background processes are
- not supported.
- split : bool, optional
- If True, split the output into an IPython SList. Otherwise, an
- IPython LSString is returned. These are objects similar to normal
- lists and strings, with a few convenience attributes for easier
- manipulation of line-based output. You can use '?' on them for
- details.
- depth : int, optional
- How many frames above the caller are the local variables which should
- be expanded in the command string? The default (0) assumes that the
- expansion variables are in the stack frame calling this function.
- """
- if cmd.rstrip().endswith('&'):
- # this is *far* from a rigorous test
- raise OSError("Background processes not supported.")
- out = getoutput(self.var_expand(cmd, depth=depth+1))
- if split:
- out = SList(out.splitlines())
- else:
- out = LSString(out)
- return out
-
- #-------------------------------------------------------------------------
- # Things related to aliases
- #-------------------------------------------------------------------------
-
- def init_alias(self):
- self.alias_manager = AliasManager(shell=self, parent=self)
- self.configurables.append(self.alias_manager)
-
- #-------------------------------------------------------------------------
- # Things related to extensions
- #-------------------------------------------------------------------------
-
- def init_extension_manager(self):
- self.extension_manager = ExtensionManager(shell=self, parent=self)
- self.configurables.append(self.extension_manager)
-
- #-------------------------------------------------------------------------
- # Things related to payloads
- #-------------------------------------------------------------------------
-
- def init_payload(self):
- self.payload_manager = PayloadManager(parent=self)
- self.configurables.append(self.payload_manager)
+ # We explicitly do NOT return the subprocess status code, because
+ # a non-None value would trigger :func:`sys.displayhook` calls.
+ # Instead, we store the exit_code in user_ns. Note the semantics
+ # of _exit_code: for control-c, _exit_code == -signal.SIGNIT,
+ # but raising SystemExit(_exit_code) will give status 254!
+ self.user_ns['_exit_code'] = ec
+
+ # use piped system by default, because it is better behaved
+ system = system_piped
+
+ def getoutput(self, cmd, split=True, depth=0):
+ """Get output (possibly including stderr) from a subprocess.
+
+ Parameters
+ ----------
+ cmd : str
+ Command to execute (can not end in '&', as background processes are
+ not supported.
+ split : bool, optional
+ If True, split the output into an IPython SList. Otherwise, an
+ IPython LSString is returned. These are objects similar to normal
+ lists and strings, with a few convenience attributes for easier
+ manipulation of line-based output. You can use '?' on them for
+ details.
+ depth : int, optional
+ How many frames above the caller are the local variables which should
+ be expanded in the command string? The default (0) assumes that the
+ expansion variables are in the stack frame calling this function.
+ """
+ if cmd.rstrip().endswith('&'):
+ # this is *far* from a rigorous test
+ raise OSError("Background processes not supported.")
+ out = getoutput(self.var_expand(cmd, depth=depth+1))
+ if split:
+ out = SList(out.splitlines())
+ else:
+ out = LSString(out)
+ return out
+
+ #-------------------------------------------------------------------------
+ # Things related to aliases
+ #-------------------------------------------------------------------------
+
+ def init_alias(self):
+ self.alias_manager = AliasManager(shell=self, parent=self)
+ self.configurables.append(self.alias_manager)
+
+ #-------------------------------------------------------------------------
+ # Things related to extensions
+ #-------------------------------------------------------------------------
+
+ def init_extension_manager(self):
+ self.extension_manager = ExtensionManager(shell=self, parent=self)
+ self.configurables.append(self.extension_manager)
+
+ #-------------------------------------------------------------------------
+ # Things related to payloads
+ #-------------------------------------------------------------------------
+
+ def init_payload(self):
+ self.payload_manager = PayloadManager(parent=self)
+ self.configurables.append(self.payload_manager)
- #-------------------------------------------------------------------------
- # Things related to the prefilter
- #-------------------------------------------------------------------------
-
- def init_prefilter(self):
- self.prefilter_manager = PrefilterManager(shell=self, parent=self)
- self.configurables.append(self.prefilter_manager)
- # Ultimately this will be refactored in the new interpreter code, but
- # for now, we should expose the main prefilter method (there's legacy
- # code out there that may rely on this).
- self.prefilter = self.prefilter_manager.prefilter_lines
-
- def auto_rewrite_input(self, cmd):
- """Print to the screen the rewritten form of the user's command.
-
- This shows visual feedback by rewriting input lines that cause
- automatic calling to kick in, like::
-
- /f x
-
- into::
-
- ------> f(x)
-
- after the user's input prompt. This helps the user understand that the
- input line was transformed automatically by IPython.
- """
- if not self.show_rewritten_input:
- return
-
+ #-------------------------------------------------------------------------
+ # Things related to the prefilter
+ #-------------------------------------------------------------------------
+
+ def init_prefilter(self):
+ self.prefilter_manager = PrefilterManager(shell=self, parent=self)
+ self.configurables.append(self.prefilter_manager)
+ # Ultimately this will be refactored in the new interpreter code, but
+ # for now, we should expose the main prefilter method (there's legacy
+ # code out there that may rely on this).
+ self.prefilter = self.prefilter_manager.prefilter_lines
+
+ def auto_rewrite_input(self, cmd):
+ """Print to the screen the rewritten form of the user's command.
+
+ This shows visual feedback by rewriting input lines that cause
+ automatic calling to kick in, like::
+
+ /f x
+
+ into::
+
+ ------> f(x)
+
+ after the user's input prompt. This helps the user understand that the
+ input line was transformed automatically by IPython.
+ """
+ if not self.show_rewritten_input:
+ return
+
# This is overridden in TerminalInteractiveShell to use fancy prompts
print("------> " + cmd)
-
- #-------------------------------------------------------------------------
- # Things related to extracting values/expressions from kernel and user_ns
- #-------------------------------------------------------------------------
-
- def _user_obj_error(self):
- """return simple exception dict
+
+ #-------------------------------------------------------------------------
+ # Things related to extracting values/expressions from kernel and user_ns
+ #-------------------------------------------------------------------------
+
+ def _user_obj_error(self):
+ """return simple exception dict
- for use in user_expressions
- """
+ for use in user_expressions
+ """
- etype, evalue, tb = self._get_exc_info()
- stb = self.InteractiveTB.get_exception_only(etype, evalue)
+ etype, evalue, tb = self._get_exc_info()
+ stb = self.InteractiveTB.get_exception_only(etype, evalue)
- exc_info = {
- u'status' : 'error',
- u'traceback' : stb,
- u'ename' : unicode_type(etype.__name__),
- u'evalue' : py3compat.safe_unicode(evalue),
- }
-
- return exc_info
+ exc_info = {
+ u'status' : 'error',
+ u'traceback' : stb,
+ u'ename' : unicode_type(etype.__name__),
+ u'evalue' : py3compat.safe_unicode(evalue),
+ }
+
+ return exc_info
- def _format_user_obj(self, obj):
- """format a user object to display dict
+ def _format_user_obj(self, obj):
+ """format a user object to display dict
- for use in user_expressions
- """
+ for use in user_expressions
+ """
- data, md = self.display_formatter.format(obj)
- value = {
- 'status' : 'ok',
- 'data' : data,
- 'metadata' : md,
- }
- return value
+ data, md = self.display_formatter.format(obj)
+ value = {
+ 'status' : 'ok',
+ 'data' : data,
+ 'metadata' : md,
+ }
+ return value
- def user_expressions(self, expressions):
- """Evaluate a dict of expressions in the user's namespace.
-
- Parameters
- ----------
- expressions : dict
- A dict with string keys and string values. The expression values
- should be valid Python expressions, each of which will be evaluated
- in the user namespace.
-
- Returns
- -------
- A dict, keyed like the input expressions dict, with the rich mime-typed
- display_data of each value.
- """
- out = {}
- user_ns = self.user_ns
- global_ns = self.user_global_ns
+ def user_expressions(self, expressions):
+ """Evaluate a dict of expressions in the user's namespace.
+
+ Parameters
+ ----------
+ expressions : dict
+ A dict with string keys and string values. The expression values
+ should be valid Python expressions, each of which will be evaluated
+ in the user namespace.
+
+ Returns
+ -------
+ A dict, keyed like the input expressions dict, with the rich mime-typed
+ display_data of each value.
+ """
+ out = {}
+ user_ns = self.user_ns
+ global_ns = self.user_global_ns
- for key, expr in iteritems(expressions):
- try:
- value = self._format_user_obj(eval(expr, global_ns, user_ns))
- except:
- value = self._user_obj_error()
- out[key] = value
- return out
-
- #-------------------------------------------------------------------------
- # Things related to the running of code
- #-------------------------------------------------------------------------
-
- def ex(self, cmd):
- """Execute a normal python statement in user namespace."""
- with self.builtin_trap:
- exec(cmd, self.user_global_ns, self.user_ns)
-
- def ev(self, expr):
- """Evaluate python expression expr in user namespace.
-
- Returns the result of evaluation
- """
- with self.builtin_trap:
- return eval(expr, self.user_global_ns, self.user_ns)
-
- def safe_execfile(self, fname, *where, **kw):
- """A safe version of the builtin execfile().
-
- This version will never throw an exception, but instead print
- helpful error messages to the screen. This only works on pure
- Python files with the .py extension.
-
- Parameters
- ----------
- fname : string
- The name of the file to be executed.
- where : tuple
- One or two namespaces, passed to execfile() as (globals,locals).
- If only one is given, it is passed as both.
- exit_ignore : bool (False)
- If True, then silence SystemExit for non-zero status (it is always
- silenced for zero status, as it is so common).
- raise_exceptions : bool (False)
- If True raise exceptions everywhere. Meant for testing.
- shell_futures : bool (False)
- If True, the code will share future statements with the interactive
- shell. It will both be affected by previous __future__ imports, and
- any __future__ imports in the code will affect the shell. If False,
- __future__ imports are not shared in either direction.
-
- """
- kw.setdefault('exit_ignore', False)
- kw.setdefault('raise_exceptions', False)
- kw.setdefault('shell_futures', False)
-
- fname = os.path.abspath(os.path.expanduser(fname))
-
- # Make sure we can open the file
- try:
- with open(fname):
- pass
- except:
- warn('Could not open file <%s> for safe execution.' % fname)
- return
-
- # Find things also in current directory. This is needed to mimic the
- # behavior of running a script from the system command line, where
- # Python inserts the script's directory into sys.path
- dname = os.path.dirname(fname)
-
+ for key, expr in iteritems(expressions):
+ try:
+ value = self._format_user_obj(eval(expr, global_ns, user_ns))
+ except:
+ value = self._user_obj_error()
+ out[key] = value
+ return out
+
+ #-------------------------------------------------------------------------
+ # Things related to the running of code
+ #-------------------------------------------------------------------------
+
+ def ex(self, cmd):
+ """Execute a normal python statement in user namespace."""
+ with self.builtin_trap:
+ exec(cmd, self.user_global_ns, self.user_ns)
+
+ def ev(self, expr):
+ """Evaluate python expression expr in user namespace.
+
+ Returns the result of evaluation
+ """
+ with self.builtin_trap:
+ return eval(expr, self.user_global_ns, self.user_ns)
+
+ def safe_execfile(self, fname, *where, **kw):
+ """A safe version of the builtin execfile().
+
+ This version will never throw an exception, but instead print
+ helpful error messages to the screen. This only works on pure
+ Python files with the .py extension.
+
+ Parameters
+ ----------
+ fname : string
+ The name of the file to be executed.
+ where : tuple
+ One or two namespaces, passed to execfile() as (globals,locals).
+ If only one is given, it is passed as both.
+ exit_ignore : bool (False)
+ If True, then silence SystemExit for non-zero status (it is always
+ silenced for zero status, as it is so common).
+ raise_exceptions : bool (False)
+ If True raise exceptions everywhere. Meant for testing.
+ shell_futures : bool (False)
+ If True, the code will share future statements with the interactive
+ shell. It will both be affected by previous __future__ imports, and
+ any __future__ imports in the code will affect the shell. If False,
+ __future__ imports are not shared in either direction.
+
+ """
+ kw.setdefault('exit_ignore', False)
+ kw.setdefault('raise_exceptions', False)
+ kw.setdefault('shell_futures', False)
+
+ fname = os.path.abspath(os.path.expanduser(fname))
+
+ # Make sure we can open the file
+ try:
+ with open(fname):
+ pass
+ except:
+ warn('Could not open file <%s> for safe execution.' % fname)
+ return
+
+ # Find things also in current directory. This is needed to mimic the
+ # behavior of running a script from the system command line, where
+ # Python inserts the script's directory into sys.path
+ dname = os.path.dirname(fname)
+
with prepended_to_syspath(dname), self.builtin_trap:
- try:
- glob, loc = (where + (None, ))[:2]
- py3compat.execfile(
- fname, glob, loc,
- self.compile if kw['shell_futures'] else None)
- except SystemExit as status:
- # If the call was made with 0 or None exit status (sys.exit(0)
- # or sys.exit() ), don't bother showing a traceback, as both of
- # these are considered normal by the OS:
- # > python -c'import sys;sys.exit(0)'; echo $?
- # 0
- # > python -c'import sys;sys.exit()'; echo $?
- # 0
- # For other exit status, we show the exception unless
- # explicitly silenced, but only in short form.
- if status.code:
- if kw['raise_exceptions']:
- raise
- if not kw['exit_ignore']:
- self.showtraceback(exception_only=True)
- except:
- if kw['raise_exceptions']:
- raise
- # tb offset is 2 because we wrap execfile
- self.showtraceback(tb_offset=2)
-
- def safe_execfile_ipy(self, fname, shell_futures=False, raise_exceptions=False):
- """Like safe_execfile, but for .ipy or .ipynb files with IPython syntax.
-
- Parameters
- ----------
- fname : str
- The name of the file to execute. The filename must have a
- .ipy or .ipynb extension.
- shell_futures : bool (False)
- If True, the code will share future statements with the interactive
- shell. It will both be affected by previous __future__ imports, and
- any __future__ imports in the code will affect the shell. If False,
- __future__ imports are not shared in either direction.
- raise_exceptions : bool (False)
- If True raise exceptions everywhere. Meant for testing.
- """
- fname = os.path.abspath(os.path.expanduser(fname))
-
- # Make sure we can open the file
- try:
- with open(fname):
- pass
- except:
- warn('Could not open file <%s> for safe execution.' % fname)
- return
-
- # Find things also in current directory. This is needed to mimic the
- # behavior of running a script from the system command line, where
- # Python inserts the script's directory into sys.path
- dname = os.path.dirname(fname)
+ try:
+ glob, loc = (where + (None, ))[:2]
+ py3compat.execfile(
+ fname, glob, loc,
+ self.compile if kw['shell_futures'] else None)
+ except SystemExit as status:
+ # If the call was made with 0 or None exit status (sys.exit(0)
+ # or sys.exit() ), don't bother showing a traceback, as both of
+ # these are considered normal by the OS:
+ # > python -c'import sys;sys.exit(0)'; echo $?
+ # 0
+ # > python -c'import sys;sys.exit()'; echo $?
+ # 0
+ # For other exit status, we show the exception unless
+ # explicitly silenced, but only in short form.
+ if status.code:
+ if kw['raise_exceptions']:
+ raise
+ if not kw['exit_ignore']:
+ self.showtraceback(exception_only=True)
+ except:
+ if kw['raise_exceptions']:
+ raise
+ # tb offset is 2 because we wrap execfile
+ self.showtraceback(tb_offset=2)
+
+ def safe_execfile_ipy(self, fname, shell_futures=False, raise_exceptions=False):
+ """Like safe_execfile, but for .ipy or .ipynb files with IPython syntax.
+
+ Parameters
+ ----------
+ fname : str
+ The name of the file to execute. The filename must have a
+ .ipy or .ipynb extension.
+ shell_futures : bool (False)
+ If True, the code will share future statements with the interactive
+ shell. It will both be affected by previous __future__ imports, and
+ any __future__ imports in the code will affect the shell. If False,
+ __future__ imports are not shared in either direction.
+ raise_exceptions : bool (False)
+ If True raise exceptions everywhere. Meant for testing.
+ """
+ fname = os.path.abspath(os.path.expanduser(fname))
+
+ # Make sure we can open the file
+ try:
+ with open(fname):
+ pass
+ except:
+ warn('Could not open file <%s> for safe execution.' % fname)
+ return
+
+ # Find things also in current directory. This is needed to mimic the
+ # behavior of running a script from the system command line, where
+ # Python inserts the script's directory into sys.path
+ dname = os.path.dirname(fname)
- def get_cells():
- """generator for sequence of code blocks to run"""
- if fname.endswith('.ipynb'):
- from nbformat import read
+ def get_cells():
+ """generator for sequence of code blocks to run"""
+ if fname.endswith('.ipynb'):
+ from nbformat import read
nb = read(fname, as_version=4)
if not nb.cells:
return
for cell in nb.cells:
if cell.cell_type == 'code':
yield cell.source
- else:
- with open(fname) as f:
- yield f.read()
-
- with prepended_to_syspath(dname):
- try:
- for cell in get_cells():
- result = self.run_cell(cell, silent=True, shell_futures=shell_futures)
- if raise_exceptions:
- result.raise_error()
- elif not result.success:
- break
- except:
- if raise_exceptions:
- raise
- self.showtraceback()
- warn('Unknown failure executing file: <%s>' % fname)
-
- def safe_run_module(self, mod_name, where):
- """A safe version of runpy.run_module().
-
- This version will never throw an exception, but instead print
- helpful error messages to the screen.
-
- `SystemExit` exceptions with status code 0 or None are ignored.
-
- Parameters
- ----------
- mod_name : string
- The name of the module to be executed.
- where : dict
- The globals namespace.
- """
- try:
- try:
- where.update(
- runpy.run_module(str(mod_name), run_name="__main__",
- alter_sys=True)
- )
- except SystemExit as status:
- if status.code:
- raise
- except:
- self.showtraceback()
- warn('Unknown failure executing module: <%s>' % mod_name)
-
- def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=True):
- """Run a complete IPython cell.
-
- Parameters
- ----------
- raw_cell : str
- The code (including IPython code such as %magic functions) to run.
- store_history : bool
- If True, the raw and translated cell will be stored in IPython's
- history. For user code calling back into IPython's machinery, this
- should be set to False.
- silent : bool
- If True, avoid side-effects, such as implicit displayhooks and
- and logging. silent=True forces store_history=False.
- shell_futures : bool
- If True, the code will share future statements with the interactive
- shell. It will both be affected by previous __future__ imports, and
- any __future__ imports in the code will affect the shell. If False,
- __future__ imports are not shared in either direction.
-
- Returns
- -------
- result : :class:`ExecutionResult`
- """
- result = ExecutionResult()
-
- if (not raw_cell) or raw_cell.isspace():
+ else:
+ with open(fname) as f:
+ yield f.read()
+
+ with prepended_to_syspath(dname):
+ try:
+ for cell in get_cells():
+ result = self.run_cell(cell, silent=True, shell_futures=shell_futures)
+ if raise_exceptions:
+ result.raise_error()
+ elif not result.success:
+ break
+ except:
+ if raise_exceptions:
+ raise
+ self.showtraceback()
+ warn('Unknown failure executing file: <%s>' % fname)
+
+ def safe_run_module(self, mod_name, where):
+ """A safe version of runpy.run_module().
+
+ This version will never throw an exception, but instead print
+ helpful error messages to the screen.
+
+ `SystemExit` exceptions with status code 0 or None are ignored.
+
+ Parameters
+ ----------
+ mod_name : string
+ The name of the module to be executed.
+ where : dict
+ The globals namespace.
+ """
+ try:
+ try:
+ where.update(
+ runpy.run_module(str(mod_name), run_name="__main__",
+ alter_sys=True)
+ )
+ except SystemExit as status:
+ if status.code:
+ raise
+ except:
+ self.showtraceback()
+ warn('Unknown failure executing module: <%s>' % mod_name)
+
+ def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=True):
+ """Run a complete IPython cell.
+
+ Parameters
+ ----------
+ raw_cell : str
+ The code (including IPython code such as %magic functions) to run.
+ store_history : bool
+ If True, the raw and translated cell will be stored in IPython's
+ history. For user code calling back into IPython's machinery, this
+ should be set to False.
+ silent : bool
+ If True, avoid side-effects, such as implicit displayhooks and
+ and logging. silent=True forces store_history=False.
+ shell_futures : bool
+ If True, the code will share future statements with the interactive
+ shell. It will both be affected by previous __future__ imports, and
+ any __future__ imports in the code will affect the shell. If False,
+ __future__ imports are not shared in either direction.
+
+ Returns
+ -------
+ result : :class:`ExecutionResult`
+ """
+ result = ExecutionResult()
+
+ if (not raw_cell) or raw_cell.isspace():
self.last_execution_succeeded = True
- return result
+ return result
- if silent:
- store_history = False
-
- if store_history:
- result.execution_count = self.execution_count
-
- def error_before_exec(value):
+ if silent:
+ store_history = False
+
+ if store_history:
+ result.execution_count = self.execution_count
+
+ def error_before_exec(value):
if store_history:
self.execution_count += 1
- result.error_before_exec = value
+ result.error_before_exec = value
self.last_execution_succeeded = False
- return result
-
- self.events.trigger('pre_execute')
- if not silent:
- self.events.trigger('pre_run_cell')
-
- # If any of our input transformation (input_transformer_manager or
- # prefilter_manager) raises an exception, we store it in this variable
- # so that we can display the error after logging the input and storing
- # it in the history.
- preprocessing_exc_tuple = None
- try:
- # Static input transformations
- cell = self.input_transformer_manager.transform_cell(raw_cell)
- except SyntaxError:
- preprocessing_exc_tuple = sys.exc_info()
- cell = raw_cell # cell has to exist so it can be stored/logged
- else:
- if len(cell.splitlines()) == 1:
- # Dynamic transformations - only applied for single line commands
- with self.builtin_trap:
- try:
- # use prefilter_lines to handle trailing newlines
- # restore trailing newline for ast.parse
- cell = self.prefilter_manager.prefilter_lines(cell) + '\n'
- except Exception:
- # don't allow prefilter errors to crash IPython
- preprocessing_exc_tuple = sys.exc_info()
-
- # Store raw and processed history
- if store_history:
- self.history_manager.store_inputs(self.execution_count,
- cell, raw_cell)
- if not silent:
- self.logger.log(cell, raw_cell)
-
- # Display the exception if input processing failed.
- if preprocessing_exc_tuple is not None:
- self.showtraceback(preprocessing_exc_tuple)
- if store_history:
- self.execution_count += 1
- return error_before_exec(preprocessing_exc_tuple[2])
-
- # Our own compiler remembers the __future__ environment. If we want to
- # run code with a separate __future__ environment, use the default
- # compiler
- compiler = self.compile if shell_futures else CachingCompiler()
-
- with self.builtin_trap:
- cell_name = self.compile.cache(cell, self.execution_count)
-
- with self.display_trap:
- # Compile to bytecode
- try:
- code_ast = compiler.ast_parse(cell, filename=cell_name)
+ return result
+
+ self.events.trigger('pre_execute')
+ if not silent:
+ self.events.trigger('pre_run_cell')
+
+ # If any of our input transformation (input_transformer_manager or
+ # prefilter_manager) raises an exception, we store it in this variable
+ # so that we can display the error after logging the input and storing
+ # it in the history.
+ preprocessing_exc_tuple = None
+ try:
+ # Static input transformations
+ cell = self.input_transformer_manager.transform_cell(raw_cell)
+ except SyntaxError:
+ preprocessing_exc_tuple = sys.exc_info()
+ cell = raw_cell # cell has to exist so it can be stored/logged
+ else:
+ if len(cell.splitlines()) == 1:
+ # Dynamic transformations - only applied for single line commands
+ with self.builtin_trap:
+ try:
+ # use prefilter_lines to handle trailing newlines
+ # restore trailing newline for ast.parse
+ cell = self.prefilter_manager.prefilter_lines(cell) + '\n'
+ except Exception:
+ # don't allow prefilter errors to crash IPython
+ preprocessing_exc_tuple = sys.exc_info()
+
+ # Store raw and processed history
+ if store_history:
+ self.history_manager.store_inputs(self.execution_count,
+ cell, raw_cell)
+ if not silent:
+ self.logger.log(cell, raw_cell)
+
+ # Display the exception if input processing failed.
+ if preprocessing_exc_tuple is not None:
+ self.showtraceback(preprocessing_exc_tuple)
+ if store_history:
+ self.execution_count += 1
+ return error_before_exec(preprocessing_exc_tuple[2])
+
+ # Our own compiler remembers the __future__ environment. If we want to
+ # run code with a separate __future__ environment, use the default
+ # compiler
+ compiler = self.compile if shell_futures else CachingCompiler()
+
+ with self.builtin_trap:
+ cell_name = self.compile.cache(cell, self.execution_count)
+
+ with self.display_trap:
+ # Compile to bytecode
+ try:
+ code_ast = compiler.ast_parse(cell, filename=cell_name)
except self.custom_exceptions as e:
etype, value, tb = sys.exc_info()
self.CustomTB(etype, value, tb)
return error_before_exec(e)
- except IndentationError as e:
- self.showindentationerror()
- return error_before_exec(e)
- except (OverflowError, SyntaxError, ValueError, TypeError,
- MemoryError) as e:
- self.showsyntaxerror()
- return error_before_exec(e)
-
- # Apply AST transformations
- try:
- code_ast = self.transform_ast(code_ast)
- except InputRejected as e:
- self.showtraceback()
- return error_before_exec(e)
-
- # Give the displayhook a reference to our ExecutionResult so it
- # can fill in the output value.
- self.displayhook.exec_result = result
-
- # Execute the user code
- interactivity = "none" if silent else self.ast_node_interactivity
+ except IndentationError as e:
+ self.showindentationerror()
+ return error_before_exec(e)
+ except (OverflowError, SyntaxError, ValueError, TypeError,
+ MemoryError) as e:
+ self.showsyntaxerror()
+ return error_before_exec(e)
+
+ # Apply AST transformations
+ try:
+ code_ast = self.transform_ast(code_ast)
+ except InputRejected as e:
+ self.showtraceback()
+ return error_before_exec(e)
+
+ # Give the displayhook a reference to our ExecutionResult so it
+ # can fill in the output value.
+ self.displayhook.exec_result = result
+
+ # Execute the user code
+ interactivity = "none" if silent else self.ast_node_interactivity
has_raised = self.run_ast_nodes(code_ast.body, cell_name,
- interactivity=interactivity, compiler=compiler, result=result)
+ interactivity=interactivity, compiler=compiler, result=result)
self.last_execution_succeeded = not has_raised
-
- # Reset this so later displayed values do not modify the
- # ExecutionResult
- self.displayhook.exec_result = None
-
- self.events.trigger('post_execute')
- if not silent:
- self.events.trigger('post_run_cell')
-
- if store_history:
- # Write output to the database. Does nothing unless
- # history output logging is enabled.
- self.history_manager.store_output(self.execution_count)
- # Each cell is a *single* input, regardless of how many lines it has
- self.execution_count += 1
-
- return result
+
+ # Reset this so later displayed values do not modify the
+ # ExecutionResult
+ self.displayhook.exec_result = None
+
+ self.events.trigger('post_execute')
+ if not silent:
+ self.events.trigger('post_run_cell')
+
+ if store_history:
+ # Write output to the database. Does nothing unless
+ # history output logging is enabled.
+ self.history_manager.store_output(self.execution_count)
+ # Each cell is a *single* input, regardless of how many lines it has
+ self.execution_count += 1
+
+ return result
- def transform_ast(self, node):
- """Apply the AST transformations from self.ast_transformers
+ def transform_ast(self, node):
+ """Apply the AST transformations from self.ast_transformers
- Parameters
- ----------
- node : ast.Node
- The root node to be transformed. Typically called with the ast.Module
- produced by parsing user input.
+ Parameters
+ ----------
+ node : ast.Node
+ The root node to be transformed. Typically called with the ast.Module
+ produced by parsing user input.
- Returns
- -------
- An ast.Node corresponding to the node it was called with. Note that it
- may also modify the passed object, so don't rely on references to the
- original AST.
- """
- for transformer in self.ast_transformers:
- try:
- node = transformer.visit(node)
- except InputRejected:
- # User-supplied AST transformers can reject an input by raising
- # an InputRejected. Short-circuit in this case so that we
- # don't unregister the transform.
- raise
- except Exception:
- warn("AST transformer %r threw an error. It will be unregistered." % transformer)
- self.ast_transformers.remove(transformer)
+ Returns
+ -------
+ An ast.Node corresponding to the node it was called with. Note that it
+ may also modify the passed object, so don't rely on references to the
+ original AST.
+ """
+ for transformer in self.ast_transformers:
+ try:
+ node = transformer.visit(node)
+ except InputRejected:
+ # User-supplied AST transformers can reject an input by raising
+ # an InputRejected. Short-circuit in this case so that we
+ # don't unregister the transform.
+ raise
+ except Exception:
+ warn("AST transformer %r threw an error. It will be unregistered." % transformer)
+ self.ast_transformers.remove(transformer)
- if self.ast_transformers:
- ast.fix_missing_locations(node)
- return node
+ if self.ast_transformers:
+ ast.fix_missing_locations(node)
+ return node
-
- def run_ast_nodes(self, nodelist, cell_name, interactivity='last_expr',
- compiler=compile, result=None):
- """Run a sequence of AST nodes. The execution mode depends on the
- interactivity parameter.
-
- Parameters
- ----------
- nodelist : list
- A sequence of AST nodes to run.
- cell_name : str
- Will be passed to the compiler as the filename of the cell. Typically
- the value returned by ip.compile.cache(cell).
- interactivity : str
- 'all', 'last', 'last_expr' or 'none', specifying which nodes should be
- run interactively (displaying output from expressions). 'last_expr'
- will run the last node interactively only if it is an expression (i.e.
- expressions in loops or other blocks are not displayed. Other values
- for this parameter will raise a ValueError.
- compiler : callable
- A function with the same interface as the built-in compile(), to turn
- the AST nodes into code objects. Default is the built-in compile().
- result : ExecutionResult, optional
- An object to store exceptions that occur during execution.
-
- Returns
- -------
- True if an exception occurred while running code, False if it finished
- running.
- """
- if not nodelist:
- return
-
- if interactivity == 'last_expr':
- if isinstance(nodelist[-1], ast.Expr):
- interactivity = "last"
- else:
- interactivity = "none"
-
- if interactivity == 'none':
- to_run_exec, to_run_interactive = nodelist, []
- elif interactivity == 'last':
- to_run_exec, to_run_interactive = nodelist[:-1], nodelist[-1:]
- elif interactivity == 'all':
- to_run_exec, to_run_interactive = [], nodelist
- else:
- raise ValueError("Interactivity was %r" % interactivity)
-
- try:
- for i, node in enumerate(to_run_exec):
- mod = ast.Module([node])
- code = compiler(mod, cell_name, "exec")
- if self.run_code(code, result):
- return True
-
- for i, node in enumerate(to_run_interactive):
- mod = ast.Interactive([node])
- code = compiler(mod, cell_name, "single")
- if self.run_code(code, result):
- return True
-
- # Flush softspace
- if softspace(sys.stdout, 0):
- print()
-
- except:
- # It's possible to have exceptions raised here, typically by
- # compilation of odd code (such as a naked 'return' outside a
- # function) that did parse but isn't valid. Typically the exception
- # is a SyntaxError, but it's safest just to catch anything and show
- # the user a traceback.
-
- # We do only one try/except outside the loop to minimize the impact
- # on runtime, and also because if any node in the node list is
- # broken, we should stop execution completely.
- if result:
- result.error_before_exec = sys.exc_info()[1]
- self.showtraceback()
- return True
-
- return False
-
- def run_code(self, code_obj, result=None):
- """Execute a code object.
-
- When an exception occurs, self.showtraceback() is called to display a
- traceback.
-
- Parameters
- ----------
- code_obj : code object
- A compiled code object, to be executed
- result : ExecutionResult, optional
- An object to store exceptions that occur during execution.
-
- Returns
- -------
- False : successful execution.
- True : an error occurred.
- """
- # Set our own excepthook in case the user code tries to call it
- # directly, so that the IPython crash handler doesn't get triggered
- old_excepthook, sys.excepthook = sys.excepthook, self.excepthook
-
- # we save the original sys.excepthook in the instance, in case config
- # code (such as magics) needs access to it.
- self.sys_excepthook = old_excepthook
- outflag = 1 # happens in more places, so it's easier as default
- try:
- try:
- self.hooks.pre_run_code_hook()
- #rprint('Running code', repr(code_obj)) # dbg
- exec(code_obj, self.user_global_ns, self.user_ns)
- finally:
- # Reset our crash handler in place
- sys.excepthook = old_excepthook
- except SystemExit as e:
- if result is not None:
- result.error_in_exec = e
- self.showtraceback(exception_only=True)
+
+ def run_ast_nodes(self, nodelist, cell_name, interactivity='last_expr',
+ compiler=compile, result=None):
+ """Run a sequence of AST nodes. The execution mode depends on the
+ interactivity parameter.
+
+ Parameters
+ ----------
+ nodelist : list
+ A sequence of AST nodes to run.
+ cell_name : str
+ Will be passed to the compiler as the filename of the cell. Typically
+ the value returned by ip.compile.cache(cell).
+ interactivity : str
+ 'all', 'last', 'last_expr' or 'none', specifying which nodes should be
+ run interactively (displaying output from expressions). 'last_expr'
+ will run the last node interactively only if it is an expression (i.e.
+ expressions in loops or other blocks are not displayed. Other values
+ for this parameter will raise a ValueError.
+ compiler : callable
+ A function with the same interface as the built-in compile(), to turn
+ the AST nodes into code objects. Default is the built-in compile().
+ result : ExecutionResult, optional
+ An object to store exceptions that occur during execution.
+
+ Returns
+ -------
+ True if an exception occurred while running code, False if it finished
+ running.
+ """
+ if not nodelist:
+ return
+
+ if interactivity == 'last_expr':
+ if isinstance(nodelist[-1], ast.Expr):
+ interactivity = "last"
+ else:
+ interactivity = "none"
+
+ if interactivity == 'none':
+ to_run_exec, to_run_interactive = nodelist, []
+ elif interactivity == 'last':
+ to_run_exec, to_run_interactive = nodelist[:-1], nodelist[-1:]
+ elif interactivity == 'all':
+ to_run_exec, to_run_interactive = [], nodelist
+ else:
+ raise ValueError("Interactivity was %r" % interactivity)
+
+ try:
+ for i, node in enumerate(to_run_exec):
+ mod = ast.Module([node])
+ code = compiler(mod, cell_name, "exec")
+ if self.run_code(code, result):
+ return True
+
+ for i, node in enumerate(to_run_interactive):
+ mod = ast.Interactive([node])
+ code = compiler(mod, cell_name, "single")
+ if self.run_code(code, result):
+ return True
+
+ # Flush softspace
+ if softspace(sys.stdout, 0):
+ print()
+
+ except:
+ # It's possible to have exceptions raised here, typically by
+ # compilation of odd code (such as a naked 'return' outside a
+ # function) that did parse but isn't valid. Typically the exception
+ # is a SyntaxError, but it's safest just to catch anything and show
+ # the user a traceback.
+
+ # We do only one try/except outside the loop to minimize the impact
+ # on runtime, and also because if any node in the node list is
+ # broken, we should stop execution completely.
+ if result:
+ result.error_before_exec = sys.exc_info()[1]
+ self.showtraceback()
+ return True
+
+ return False
+
+ def run_code(self, code_obj, result=None):
+ """Execute a code object.
+
+ When an exception occurs, self.showtraceback() is called to display a
+ traceback.
+
+ Parameters
+ ----------
+ code_obj : code object
+ A compiled code object, to be executed
+ result : ExecutionResult, optional
+ An object to store exceptions that occur during execution.
+
+ Returns
+ -------
+ False : successful execution.
+ True : an error occurred.
+ """
+ # Set our own excepthook in case the user code tries to call it
+ # directly, so that the IPython crash handler doesn't get triggered
+ old_excepthook, sys.excepthook = sys.excepthook, self.excepthook
+
+ # we save the original sys.excepthook in the instance, in case config
+ # code (such as magics) needs access to it.
+ self.sys_excepthook = old_excepthook
+ outflag = 1 # happens in more places, so it's easier as default
+ try:
+ try:
+ self.hooks.pre_run_code_hook()
+ #rprint('Running code', repr(code_obj)) # dbg
+ exec(code_obj, self.user_global_ns, self.user_ns)
+ finally:
+ # Reset our crash handler in place
+ sys.excepthook = old_excepthook
+ except SystemExit as e:
+ if result is not None:
+ result.error_in_exec = e
+ self.showtraceback(exception_only=True)
warn("To exit: use 'exit', 'quit', or Ctrl-D.", stacklevel=1)
- except self.custom_exceptions:
- etype, value, tb = sys.exc_info()
- if result is not None:
- result.error_in_exec = value
- self.CustomTB(etype, value, tb)
- except:
- if result is not None:
- result.error_in_exec = sys.exc_info()[1]
- self.showtraceback()
- else:
- outflag = 0
- return outflag
-
- # For backwards compatibility
- runcode = run_code
-
+ except self.custom_exceptions:
+ etype, value, tb = sys.exc_info()
+ if result is not None:
+ result.error_in_exec = value
+ self.CustomTB(etype, value, tb)
+ except:
+ if result is not None:
+ result.error_in_exec = sys.exc_info()[1]
+ self.showtraceback()
+ else:
+ outflag = 0
+ return outflag
+
+ # For backwards compatibility
+ runcode = run_code
+
def check_complete(self, code):
"""Return whether a block of code is ready to execute, or should be continued
@@ -2920,345 +2920,345 @@ class InteractiveShell(SingletonConfigurable):
status, nspaces = self.input_splitter.check_complete(code)
return status, ' ' * (nspaces or 0)
- #-------------------------------------------------------------------------
- # Things related to GUI support and pylab
- #-------------------------------------------------------------------------
-
+ #-------------------------------------------------------------------------
+ # Things related to GUI support and pylab
+ #-------------------------------------------------------------------------
+
active_eventloop = None
- def enable_gui(self, gui=None):
- raise NotImplementedError('Implement enable_gui in a subclass')
+ def enable_gui(self, gui=None):
+ raise NotImplementedError('Implement enable_gui in a subclass')
- def enable_matplotlib(self, gui=None):
- """Enable interactive matplotlib and inline figure support.
+ def enable_matplotlib(self, gui=None):
+ """Enable interactive matplotlib and inline figure support.
- This takes the following steps:
+ This takes the following steps:
- 1. select the appropriate eventloop and matplotlib backend
- 2. set up matplotlib for interactive use with that backend
- 3. configure formatters for inline figure display
- 4. enable the selected gui eventloop
+ 1. select the appropriate eventloop and matplotlib backend
+ 2. set up matplotlib for interactive use with that backend
+ 3. configure formatters for inline figure display
+ 4. enable the selected gui eventloop
- Parameters
- ----------
- gui : optional, string
- If given, dictates the choice of matplotlib GUI backend to use
- (should be one of IPython's supported backends, 'qt', 'osx', 'tk',
- 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by
- matplotlib (as dictated by the matplotlib build-time options plus the
- user's matplotlibrc configuration file). Note that not all backends
- make sense in all contexts, for example a terminal ipython can't
- display figures inline.
- """
- from IPython.core import pylabtools as pt
- gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select)
+ Parameters
+ ----------
+ gui : optional, string
+ If given, dictates the choice of matplotlib GUI backend to use
+ (should be one of IPython's supported backends, 'qt', 'osx', 'tk',
+ 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by
+ matplotlib (as dictated by the matplotlib build-time options plus the
+ user's matplotlibrc configuration file). Note that not all backends
+ make sense in all contexts, for example a terminal ipython can't
+ display figures inline.
+ """
+ from IPython.core import pylabtools as pt
+ gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select)
- if gui != 'inline':
- # If we have our first gui selection, store it
- if self.pylab_gui_select is None:
- self.pylab_gui_select = gui
- # Otherwise if they are different
- elif gui != self.pylab_gui_select:
- print ('Warning: Cannot change to a different GUI toolkit: %s.'
- ' Using %s instead.' % (gui, self.pylab_gui_select))
- gui, backend = pt.find_gui_and_backend(self.pylab_gui_select)
+ if gui != 'inline':
+ # If we have our first gui selection, store it
+ if self.pylab_gui_select is None:
+ self.pylab_gui_select = gui
+ # Otherwise if they are different
+ elif gui != self.pylab_gui_select:
+ print ('Warning: Cannot change to a different GUI toolkit: %s.'
+ ' Using %s instead.' % (gui, self.pylab_gui_select))
+ gui, backend = pt.find_gui_and_backend(self.pylab_gui_select)
- pt.activate_matplotlib(backend)
- pt.configure_inline_support(self, backend)
+ pt.activate_matplotlib(backend)
+ pt.configure_inline_support(self, backend)
- # Now we must activate the gui pylab wants to use, and fix %run to take
- # plot updates into account
- self.enable_gui(gui)
- self.magics_manager.registry['ExecutionMagics'].default_runner = \
- pt.mpl_runner(self.safe_execfile)
+ # Now we must activate the gui pylab wants to use, and fix %run to take
+ # plot updates into account
+ self.enable_gui(gui)
+ self.magics_manager.registry['ExecutionMagics'].default_runner = \
+ pt.mpl_runner(self.safe_execfile)
- return gui, backend
-
- def enable_pylab(self, gui=None, import_all=True, welcome_message=False):
- """Activate pylab support at runtime.
-
- This turns on support for matplotlib, preloads into the interactive
- namespace all of numpy and pylab, and configures IPython to correctly
- interact with the GUI event loop. The GUI backend to be used can be
- optionally selected with the optional ``gui`` argument.
+ return gui, backend
+
+ def enable_pylab(self, gui=None, import_all=True, welcome_message=False):
+ """Activate pylab support at runtime.
+
+ This turns on support for matplotlib, preloads into the interactive
+ namespace all of numpy and pylab, and configures IPython to correctly
+ interact with the GUI event loop. The GUI backend to be used can be
+ optionally selected with the optional ``gui`` argument.
- This method only adds preloading the namespace to InteractiveShell.enable_matplotlib.
-
- Parameters
- ----------
- gui : optional, string
- If given, dictates the choice of matplotlib GUI backend to use
- (should be one of IPython's supported backends, 'qt', 'osx', 'tk',
- 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by
- matplotlib (as dictated by the matplotlib build-time options plus the
- user's matplotlibrc configuration file). Note that not all backends
- make sense in all contexts, for example a terminal ipython can't
- display figures inline.
- import_all : optional, bool, default: True
- Whether to do `from numpy import *` and `from pylab import *`
- in addition to module imports.
- welcome_message : deprecated
- This argument is ignored, no welcome message will be displayed.
- """
- from IPython.core.pylabtools import import_pylab
+ This method only adds preloading the namespace to InteractiveShell.enable_matplotlib.
+
+ Parameters
+ ----------
+ gui : optional, string
+ If given, dictates the choice of matplotlib GUI backend to use
+ (should be one of IPython's supported backends, 'qt', 'osx', 'tk',
+ 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by
+ matplotlib (as dictated by the matplotlib build-time options plus the
+ user's matplotlibrc configuration file). Note that not all backends
+ make sense in all contexts, for example a terminal ipython can't
+ display figures inline.
+ import_all : optional, bool, default: True
+ Whether to do `from numpy import *` and `from pylab import *`
+ in addition to module imports.
+ welcome_message : deprecated
+ This argument is ignored, no welcome message will be displayed.
+ """
+ from IPython.core.pylabtools import import_pylab
- gui, backend = self.enable_matplotlib(gui)
+ gui, backend = self.enable_matplotlib(gui)
- # We want to prevent the loading of pylab to pollute the user's
- # namespace as shown by the %who* magics, so we execute the activation
- # code in an empty namespace, and we update *both* user_ns and
- # user_ns_hidden with this information.
- ns = {}
- import_pylab(ns, import_all)
- # warn about clobbered names
- ignored = {"__builtins__"}
- both = set(ns).intersection(self.user_ns).difference(ignored)
- clobbered = [ name for name in both if self.user_ns[name] is not ns[name] ]
- self.user_ns.update(ns)
- self.user_ns_hidden.update(ns)
- return gui, backend, clobbered
-
- #-------------------------------------------------------------------------
- # Utilities
- #-------------------------------------------------------------------------
-
- def var_expand(self, cmd, depth=0, formatter=DollarFormatter()):
- """Expand python variables in a string.
-
- The depth argument indicates how many frames above the caller should
- be walked to look for the local namespace where to expand variables.
-
- The global namespace for expansion is always the user's interactive
- namespace.
- """
- ns = self.user_ns.copy()
- try:
- frame = sys._getframe(depth+1)
- except ValueError:
- # This is thrown if there aren't that many frames on the stack,
- # e.g. if a script called run_line_magic() directly.
- pass
- else:
- ns.update(frame.f_locals)
-
- try:
- # We have to use .vformat() here, because 'self' is a valid and common
- # name, and expanding **ns for .format() would make it collide with
- # the 'self' argument of the method.
- cmd = formatter.vformat(cmd, args=[], kwargs=ns)
- except Exception:
- # if formatter couldn't format, just let it go untransformed
- pass
- return cmd
-
- def mktempfile(self, data=None, prefix='ipython_edit_'):
- """Make a new tempfile and return its filename.
-
- This makes a call to tempfile.mkstemp (created in a tempfile.mkdtemp),
- but it registers the created filename internally so ipython cleans it up
- at exit time.
-
- Optional inputs:
-
- - data(None): if data is given, it gets written out to the temp file
- immediately, and the file is closed again."""
-
- dirname = tempfile.mkdtemp(prefix=prefix)
- self.tempdirs.append(dirname)
-
- handle, filename = tempfile.mkstemp('.py', prefix, dir=dirname)
- os.close(handle) # On Windows, there can only be one open handle on a file
- self.tempfiles.append(filename)
-
- if data:
- tmp_file = open(filename,'w')
- tmp_file.write(data)
- tmp_file.close()
- return filename
-
+ # We want to prevent the loading of pylab to pollute the user's
+ # namespace as shown by the %who* magics, so we execute the activation
+ # code in an empty namespace, and we update *both* user_ns and
+ # user_ns_hidden with this information.
+ ns = {}
+ import_pylab(ns, import_all)
+ # warn about clobbered names
+ ignored = {"__builtins__"}
+ both = set(ns).intersection(self.user_ns).difference(ignored)
+ clobbered = [ name for name in both if self.user_ns[name] is not ns[name] ]
+ self.user_ns.update(ns)
+ self.user_ns_hidden.update(ns)
+ return gui, backend, clobbered
+
+ #-------------------------------------------------------------------------
+ # Utilities
+ #-------------------------------------------------------------------------
+
+ def var_expand(self, cmd, depth=0, formatter=DollarFormatter()):
+ """Expand python variables in a string.
+
+ The depth argument indicates how many frames above the caller should
+ be walked to look for the local namespace where to expand variables.
+
+ The global namespace for expansion is always the user's interactive
+ namespace.
+ """
+ ns = self.user_ns.copy()
+ try:
+ frame = sys._getframe(depth+1)
+ except ValueError:
+ # This is thrown if there aren't that many frames on the stack,
+ # e.g. if a script called run_line_magic() directly.
+ pass
+ else:
+ ns.update(frame.f_locals)
+
+ try:
+ # We have to use .vformat() here, because 'self' is a valid and common
+ # name, and expanding **ns for .format() would make it collide with
+ # the 'self' argument of the method.
+ cmd = formatter.vformat(cmd, args=[], kwargs=ns)
+ except Exception:
+ # if formatter couldn't format, just let it go untransformed
+ pass
+ return cmd
+
+ def mktempfile(self, data=None, prefix='ipython_edit_'):
+ """Make a new tempfile and return its filename.
+
+ This makes a call to tempfile.mkstemp (created in a tempfile.mkdtemp),
+ but it registers the created filename internally so ipython cleans it up
+ at exit time.
+
+ Optional inputs:
+
+ - data(None): if data is given, it gets written out to the temp file
+ immediately, and the file is closed again."""
+
+ dirname = tempfile.mkdtemp(prefix=prefix)
+ self.tempdirs.append(dirname)
+
+ handle, filename = tempfile.mkstemp('.py', prefix, dir=dirname)
+ os.close(handle) # On Windows, there can only be one open handle on a file
+ self.tempfiles.append(filename)
+
+ if data:
+ tmp_file = open(filename,'w')
+ tmp_file.write(data)
+ tmp_file.close()
+ return filename
+
@undoc
- def write(self,data):
+ def write(self,data):
"""DEPRECATED: Write a string to the default output"""
warn('InteractiveShell.write() is deprecated, use sys.stdout instead',
DeprecationWarning, stacklevel=2)
sys.stdout.write(data)
-
+
@undoc
- def write_err(self,data):
+ def write_err(self,data):
"""DEPRECATED: Write a string to the default error output"""
warn('InteractiveShell.write_err() is deprecated, use sys.stderr instead',
DeprecationWarning, stacklevel=2)
sys.stderr.write(data)
-
- def ask_yes_no(self, prompt, default=None, interrupt=None):
- if self.quiet:
- return True
- return ask_yes_no(prompt,default,interrupt)
-
- def show_usage(self):
- """Show a usage message"""
- page.page(IPython.core.usage.interactive_usage)
-
- def extract_input_lines(self, range_str, raw=False):
- """Return as a string a set of input history slices.
-
- Parameters
- ----------
- range_str : string
- The set of slices is given as a string, like "~5/6-~4/2 4:8 9",
- since this function is for use by magic functions which get their
- arguments as strings. The number before the / is the session
- number: ~n goes n back from the current session.
-
- raw : bool, optional
- By default, the processed input is used. If this is true, the raw
- input history is used instead.
-
- Notes
- -----
-
- Slices can be described with two notations:
-
- * ``N:M`` -> standard python form, means including items N...(M-1).
- * ``N-M`` -> include items N..M (closed endpoint).
- """
- lines = self.history_manager.get_range_by_str(range_str, raw=raw)
- return "\n".join(x for _, _, x in lines)
-
- def find_user_code(self, target, raw=True, py_only=False, skip_encoding_cookie=True, search_ns=False):
- """Get a code string from history, file, url, or a string or macro.
-
- This is mainly used by magic functions.
-
- Parameters
- ----------
-
- target : str
-
- A string specifying code to retrieve. This will be tried respectively
- as: ranges of input history (see %history for syntax), url,
- corresponding .py file, filename, or an expression evaluating to a
- string or Macro in the user namespace.
-
- raw : bool
- If true (default), retrieve raw history. Has no effect on the other
- retrieval mechanisms.
-
- py_only : bool (default False)
- Only try to fetch python code, do not try alternative methods to decode file
- if unicode fails.
-
- Returns
- -------
- A string of code.
-
- ValueError is raised if nothing is found, and TypeError if it evaluates
- to an object of another type. In each case, .args[0] is a printable
- message.
- """
- code = self.extract_input_lines(target, raw=raw) # Grab history
- if code:
- return code
- try:
+
+ def ask_yes_no(self, prompt, default=None, interrupt=None):
+ if self.quiet:
+ return True
+ return ask_yes_no(prompt,default,interrupt)
+
+ def show_usage(self):
+ """Show a usage message"""
+ page.page(IPython.core.usage.interactive_usage)
+
+ def extract_input_lines(self, range_str, raw=False):
+ """Return as a string a set of input history slices.
+
+ Parameters
+ ----------
+ range_str : string
+ The set of slices is given as a string, like "~5/6-~4/2 4:8 9",
+ since this function is for use by magic functions which get their
+ arguments as strings. The number before the / is the session
+ number: ~n goes n back from the current session.
+
+ raw : bool, optional
+ By default, the processed input is used. If this is true, the raw
+ input history is used instead.
+
+ Notes
+ -----
+
+ Slices can be described with two notations:
+
+ * ``N:M`` -> standard python form, means including items N...(M-1).
+ * ``N-M`` -> include items N..M (closed endpoint).
+ """
+ lines = self.history_manager.get_range_by_str(range_str, raw=raw)
+ return "\n".join(x for _, _, x in lines)
+
+ def find_user_code(self, target, raw=True, py_only=False, skip_encoding_cookie=True, search_ns=False):
+ """Get a code string from history, file, url, or a string or macro.
+
+ This is mainly used by magic functions.
+
+ Parameters
+ ----------
+
+ target : str
+
+ A string specifying code to retrieve. This will be tried respectively
+ as: ranges of input history (see %history for syntax), url,
+ corresponding .py file, filename, or an expression evaluating to a
+ string or Macro in the user namespace.
+
+ raw : bool
+ If true (default), retrieve raw history. Has no effect on the other
+ retrieval mechanisms.
+
+ py_only : bool (default False)
+ Only try to fetch python code, do not try alternative methods to decode file
+ if unicode fails.
+
+ Returns
+ -------
+ A string of code.
+
+ ValueError is raised if nothing is found, and TypeError if it evaluates
+ to an object of another type. In each case, .args[0] is a printable
+ message.
+ """
+ code = self.extract_input_lines(target, raw=raw) # Grab history
+ if code:
+ return code
+ try:
if target.startswith(('http://', 'https://')):
return openpy.read_py_url(target, skip_encoding_cookie=skip_encoding_cookie)
- except UnicodeDecodeError:
- if not py_only :
- # Deferred import
- try:
- from urllib.request import urlopen # Py3
- except ImportError:
- from urllib import urlopen
- response = urlopen(target)
- return response.read().decode('latin1')
+ except UnicodeDecodeError:
+ if not py_only :
+ # Deferred import
+ try:
+ from urllib.request import urlopen # Py3
+ except ImportError:
+ from urllib import urlopen
+ response = urlopen(target)
+ return response.read().decode('latin1')
raise ValueError(("'%s' seem to be unreadable.") % target)
-
- potential_target = [target]
- try :
- potential_target.insert(0,get_py_filename(target))
- except IOError:
- pass
-
- for tgt in potential_target :
- if os.path.isfile(tgt): # Read file
- try :
- return openpy.read_py_file(tgt, skip_encoding_cookie=skip_encoding_cookie)
- except UnicodeDecodeError :
- if not py_only :
- with io_open(tgt,'r', encoding='latin1') as f :
- return f.read()
- raise ValueError(("'%s' seem to be unreadable.") % target)
- elif os.path.isdir(os.path.expanduser(tgt)):
- raise ValueError("'%s' is a directory, not a regular file." % target)
-
- if search_ns:
- # Inspect namespace to load object source
- object_info = self.object_inspect(target, detail_level=1)
- if object_info['found'] and object_info['source']:
- return object_info['source']
-
- try: # User namespace
- codeobj = eval(target, self.user_ns)
- except Exception:
- raise ValueError(("'%s' was not found in history, as a file, url, "
- "nor in the user namespace.") % target)
-
- if isinstance(codeobj, string_types):
- return codeobj
- elif isinstance(codeobj, Macro):
- return codeobj.value
-
- raise TypeError("%s is neither a string nor a macro." % target,
- codeobj)
-
- #-------------------------------------------------------------------------
- # Things related to IPython exiting
- #-------------------------------------------------------------------------
- def atexit_operations(self):
- """This will be executed at the time of exit.
-
- Cleanup operations and saving of persistent data that is done
- unconditionally by IPython should be performed here.
-
- For things that may depend on startup flags or platform specifics (such
- as having readline or not), register a separate atexit function in the
- code that has the appropriate information, rather than trying to
- clutter
- """
- # Close the history session (this stores the end time and line count)
- # this must be *before* the tempfile cleanup, in case of temporary
- # history db
- self.history_manager.end_session()
-
- # Cleanup all tempfiles and folders left around
- for tfile in self.tempfiles:
- try:
- os.unlink(tfile)
- except OSError:
- pass
-
- for tdir in self.tempdirs:
- try:
- os.rmdir(tdir)
- except OSError:
- pass
-
- # Clear all user namespaces to release all references cleanly.
- self.reset(new_session=False)
-
- # Run user hooks
- self.hooks.shutdown_hook()
-
- def cleanup(self):
- self.restore_sys_module_state()
-
-
+
+ potential_target = [target]
+ try :
+ potential_target.insert(0,get_py_filename(target))
+ except IOError:
+ pass
+
+ for tgt in potential_target :
+ if os.path.isfile(tgt): # Read file
+ try :
+ return openpy.read_py_file(tgt, skip_encoding_cookie=skip_encoding_cookie)
+ except UnicodeDecodeError :
+ if not py_only :
+ with io_open(tgt,'r', encoding='latin1') as f :
+ return f.read()
+ raise ValueError(("'%s' seem to be unreadable.") % target)
+ elif os.path.isdir(os.path.expanduser(tgt)):
+ raise ValueError("'%s' is a directory, not a regular file." % target)
+
+ if search_ns:
+ # Inspect namespace to load object source
+ object_info = self.object_inspect(target, detail_level=1)
+ if object_info['found'] and object_info['source']:
+ return object_info['source']
+
+ try: # User namespace
+ codeobj = eval(target, self.user_ns)
+ except Exception:
+ raise ValueError(("'%s' was not found in history, as a file, url, "
+ "nor in the user namespace.") % target)
+
+ if isinstance(codeobj, string_types):
+ return codeobj
+ elif isinstance(codeobj, Macro):
+ return codeobj.value
+
+ raise TypeError("%s is neither a string nor a macro." % target,
+ codeobj)
+
+ #-------------------------------------------------------------------------
+ # Things related to IPython exiting
+ #-------------------------------------------------------------------------
+ def atexit_operations(self):
+ """This will be executed at the time of exit.
+
+ Cleanup operations and saving of persistent data that is done
+ unconditionally by IPython should be performed here.
+
+ For things that may depend on startup flags or platform specifics (such
+ as having readline or not), register a separate atexit function in the
+ code that has the appropriate information, rather than trying to
+ clutter
+ """
+ # Close the history session (this stores the end time and line count)
+ # this must be *before* the tempfile cleanup, in case of temporary
+ # history db
+ self.history_manager.end_session()
+
+ # Cleanup all tempfiles and folders left around
+ for tfile in self.tempfiles:
+ try:
+ os.unlink(tfile)
+ except OSError:
+ pass
+
+ for tdir in self.tempdirs:
+ try:
+ os.rmdir(tdir)
+ except OSError:
+ pass
+
+ # Clear all user namespaces to release all references cleanly.
+ self.reset(new_session=False)
+
+ # Run user hooks
+ self.hooks.shutdown_hook()
+
+ def cleanup(self):
+ self.restore_sys_module_state()
+
+
# Overridden in terminal subclass to change prompts
def switch_doctest_mode(self, mode):
pass
-class InteractiveShellABC(with_metaclass(abc.ABCMeta, object)):
- """An abstract base class for InteractiveShell."""
-
-InteractiveShellABC.register(InteractiveShell)
+class InteractiveShellABC(with_metaclass(abc.ABCMeta, object)):
+ """An abstract base class for InteractiveShell."""
+
+InteractiveShellABC.register(InteractiveShell)
diff --git a/contrib/python/ipython/py2/IPython/core/latex_symbols.py b/contrib/python/ipython/py2/IPython/core/latex_symbols.py
index 0319b99ff8..ca7200bb59 100644
--- a/contrib/python/ipython/py2/IPython/core/latex_symbols.py
+++ b/contrib/python/ipython/py2/IPython/core/latex_symbols.py
@@ -1,1300 +1,1300 @@
-# encoding: utf-8
-
-# DO NOT EDIT THIS FILE BY HAND.
-
-# To update this file, run the script /tools/gen_latex_symbols.py using Python 3
-
-# This file is autogenerated from the file:
-# https://raw.githubusercontent.com/JuliaLang/julia/master/base/latex_symbols.jl
-# This original list is filtered to remove any unicode characters that are not valid
-# Python identifiers.
-
-latex_symbols = {
-
- "\\^a" : "ᵃ",
- "\\^b" : "ᵇ",
- "\\^c" : "ᶜ",
- "\\^d" : "ᵈ",
- "\\^e" : "ᵉ",
- "\\^f" : "ᶠ",
- "\\^g" : "ᵍ",
- "\\^h" : "ʰ",
- "\\^i" : "ⁱ",
- "\\^j" : "ʲ",
- "\\^k" : "ᵏ",
- "\\^l" : "ˡ",
- "\\^m" : "ᵐ",
- "\\^n" : "ⁿ",
- "\\^o" : "ᵒ",
- "\\^p" : "ᵖ",
- "\\^r" : "ʳ",
- "\\^s" : "ˢ",
- "\\^t" : "ᵗ",
- "\\^u" : "ᵘ",
- "\\^v" : "ᵛ",
- "\\^w" : "ʷ",
- "\\^x" : "ˣ",
- "\\^y" : "ʸ",
- "\\^z" : "ᶻ",
- "\\^A" : "ᴬ",
- "\\^B" : "ᴮ",
- "\\^D" : "ᴰ",
- "\\^E" : "ᴱ",
- "\\^G" : "ᴳ",
- "\\^H" : "ᴴ",
- "\\^I" : "ᴵ",
- "\\^J" : "ᴶ",
- "\\^K" : "ᴷ",
- "\\^L" : "ᴸ",
- "\\^M" : "ᴹ",
- "\\^N" : "ᴺ",
- "\\^O" : "ᴼ",
- "\\^P" : "ᴾ",
- "\\^R" : "ᴿ",
- "\\^T" : "ᵀ",
- "\\^U" : "ᵁ",
- "\\^V" : "ⱽ",
- "\\^W" : "ᵂ",
- "\\^alpha" : "ᵅ",
- "\\^beta" : "ᵝ",
- "\\^gamma" : "ᵞ",
- "\\^delta" : "ᵟ",
- "\\^epsilon" : "ᵋ",
- "\\^theta" : "ᶿ",
- "\\^iota" : "ᶥ",
- "\\^phi" : "ᵠ",
- "\\^chi" : "ᵡ",
- "\\^Phi" : "ᶲ",
- "\\_a" : "ₐ",
- "\\_e" : "ₑ",
- "\\_h" : "ₕ",
- "\\_i" : "ᵢ",
- "\\_j" : "ⱼ",
- "\\_k" : "ₖ",
- "\\_l" : "ₗ",
- "\\_m" : "ₘ",
- "\\_n" : "ₙ",
- "\\_o" : "ₒ",
- "\\_p" : "ₚ",
- "\\_r" : "ᵣ",
- "\\_s" : "ₛ",
- "\\_t" : "ₜ",
- "\\_u" : "ᵤ",
- "\\_v" : "ᵥ",
- "\\_x" : "ₓ",
- "\\_schwa" : "ₔ",
- "\\_beta" : "ᵦ",
- "\\_gamma" : "ᵧ",
- "\\_rho" : "ᵨ",
- "\\_phi" : "ᵩ",
- "\\_chi" : "ᵪ",
- "\\hbar" : "ħ",
- "\\sout" : "̶",
- "\\textordfeminine" : "ª",
- "\\cdotp" : "·",
- "\\textordmasculine" : "º",
- "\\AA" : "Å",
- "\\AE" : "Æ",
- "\\DH" : "Ð",
- "\\O" : "Ø",
- "\\TH" : "Þ",
- "\\ss" : "ß",
- "\\aa" : "å",
- "\\ae" : "æ",
- "\\eth" : "ð",
- "\\o" : "ø",
- "\\th" : "þ",
- "\\DJ" : "Đ",
- "\\dj" : "đ",
- "\\Elzxh" : "ħ",
- "\\imath" : "ı",
- "\\L" : "Ł",
- "\\l" : "ł",
- "\\NG" : "Ŋ",
- "\\ng" : "ŋ",
- "\\OE" : "Œ",
- "\\oe" : "œ",
- "\\texthvlig" : "ƕ",
- "\\textnrleg" : "ƞ",
- "\\textdoublepipe" : "ǂ",
- "\\Elztrna" : "ɐ",
- "\\Elztrnsa" : "ɒ",
- "\\Elzopeno" : "ɔ",
- "\\Elzrtld" : "ɖ",
- "\\Elzschwa" : "ə",
- "\\varepsilon" : "ɛ",
- "\\Elzpgamma" : "ɣ",
- "\\Elzpbgam" : "ɤ",
- "\\Elztrnh" : "ɥ",
- "\\Elzbtdl" : "ɬ",
- "\\Elzrtll" : "ɭ",
- "\\Elztrnm" : "ɯ",
- "\\Elztrnmlr" : "ɰ",
- "\\Elzltlmr" : "ɱ",
- "\\Elzltln" : "ɲ",
- "\\Elzrtln" : "ɳ",
- "\\Elzclomeg" : "ɷ",
- "\\textphi" : "ɸ",
- "\\Elztrnr" : "ɹ",
- "\\Elztrnrl" : "ɺ",
- "\\Elzrttrnr" : "ɻ",
- "\\Elzrl" : "ɼ",
- "\\Elzrtlr" : "ɽ",
- "\\Elzfhr" : "ɾ",
- "\\Elzrtls" : "ʂ",
- "\\Elzesh" : "ʃ",
- "\\Elztrnt" : "ʇ",
- "\\Elzrtlt" : "ʈ",
- "\\Elzpupsil" : "ʊ",
- "\\Elzpscrv" : "ʋ",
- "\\Elzinvv" : "ʌ",
- "\\Elzinvw" : "ʍ",
- "\\Elztrny" : "ʎ",
- "\\Elzrtlz" : "ʐ",
- "\\Elzyogh" : "ʒ",
- "\\Elzglst" : "ʔ",
- "\\Elzreglst" : "ʕ",
- "\\Elzinglst" : "ʖ",
- "\\textturnk" : "ʞ",
- "\\Elzdyogh" : "ʤ",
- "\\Elztesh" : "ʧ",
- "\\rasp" : "ʼ",
- "\\textasciicaron" : "ˇ",
- "\\Elzverts" : "ˈ",
- "\\Elzverti" : "ˌ",
- "\\Elzlmrk" : "ː",
- "\\Elzhlmrk" : "ˑ",
- "\\grave" : "̀",
- "\\acute" : "́",
- "\\hat" : "̂",
- "\\tilde" : "̃",
- "\\bar" : "̄",
- "\\breve" : "̆",
- "\\dot" : "̇",
- "\\ddot" : "̈",
- "\\ocirc" : "̊",
- "\\H" : "̋",
- "\\check" : "̌",
- "\\Elzpalh" : "̡",
- "\\Elzrh" : "̢",
- "\\c" : "̧",
- "\\k" : "̨",
- "\\Elzsbbrg" : "̪",
- "\\Elzxl" : "̵",
- "\\Elzbar" : "̶",
- "\\Alpha" : "Α",
- "\\Beta" : "Β",
- "\\Gamma" : "Γ",
- "\\Delta" : "Δ",
- "\\Epsilon" : "Ε",
- "\\Zeta" : "Ζ",
- "\\Eta" : "Η",
- "\\Theta" : "Θ",
- "\\Iota" : "Ι",
- "\\Kappa" : "Κ",
- "\\Lambda" : "Λ",
- "\\Xi" : "Ξ",
- "\\Pi" : "Π",
- "\\Rho" : "Ρ",
- "\\Sigma" : "Σ",
- "\\Tau" : "Τ",
- "\\Upsilon" : "Υ",
- "\\Phi" : "Φ",
- "\\Chi" : "Χ",
- "\\Psi" : "Ψ",
- "\\Omega" : "Ω",
- "\\alpha" : "α",
- "\\beta" : "β",
- "\\gamma" : "γ",
- "\\delta" : "δ",
- "\\zeta" : "ζ",
- "\\eta" : "η",
- "\\theta" : "θ",
- "\\iota" : "ι",
- "\\kappa" : "κ",
- "\\lambda" : "λ",
- "\\mu" : "μ",
- "\\nu" : "ν",
- "\\xi" : "ξ",
- "\\pi" : "π",
- "\\rho" : "ρ",
- "\\varsigma" : "ς",
- "\\sigma" : "σ",
- "\\tau" : "τ",
- "\\upsilon" : "υ",
- "\\varphi" : "φ",
- "\\chi" : "χ",
- "\\psi" : "ψ",
- "\\omega" : "ω",
- "\\vartheta" : "ϑ",
- "\\phi" : "ϕ",
- "\\varpi" : "ϖ",
- "\\Stigma" : "Ϛ",
- "\\Digamma" : "Ϝ",
- "\\digamma" : "ϝ",
- "\\Koppa" : "Ϟ",
- "\\Sampi" : "Ϡ",
- "\\varkappa" : "ϰ",
- "\\varrho" : "ϱ",
- "\\textTheta" : "ϴ",
- "\\epsilon" : "ϵ",
- "\\dddot" : "⃛",
- "\\ddddot" : "⃜",
- "\\hslash" : "ℏ",
- "\\Im" : "ℑ",
- "\\ell" : "ℓ",
- "\\wp" : "℘",
- "\\Re" : "ℜ",
- "\\aleph" : "ℵ",
- "\\beth" : "ℶ",
- "\\gimel" : "ℷ",
- "\\daleth" : "ℸ",
- "\\BbbPi" : "ℿ",
- "\\Zbar" : "Ƶ",
- "\\overbar" : "̅",
- "\\ovhook" : "̉",
- "\\candra" : "̐",
- "\\oturnedcomma" : "̒",
- "\\ocommatopright" : "̕",
- "\\droang" : "̚",
- "\\wideutilde" : "̰",
- "\\underbar" : "̱",
- "\\not" : "̸",
- "\\upMu" : "Μ",
- "\\upNu" : "Ν",
- "\\upOmicron" : "Ο",
- "\\upepsilon" : "ε",
- "\\upomicron" : "ο",
- "\\upvarbeta" : "ϐ",
- "\\upoldKoppa" : "Ϙ",
- "\\upoldkoppa" : "ϙ",
- "\\upstigma" : "ϛ",
- "\\upkoppa" : "ϟ",
- "\\upsampi" : "ϡ",
- "\\tieconcat" : "⁀",
- "\\leftharpoonaccent" : "⃐",
- "\\rightharpoonaccent" : "⃑",
- "\\vertoverlay" : "⃒",
- "\\overleftarrow" : "⃖",
- "\\vec" : "⃗",
- "\\overleftrightarrow" : "⃡",
- "\\annuity" : "⃧",
- "\\threeunderdot" : "⃨",
- "\\widebridgeabove" : "⃩",
- "\\BbbC" : "ℂ",
- "\\Eulerconst" : "ℇ",
- "\\mscrg" : "ℊ",
- "\\mscrH" : "ℋ",
- "\\mfrakH" : "ℌ",
- "\\BbbH" : "ℍ",
- "\\Planckconst" : "ℎ",
- "\\mscrI" : "ℐ",
- "\\mscrL" : "ℒ",
- "\\BbbN" : "ℕ",
- "\\BbbP" : "ℙ",
- "\\BbbQ" : "ℚ",
- "\\mscrR" : "ℛ",
- "\\BbbR" : "ℝ",
- "\\BbbZ" : "ℤ",
- "\\mfrakZ" : "ℨ",
- "\\Angstrom" : "Å",
- "\\mscrB" : "ℬ",
- "\\mfrakC" : "ℭ",
- "\\mscre" : "ℯ",
- "\\mscrE" : "ℰ",
- "\\mscrF" : "ℱ",
- "\\Finv" : "Ⅎ",
- "\\mscrM" : "ℳ",
- "\\mscro" : "ℴ",
- "\\Bbbgamma" : "ℽ",
- "\\BbbGamma" : "ℾ",
- "\\mitBbbD" : "ⅅ",
- "\\mitBbbd" : "ⅆ",
- "\\mitBbbe" : "ⅇ",
- "\\mitBbbi" : "ⅈ",
- "\\mitBbbj" : "ⅉ",
- "\\mbfA" : "𝐀",
- "\\mbfB" : "𝐁",
- "\\mbfC" : "𝐂",
- "\\mbfD" : "𝐃",
- "\\mbfE" : "𝐄",
- "\\mbfF" : "𝐅",
- "\\mbfG" : "𝐆",
- "\\mbfH" : "𝐇",
- "\\mbfI" : "𝐈",
- "\\mbfJ" : "𝐉",
- "\\mbfK" : "𝐊",
- "\\mbfL" : "𝐋",
- "\\mbfM" : "𝐌",
- "\\mbfN" : "𝐍",
- "\\mbfO" : "𝐎",
- "\\mbfP" : "𝐏",
- "\\mbfQ" : "𝐐",
- "\\mbfR" : "𝐑",
- "\\mbfS" : "𝐒",
- "\\mbfT" : "𝐓",
- "\\mbfU" : "𝐔",
- "\\mbfV" : "𝐕",
- "\\mbfW" : "𝐖",
- "\\mbfX" : "𝐗",
- "\\mbfY" : "𝐘",
- "\\mbfZ" : "𝐙",
- "\\mbfa" : "𝐚",
- "\\mbfb" : "𝐛",
- "\\mbfc" : "𝐜",
- "\\mbfd" : "𝐝",
- "\\mbfe" : "𝐞",
- "\\mbff" : "𝐟",
- "\\mbfg" : "𝐠",
- "\\mbfh" : "𝐡",
- "\\mbfi" : "𝐢",
- "\\mbfj" : "𝐣",
- "\\mbfk" : "𝐤",
- "\\mbfl" : "𝐥",
- "\\mbfm" : "𝐦",
- "\\mbfn" : "𝐧",
- "\\mbfo" : "𝐨",
- "\\mbfp" : "𝐩",
- "\\mbfq" : "𝐪",
- "\\mbfr" : "𝐫",
- "\\mbfs" : "𝐬",
- "\\mbft" : "𝐭",
- "\\mbfu" : "𝐮",
- "\\mbfv" : "𝐯",
- "\\mbfw" : "𝐰",
- "\\mbfx" : "𝐱",
- "\\mbfy" : "𝐲",
- "\\mbfz" : "𝐳",
- "\\mitA" : "𝐴",
- "\\mitB" : "𝐵",
- "\\mitC" : "𝐶",
- "\\mitD" : "𝐷",
- "\\mitE" : "𝐸",
- "\\mitF" : "𝐹",
- "\\mitG" : "𝐺",
- "\\mitH" : "𝐻",
- "\\mitI" : "𝐼",
- "\\mitJ" : "𝐽",
- "\\mitK" : "𝐾",
- "\\mitL" : "𝐿",
- "\\mitM" : "𝑀",
- "\\mitN" : "𝑁",
- "\\mitO" : "𝑂",
- "\\mitP" : "𝑃",
- "\\mitQ" : "𝑄",
- "\\mitR" : "𝑅",
- "\\mitS" : "𝑆",
- "\\mitT" : "𝑇",
- "\\mitU" : "𝑈",
- "\\mitV" : "𝑉",
- "\\mitW" : "𝑊",
- "\\mitX" : "𝑋",
- "\\mitY" : "𝑌",
- "\\mitZ" : "𝑍",
- "\\mita" : "𝑎",
- "\\mitb" : "𝑏",
- "\\mitc" : "𝑐",
- "\\mitd" : "𝑑",
- "\\mite" : "𝑒",
- "\\mitf" : "𝑓",
- "\\mitg" : "𝑔",
- "\\miti" : "𝑖",
- "\\mitj" : "𝑗",
- "\\mitk" : "𝑘",
- "\\mitl" : "𝑙",
- "\\mitm" : "𝑚",
- "\\mitn" : "𝑛",
- "\\mito" : "𝑜",
- "\\mitp" : "𝑝",
- "\\mitq" : "𝑞",
- "\\mitr" : "𝑟",
- "\\mits" : "𝑠",
- "\\mitt" : "𝑡",
- "\\mitu" : "𝑢",
- "\\mitv" : "𝑣",
- "\\mitw" : "𝑤",
- "\\mitx" : "𝑥",
- "\\mity" : "𝑦",
- "\\mitz" : "𝑧",
- "\\mbfitA" : "𝑨",
- "\\mbfitB" : "𝑩",
- "\\mbfitC" : "𝑪",
- "\\mbfitD" : "𝑫",
- "\\mbfitE" : "𝑬",
- "\\mbfitF" : "𝑭",
- "\\mbfitG" : "𝑮",
- "\\mbfitH" : "𝑯",
- "\\mbfitI" : "𝑰",
- "\\mbfitJ" : "𝑱",
- "\\mbfitK" : "𝑲",
- "\\mbfitL" : "𝑳",
- "\\mbfitM" : "𝑴",
- "\\mbfitN" : "𝑵",
- "\\mbfitO" : "𝑶",
- "\\mbfitP" : "𝑷",
- "\\mbfitQ" : "𝑸",
- "\\mbfitR" : "𝑹",
- "\\mbfitS" : "𝑺",
- "\\mbfitT" : "𝑻",
- "\\mbfitU" : "𝑼",
- "\\mbfitV" : "𝑽",
- "\\mbfitW" : "𝑾",
- "\\mbfitX" : "𝑿",
- "\\mbfitY" : "𝒀",
- "\\mbfitZ" : "𝒁",
- "\\mbfita" : "𝒂",
- "\\mbfitb" : "𝒃",
- "\\mbfitc" : "𝒄",
- "\\mbfitd" : "𝒅",
- "\\mbfite" : "𝒆",
- "\\mbfitf" : "𝒇",
- "\\mbfitg" : "𝒈",
- "\\mbfith" : "𝒉",
- "\\mbfiti" : "𝒊",
- "\\mbfitj" : "𝒋",
- "\\mbfitk" : "𝒌",
- "\\mbfitl" : "𝒍",
- "\\mbfitm" : "𝒎",
- "\\mbfitn" : "𝒏",
- "\\mbfito" : "𝒐",
- "\\mbfitp" : "𝒑",
- "\\mbfitq" : "𝒒",
- "\\mbfitr" : "𝒓",
- "\\mbfits" : "𝒔",
- "\\mbfitt" : "𝒕",
- "\\mbfitu" : "𝒖",
- "\\mbfitv" : "𝒗",
- "\\mbfitw" : "𝒘",
- "\\mbfitx" : "𝒙",
- "\\mbfity" : "𝒚",
- "\\mbfitz" : "𝒛",
- "\\mscrA" : "𝒜",
- "\\mscrC" : "𝒞",
- "\\mscrD" : "𝒟",
- "\\mscrG" : "𝒢",
- "\\mscrJ" : "𝒥",
- "\\mscrK" : "𝒦",
- "\\mscrN" : "𝒩",
- "\\mscrO" : "𝒪",
- "\\mscrP" : "𝒫",
- "\\mscrQ" : "𝒬",
- "\\mscrS" : "𝒮",
- "\\mscrT" : "𝒯",
- "\\mscrU" : "𝒰",
- "\\mscrV" : "𝒱",
- "\\mscrW" : "𝒲",
- "\\mscrX" : "𝒳",
- "\\mscrY" : "𝒴",
- "\\mscrZ" : "𝒵",
- "\\mscra" : "𝒶",
- "\\mscrb" : "𝒷",
- "\\mscrc" : "𝒸",
- "\\mscrd" : "𝒹",
- "\\mscrf" : "𝒻",
- "\\mscrh" : "𝒽",
- "\\mscri" : "𝒾",
- "\\mscrj" : "𝒿",
- "\\mscrk" : "𝓀",
- "\\mscrm" : "𝓂",
- "\\mscrn" : "𝓃",
- "\\mscrp" : "𝓅",
- "\\mscrq" : "𝓆",
- "\\mscrr" : "𝓇",
- "\\mscrs" : "𝓈",
- "\\mscrt" : "𝓉",
- "\\mscru" : "𝓊",
- "\\mscrv" : "𝓋",
- "\\mscrw" : "𝓌",
- "\\mscrx" : "𝓍",
- "\\mscry" : "𝓎",
- "\\mscrz" : "𝓏",
- "\\mbfscrA" : "𝓐",
- "\\mbfscrB" : "𝓑",
- "\\mbfscrC" : "𝓒",
- "\\mbfscrD" : "𝓓",
- "\\mbfscrE" : "𝓔",
- "\\mbfscrF" : "𝓕",
- "\\mbfscrG" : "𝓖",
- "\\mbfscrH" : "𝓗",
- "\\mbfscrI" : "𝓘",
- "\\mbfscrJ" : "𝓙",
- "\\mbfscrK" : "𝓚",
- "\\mbfscrL" : "𝓛",
- "\\mbfscrM" : "𝓜",
- "\\mbfscrN" : "𝓝",
- "\\mbfscrO" : "𝓞",
- "\\mbfscrP" : "𝓟",
- "\\mbfscrQ" : "𝓠",
- "\\mbfscrR" : "𝓡",
- "\\mbfscrS" : "𝓢",
- "\\mbfscrT" : "𝓣",
- "\\mbfscrU" : "𝓤",
- "\\mbfscrV" : "𝓥",
- "\\mbfscrW" : "𝓦",
- "\\mbfscrX" : "𝓧",
- "\\mbfscrY" : "𝓨",
- "\\mbfscrZ" : "𝓩",
- "\\mbfscra" : "𝓪",
- "\\mbfscrb" : "𝓫",
- "\\mbfscrc" : "𝓬",
- "\\mbfscrd" : "𝓭",
- "\\mbfscre" : "𝓮",
- "\\mbfscrf" : "𝓯",
- "\\mbfscrg" : "𝓰",
- "\\mbfscrh" : "𝓱",
- "\\mbfscri" : "𝓲",
- "\\mbfscrj" : "𝓳",
- "\\mbfscrk" : "𝓴",
- "\\mbfscrl" : "𝓵",
- "\\mbfscrm" : "𝓶",
- "\\mbfscrn" : "𝓷",
- "\\mbfscro" : "𝓸",
- "\\mbfscrp" : "𝓹",
- "\\mbfscrq" : "𝓺",
- "\\mbfscrr" : "𝓻",
- "\\mbfscrs" : "𝓼",
- "\\mbfscrt" : "𝓽",
- "\\mbfscru" : "𝓾",
- "\\mbfscrv" : "𝓿",
- "\\mbfscrw" : "𝔀",
- "\\mbfscrx" : "𝔁",
- "\\mbfscry" : "𝔂",
- "\\mbfscrz" : "𝔃",
- "\\mfrakA" : "𝔄",
- "\\mfrakB" : "𝔅",
- "\\mfrakD" : "𝔇",
- "\\mfrakE" : "𝔈",
- "\\mfrakF" : "𝔉",
- "\\mfrakG" : "𝔊",
- "\\mfrakJ" : "𝔍",
- "\\mfrakK" : "𝔎",
- "\\mfrakL" : "𝔏",
- "\\mfrakM" : "𝔐",
- "\\mfrakN" : "𝔑",
- "\\mfrakO" : "𝔒",
- "\\mfrakP" : "𝔓",
- "\\mfrakQ" : "𝔔",
- "\\mfrakS" : "𝔖",
- "\\mfrakT" : "𝔗",
- "\\mfrakU" : "𝔘",
- "\\mfrakV" : "𝔙",
- "\\mfrakW" : "𝔚",
- "\\mfrakX" : "𝔛",
- "\\mfrakY" : "𝔜",
- "\\mfraka" : "𝔞",
- "\\mfrakb" : "𝔟",
- "\\mfrakc" : "𝔠",
- "\\mfrakd" : "𝔡",
- "\\mfrake" : "𝔢",
- "\\mfrakf" : "𝔣",
- "\\mfrakg" : "𝔤",
- "\\mfrakh" : "𝔥",
- "\\mfraki" : "𝔦",
- "\\mfrakj" : "𝔧",
- "\\mfrakk" : "𝔨",
- "\\mfrakl" : "𝔩",
- "\\mfrakm" : "𝔪",
- "\\mfrakn" : "𝔫",
- "\\mfrako" : "𝔬",
- "\\mfrakp" : "𝔭",
- "\\mfrakq" : "𝔮",
- "\\mfrakr" : "𝔯",
- "\\mfraks" : "𝔰",
- "\\mfrakt" : "𝔱",
- "\\mfraku" : "𝔲",
- "\\mfrakv" : "𝔳",
- "\\mfrakw" : "𝔴",
- "\\mfrakx" : "𝔵",
- "\\mfraky" : "𝔶",
- "\\mfrakz" : "𝔷",
- "\\BbbA" : "𝔸",
- "\\BbbB" : "𝔹",
- "\\BbbD" : "𝔻",
- "\\BbbE" : "𝔼",
- "\\BbbF" : "𝔽",
- "\\BbbG" : "𝔾",
- "\\BbbI" : "𝕀",
- "\\BbbJ" : "𝕁",
- "\\BbbK" : "𝕂",
- "\\BbbL" : "𝕃",
- "\\BbbM" : "𝕄",
- "\\BbbO" : "𝕆",
- "\\BbbS" : "𝕊",
- "\\BbbT" : "𝕋",
- "\\BbbU" : "𝕌",
- "\\BbbV" : "𝕍",
- "\\BbbW" : "𝕎",
- "\\BbbX" : "𝕏",
- "\\BbbY" : "𝕐",
- "\\Bbba" : "𝕒",
- "\\Bbbb" : "𝕓",
- "\\Bbbc" : "𝕔",
- "\\Bbbd" : "𝕕",
- "\\Bbbe" : "𝕖",
- "\\Bbbf" : "𝕗",
- "\\Bbbg" : "𝕘",
- "\\Bbbh" : "𝕙",
- "\\Bbbi" : "𝕚",
- "\\Bbbj" : "𝕛",
- "\\Bbbk" : "𝕜",
- "\\Bbbl" : "𝕝",
- "\\Bbbm" : "𝕞",
- "\\Bbbn" : "𝕟",
- "\\Bbbo" : "𝕠",
- "\\Bbbp" : "𝕡",
- "\\Bbbq" : "𝕢",
- "\\Bbbr" : "𝕣",
- "\\Bbbs" : "𝕤",
- "\\Bbbt" : "𝕥",
- "\\Bbbu" : "𝕦",
- "\\Bbbv" : "𝕧",
- "\\Bbbw" : "𝕨",
- "\\Bbbx" : "𝕩",
- "\\Bbby" : "𝕪",
- "\\Bbbz" : "𝕫",
- "\\mbffrakA" : "𝕬",
- "\\mbffrakB" : "𝕭",
- "\\mbffrakC" : "𝕮",
- "\\mbffrakD" : "𝕯",
- "\\mbffrakE" : "𝕰",
- "\\mbffrakF" : "𝕱",
- "\\mbffrakG" : "𝕲",
- "\\mbffrakH" : "𝕳",
- "\\mbffrakI" : "𝕴",
- "\\mbffrakJ" : "𝕵",
- "\\mbffrakK" : "𝕶",
- "\\mbffrakL" : "𝕷",
- "\\mbffrakM" : "𝕸",
- "\\mbffrakN" : "𝕹",
- "\\mbffrakO" : "𝕺",
- "\\mbffrakP" : "𝕻",
- "\\mbffrakQ" : "𝕼",
- "\\mbffrakR" : "𝕽",
- "\\mbffrakS" : "𝕾",
- "\\mbffrakT" : "𝕿",
- "\\mbffrakU" : "𝖀",
- "\\mbffrakV" : "𝖁",
- "\\mbffrakW" : "𝖂",
- "\\mbffrakX" : "𝖃",
- "\\mbffrakY" : "𝖄",
- "\\mbffrakZ" : "𝖅",
- "\\mbffraka" : "𝖆",
- "\\mbffrakb" : "𝖇",
- "\\mbffrakc" : "𝖈",
- "\\mbffrakd" : "𝖉",
- "\\mbffrake" : "𝖊",
- "\\mbffrakf" : "𝖋",
- "\\mbffrakg" : "𝖌",
- "\\mbffrakh" : "𝖍",
- "\\mbffraki" : "𝖎",
- "\\mbffrakj" : "𝖏",
- "\\mbffrakk" : "𝖐",
- "\\mbffrakl" : "𝖑",
- "\\mbffrakm" : "𝖒",
- "\\mbffrakn" : "𝖓",
- "\\mbffrako" : "𝖔",
- "\\mbffrakp" : "𝖕",
- "\\mbffrakq" : "𝖖",
- "\\mbffrakr" : "𝖗",
- "\\mbffraks" : "𝖘",
- "\\mbffrakt" : "𝖙",
- "\\mbffraku" : "𝖚",
- "\\mbffrakv" : "𝖛",
- "\\mbffrakw" : "𝖜",
- "\\mbffrakx" : "𝖝",
- "\\mbffraky" : "𝖞",
- "\\mbffrakz" : "𝖟",
- "\\msansA" : "𝖠",
- "\\msansB" : "𝖡",
- "\\msansC" : "𝖢",
- "\\msansD" : "𝖣",
- "\\msansE" : "𝖤",
- "\\msansF" : "𝖥",
- "\\msansG" : "𝖦",
- "\\msansH" : "𝖧",
- "\\msansI" : "𝖨",
- "\\msansJ" : "𝖩",
- "\\msansK" : "𝖪",
- "\\msansL" : "𝖫",
- "\\msansM" : "𝖬",
- "\\msansN" : "𝖭",
- "\\msansO" : "𝖮",
- "\\msansP" : "𝖯",
- "\\msansQ" : "𝖰",
- "\\msansR" : "𝖱",
- "\\msansS" : "𝖲",
- "\\msansT" : "𝖳",
- "\\msansU" : "𝖴",
- "\\msansV" : "𝖵",
- "\\msansW" : "𝖶",
- "\\msansX" : "𝖷",
- "\\msansY" : "𝖸",
- "\\msansZ" : "𝖹",
- "\\msansa" : "𝖺",
- "\\msansb" : "𝖻",
- "\\msansc" : "𝖼",
- "\\msansd" : "𝖽",
- "\\msanse" : "𝖾",
- "\\msansf" : "𝖿",
- "\\msansg" : "𝗀",
- "\\msansh" : "𝗁",
- "\\msansi" : "𝗂",
- "\\msansj" : "𝗃",
- "\\msansk" : "𝗄",
- "\\msansl" : "𝗅",
- "\\msansm" : "𝗆",
- "\\msansn" : "𝗇",
- "\\msanso" : "𝗈",
- "\\msansp" : "𝗉",
- "\\msansq" : "𝗊",
- "\\msansr" : "𝗋",
- "\\msanss" : "𝗌",
- "\\msanst" : "𝗍",
- "\\msansu" : "𝗎",
- "\\msansv" : "𝗏",
- "\\msansw" : "𝗐",
- "\\msansx" : "𝗑",
- "\\msansy" : "𝗒",
- "\\msansz" : "𝗓",
- "\\mbfsansA" : "𝗔",
- "\\mbfsansB" : "𝗕",
- "\\mbfsansC" : "𝗖",
- "\\mbfsansD" : "𝗗",
- "\\mbfsansE" : "𝗘",
- "\\mbfsansF" : "𝗙",
- "\\mbfsansG" : "𝗚",
- "\\mbfsansH" : "𝗛",
- "\\mbfsansI" : "𝗜",
- "\\mbfsansJ" : "𝗝",
- "\\mbfsansK" : "𝗞",
- "\\mbfsansL" : "𝗟",
- "\\mbfsansM" : "𝗠",
- "\\mbfsansN" : "𝗡",
- "\\mbfsansO" : "𝗢",
- "\\mbfsansP" : "𝗣",
- "\\mbfsansQ" : "𝗤",
- "\\mbfsansR" : "𝗥",
- "\\mbfsansS" : "𝗦",
- "\\mbfsansT" : "𝗧",
- "\\mbfsansU" : "𝗨",
- "\\mbfsansV" : "𝗩",
- "\\mbfsansW" : "𝗪",
- "\\mbfsansX" : "𝗫",
- "\\mbfsansY" : "𝗬",
- "\\mbfsansZ" : "𝗭",
- "\\mbfsansa" : "𝗮",
- "\\mbfsansb" : "𝗯",
- "\\mbfsansc" : "𝗰",
- "\\mbfsansd" : "𝗱",
- "\\mbfsanse" : "𝗲",
- "\\mbfsansf" : "𝗳",
- "\\mbfsansg" : "𝗴",
- "\\mbfsansh" : "𝗵",
- "\\mbfsansi" : "𝗶",
- "\\mbfsansj" : "𝗷",
- "\\mbfsansk" : "𝗸",
- "\\mbfsansl" : "𝗹",
- "\\mbfsansm" : "𝗺",
- "\\mbfsansn" : "𝗻",
- "\\mbfsanso" : "𝗼",
- "\\mbfsansp" : "𝗽",
- "\\mbfsansq" : "𝗾",
- "\\mbfsansr" : "𝗿",
- "\\mbfsanss" : "𝘀",
- "\\mbfsanst" : "𝘁",
- "\\mbfsansu" : "𝘂",
- "\\mbfsansv" : "𝘃",
- "\\mbfsansw" : "𝘄",
- "\\mbfsansx" : "𝘅",
- "\\mbfsansy" : "𝘆",
- "\\mbfsansz" : "𝘇",
- "\\mitsansA" : "𝘈",
- "\\mitsansB" : "𝘉",
- "\\mitsansC" : "𝘊",
- "\\mitsansD" : "𝘋",
- "\\mitsansE" : "𝘌",
- "\\mitsansF" : "𝘍",
- "\\mitsansG" : "𝘎",
- "\\mitsansH" : "𝘏",
- "\\mitsansI" : "𝘐",
- "\\mitsansJ" : "𝘑",
- "\\mitsansK" : "𝘒",
- "\\mitsansL" : "𝘓",
- "\\mitsansM" : "𝘔",
- "\\mitsansN" : "𝘕",
- "\\mitsansO" : "𝘖",
- "\\mitsansP" : "𝘗",
- "\\mitsansQ" : "𝘘",
- "\\mitsansR" : "𝘙",
- "\\mitsansS" : "𝘚",
- "\\mitsansT" : "𝘛",
- "\\mitsansU" : "𝘜",
- "\\mitsansV" : "𝘝",
- "\\mitsansW" : "𝘞",
- "\\mitsansX" : "𝘟",
- "\\mitsansY" : "𝘠",
- "\\mitsansZ" : "𝘡",
- "\\mitsansa" : "𝘢",
- "\\mitsansb" : "𝘣",
- "\\mitsansc" : "𝘤",
- "\\mitsansd" : "𝘥",
- "\\mitsanse" : "𝘦",
- "\\mitsansf" : "𝘧",
- "\\mitsansg" : "𝘨",
- "\\mitsansh" : "𝘩",
- "\\mitsansi" : "𝘪",
- "\\mitsansj" : "𝘫",
- "\\mitsansk" : "𝘬",
- "\\mitsansl" : "𝘭",
- "\\mitsansm" : "𝘮",
- "\\mitsansn" : "𝘯",
- "\\mitsanso" : "𝘰",
- "\\mitsansp" : "𝘱",
- "\\mitsansq" : "𝘲",
- "\\mitsansr" : "𝘳",
- "\\mitsanss" : "𝘴",
- "\\mitsanst" : "𝘵",
- "\\mitsansu" : "𝘶",
- "\\mitsansv" : "𝘷",
- "\\mitsansw" : "𝘸",
- "\\mitsansx" : "𝘹",
- "\\mitsansy" : "𝘺",
- "\\mitsansz" : "𝘻",
- "\\mbfitsansA" : "𝘼",
- "\\mbfitsansB" : "𝘽",
- "\\mbfitsansC" : "𝘾",
- "\\mbfitsansD" : "𝘿",
- "\\mbfitsansE" : "𝙀",
- "\\mbfitsansF" : "𝙁",
- "\\mbfitsansG" : "𝙂",
- "\\mbfitsansH" : "𝙃",
- "\\mbfitsansI" : "𝙄",
- "\\mbfitsansJ" : "𝙅",
- "\\mbfitsansK" : "𝙆",
- "\\mbfitsansL" : "𝙇",
- "\\mbfitsansM" : "𝙈",
- "\\mbfitsansN" : "𝙉",
- "\\mbfitsansO" : "𝙊",
- "\\mbfitsansP" : "𝙋",
- "\\mbfitsansQ" : "𝙌",
- "\\mbfitsansR" : "𝙍",
- "\\mbfitsansS" : "𝙎",
- "\\mbfitsansT" : "𝙏",
- "\\mbfitsansU" : "𝙐",
- "\\mbfitsansV" : "𝙑",
- "\\mbfitsansW" : "𝙒",
- "\\mbfitsansX" : "𝙓",
- "\\mbfitsansY" : "𝙔",
- "\\mbfitsansZ" : "𝙕",
- "\\mbfitsansa" : "𝙖",
- "\\mbfitsansb" : "𝙗",
- "\\mbfitsansc" : "𝙘",
- "\\mbfitsansd" : "𝙙",
- "\\mbfitsanse" : "𝙚",
- "\\mbfitsansf" : "𝙛",
- "\\mbfitsansg" : "𝙜",
- "\\mbfitsansh" : "𝙝",
- "\\mbfitsansi" : "𝙞",
- "\\mbfitsansj" : "𝙟",
- "\\mbfitsansk" : "𝙠",
- "\\mbfitsansl" : "𝙡",
- "\\mbfitsansm" : "𝙢",
- "\\mbfitsansn" : "𝙣",
- "\\mbfitsanso" : "𝙤",
- "\\mbfitsansp" : "𝙥",
- "\\mbfitsansq" : "𝙦",
- "\\mbfitsansr" : "𝙧",
- "\\mbfitsanss" : "𝙨",
- "\\mbfitsanst" : "𝙩",
- "\\mbfitsansu" : "𝙪",
- "\\mbfitsansv" : "𝙫",
- "\\mbfitsansw" : "𝙬",
- "\\mbfitsansx" : "𝙭",
- "\\mbfitsansy" : "𝙮",
- "\\mbfitsansz" : "𝙯",
- "\\mttA" : "𝙰",
- "\\mttB" : "𝙱",
- "\\mttC" : "𝙲",
- "\\mttD" : "𝙳",
- "\\mttE" : "𝙴",
- "\\mttF" : "𝙵",
- "\\mttG" : "𝙶",
- "\\mttH" : "𝙷",
- "\\mttI" : "𝙸",
- "\\mttJ" : "𝙹",
- "\\mttK" : "𝙺",
- "\\mttL" : "𝙻",
- "\\mttM" : "𝙼",
- "\\mttN" : "𝙽",
- "\\mttO" : "𝙾",
- "\\mttP" : "𝙿",
- "\\mttQ" : "𝚀",
- "\\mttR" : "𝚁",
- "\\mttS" : "𝚂",
- "\\mttT" : "𝚃",
- "\\mttU" : "𝚄",
- "\\mttV" : "𝚅",
- "\\mttW" : "𝚆",
- "\\mttX" : "𝚇",
- "\\mttY" : "𝚈",
- "\\mttZ" : "𝚉",
- "\\mtta" : "𝚊",
- "\\mttb" : "𝚋",
- "\\mttc" : "𝚌",
- "\\mttd" : "𝚍",
- "\\mtte" : "𝚎",
- "\\mttf" : "𝚏",
- "\\mttg" : "𝚐",
- "\\mtth" : "𝚑",
- "\\mtti" : "𝚒",
- "\\mttj" : "𝚓",
- "\\mttk" : "𝚔",
- "\\mttl" : "𝚕",
- "\\mttm" : "𝚖",
- "\\mttn" : "𝚗",
- "\\mtto" : "𝚘",
- "\\mttp" : "𝚙",
- "\\mttq" : "𝚚",
- "\\mttr" : "𝚛",
- "\\mtts" : "𝚜",
- "\\mttt" : "𝚝",
- "\\mttu" : "𝚞",
- "\\mttv" : "𝚟",
- "\\mttw" : "𝚠",
- "\\mttx" : "𝚡",
- "\\mtty" : "𝚢",
- "\\mttz" : "𝚣",
- "\\mbfAlpha" : "𝚨",
- "\\mbfBeta" : "𝚩",
- "\\mbfGamma" : "𝚪",
- "\\mbfDelta" : "𝚫",
- "\\mbfEpsilon" : "𝚬",
- "\\mbfZeta" : "𝚭",
- "\\mbfEta" : "𝚮",
- "\\mbfTheta" : "𝚯",
- "\\mbfIota" : "𝚰",
- "\\mbfKappa" : "𝚱",
- "\\mbfLambda" : "𝚲",
- "\\mbfMu" : "𝚳",
- "\\mbfNu" : "𝚴",
- "\\mbfXi" : "𝚵",
- "\\mbfOmicron" : "𝚶",
- "\\mbfPi" : "𝚷",
- "\\mbfRho" : "𝚸",
- "\\mbfvarTheta" : "𝚹",
- "\\mbfSigma" : "𝚺",
- "\\mbfTau" : "𝚻",
- "\\mbfUpsilon" : "𝚼",
- "\\mbfPhi" : "𝚽",
- "\\mbfChi" : "𝚾",
- "\\mbfPsi" : "𝚿",
- "\\mbfOmega" : "𝛀",
- "\\mbfalpha" : "𝛂",
- "\\mbfbeta" : "𝛃",
- "\\mbfgamma" : "𝛄",
- "\\mbfdelta" : "𝛅",
- "\\mbfepsilon" : "𝛆",
- "\\mbfzeta" : "𝛇",
- "\\mbfeta" : "𝛈",
- "\\mbftheta" : "𝛉",
- "\\mbfiota" : "𝛊",
- "\\mbfkappa" : "𝛋",
- "\\mbflambda" : "𝛌",
- "\\mbfmu" : "𝛍",
- "\\mbfnu" : "𝛎",
- "\\mbfxi" : "𝛏",
- "\\mbfomicron" : "𝛐",
- "\\mbfpi" : "𝛑",
- "\\mbfrho" : "𝛒",
- "\\mbfvarsigma" : "𝛓",
- "\\mbfsigma" : "𝛔",
- "\\mbftau" : "𝛕",
- "\\mbfupsilon" : "𝛖",
- "\\mbfvarphi" : "𝛗",
- "\\mbfchi" : "𝛘",
- "\\mbfpsi" : "𝛙",
- "\\mbfomega" : "𝛚",
- "\\mbfvarepsilon" : "𝛜",
- "\\mbfvartheta" : "𝛝",
- "\\mbfvarkappa" : "𝛞",
- "\\mbfphi" : "𝛟",
- "\\mbfvarrho" : "𝛠",
- "\\mbfvarpi" : "𝛡",
- "\\mitAlpha" : "𝛢",
- "\\mitBeta" : "𝛣",
- "\\mitGamma" : "𝛤",
- "\\mitDelta" : "𝛥",
- "\\mitEpsilon" : "𝛦",
- "\\mitZeta" : "𝛧",
- "\\mitEta" : "𝛨",
- "\\mitTheta" : "𝛩",
- "\\mitIota" : "𝛪",
- "\\mitKappa" : "𝛫",
- "\\mitLambda" : "𝛬",
- "\\mitMu" : "𝛭",
- "\\mitNu" : "𝛮",
- "\\mitXi" : "𝛯",
- "\\mitOmicron" : "𝛰",
- "\\mitPi" : "𝛱",
- "\\mitRho" : "𝛲",
- "\\mitvarTheta" : "𝛳",
- "\\mitSigma" : "𝛴",
- "\\mitTau" : "𝛵",
- "\\mitUpsilon" : "𝛶",
- "\\mitPhi" : "𝛷",
- "\\mitChi" : "𝛸",
- "\\mitPsi" : "𝛹",
- "\\mitOmega" : "𝛺",
- "\\mitalpha" : "𝛼",
- "\\mitbeta" : "𝛽",
- "\\mitgamma" : "𝛾",
- "\\mitdelta" : "𝛿",
- "\\mitepsilon" : "𝜀",
- "\\mitzeta" : "𝜁",
- "\\miteta" : "𝜂",
- "\\mittheta" : "𝜃",
- "\\mitiota" : "𝜄",
- "\\mitkappa" : "𝜅",
- "\\mitlambda" : "𝜆",
- "\\mitmu" : "𝜇",
- "\\mitnu" : "𝜈",
- "\\mitxi" : "𝜉",
- "\\mitomicron" : "𝜊",
- "\\mitpi" : "𝜋",
- "\\mitrho" : "𝜌",
- "\\mitvarsigma" : "𝜍",
- "\\mitsigma" : "𝜎",
- "\\mittau" : "𝜏",
- "\\mitupsilon" : "𝜐",
- "\\mitphi" : "𝜑",
- "\\mitchi" : "𝜒",
- "\\mitpsi" : "𝜓",
- "\\mitomega" : "𝜔",
- "\\mitvarepsilon" : "𝜖",
- "\\mitvartheta" : "𝜗",
- "\\mitvarkappa" : "𝜘",
- "\\mitvarphi" : "𝜙",
- "\\mitvarrho" : "𝜚",
- "\\mitvarpi" : "𝜛",
- "\\mbfitAlpha" : "𝜜",
- "\\mbfitBeta" : "𝜝",
- "\\mbfitGamma" : "𝜞",
- "\\mbfitDelta" : "𝜟",
- "\\mbfitEpsilon" : "𝜠",
- "\\mbfitZeta" : "𝜡",
- "\\mbfitEta" : "𝜢",
- "\\mbfitTheta" : "𝜣",
- "\\mbfitIota" : "𝜤",
- "\\mbfitKappa" : "𝜥",
- "\\mbfitLambda" : "𝜦",
- "\\mbfitMu" : "𝜧",
- "\\mbfitNu" : "𝜨",
- "\\mbfitXi" : "𝜩",
- "\\mbfitOmicron" : "𝜪",
- "\\mbfitPi" : "𝜫",
- "\\mbfitRho" : "𝜬",
- "\\mbfitvarTheta" : "𝜭",
- "\\mbfitSigma" : "𝜮",
- "\\mbfitTau" : "𝜯",
- "\\mbfitUpsilon" : "𝜰",
- "\\mbfitPhi" : "𝜱",
- "\\mbfitChi" : "𝜲",
- "\\mbfitPsi" : "𝜳",
- "\\mbfitOmega" : "𝜴",
- "\\mbfitalpha" : "𝜶",
- "\\mbfitbeta" : "𝜷",
- "\\mbfitgamma" : "𝜸",
- "\\mbfitdelta" : "𝜹",
- "\\mbfitepsilon" : "𝜺",
- "\\mbfitzeta" : "𝜻",
- "\\mbfiteta" : "𝜼",
- "\\mbfittheta" : "𝜽",
- "\\mbfitiota" : "𝜾",
- "\\mbfitkappa" : "𝜿",
- "\\mbfitlambda" : "𝝀",
- "\\mbfitmu" : "𝝁",
- "\\mbfitnu" : "𝝂",
- "\\mbfitxi" : "𝝃",
- "\\mbfitomicron" : "𝝄",
- "\\mbfitpi" : "𝝅",
- "\\mbfitrho" : "𝝆",
- "\\mbfitvarsigma" : "𝝇",
- "\\mbfitsigma" : "𝝈",
- "\\mbfittau" : "𝝉",
- "\\mbfitupsilon" : "𝝊",
- "\\mbfitphi" : "𝝋",
- "\\mbfitchi" : "𝝌",
- "\\mbfitpsi" : "𝝍",
- "\\mbfitomega" : "𝝎",
- "\\mbfitvarepsilon" : "𝝐",
- "\\mbfitvartheta" : "𝝑",
- "\\mbfitvarkappa" : "𝝒",
- "\\mbfitvarphi" : "𝝓",
- "\\mbfitvarrho" : "𝝔",
- "\\mbfitvarpi" : "𝝕",
- "\\mbfsansAlpha" : "𝝖",
- "\\mbfsansBeta" : "𝝗",
- "\\mbfsansGamma" : "𝝘",
- "\\mbfsansDelta" : "𝝙",
- "\\mbfsansEpsilon" : "𝝚",
- "\\mbfsansZeta" : "𝝛",
- "\\mbfsansEta" : "𝝜",
- "\\mbfsansTheta" : "𝝝",
- "\\mbfsansIota" : "𝝞",
- "\\mbfsansKappa" : "𝝟",
- "\\mbfsansLambda" : "𝝠",
- "\\mbfsansMu" : "𝝡",
- "\\mbfsansNu" : "𝝢",
- "\\mbfsansXi" : "𝝣",
- "\\mbfsansOmicron" : "𝝤",
- "\\mbfsansPi" : "𝝥",
- "\\mbfsansRho" : "𝝦",
- "\\mbfsansvarTheta" : "𝝧",
- "\\mbfsansSigma" : "𝝨",
- "\\mbfsansTau" : "𝝩",
- "\\mbfsansUpsilon" : "𝝪",
- "\\mbfsansPhi" : "𝝫",
- "\\mbfsansChi" : "𝝬",
- "\\mbfsansPsi" : "𝝭",
- "\\mbfsansOmega" : "𝝮",
- "\\mbfsansalpha" : "𝝰",
- "\\mbfsansbeta" : "𝝱",
- "\\mbfsansgamma" : "𝝲",
- "\\mbfsansdelta" : "𝝳",
- "\\mbfsansepsilon" : "𝝴",
- "\\mbfsanszeta" : "𝝵",
- "\\mbfsanseta" : "𝝶",
- "\\mbfsanstheta" : "𝝷",
- "\\mbfsansiota" : "𝝸",
- "\\mbfsanskappa" : "𝝹",
- "\\mbfsanslambda" : "𝝺",
- "\\mbfsansmu" : "𝝻",
- "\\mbfsansnu" : "𝝼",
- "\\mbfsansxi" : "𝝽",
- "\\mbfsansomicron" : "𝝾",
- "\\mbfsanspi" : "𝝿",
- "\\mbfsansrho" : "𝞀",
- "\\mbfsansvarsigma" : "𝞁",
- "\\mbfsanssigma" : "𝞂",
- "\\mbfsanstau" : "𝞃",
- "\\mbfsansupsilon" : "𝞄",
- "\\mbfsansphi" : "𝞅",
- "\\mbfsanschi" : "𝞆",
- "\\mbfsanspsi" : "𝞇",
- "\\mbfsansomega" : "𝞈",
- "\\mbfsansvarepsilon" : "𝞊",
- "\\mbfsansvartheta" : "𝞋",
- "\\mbfsansvarkappa" : "𝞌",
- "\\mbfsansvarphi" : "𝞍",
- "\\mbfsansvarrho" : "𝞎",
- "\\mbfsansvarpi" : "𝞏",
- "\\mbfitsansAlpha" : "𝞐",
- "\\mbfitsansBeta" : "𝞑",
- "\\mbfitsansGamma" : "𝞒",
- "\\mbfitsansDelta" : "𝞓",
- "\\mbfitsansEpsilon" : "𝞔",
- "\\mbfitsansZeta" : "𝞕",
- "\\mbfitsansEta" : "𝞖",
- "\\mbfitsansTheta" : "𝞗",
- "\\mbfitsansIota" : "𝞘",
- "\\mbfitsansKappa" : "𝞙",
- "\\mbfitsansLambda" : "𝞚",
- "\\mbfitsansMu" : "𝞛",
- "\\mbfitsansNu" : "𝞜",
- "\\mbfitsansXi" : "𝞝",
- "\\mbfitsansOmicron" : "𝞞",
- "\\mbfitsansPi" : "𝞟",
- "\\mbfitsansRho" : "𝞠",
- "\\mbfitsansvarTheta" : "𝞡",
- "\\mbfitsansSigma" : "𝞢",
- "\\mbfitsansTau" : "𝞣",
- "\\mbfitsansUpsilon" : "𝞤",
- "\\mbfitsansPhi" : "𝞥",
- "\\mbfitsansChi" : "𝞦",
- "\\mbfitsansPsi" : "𝞧",
- "\\mbfitsansOmega" : "𝞨",
- "\\mbfitsansalpha" : "𝞪",
- "\\mbfitsansbeta" : "𝞫",
- "\\mbfitsansgamma" : "𝞬",
- "\\mbfitsansdelta" : "𝞭",
- "\\mbfitsansepsilon" : "𝞮",
- "\\mbfitsanszeta" : "𝞯",
- "\\mbfitsanseta" : "𝞰",
- "\\mbfitsanstheta" : "𝞱",
- "\\mbfitsansiota" : "𝞲",
- "\\mbfitsanskappa" : "𝞳",
- "\\mbfitsanslambda" : "𝞴",
- "\\mbfitsansmu" : "𝞵",
- "\\mbfitsansnu" : "𝞶",
- "\\mbfitsansxi" : "𝞷",
- "\\mbfitsansomicron" : "𝞸",
- "\\mbfitsanspi" : "𝞹",
- "\\mbfitsansrho" : "𝞺",
- "\\mbfitsansvarsigma" : "𝞻",
- "\\mbfitsanssigma" : "𝞼",
- "\\mbfitsanstau" : "𝞽",
- "\\mbfitsansupsilon" : "𝞾",
- "\\mbfitsansphi" : "𝞿",
- "\\mbfitsanschi" : "𝟀",
- "\\mbfitsanspsi" : "𝟁",
- "\\mbfitsansomega" : "𝟂",
- "\\mbfitsansvarepsilon" : "𝟄",
- "\\mbfitsansvartheta" : "𝟅",
- "\\mbfitsansvarkappa" : "𝟆",
- "\\mbfitsansvarphi" : "𝟇",
- "\\mbfitsansvarrho" : "𝟈",
- "\\mbfitsansvarpi" : "𝟉",
- "\\mbfzero" : "𝟎",
- "\\mbfone" : "𝟏",
- "\\mbftwo" : "𝟐",
- "\\mbfthree" : "𝟑",
- "\\mbffour" : "𝟒",
- "\\mbffive" : "𝟓",
- "\\mbfsix" : "𝟔",
- "\\mbfseven" : "𝟕",
- "\\mbfeight" : "𝟖",
- "\\mbfnine" : "𝟗",
- "\\Bbbzero" : "𝟘",
- "\\Bbbone" : "𝟙",
- "\\Bbbtwo" : "𝟚",
- "\\Bbbthree" : "𝟛",
- "\\Bbbfour" : "𝟜",
- "\\Bbbfive" : "𝟝",
- "\\Bbbsix" : "𝟞",
- "\\Bbbseven" : "𝟟",
- "\\Bbbeight" : "𝟠",
- "\\Bbbnine" : "𝟡",
- "\\msanszero" : "𝟢",
- "\\msansone" : "𝟣",
- "\\msanstwo" : "𝟤",
- "\\msansthree" : "𝟥",
- "\\msansfour" : "𝟦",
- "\\msansfive" : "𝟧",
- "\\msanssix" : "𝟨",
- "\\msansseven" : "𝟩",
- "\\msanseight" : "𝟪",
- "\\msansnine" : "𝟫",
- "\\mbfsanszero" : "𝟬",
- "\\mbfsansone" : "𝟭",
- "\\mbfsanstwo" : "𝟮",
- "\\mbfsansthree" : "𝟯",
- "\\mbfsansfour" : "𝟰",
- "\\mbfsansfive" : "𝟱",
- "\\mbfsanssix" : "𝟲",
- "\\mbfsansseven" : "𝟳",
- "\\mbfsanseight" : "𝟴",
- "\\mbfsansnine" : "𝟵",
- "\\mttzero" : "𝟶",
- "\\mttone" : "𝟷",
- "\\mtttwo" : "𝟸",
- "\\mttthree" : "𝟹",
- "\\mttfour" : "𝟺",
- "\\mttfive" : "𝟻",
- "\\mttsix" : "𝟼",
- "\\mttseven" : "𝟽",
- "\\mtteight" : "𝟾",
- "\\mttnine" : "𝟿",
-}
-
-
-reverse_latex_symbol = { v:k for k,v in latex_symbols.items()}
+# encoding: utf-8
+
+# DO NOT EDIT THIS FILE BY HAND.
+
+# To update this file, run the script /tools/gen_latex_symbols.py using Python 3
+
+# This file is autogenerated from the file:
+# https://raw.githubusercontent.com/JuliaLang/julia/master/base/latex_symbols.jl
+# This original list is filtered to remove any unicode characters that are not valid
+# Python identifiers.
+
+latex_symbols = {
+
+ "\\^a" : "ᵃ",
+ "\\^b" : "ᵇ",
+ "\\^c" : "ᶜ",
+ "\\^d" : "ᵈ",
+ "\\^e" : "ᵉ",
+ "\\^f" : "ᶠ",
+ "\\^g" : "ᵍ",
+ "\\^h" : "ʰ",
+ "\\^i" : "ⁱ",
+ "\\^j" : "ʲ",
+ "\\^k" : "ᵏ",
+ "\\^l" : "ˡ",
+ "\\^m" : "ᵐ",
+ "\\^n" : "ⁿ",
+ "\\^o" : "ᵒ",
+ "\\^p" : "ᵖ",
+ "\\^r" : "ʳ",
+ "\\^s" : "ˢ",
+ "\\^t" : "ᵗ",
+ "\\^u" : "ᵘ",
+ "\\^v" : "ᵛ",
+ "\\^w" : "ʷ",
+ "\\^x" : "ˣ",
+ "\\^y" : "ʸ",
+ "\\^z" : "ᶻ",
+ "\\^A" : "ᴬ",
+ "\\^B" : "ᴮ",
+ "\\^D" : "ᴰ",
+ "\\^E" : "ᴱ",
+ "\\^G" : "ᴳ",
+ "\\^H" : "ᴴ",
+ "\\^I" : "ᴵ",
+ "\\^J" : "ᴶ",
+ "\\^K" : "ᴷ",
+ "\\^L" : "ᴸ",
+ "\\^M" : "ᴹ",
+ "\\^N" : "ᴺ",
+ "\\^O" : "ᴼ",
+ "\\^P" : "ᴾ",
+ "\\^R" : "ᴿ",
+ "\\^T" : "ᵀ",
+ "\\^U" : "ᵁ",
+ "\\^V" : "ⱽ",
+ "\\^W" : "ᵂ",
+ "\\^alpha" : "ᵅ",
+ "\\^beta" : "ᵝ",
+ "\\^gamma" : "ᵞ",
+ "\\^delta" : "ᵟ",
+ "\\^epsilon" : "ᵋ",
+ "\\^theta" : "ᶿ",
+ "\\^iota" : "ᶥ",
+ "\\^phi" : "ᵠ",
+ "\\^chi" : "ᵡ",
+ "\\^Phi" : "ᶲ",
+ "\\_a" : "ₐ",
+ "\\_e" : "ₑ",
+ "\\_h" : "ₕ",
+ "\\_i" : "ᵢ",
+ "\\_j" : "ⱼ",
+ "\\_k" : "ₖ",
+ "\\_l" : "ₗ",
+ "\\_m" : "ₘ",
+ "\\_n" : "ₙ",
+ "\\_o" : "ₒ",
+ "\\_p" : "ₚ",
+ "\\_r" : "ᵣ",
+ "\\_s" : "ₛ",
+ "\\_t" : "ₜ",
+ "\\_u" : "ᵤ",
+ "\\_v" : "ᵥ",
+ "\\_x" : "ₓ",
+ "\\_schwa" : "ₔ",
+ "\\_beta" : "ᵦ",
+ "\\_gamma" : "ᵧ",
+ "\\_rho" : "ᵨ",
+ "\\_phi" : "ᵩ",
+ "\\_chi" : "ᵪ",
+ "\\hbar" : "ħ",
+ "\\sout" : "̶",
+ "\\textordfeminine" : "ª",
+ "\\cdotp" : "·",
+ "\\textordmasculine" : "º",
+ "\\AA" : "Å",
+ "\\AE" : "Æ",
+ "\\DH" : "Ð",
+ "\\O" : "Ø",
+ "\\TH" : "Þ",
+ "\\ss" : "ß",
+ "\\aa" : "å",
+ "\\ae" : "æ",
+ "\\eth" : "ð",
+ "\\o" : "ø",
+ "\\th" : "þ",
+ "\\DJ" : "Đ",
+ "\\dj" : "đ",
+ "\\Elzxh" : "ħ",
+ "\\imath" : "ı",
+ "\\L" : "Ł",
+ "\\l" : "ł",
+ "\\NG" : "Ŋ",
+ "\\ng" : "ŋ",
+ "\\OE" : "Œ",
+ "\\oe" : "œ",
+ "\\texthvlig" : "ƕ",
+ "\\textnrleg" : "ƞ",
+ "\\textdoublepipe" : "ǂ",
+ "\\Elztrna" : "ɐ",
+ "\\Elztrnsa" : "ɒ",
+ "\\Elzopeno" : "ɔ",
+ "\\Elzrtld" : "ɖ",
+ "\\Elzschwa" : "ə",
+ "\\varepsilon" : "ɛ",
+ "\\Elzpgamma" : "ɣ",
+ "\\Elzpbgam" : "ɤ",
+ "\\Elztrnh" : "ɥ",
+ "\\Elzbtdl" : "ɬ",
+ "\\Elzrtll" : "ɭ",
+ "\\Elztrnm" : "ɯ",
+ "\\Elztrnmlr" : "ɰ",
+ "\\Elzltlmr" : "ɱ",
+ "\\Elzltln" : "ɲ",
+ "\\Elzrtln" : "ɳ",
+ "\\Elzclomeg" : "ɷ",
+ "\\textphi" : "ɸ",
+ "\\Elztrnr" : "ɹ",
+ "\\Elztrnrl" : "ɺ",
+ "\\Elzrttrnr" : "ɻ",
+ "\\Elzrl" : "ɼ",
+ "\\Elzrtlr" : "ɽ",
+ "\\Elzfhr" : "ɾ",
+ "\\Elzrtls" : "ʂ",
+ "\\Elzesh" : "ʃ",
+ "\\Elztrnt" : "ʇ",
+ "\\Elzrtlt" : "ʈ",
+ "\\Elzpupsil" : "ʊ",
+ "\\Elzpscrv" : "ʋ",
+ "\\Elzinvv" : "ʌ",
+ "\\Elzinvw" : "ʍ",
+ "\\Elztrny" : "ʎ",
+ "\\Elzrtlz" : "ʐ",
+ "\\Elzyogh" : "ʒ",
+ "\\Elzglst" : "ʔ",
+ "\\Elzreglst" : "ʕ",
+ "\\Elzinglst" : "ʖ",
+ "\\textturnk" : "ʞ",
+ "\\Elzdyogh" : "ʤ",
+ "\\Elztesh" : "ʧ",
+ "\\rasp" : "ʼ",
+ "\\textasciicaron" : "ˇ",
+ "\\Elzverts" : "ˈ",
+ "\\Elzverti" : "ˌ",
+ "\\Elzlmrk" : "ː",
+ "\\Elzhlmrk" : "ˑ",
+ "\\grave" : "̀",
+ "\\acute" : "́",
+ "\\hat" : "̂",
+ "\\tilde" : "̃",
+ "\\bar" : "̄",
+ "\\breve" : "̆",
+ "\\dot" : "̇",
+ "\\ddot" : "̈",
+ "\\ocirc" : "̊",
+ "\\H" : "̋",
+ "\\check" : "̌",
+ "\\Elzpalh" : "̡",
+ "\\Elzrh" : "̢",
+ "\\c" : "̧",
+ "\\k" : "̨",
+ "\\Elzsbbrg" : "̪",
+ "\\Elzxl" : "̵",
+ "\\Elzbar" : "̶",
+ "\\Alpha" : "Α",
+ "\\Beta" : "Β",
+ "\\Gamma" : "Γ",
+ "\\Delta" : "Δ",
+ "\\Epsilon" : "Ε",
+ "\\Zeta" : "Ζ",
+ "\\Eta" : "Η",
+ "\\Theta" : "Θ",
+ "\\Iota" : "Ι",
+ "\\Kappa" : "Κ",
+ "\\Lambda" : "Λ",
+ "\\Xi" : "Ξ",
+ "\\Pi" : "Π",
+ "\\Rho" : "Ρ",
+ "\\Sigma" : "Σ",
+ "\\Tau" : "Τ",
+ "\\Upsilon" : "Υ",
+ "\\Phi" : "Φ",
+ "\\Chi" : "Χ",
+ "\\Psi" : "Ψ",
+ "\\Omega" : "Ω",
+ "\\alpha" : "α",
+ "\\beta" : "β",
+ "\\gamma" : "γ",
+ "\\delta" : "δ",
+ "\\zeta" : "ζ",
+ "\\eta" : "η",
+ "\\theta" : "θ",
+ "\\iota" : "ι",
+ "\\kappa" : "κ",
+ "\\lambda" : "λ",
+ "\\mu" : "μ",
+ "\\nu" : "ν",
+ "\\xi" : "ξ",
+ "\\pi" : "π",
+ "\\rho" : "ρ",
+ "\\varsigma" : "ς",
+ "\\sigma" : "σ",
+ "\\tau" : "τ",
+ "\\upsilon" : "υ",
+ "\\varphi" : "φ",
+ "\\chi" : "χ",
+ "\\psi" : "ψ",
+ "\\omega" : "ω",
+ "\\vartheta" : "ϑ",
+ "\\phi" : "ϕ",
+ "\\varpi" : "ϖ",
+ "\\Stigma" : "Ϛ",
+ "\\Digamma" : "Ϝ",
+ "\\digamma" : "ϝ",
+ "\\Koppa" : "Ϟ",
+ "\\Sampi" : "Ϡ",
+ "\\varkappa" : "ϰ",
+ "\\varrho" : "ϱ",
+ "\\textTheta" : "ϴ",
+ "\\epsilon" : "ϵ",
+ "\\dddot" : "⃛",
+ "\\ddddot" : "⃜",
+ "\\hslash" : "ℏ",
+ "\\Im" : "ℑ",
+ "\\ell" : "ℓ",
+ "\\wp" : "℘",
+ "\\Re" : "ℜ",
+ "\\aleph" : "ℵ",
+ "\\beth" : "ℶ",
+ "\\gimel" : "ℷ",
+ "\\daleth" : "ℸ",
+ "\\BbbPi" : "ℿ",
+ "\\Zbar" : "Ƶ",
+ "\\overbar" : "̅",
+ "\\ovhook" : "̉",
+ "\\candra" : "̐",
+ "\\oturnedcomma" : "̒",
+ "\\ocommatopright" : "̕",
+ "\\droang" : "̚",
+ "\\wideutilde" : "̰",
+ "\\underbar" : "̱",
+ "\\not" : "̸",
+ "\\upMu" : "Μ",
+ "\\upNu" : "Ν",
+ "\\upOmicron" : "Ο",
+ "\\upepsilon" : "ε",
+ "\\upomicron" : "ο",
+ "\\upvarbeta" : "ϐ",
+ "\\upoldKoppa" : "Ϙ",
+ "\\upoldkoppa" : "ϙ",
+ "\\upstigma" : "ϛ",
+ "\\upkoppa" : "ϟ",
+ "\\upsampi" : "ϡ",
+ "\\tieconcat" : "⁀",
+ "\\leftharpoonaccent" : "⃐",
+ "\\rightharpoonaccent" : "⃑",
+ "\\vertoverlay" : "⃒",
+ "\\overleftarrow" : "⃖",
+ "\\vec" : "⃗",
+ "\\overleftrightarrow" : "⃡",
+ "\\annuity" : "⃧",
+ "\\threeunderdot" : "⃨",
+ "\\widebridgeabove" : "⃩",
+ "\\BbbC" : "ℂ",
+ "\\Eulerconst" : "ℇ",
+ "\\mscrg" : "ℊ",
+ "\\mscrH" : "ℋ",
+ "\\mfrakH" : "ℌ",
+ "\\BbbH" : "ℍ",
+ "\\Planckconst" : "ℎ",
+ "\\mscrI" : "ℐ",
+ "\\mscrL" : "ℒ",
+ "\\BbbN" : "ℕ",
+ "\\BbbP" : "ℙ",
+ "\\BbbQ" : "ℚ",
+ "\\mscrR" : "ℛ",
+ "\\BbbR" : "ℝ",
+ "\\BbbZ" : "ℤ",
+ "\\mfrakZ" : "ℨ",
+ "\\Angstrom" : "Å",
+ "\\mscrB" : "ℬ",
+ "\\mfrakC" : "ℭ",
+ "\\mscre" : "ℯ",
+ "\\mscrE" : "ℰ",
+ "\\mscrF" : "ℱ",
+ "\\Finv" : "Ⅎ",
+ "\\mscrM" : "ℳ",
+ "\\mscro" : "ℴ",
+ "\\Bbbgamma" : "ℽ",
+ "\\BbbGamma" : "ℾ",
+ "\\mitBbbD" : "ⅅ",
+ "\\mitBbbd" : "ⅆ",
+ "\\mitBbbe" : "ⅇ",
+ "\\mitBbbi" : "ⅈ",
+ "\\mitBbbj" : "ⅉ",
+ "\\mbfA" : "𝐀",
+ "\\mbfB" : "𝐁",
+ "\\mbfC" : "𝐂",
+ "\\mbfD" : "𝐃",
+ "\\mbfE" : "𝐄",
+ "\\mbfF" : "𝐅",
+ "\\mbfG" : "𝐆",
+ "\\mbfH" : "𝐇",
+ "\\mbfI" : "𝐈",
+ "\\mbfJ" : "𝐉",
+ "\\mbfK" : "𝐊",
+ "\\mbfL" : "𝐋",
+ "\\mbfM" : "𝐌",
+ "\\mbfN" : "𝐍",
+ "\\mbfO" : "𝐎",
+ "\\mbfP" : "𝐏",
+ "\\mbfQ" : "𝐐",
+ "\\mbfR" : "𝐑",
+ "\\mbfS" : "𝐒",
+ "\\mbfT" : "𝐓",
+ "\\mbfU" : "𝐔",
+ "\\mbfV" : "𝐕",
+ "\\mbfW" : "𝐖",
+ "\\mbfX" : "𝐗",
+ "\\mbfY" : "𝐘",
+ "\\mbfZ" : "𝐙",
+ "\\mbfa" : "𝐚",
+ "\\mbfb" : "𝐛",
+ "\\mbfc" : "𝐜",
+ "\\mbfd" : "𝐝",
+ "\\mbfe" : "𝐞",
+ "\\mbff" : "𝐟",
+ "\\mbfg" : "𝐠",
+ "\\mbfh" : "𝐡",
+ "\\mbfi" : "𝐢",
+ "\\mbfj" : "𝐣",
+ "\\mbfk" : "𝐤",
+ "\\mbfl" : "𝐥",
+ "\\mbfm" : "𝐦",
+ "\\mbfn" : "𝐧",
+ "\\mbfo" : "𝐨",
+ "\\mbfp" : "𝐩",
+ "\\mbfq" : "𝐪",
+ "\\mbfr" : "𝐫",
+ "\\mbfs" : "𝐬",
+ "\\mbft" : "𝐭",
+ "\\mbfu" : "𝐮",
+ "\\mbfv" : "𝐯",
+ "\\mbfw" : "𝐰",
+ "\\mbfx" : "𝐱",
+ "\\mbfy" : "𝐲",
+ "\\mbfz" : "𝐳",
+ "\\mitA" : "𝐴",
+ "\\mitB" : "𝐵",
+ "\\mitC" : "𝐶",
+ "\\mitD" : "𝐷",
+ "\\mitE" : "𝐸",
+ "\\mitF" : "𝐹",
+ "\\mitG" : "𝐺",
+ "\\mitH" : "𝐻",
+ "\\mitI" : "𝐼",
+ "\\mitJ" : "𝐽",
+ "\\mitK" : "𝐾",
+ "\\mitL" : "𝐿",
+ "\\mitM" : "𝑀",
+ "\\mitN" : "𝑁",
+ "\\mitO" : "𝑂",
+ "\\mitP" : "𝑃",
+ "\\mitQ" : "𝑄",
+ "\\mitR" : "𝑅",
+ "\\mitS" : "𝑆",
+ "\\mitT" : "𝑇",
+ "\\mitU" : "𝑈",
+ "\\mitV" : "𝑉",
+ "\\mitW" : "𝑊",
+ "\\mitX" : "𝑋",
+ "\\mitY" : "𝑌",
+ "\\mitZ" : "𝑍",
+ "\\mita" : "𝑎",
+ "\\mitb" : "𝑏",
+ "\\mitc" : "𝑐",
+ "\\mitd" : "𝑑",
+ "\\mite" : "𝑒",
+ "\\mitf" : "𝑓",
+ "\\mitg" : "𝑔",
+ "\\miti" : "𝑖",
+ "\\mitj" : "𝑗",
+ "\\mitk" : "𝑘",
+ "\\mitl" : "𝑙",
+ "\\mitm" : "𝑚",
+ "\\mitn" : "𝑛",
+ "\\mito" : "𝑜",
+ "\\mitp" : "𝑝",
+ "\\mitq" : "𝑞",
+ "\\mitr" : "𝑟",
+ "\\mits" : "𝑠",
+ "\\mitt" : "𝑡",
+ "\\mitu" : "𝑢",
+ "\\mitv" : "𝑣",
+ "\\mitw" : "𝑤",
+ "\\mitx" : "𝑥",
+ "\\mity" : "𝑦",
+ "\\mitz" : "𝑧",
+ "\\mbfitA" : "𝑨",
+ "\\mbfitB" : "𝑩",
+ "\\mbfitC" : "𝑪",
+ "\\mbfitD" : "𝑫",
+ "\\mbfitE" : "𝑬",
+ "\\mbfitF" : "𝑭",
+ "\\mbfitG" : "𝑮",
+ "\\mbfitH" : "𝑯",
+ "\\mbfitI" : "𝑰",
+ "\\mbfitJ" : "𝑱",
+ "\\mbfitK" : "𝑲",
+ "\\mbfitL" : "𝑳",
+ "\\mbfitM" : "𝑴",
+ "\\mbfitN" : "𝑵",
+ "\\mbfitO" : "𝑶",
+ "\\mbfitP" : "𝑷",
+ "\\mbfitQ" : "𝑸",
+ "\\mbfitR" : "𝑹",
+ "\\mbfitS" : "𝑺",
+ "\\mbfitT" : "𝑻",
+ "\\mbfitU" : "𝑼",
+ "\\mbfitV" : "𝑽",
+ "\\mbfitW" : "𝑾",
+ "\\mbfitX" : "𝑿",
+ "\\mbfitY" : "𝒀",
+ "\\mbfitZ" : "𝒁",
+ "\\mbfita" : "𝒂",
+ "\\mbfitb" : "𝒃",
+ "\\mbfitc" : "𝒄",
+ "\\mbfitd" : "𝒅",
+ "\\mbfite" : "𝒆",
+ "\\mbfitf" : "𝒇",
+ "\\mbfitg" : "𝒈",
+ "\\mbfith" : "𝒉",
+ "\\mbfiti" : "𝒊",
+ "\\mbfitj" : "𝒋",
+ "\\mbfitk" : "𝒌",
+ "\\mbfitl" : "𝒍",
+ "\\mbfitm" : "𝒎",
+ "\\mbfitn" : "𝒏",
+ "\\mbfito" : "𝒐",
+ "\\mbfitp" : "𝒑",
+ "\\mbfitq" : "𝒒",
+ "\\mbfitr" : "𝒓",
+ "\\mbfits" : "𝒔",
+ "\\mbfitt" : "𝒕",
+ "\\mbfitu" : "𝒖",
+ "\\mbfitv" : "𝒗",
+ "\\mbfitw" : "𝒘",
+ "\\mbfitx" : "𝒙",
+ "\\mbfity" : "𝒚",
+ "\\mbfitz" : "𝒛",
+ "\\mscrA" : "𝒜",
+ "\\mscrC" : "𝒞",
+ "\\mscrD" : "𝒟",
+ "\\mscrG" : "𝒢",
+ "\\mscrJ" : "𝒥",
+ "\\mscrK" : "𝒦",
+ "\\mscrN" : "𝒩",
+ "\\mscrO" : "𝒪",
+ "\\mscrP" : "𝒫",
+ "\\mscrQ" : "𝒬",
+ "\\mscrS" : "𝒮",
+ "\\mscrT" : "𝒯",
+ "\\mscrU" : "𝒰",
+ "\\mscrV" : "𝒱",
+ "\\mscrW" : "𝒲",
+ "\\mscrX" : "𝒳",
+ "\\mscrY" : "𝒴",
+ "\\mscrZ" : "𝒵",
+ "\\mscra" : "𝒶",
+ "\\mscrb" : "𝒷",
+ "\\mscrc" : "𝒸",
+ "\\mscrd" : "𝒹",
+ "\\mscrf" : "𝒻",
+ "\\mscrh" : "𝒽",
+ "\\mscri" : "𝒾",
+ "\\mscrj" : "𝒿",
+ "\\mscrk" : "𝓀",
+ "\\mscrm" : "𝓂",
+ "\\mscrn" : "𝓃",
+ "\\mscrp" : "𝓅",
+ "\\mscrq" : "𝓆",
+ "\\mscrr" : "𝓇",
+ "\\mscrs" : "𝓈",
+ "\\mscrt" : "𝓉",
+ "\\mscru" : "𝓊",
+ "\\mscrv" : "𝓋",
+ "\\mscrw" : "𝓌",
+ "\\mscrx" : "𝓍",
+ "\\mscry" : "𝓎",
+ "\\mscrz" : "𝓏",
+ "\\mbfscrA" : "𝓐",
+ "\\mbfscrB" : "𝓑",
+ "\\mbfscrC" : "𝓒",
+ "\\mbfscrD" : "𝓓",
+ "\\mbfscrE" : "𝓔",
+ "\\mbfscrF" : "𝓕",
+ "\\mbfscrG" : "𝓖",
+ "\\mbfscrH" : "𝓗",
+ "\\mbfscrI" : "𝓘",
+ "\\mbfscrJ" : "𝓙",
+ "\\mbfscrK" : "𝓚",
+ "\\mbfscrL" : "𝓛",
+ "\\mbfscrM" : "𝓜",
+ "\\mbfscrN" : "𝓝",
+ "\\mbfscrO" : "𝓞",
+ "\\mbfscrP" : "𝓟",
+ "\\mbfscrQ" : "𝓠",
+ "\\mbfscrR" : "𝓡",
+ "\\mbfscrS" : "𝓢",
+ "\\mbfscrT" : "𝓣",
+ "\\mbfscrU" : "𝓤",
+ "\\mbfscrV" : "𝓥",
+ "\\mbfscrW" : "𝓦",
+ "\\mbfscrX" : "𝓧",
+ "\\mbfscrY" : "𝓨",
+ "\\mbfscrZ" : "𝓩",
+ "\\mbfscra" : "𝓪",
+ "\\mbfscrb" : "𝓫",
+ "\\mbfscrc" : "𝓬",
+ "\\mbfscrd" : "𝓭",
+ "\\mbfscre" : "𝓮",
+ "\\mbfscrf" : "𝓯",
+ "\\mbfscrg" : "𝓰",
+ "\\mbfscrh" : "𝓱",
+ "\\mbfscri" : "𝓲",
+ "\\mbfscrj" : "𝓳",
+ "\\mbfscrk" : "𝓴",
+ "\\mbfscrl" : "𝓵",
+ "\\mbfscrm" : "𝓶",
+ "\\mbfscrn" : "𝓷",
+ "\\mbfscro" : "𝓸",
+ "\\mbfscrp" : "𝓹",
+ "\\mbfscrq" : "𝓺",
+ "\\mbfscrr" : "𝓻",
+ "\\mbfscrs" : "𝓼",
+ "\\mbfscrt" : "𝓽",
+ "\\mbfscru" : "𝓾",
+ "\\mbfscrv" : "𝓿",
+ "\\mbfscrw" : "𝔀",
+ "\\mbfscrx" : "𝔁",
+ "\\mbfscry" : "𝔂",
+ "\\mbfscrz" : "𝔃",
+ "\\mfrakA" : "𝔄",
+ "\\mfrakB" : "𝔅",
+ "\\mfrakD" : "𝔇",
+ "\\mfrakE" : "𝔈",
+ "\\mfrakF" : "𝔉",
+ "\\mfrakG" : "𝔊",
+ "\\mfrakJ" : "𝔍",
+ "\\mfrakK" : "𝔎",
+ "\\mfrakL" : "𝔏",
+ "\\mfrakM" : "𝔐",
+ "\\mfrakN" : "𝔑",
+ "\\mfrakO" : "𝔒",
+ "\\mfrakP" : "𝔓",
+ "\\mfrakQ" : "𝔔",
+ "\\mfrakS" : "𝔖",
+ "\\mfrakT" : "𝔗",
+ "\\mfrakU" : "𝔘",
+ "\\mfrakV" : "𝔙",
+ "\\mfrakW" : "𝔚",
+ "\\mfrakX" : "𝔛",
+ "\\mfrakY" : "𝔜",
+ "\\mfraka" : "𝔞",
+ "\\mfrakb" : "𝔟",
+ "\\mfrakc" : "𝔠",
+ "\\mfrakd" : "𝔡",
+ "\\mfrake" : "𝔢",
+ "\\mfrakf" : "𝔣",
+ "\\mfrakg" : "𝔤",
+ "\\mfrakh" : "𝔥",
+ "\\mfraki" : "𝔦",
+ "\\mfrakj" : "𝔧",
+ "\\mfrakk" : "𝔨",
+ "\\mfrakl" : "𝔩",
+ "\\mfrakm" : "𝔪",
+ "\\mfrakn" : "𝔫",
+ "\\mfrako" : "𝔬",
+ "\\mfrakp" : "𝔭",
+ "\\mfrakq" : "𝔮",
+ "\\mfrakr" : "𝔯",
+ "\\mfraks" : "𝔰",
+ "\\mfrakt" : "𝔱",
+ "\\mfraku" : "𝔲",
+ "\\mfrakv" : "𝔳",
+ "\\mfrakw" : "𝔴",
+ "\\mfrakx" : "𝔵",
+ "\\mfraky" : "𝔶",
+ "\\mfrakz" : "𝔷",
+ "\\BbbA" : "𝔸",
+ "\\BbbB" : "𝔹",
+ "\\BbbD" : "𝔻",
+ "\\BbbE" : "𝔼",
+ "\\BbbF" : "𝔽",
+ "\\BbbG" : "𝔾",
+ "\\BbbI" : "𝕀",
+ "\\BbbJ" : "𝕁",
+ "\\BbbK" : "𝕂",
+ "\\BbbL" : "𝕃",
+ "\\BbbM" : "𝕄",
+ "\\BbbO" : "𝕆",
+ "\\BbbS" : "𝕊",
+ "\\BbbT" : "𝕋",
+ "\\BbbU" : "𝕌",
+ "\\BbbV" : "𝕍",
+ "\\BbbW" : "𝕎",
+ "\\BbbX" : "𝕏",
+ "\\BbbY" : "𝕐",
+ "\\Bbba" : "𝕒",
+ "\\Bbbb" : "𝕓",
+ "\\Bbbc" : "𝕔",
+ "\\Bbbd" : "𝕕",
+ "\\Bbbe" : "𝕖",
+ "\\Bbbf" : "𝕗",
+ "\\Bbbg" : "𝕘",
+ "\\Bbbh" : "𝕙",
+ "\\Bbbi" : "𝕚",
+ "\\Bbbj" : "𝕛",
+ "\\Bbbk" : "𝕜",
+ "\\Bbbl" : "𝕝",
+ "\\Bbbm" : "𝕞",
+ "\\Bbbn" : "𝕟",
+ "\\Bbbo" : "𝕠",
+ "\\Bbbp" : "𝕡",
+ "\\Bbbq" : "𝕢",
+ "\\Bbbr" : "𝕣",
+ "\\Bbbs" : "𝕤",
+ "\\Bbbt" : "𝕥",
+ "\\Bbbu" : "𝕦",
+ "\\Bbbv" : "𝕧",
+ "\\Bbbw" : "𝕨",
+ "\\Bbbx" : "𝕩",
+ "\\Bbby" : "𝕪",
+ "\\Bbbz" : "𝕫",
+ "\\mbffrakA" : "𝕬",
+ "\\mbffrakB" : "𝕭",
+ "\\mbffrakC" : "𝕮",
+ "\\mbffrakD" : "𝕯",
+ "\\mbffrakE" : "𝕰",
+ "\\mbffrakF" : "𝕱",
+ "\\mbffrakG" : "𝕲",
+ "\\mbffrakH" : "𝕳",
+ "\\mbffrakI" : "𝕴",
+ "\\mbffrakJ" : "𝕵",
+ "\\mbffrakK" : "𝕶",
+ "\\mbffrakL" : "𝕷",
+ "\\mbffrakM" : "𝕸",
+ "\\mbffrakN" : "𝕹",
+ "\\mbffrakO" : "𝕺",
+ "\\mbffrakP" : "𝕻",
+ "\\mbffrakQ" : "𝕼",
+ "\\mbffrakR" : "𝕽",
+ "\\mbffrakS" : "𝕾",
+ "\\mbffrakT" : "𝕿",
+ "\\mbffrakU" : "𝖀",
+ "\\mbffrakV" : "𝖁",
+ "\\mbffrakW" : "𝖂",
+ "\\mbffrakX" : "𝖃",
+ "\\mbffrakY" : "𝖄",
+ "\\mbffrakZ" : "𝖅",
+ "\\mbffraka" : "𝖆",
+ "\\mbffrakb" : "𝖇",
+ "\\mbffrakc" : "𝖈",
+ "\\mbffrakd" : "𝖉",
+ "\\mbffrake" : "𝖊",
+ "\\mbffrakf" : "𝖋",
+ "\\mbffrakg" : "𝖌",
+ "\\mbffrakh" : "𝖍",
+ "\\mbffraki" : "𝖎",
+ "\\mbffrakj" : "𝖏",
+ "\\mbffrakk" : "𝖐",
+ "\\mbffrakl" : "𝖑",
+ "\\mbffrakm" : "𝖒",
+ "\\mbffrakn" : "𝖓",
+ "\\mbffrako" : "𝖔",
+ "\\mbffrakp" : "𝖕",
+ "\\mbffrakq" : "𝖖",
+ "\\mbffrakr" : "𝖗",
+ "\\mbffraks" : "𝖘",
+ "\\mbffrakt" : "𝖙",
+ "\\mbffraku" : "𝖚",
+ "\\mbffrakv" : "𝖛",
+ "\\mbffrakw" : "𝖜",
+ "\\mbffrakx" : "𝖝",
+ "\\mbffraky" : "𝖞",
+ "\\mbffrakz" : "𝖟",
+ "\\msansA" : "𝖠",
+ "\\msansB" : "𝖡",
+ "\\msansC" : "𝖢",
+ "\\msansD" : "𝖣",
+ "\\msansE" : "𝖤",
+ "\\msansF" : "𝖥",
+ "\\msansG" : "𝖦",
+ "\\msansH" : "𝖧",
+ "\\msansI" : "𝖨",
+ "\\msansJ" : "𝖩",
+ "\\msansK" : "𝖪",
+ "\\msansL" : "𝖫",
+ "\\msansM" : "𝖬",
+ "\\msansN" : "𝖭",
+ "\\msansO" : "𝖮",
+ "\\msansP" : "𝖯",
+ "\\msansQ" : "𝖰",
+ "\\msansR" : "𝖱",
+ "\\msansS" : "𝖲",
+ "\\msansT" : "𝖳",
+ "\\msansU" : "𝖴",
+ "\\msansV" : "𝖵",
+ "\\msansW" : "𝖶",
+ "\\msansX" : "𝖷",
+ "\\msansY" : "𝖸",
+ "\\msansZ" : "𝖹",
+ "\\msansa" : "𝖺",
+ "\\msansb" : "𝖻",
+ "\\msansc" : "𝖼",
+ "\\msansd" : "𝖽",
+ "\\msanse" : "𝖾",
+ "\\msansf" : "𝖿",
+ "\\msansg" : "𝗀",
+ "\\msansh" : "𝗁",
+ "\\msansi" : "𝗂",
+ "\\msansj" : "𝗃",
+ "\\msansk" : "𝗄",
+ "\\msansl" : "𝗅",
+ "\\msansm" : "𝗆",
+ "\\msansn" : "𝗇",
+ "\\msanso" : "𝗈",
+ "\\msansp" : "𝗉",
+ "\\msansq" : "𝗊",
+ "\\msansr" : "𝗋",
+ "\\msanss" : "𝗌",
+ "\\msanst" : "𝗍",
+ "\\msansu" : "𝗎",
+ "\\msansv" : "𝗏",
+ "\\msansw" : "𝗐",
+ "\\msansx" : "𝗑",
+ "\\msansy" : "𝗒",
+ "\\msansz" : "𝗓",
+ "\\mbfsansA" : "𝗔",
+ "\\mbfsansB" : "𝗕",
+ "\\mbfsansC" : "𝗖",
+ "\\mbfsansD" : "𝗗",
+ "\\mbfsansE" : "𝗘",
+ "\\mbfsansF" : "𝗙",
+ "\\mbfsansG" : "𝗚",
+ "\\mbfsansH" : "𝗛",
+ "\\mbfsansI" : "𝗜",
+ "\\mbfsansJ" : "𝗝",
+ "\\mbfsansK" : "𝗞",
+ "\\mbfsansL" : "𝗟",
+ "\\mbfsansM" : "𝗠",
+ "\\mbfsansN" : "𝗡",
+ "\\mbfsansO" : "𝗢",
+ "\\mbfsansP" : "𝗣",
+ "\\mbfsansQ" : "𝗤",
+ "\\mbfsansR" : "𝗥",
+ "\\mbfsansS" : "𝗦",
+ "\\mbfsansT" : "𝗧",
+ "\\mbfsansU" : "𝗨",
+ "\\mbfsansV" : "𝗩",
+ "\\mbfsansW" : "𝗪",
+ "\\mbfsansX" : "𝗫",
+ "\\mbfsansY" : "𝗬",
+ "\\mbfsansZ" : "𝗭",
+ "\\mbfsansa" : "𝗮",
+ "\\mbfsansb" : "𝗯",
+ "\\mbfsansc" : "𝗰",
+ "\\mbfsansd" : "𝗱",
+ "\\mbfsanse" : "𝗲",
+ "\\mbfsansf" : "𝗳",
+ "\\mbfsansg" : "𝗴",
+ "\\mbfsansh" : "𝗵",
+ "\\mbfsansi" : "𝗶",
+ "\\mbfsansj" : "𝗷",
+ "\\mbfsansk" : "𝗸",
+ "\\mbfsansl" : "𝗹",
+ "\\mbfsansm" : "𝗺",
+ "\\mbfsansn" : "𝗻",
+ "\\mbfsanso" : "𝗼",
+ "\\mbfsansp" : "𝗽",
+ "\\mbfsansq" : "𝗾",
+ "\\mbfsansr" : "𝗿",
+ "\\mbfsanss" : "𝘀",
+ "\\mbfsanst" : "𝘁",
+ "\\mbfsansu" : "𝘂",
+ "\\mbfsansv" : "𝘃",
+ "\\mbfsansw" : "𝘄",
+ "\\mbfsansx" : "𝘅",
+ "\\mbfsansy" : "𝘆",
+ "\\mbfsansz" : "𝘇",
+ "\\mitsansA" : "𝘈",
+ "\\mitsansB" : "𝘉",
+ "\\mitsansC" : "𝘊",
+ "\\mitsansD" : "𝘋",
+ "\\mitsansE" : "𝘌",
+ "\\mitsansF" : "𝘍",
+ "\\mitsansG" : "𝘎",
+ "\\mitsansH" : "𝘏",
+ "\\mitsansI" : "𝘐",
+ "\\mitsansJ" : "𝘑",
+ "\\mitsansK" : "𝘒",
+ "\\mitsansL" : "𝘓",
+ "\\mitsansM" : "𝘔",
+ "\\mitsansN" : "𝘕",
+ "\\mitsansO" : "𝘖",
+ "\\mitsansP" : "𝘗",
+ "\\mitsansQ" : "𝘘",
+ "\\mitsansR" : "𝘙",
+ "\\mitsansS" : "𝘚",
+ "\\mitsansT" : "𝘛",
+ "\\mitsansU" : "𝘜",
+ "\\mitsansV" : "𝘝",
+ "\\mitsansW" : "𝘞",
+ "\\mitsansX" : "𝘟",
+ "\\mitsansY" : "𝘠",
+ "\\mitsansZ" : "𝘡",
+ "\\mitsansa" : "𝘢",
+ "\\mitsansb" : "𝘣",
+ "\\mitsansc" : "𝘤",
+ "\\mitsansd" : "𝘥",
+ "\\mitsanse" : "𝘦",
+ "\\mitsansf" : "𝘧",
+ "\\mitsansg" : "𝘨",
+ "\\mitsansh" : "𝘩",
+ "\\mitsansi" : "𝘪",
+ "\\mitsansj" : "𝘫",
+ "\\mitsansk" : "𝘬",
+ "\\mitsansl" : "𝘭",
+ "\\mitsansm" : "𝘮",
+ "\\mitsansn" : "𝘯",
+ "\\mitsanso" : "𝘰",
+ "\\mitsansp" : "𝘱",
+ "\\mitsansq" : "𝘲",
+ "\\mitsansr" : "𝘳",
+ "\\mitsanss" : "𝘴",
+ "\\mitsanst" : "𝘵",
+ "\\mitsansu" : "𝘶",
+ "\\mitsansv" : "𝘷",
+ "\\mitsansw" : "𝘸",
+ "\\mitsansx" : "𝘹",
+ "\\mitsansy" : "𝘺",
+ "\\mitsansz" : "𝘻",
+ "\\mbfitsansA" : "𝘼",
+ "\\mbfitsansB" : "𝘽",
+ "\\mbfitsansC" : "𝘾",
+ "\\mbfitsansD" : "𝘿",
+ "\\mbfitsansE" : "𝙀",
+ "\\mbfitsansF" : "𝙁",
+ "\\mbfitsansG" : "𝙂",
+ "\\mbfitsansH" : "𝙃",
+ "\\mbfitsansI" : "𝙄",
+ "\\mbfitsansJ" : "𝙅",
+ "\\mbfitsansK" : "𝙆",
+ "\\mbfitsansL" : "𝙇",
+ "\\mbfitsansM" : "𝙈",
+ "\\mbfitsansN" : "𝙉",
+ "\\mbfitsansO" : "𝙊",
+ "\\mbfitsansP" : "𝙋",
+ "\\mbfitsansQ" : "𝙌",
+ "\\mbfitsansR" : "𝙍",
+ "\\mbfitsansS" : "𝙎",
+ "\\mbfitsansT" : "𝙏",
+ "\\mbfitsansU" : "𝙐",
+ "\\mbfitsansV" : "𝙑",
+ "\\mbfitsansW" : "𝙒",
+ "\\mbfitsansX" : "𝙓",
+ "\\mbfitsansY" : "𝙔",
+ "\\mbfitsansZ" : "𝙕",
+ "\\mbfitsansa" : "𝙖",
+ "\\mbfitsansb" : "𝙗",
+ "\\mbfitsansc" : "𝙘",
+ "\\mbfitsansd" : "𝙙",
+ "\\mbfitsanse" : "𝙚",
+ "\\mbfitsansf" : "𝙛",
+ "\\mbfitsansg" : "𝙜",
+ "\\mbfitsansh" : "𝙝",
+ "\\mbfitsansi" : "𝙞",
+ "\\mbfitsansj" : "𝙟",
+ "\\mbfitsansk" : "𝙠",
+ "\\mbfitsansl" : "𝙡",
+ "\\mbfitsansm" : "𝙢",
+ "\\mbfitsansn" : "𝙣",
+ "\\mbfitsanso" : "𝙤",
+ "\\mbfitsansp" : "𝙥",
+ "\\mbfitsansq" : "𝙦",
+ "\\mbfitsansr" : "𝙧",
+ "\\mbfitsanss" : "𝙨",
+ "\\mbfitsanst" : "𝙩",
+ "\\mbfitsansu" : "𝙪",
+ "\\mbfitsansv" : "𝙫",
+ "\\mbfitsansw" : "𝙬",
+ "\\mbfitsansx" : "𝙭",
+ "\\mbfitsansy" : "𝙮",
+ "\\mbfitsansz" : "𝙯",
+ "\\mttA" : "𝙰",
+ "\\mttB" : "𝙱",
+ "\\mttC" : "𝙲",
+ "\\mttD" : "𝙳",
+ "\\mttE" : "𝙴",
+ "\\mttF" : "𝙵",
+ "\\mttG" : "𝙶",
+ "\\mttH" : "𝙷",
+ "\\mttI" : "𝙸",
+ "\\mttJ" : "𝙹",
+ "\\mttK" : "𝙺",
+ "\\mttL" : "𝙻",
+ "\\mttM" : "𝙼",
+ "\\mttN" : "𝙽",
+ "\\mttO" : "𝙾",
+ "\\mttP" : "𝙿",
+ "\\mttQ" : "𝚀",
+ "\\mttR" : "𝚁",
+ "\\mttS" : "𝚂",
+ "\\mttT" : "𝚃",
+ "\\mttU" : "𝚄",
+ "\\mttV" : "𝚅",
+ "\\mttW" : "𝚆",
+ "\\mttX" : "𝚇",
+ "\\mttY" : "𝚈",
+ "\\mttZ" : "𝚉",
+ "\\mtta" : "𝚊",
+ "\\mttb" : "𝚋",
+ "\\mttc" : "𝚌",
+ "\\mttd" : "𝚍",
+ "\\mtte" : "𝚎",
+ "\\mttf" : "𝚏",
+ "\\mttg" : "𝚐",
+ "\\mtth" : "𝚑",
+ "\\mtti" : "𝚒",
+ "\\mttj" : "𝚓",
+ "\\mttk" : "𝚔",
+ "\\mttl" : "𝚕",
+ "\\mttm" : "𝚖",
+ "\\mttn" : "𝚗",
+ "\\mtto" : "𝚘",
+ "\\mttp" : "𝚙",
+ "\\mttq" : "𝚚",
+ "\\mttr" : "𝚛",
+ "\\mtts" : "𝚜",
+ "\\mttt" : "𝚝",
+ "\\mttu" : "𝚞",
+ "\\mttv" : "𝚟",
+ "\\mttw" : "𝚠",
+ "\\mttx" : "𝚡",
+ "\\mtty" : "𝚢",
+ "\\mttz" : "𝚣",
+ "\\mbfAlpha" : "𝚨",
+ "\\mbfBeta" : "𝚩",
+ "\\mbfGamma" : "𝚪",
+ "\\mbfDelta" : "𝚫",
+ "\\mbfEpsilon" : "𝚬",
+ "\\mbfZeta" : "𝚭",
+ "\\mbfEta" : "𝚮",
+ "\\mbfTheta" : "𝚯",
+ "\\mbfIota" : "𝚰",
+ "\\mbfKappa" : "𝚱",
+ "\\mbfLambda" : "𝚲",
+ "\\mbfMu" : "𝚳",
+ "\\mbfNu" : "𝚴",
+ "\\mbfXi" : "𝚵",
+ "\\mbfOmicron" : "𝚶",
+ "\\mbfPi" : "𝚷",
+ "\\mbfRho" : "𝚸",
+ "\\mbfvarTheta" : "𝚹",
+ "\\mbfSigma" : "𝚺",
+ "\\mbfTau" : "𝚻",
+ "\\mbfUpsilon" : "𝚼",
+ "\\mbfPhi" : "𝚽",
+ "\\mbfChi" : "𝚾",
+ "\\mbfPsi" : "𝚿",
+ "\\mbfOmega" : "𝛀",
+ "\\mbfalpha" : "𝛂",
+ "\\mbfbeta" : "𝛃",
+ "\\mbfgamma" : "𝛄",
+ "\\mbfdelta" : "𝛅",
+ "\\mbfepsilon" : "𝛆",
+ "\\mbfzeta" : "𝛇",
+ "\\mbfeta" : "𝛈",
+ "\\mbftheta" : "𝛉",
+ "\\mbfiota" : "𝛊",
+ "\\mbfkappa" : "𝛋",
+ "\\mbflambda" : "𝛌",
+ "\\mbfmu" : "𝛍",
+ "\\mbfnu" : "𝛎",
+ "\\mbfxi" : "𝛏",
+ "\\mbfomicron" : "𝛐",
+ "\\mbfpi" : "𝛑",
+ "\\mbfrho" : "𝛒",
+ "\\mbfvarsigma" : "𝛓",
+ "\\mbfsigma" : "𝛔",
+ "\\mbftau" : "𝛕",
+ "\\mbfupsilon" : "𝛖",
+ "\\mbfvarphi" : "𝛗",
+ "\\mbfchi" : "𝛘",
+ "\\mbfpsi" : "𝛙",
+ "\\mbfomega" : "𝛚",
+ "\\mbfvarepsilon" : "𝛜",
+ "\\mbfvartheta" : "𝛝",
+ "\\mbfvarkappa" : "𝛞",
+ "\\mbfphi" : "𝛟",
+ "\\mbfvarrho" : "𝛠",
+ "\\mbfvarpi" : "𝛡",
+ "\\mitAlpha" : "𝛢",
+ "\\mitBeta" : "𝛣",
+ "\\mitGamma" : "𝛤",
+ "\\mitDelta" : "𝛥",
+ "\\mitEpsilon" : "𝛦",
+ "\\mitZeta" : "𝛧",
+ "\\mitEta" : "𝛨",
+ "\\mitTheta" : "𝛩",
+ "\\mitIota" : "𝛪",
+ "\\mitKappa" : "𝛫",
+ "\\mitLambda" : "𝛬",
+ "\\mitMu" : "𝛭",
+ "\\mitNu" : "𝛮",
+ "\\mitXi" : "𝛯",
+ "\\mitOmicron" : "𝛰",
+ "\\mitPi" : "𝛱",
+ "\\mitRho" : "𝛲",
+ "\\mitvarTheta" : "𝛳",
+ "\\mitSigma" : "𝛴",
+ "\\mitTau" : "𝛵",
+ "\\mitUpsilon" : "𝛶",
+ "\\mitPhi" : "𝛷",
+ "\\mitChi" : "𝛸",
+ "\\mitPsi" : "𝛹",
+ "\\mitOmega" : "𝛺",
+ "\\mitalpha" : "𝛼",
+ "\\mitbeta" : "𝛽",
+ "\\mitgamma" : "𝛾",
+ "\\mitdelta" : "𝛿",
+ "\\mitepsilon" : "𝜀",
+ "\\mitzeta" : "𝜁",
+ "\\miteta" : "𝜂",
+ "\\mittheta" : "𝜃",
+ "\\mitiota" : "𝜄",
+ "\\mitkappa" : "𝜅",
+ "\\mitlambda" : "𝜆",
+ "\\mitmu" : "𝜇",
+ "\\mitnu" : "𝜈",
+ "\\mitxi" : "𝜉",
+ "\\mitomicron" : "𝜊",
+ "\\mitpi" : "𝜋",
+ "\\mitrho" : "𝜌",
+ "\\mitvarsigma" : "𝜍",
+ "\\mitsigma" : "𝜎",
+ "\\mittau" : "𝜏",
+ "\\mitupsilon" : "𝜐",
+ "\\mitphi" : "𝜑",
+ "\\mitchi" : "𝜒",
+ "\\mitpsi" : "𝜓",
+ "\\mitomega" : "𝜔",
+ "\\mitvarepsilon" : "𝜖",
+ "\\mitvartheta" : "𝜗",
+ "\\mitvarkappa" : "𝜘",
+ "\\mitvarphi" : "𝜙",
+ "\\mitvarrho" : "𝜚",
+ "\\mitvarpi" : "𝜛",
+ "\\mbfitAlpha" : "𝜜",
+ "\\mbfitBeta" : "𝜝",
+ "\\mbfitGamma" : "𝜞",
+ "\\mbfitDelta" : "𝜟",
+ "\\mbfitEpsilon" : "𝜠",
+ "\\mbfitZeta" : "𝜡",
+ "\\mbfitEta" : "𝜢",
+ "\\mbfitTheta" : "𝜣",
+ "\\mbfitIota" : "𝜤",
+ "\\mbfitKappa" : "𝜥",
+ "\\mbfitLambda" : "𝜦",
+ "\\mbfitMu" : "𝜧",
+ "\\mbfitNu" : "𝜨",
+ "\\mbfitXi" : "𝜩",
+ "\\mbfitOmicron" : "𝜪",
+ "\\mbfitPi" : "𝜫",
+ "\\mbfitRho" : "𝜬",
+ "\\mbfitvarTheta" : "𝜭",
+ "\\mbfitSigma" : "𝜮",
+ "\\mbfitTau" : "𝜯",
+ "\\mbfitUpsilon" : "𝜰",
+ "\\mbfitPhi" : "𝜱",
+ "\\mbfitChi" : "𝜲",
+ "\\mbfitPsi" : "𝜳",
+ "\\mbfitOmega" : "𝜴",
+ "\\mbfitalpha" : "𝜶",
+ "\\mbfitbeta" : "𝜷",
+ "\\mbfitgamma" : "𝜸",
+ "\\mbfitdelta" : "𝜹",
+ "\\mbfitepsilon" : "𝜺",
+ "\\mbfitzeta" : "𝜻",
+ "\\mbfiteta" : "𝜼",
+ "\\mbfittheta" : "𝜽",
+ "\\mbfitiota" : "𝜾",
+ "\\mbfitkappa" : "𝜿",
+ "\\mbfitlambda" : "𝝀",
+ "\\mbfitmu" : "𝝁",
+ "\\mbfitnu" : "𝝂",
+ "\\mbfitxi" : "𝝃",
+ "\\mbfitomicron" : "𝝄",
+ "\\mbfitpi" : "𝝅",
+ "\\mbfitrho" : "𝝆",
+ "\\mbfitvarsigma" : "𝝇",
+ "\\mbfitsigma" : "𝝈",
+ "\\mbfittau" : "𝝉",
+ "\\mbfitupsilon" : "𝝊",
+ "\\mbfitphi" : "𝝋",
+ "\\mbfitchi" : "𝝌",
+ "\\mbfitpsi" : "𝝍",
+ "\\mbfitomega" : "𝝎",
+ "\\mbfitvarepsilon" : "𝝐",
+ "\\mbfitvartheta" : "𝝑",
+ "\\mbfitvarkappa" : "𝝒",
+ "\\mbfitvarphi" : "𝝓",
+ "\\mbfitvarrho" : "𝝔",
+ "\\mbfitvarpi" : "𝝕",
+ "\\mbfsansAlpha" : "𝝖",
+ "\\mbfsansBeta" : "𝝗",
+ "\\mbfsansGamma" : "𝝘",
+ "\\mbfsansDelta" : "𝝙",
+ "\\mbfsansEpsilon" : "𝝚",
+ "\\mbfsansZeta" : "𝝛",
+ "\\mbfsansEta" : "𝝜",
+ "\\mbfsansTheta" : "𝝝",
+ "\\mbfsansIota" : "𝝞",
+ "\\mbfsansKappa" : "𝝟",
+ "\\mbfsansLambda" : "𝝠",
+ "\\mbfsansMu" : "𝝡",
+ "\\mbfsansNu" : "𝝢",
+ "\\mbfsansXi" : "𝝣",
+ "\\mbfsansOmicron" : "𝝤",
+ "\\mbfsansPi" : "𝝥",
+ "\\mbfsansRho" : "𝝦",
+ "\\mbfsansvarTheta" : "𝝧",
+ "\\mbfsansSigma" : "𝝨",
+ "\\mbfsansTau" : "𝝩",
+ "\\mbfsansUpsilon" : "𝝪",
+ "\\mbfsansPhi" : "𝝫",
+ "\\mbfsansChi" : "𝝬",
+ "\\mbfsansPsi" : "𝝭",
+ "\\mbfsansOmega" : "𝝮",
+ "\\mbfsansalpha" : "𝝰",
+ "\\mbfsansbeta" : "𝝱",
+ "\\mbfsansgamma" : "𝝲",
+ "\\mbfsansdelta" : "𝝳",
+ "\\mbfsansepsilon" : "𝝴",
+ "\\mbfsanszeta" : "𝝵",
+ "\\mbfsanseta" : "𝝶",
+ "\\mbfsanstheta" : "𝝷",
+ "\\mbfsansiota" : "𝝸",
+ "\\mbfsanskappa" : "𝝹",
+ "\\mbfsanslambda" : "𝝺",
+ "\\mbfsansmu" : "𝝻",
+ "\\mbfsansnu" : "𝝼",
+ "\\mbfsansxi" : "𝝽",
+ "\\mbfsansomicron" : "𝝾",
+ "\\mbfsanspi" : "𝝿",
+ "\\mbfsansrho" : "𝞀",
+ "\\mbfsansvarsigma" : "𝞁",
+ "\\mbfsanssigma" : "𝞂",
+ "\\mbfsanstau" : "𝞃",
+ "\\mbfsansupsilon" : "𝞄",
+ "\\mbfsansphi" : "𝞅",
+ "\\mbfsanschi" : "𝞆",
+ "\\mbfsanspsi" : "𝞇",
+ "\\mbfsansomega" : "𝞈",
+ "\\mbfsansvarepsilon" : "𝞊",
+ "\\mbfsansvartheta" : "𝞋",
+ "\\mbfsansvarkappa" : "𝞌",
+ "\\mbfsansvarphi" : "𝞍",
+ "\\mbfsansvarrho" : "𝞎",
+ "\\mbfsansvarpi" : "𝞏",
+ "\\mbfitsansAlpha" : "𝞐",
+ "\\mbfitsansBeta" : "𝞑",
+ "\\mbfitsansGamma" : "𝞒",
+ "\\mbfitsansDelta" : "𝞓",
+ "\\mbfitsansEpsilon" : "𝞔",
+ "\\mbfitsansZeta" : "𝞕",
+ "\\mbfitsansEta" : "𝞖",
+ "\\mbfitsansTheta" : "𝞗",
+ "\\mbfitsansIota" : "𝞘",
+ "\\mbfitsansKappa" : "𝞙",
+ "\\mbfitsansLambda" : "𝞚",
+ "\\mbfitsansMu" : "𝞛",
+ "\\mbfitsansNu" : "𝞜",
+ "\\mbfitsansXi" : "𝞝",
+ "\\mbfitsansOmicron" : "𝞞",
+ "\\mbfitsansPi" : "𝞟",
+ "\\mbfitsansRho" : "𝞠",
+ "\\mbfitsansvarTheta" : "𝞡",
+ "\\mbfitsansSigma" : "𝞢",
+ "\\mbfitsansTau" : "𝞣",
+ "\\mbfitsansUpsilon" : "𝞤",
+ "\\mbfitsansPhi" : "𝞥",
+ "\\mbfitsansChi" : "𝞦",
+ "\\mbfitsansPsi" : "𝞧",
+ "\\mbfitsansOmega" : "𝞨",
+ "\\mbfitsansalpha" : "𝞪",
+ "\\mbfitsansbeta" : "𝞫",
+ "\\mbfitsansgamma" : "𝞬",
+ "\\mbfitsansdelta" : "𝞭",
+ "\\mbfitsansepsilon" : "𝞮",
+ "\\mbfitsanszeta" : "𝞯",
+ "\\mbfitsanseta" : "𝞰",
+ "\\mbfitsanstheta" : "𝞱",
+ "\\mbfitsansiota" : "𝞲",
+ "\\mbfitsanskappa" : "𝞳",
+ "\\mbfitsanslambda" : "𝞴",
+ "\\mbfitsansmu" : "𝞵",
+ "\\mbfitsansnu" : "𝞶",
+ "\\mbfitsansxi" : "𝞷",
+ "\\mbfitsansomicron" : "𝞸",
+ "\\mbfitsanspi" : "𝞹",
+ "\\mbfitsansrho" : "𝞺",
+ "\\mbfitsansvarsigma" : "𝞻",
+ "\\mbfitsanssigma" : "𝞼",
+ "\\mbfitsanstau" : "𝞽",
+ "\\mbfitsansupsilon" : "𝞾",
+ "\\mbfitsansphi" : "𝞿",
+ "\\mbfitsanschi" : "𝟀",
+ "\\mbfitsanspsi" : "𝟁",
+ "\\mbfitsansomega" : "𝟂",
+ "\\mbfitsansvarepsilon" : "𝟄",
+ "\\mbfitsansvartheta" : "𝟅",
+ "\\mbfitsansvarkappa" : "𝟆",
+ "\\mbfitsansvarphi" : "𝟇",
+ "\\mbfitsansvarrho" : "𝟈",
+ "\\mbfitsansvarpi" : "𝟉",
+ "\\mbfzero" : "𝟎",
+ "\\mbfone" : "𝟏",
+ "\\mbftwo" : "𝟐",
+ "\\mbfthree" : "𝟑",
+ "\\mbffour" : "𝟒",
+ "\\mbffive" : "𝟓",
+ "\\mbfsix" : "𝟔",
+ "\\mbfseven" : "𝟕",
+ "\\mbfeight" : "𝟖",
+ "\\mbfnine" : "𝟗",
+ "\\Bbbzero" : "𝟘",
+ "\\Bbbone" : "𝟙",
+ "\\Bbbtwo" : "𝟚",
+ "\\Bbbthree" : "𝟛",
+ "\\Bbbfour" : "𝟜",
+ "\\Bbbfive" : "𝟝",
+ "\\Bbbsix" : "𝟞",
+ "\\Bbbseven" : "𝟟",
+ "\\Bbbeight" : "𝟠",
+ "\\Bbbnine" : "𝟡",
+ "\\msanszero" : "𝟢",
+ "\\msansone" : "𝟣",
+ "\\msanstwo" : "𝟤",
+ "\\msansthree" : "𝟥",
+ "\\msansfour" : "𝟦",
+ "\\msansfive" : "𝟧",
+ "\\msanssix" : "𝟨",
+ "\\msansseven" : "𝟩",
+ "\\msanseight" : "𝟪",
+ "\\msansnine" : "𝟫",
+ "\\mbfsanszero" : "𝟬",
+ "\\mbfsansone" : "𝟭",
+ "\\mbfsanstwo" : "𝟮",
+ "\\mbfsansthree" : "𝟯",
+ "\\mbfsansfour" : "𝟰",
+ "\\mbfsansfive" : "𝟱",
+ "\\mbfsanssix" : "𝟲",
+ "\\mbfsansseven" : "𝟳",
+ "\\mbfsanseight" : "𝟴",
+ "\\mbfsansnine" : "𝟵",
+ "\\mttzero" : "𝟶",
+ "\\mttone" : "𝟷",
+ "\\mtttwo" : "𝟸",
+ "\\mttthree" : "𝟹",
+ "\\mttfour" : "𝟺",
+ "\\mttfive" : "𝟻",
+ "\\mttsix" : "𝟼",
+ "\\mttseven" : "𝟽",
+ "\\mtteight" : "𝟾",
+ "\\mttnine" : "𝟿",
+}
+
+
+reverse_latex_symbol = { v:k for k,v in latex_symbols.items()}
diff --git a/contrib/python/ipython/py2/IPython/core/logger.py b/contrib/python/ipython/py2/IPython/core/logger.py
index ba5a88a73a..0e41db598f 100644
--- a/contrib/python/ipython/py2/IPython/core/logger.py
+++ b/contrib/python/ipython/py2/IPython/core/logger.py
@@ -1,221 +1,221 @@
-"""Logger class for IPython's logging facilities.
-"""
-from __future__ import print_function
-
-#*****************************************************************************
-# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
-# Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#*****************************************************************************
-
-#****************************************************************************
-# Modules and globals
-
-# Python standard modules
-import glob
-import io
-import os
-import time
-
-from IPython.utils.py3compat import str_to_unicode
-
-#****************************************************************************
-# FIXME: This class isn't a mixin anymore, but it still needs attributes from
-# ipython and does input cache management. Finish cleanup later...
-
-class Logger(object):
- """A Logfile class with different policies for file creation"""
-
- def __init__(self, home_dir, logfname='Logger.log', loghead=u'',
- logmode='over'):
-
- # this is the full ipython instance, we need some attributes from it
- # which won't exist until later. What a mess, clean up later...
- self.home_dir = home_dir
-
- self.logfname = logfname
- self.loghead = loghead
- self.logmode = logmode
- self.logfile = None
-
- # Whether to log raw or processed input
- self.log_raw_input = False
-
- # whether to also log output
- self.log_output = False
-
- # whether to put timestamps before each log entry
- self.timestamp = False
-
- # activity control flags
- self.log_active = False
-
- # logmode is a validated property
- def _set_mode(self,mode):
- if mode not in ['append','backup','global','over','rotate']:
- raise ValueError('invalid log mode %s given' % mode)
- self._logmode = mode
-
- def _get_mode(self):
- return self._logmode
-
- logmode = property(_get_mode,_set_mode)
-
- def logstart(self, logfname=None, loghead=None, logmode=None,
- log_output=False, timestamp=False, log_raw_input=False):
- """Generate a new log-file with a default header.
-
- Raises RuntimeError if the log has already been started"""
-
- if self.logfile is not None:
- raise RuntimeError('Log file is already active: %s' %
- self.logfname)
-
- # The parameters can override constructor defaults
- if logfname is not None: self.logfname = logfname
- if loghead is not None: self.loghead = loghead
- if logmode is not None: self.logmode = logmode
-
- # Parameters not part of the constructor
- self.timestamp = timestamp
- self.log_output = log_output
- self.log_raw_input = log_raw_input
-
- # init depending on the log mode requested
- isfile = os.path.isfile
- logmode = self.logmode
-
- if logmode == 'append':
- self.logfile = io.open(self.logfname, 'a', encoding='utf-8')
-
- elif logmode == 'backup':
- if isfile(self.logfname):
- backup_logname = self.logfname+'~'
- # Manually remove any old backup, since os.rename may fail
- # under Windows.
- if isfile(backup_logname):
- os.remove(backup_logname)
- os.rename(self.logfname,backup_logname)
- self.logfile = io.open(self.logfname, 'w', encoding='utf-8')
-
- elif logmode == 'global':
- self.logfname = os.path.join(self.home_dir,self.logfname)
- self.logfile = io.open(self.logfname, 'a', encoding='utf-8')
-
- elif logmode == 'over':
- if isfile(self.logfname):
- os.remove(self.logfname)
- self.logfile = io.open(self.logfname,'w', encoding='utf-8')
-
- elif logmode == 'rotate':
- if isfile(self.logfname):
- if isfile(self.logfname+'.001~'):
- old = glob.glob(self.logfname+'.*~')
- old.sort()
- old.reverse()
- for f in old:
- root, ext = os.path.splitext(f)
- num = int(ext[1:-1])+1
- os.rename(f, root+'.'+repr(num).zfill(3)+'~')
- os.rename(self.logfname, self.logfname+'.001~')
- self.logfile = io.open(self.logfname, 'w', encoding='utf-8')
-
- if logmode != 'append':
- self.logfile.write(self.loghead)
-
- self.logfile.flush()
- self.log_active = True
-
- def switch_log(self,val):
- """Switch logging on/off. val should be ONLY a boolean."""
-
- if val not in [False,True,0,1]:
- raise ValueError('Call switch_log ONLY with a boolean argument, '
- 'not with: %s' % val)
-
- label = {0:'OFF',1:'ON',False:'OFF',True:'ON'}
-
- if self.logfile is None:
- print("""
-Logging hasn't been started yet (use logstart for that).
-
-%logon/%logoff are for temporarily starting and stopping logging for a logfile
-which already exists. But you must first start the logging process with
-%logstart (optionally giving a logfile name).""")
-
- else:
- if self.log_active == val:
- print('Logging is already',label[val])
- else:
- print('Switching logging',label[val])
- self.log_active = not self.log_active
- self.log_active_out = self.log_active
-
- def logstate(self):
- """Print a status message about the logger."""
- if self.logfile is None:
- print('Logging has not been activated.')
- else:
- state = self.log_active and 'active' or 'temporarily suspended'
- print('Filename :', self.logfname)
- print('Mode :', self.logmode)
- print('Output logging :', self.log_output)
- print('Raw input log :', self.log_raw_input)
- print('Timestamping :', self.timestamp)
- print('State :', state)
-
- def log(self, line_mod, line_ori):
- """Write the sources to a log.
-
- Inputs:
-
- - line_mod: possibly modified input, such as the transformations made
- by input prefilters or input handlers of various kinds. This should
- always be valid Python.
-
- - line_ori: unmodified input line from the user. This is not
- necessarily valid Python.
- """
-
- # Write the log line, but decide which one according to the
- # log_raw_input flag, set when the log is started.
- if self.log_raw_input:
- self.log_write(line_ori)
- else:
- self.log_write(line_mod)
-
- def log_write(self, data, kind='input'):
- """Write data to the log file, if active"""
-
- #print 'data: %r' % data # dbg
- if self.log_active and data:
- write = self.logfile.write
- if kind=='input':
- if self.timestamp:
- write(str_to_unicode(time.strftime('# %a, %d %b %Y %H:%M:%S\n',
- time.localtime())))
- write(data)
- elif kind=='output' and self.log_output:
- odata = u'\n'.join([u'#[Out]# %s' % s
- for s in data.splitlines()])
- write(u'%s\n' % odata)
- self.logfile.flush()
-
- def logstop(self):
- """Fully stop logging and close log file.
-
- In order to start logging again, a new logstart() call needs to be
- made, possibly (though not necessarily) with a new filename, mode and
- other options."""
-
- if self.logfile is not None:
- self.logfile.close()
- self.logfile = None
- else:
- print("Logging hadn't been started.")
- self.log_active = False
-
- # For backwards compatibility, in case anyone was using this.
- close_log = logstop
+"""Logger class for IPython's logging facilities.
+"""
+from __future__ import print_function
+
+#*****************************************************************************
+# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
+# Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#*****************************************************************************
+
+#****************************************************************************
+# Modules and globals
+
+# Python standard modules
+import glob
+import io
+import os
+import time
+
+from IPython.utils.py3compat import str_to_unicode
+
+#****************************************************************************
+# FIXME: This class isn't a mixin anymore, but it still needs attributes from
+# ipython and does input cache management. Finish cleanup later...
+
+class Logger(object):
+ """A Logfile class with different policies for file creation"""
+
+ def __init__(self, home_dir, logfname='Logger.log', loghead=u'',
+ logmode='over'):
+
+ # this is the full ipython instance, we need some attributes from it
+ # which won't exist until later. What a mess, clean up later...
+ self.home_dir = home_dir
+
+ self.logfname = logfname
+ self.loghead = loghead
+ self.logmode = logmode
+ self.logfile = None
+
+ # Whether to log raw or processed input
+ self.log_raw_input = False
+
+ # whether to also log output
+ self.log_output = False
+
+ # whether to put timestamps before each log entry
+ self.timestamp = False
+
+ # activity control flags
+ self.log_active = False
+
+ # logmode is a validated property
+ def _set_mode(self,mode):
+ if mode not in ['append','backup','global','over','rotate']:
+ raise ValueError('invalid log mode %s given' % mode)
+ self._logmode = mode
+
+ def _get_mode(self):
+ return self._logmode
+
+ logmode = property(_get_mode,_set_mode)
+
+ def logstart(self, logfname=None, loghead=None, logmode=None,
+ log_output=False, timestamp=False, log_raw_input=False):
+ """Generate a new log-file with a default header.
+
+ Raises RuntimeError if the log has already been started"""
+
+ if self.logfile is not None:
+ raise RuntimeError('Log file is already active: %s' %
+ self.logfname)
+
+ # The parameters can override constructor defaults
+ if logfname is not None: self.logfname = logfname
+ if loghead is not None: self.loghead = loghead
+ if logmode is not None: self.logmode = logmode
+
+ # Parameters not part of the constructor
+ self.timestamp = timestamp
+ self.log_output = log_output
+ self.log_raw_input = log_raw_input
+
+ # init depending on the log mode requested
+ isfile = os.path.isfile
+ logmode = self.logmode
+
+ if logmode == 'append':
+ self.logfile = io.open(self.logfname, 'a', encoding='utf-8')
+
+ elif logmode == 'backup':
+ if isfile(self.logfname):
+ backup_logname = self.logfname+'~'
+ # Manually remove any old backup, since os.rename may fail
+ # under Windows.
+ if isfile(backup_logname):
+ os.remove(backup_logname)
+ os.rename(self.logfname,backup_logname)
+ self.logfile = io.open(self.logfname, 'w', encoding='utf-8')
+
+ elif logmode == 'global':
+ self.logfname = os.path.join(self.home_dir,self.logfname)
+ self.logfile = io.open(self.logfname, 'a', encoding='utf-8')
+
+ elif logmode == 'over':
+ if isfile(self.logfname):
+ os.remove(self.logfname)
+ self.logfile = io.open(self.logfname,'w', encoding='utf-8')
+
+ elif logmode == 'rotate':
+ if isfile(self.logfname):
+ if isfile(self.logfname+'.001~'):
+ old = glob.glob(self.logfname+'.*~')
+ old.sort()
+ old.reverse()
+ for f in old:
+ root, ext = os.path.splitext(f)
+ num = int(ext[1:-1])+1
+ os.rename(f, root+'.'+repr(num).zfill(3)+'~')
+ os.rename(self.logfname, self.logfname+'.001~')
+ self.logfile = io.open(self.logfname, 'w', encoding='utf-8')
+
+ if logmode != 'append':
+ self.logfile.write(self.loghead)
+
+ self.logfile.flush()
+ self.log_active = True
+
+ def switch_log(self,val):
+ """Switch logging on/off. val should be ONLY a boolean."""
+
+ if val not in [False,True,0,1]:
+ raise ValueError('Call switch_log ONLY with a boolean argument, '
+ 'not with: %s' % val)
+
+ label = {0:'OFF',1:'ON',False:'OFF',True:'ON'}
+
+ if self.logfile is None:
+ print("""
+Logging hasn't been started yet (use logstart for that).
+
+%logon/%logoff are for temporarily starting and stopping logging for a logfile
+which already exists. But you must first start the logging process with
+%logstart (optionally giving a logfile name).""")
+
+ else:
+ if self.log_active == val:
+ print('Logging is already',label[val])
+ else:
+ print('Switching logging',label[val])
+ self.log_active = not self.log_active
+ self.log_active_out = self.log_active
+
+ def logstate(self):
+ """Print a status message about the logger."""
+ if self.logfile is None:
+ print('Logging has not been activated.')
+ else:
+ state = self.log_active and 'active' or 'temporarily suspended'
+ print('Filename :', self.logfname)
+ print('Mode :', self.logmode)
+ print('Output logging :', self.log_output)
+ print('Raw input log :', self.log_raw_input)
+ print('Timestamping :', self.timestamp)
+ print('State :', state)
+
+ def log(self, line_mod, line_ori):
+ """Write the sources to a log.
+
+ Inputs:
+
+ - line_mod: possibly modified input, such as the transformations made
+ by input prefilters or input handlers of various kinds. This should
+ always be valid Python.
+
+ - line_ori: unmodified input line from the user. This is not
+ necessarily valid Python.
+ """
+
+ # Write the log line, but decide which one according to the
+ # log_raw_input flag, set when the log is started.
+ if self.log_raw_input:
+ self.log_write(line_ori)
+ else:
+ self.log_write(line_mod)
+
+ def log_write(self, data, kind='input'):
+ """Write data to the log file, if active"""
+
+ #print 'data: %r' % data # dbg
+ if self.log_active and data:
+ write = self.logfile.write
+ if kind=='input':
+ if self.timestamp:
+ write(str_to_unicode(time.strftime('# %a, %d %b %Y %H:%M:%S\n',
+ time.localtime())))
+ write(data)
+ elif kind=='output' and self.log_output:
+ odata = u'\n'.join([u'#[Out]# %s' % s
+ for s in data.splitlines()])
+ write(u'%s\n' % odata)
+ self.logfile.flush()
+
+ def logstop(self):
+ """Fully stop logging and close log file.
+
+ In order to start logging again, a new logstart() call needs to be
+ made, possibly (though not necessarily) with a new filename, mode and
+ other options."""
+
+ if self.logfile is not None:
+ self.logfile.close()
+ self.logfile = None
+ else:
+ print("Logging hadn't been started.")
+ self.log_active = False
+
+ # For backwards compatibility, in case anyone was using this.
+ close_log = logstop
diff --git a/contrib/python/ipython/py2/IPython/core/macro.py b/contrib/python/ipython/py2/IPython/core/macro.py
index 803236ffe5..9032706d2b 100644
--- a/contrib/python/ipython/py2/IPython/core/macro.py
+++ b/contrib/python/ipython/py2/IPython/core/macro.py
@@ -1,57 +1,57 @@
-"""Support for interactive macros in IPython"""
-
-#*****************************************************************************
-# Copyright (C) 2001-2005 Fernando Perez <fperez@colorado.edu>
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#*****************************************************************************
-
-import re
-
-from IPython.utils import py3compat
-from IPython.utils.encoding import DEFAULT_ENCODING
-
-coding_declaration = re.compile(r"#\s*coding[:=]\s*([-\w.]+)")
-
-class Macro(object):
- """Simple class to store the value of macros as strings.
-
- Macro is just a callable that executes a string of IPython
- input when called.
- """
-
- def __init__(self,code):
- """store the macro value, as a single string which can be executed"""
- lines = []
- enc = None
- for line in code.splitlines():
- coding_match = coding_declaration.match(line)
- if coding_match:
- enc = coding_match.group(1)
- else:
- lines.append(line)
- code = "\n".join(lines)
- if isinstance(code, bytes):
- code = code.decode(enc or DEFAULT_ENCODING)
- self.value = code + '\n'
-
- def __str__(self):
- return py3compat.unicode_to_str(self.value)
-
- def __unicode__(self):
- return self.value
-
- def __repr__(self):
- return 'IPython.macro.Macro(%s)' % repr(self.value)
-
- def __getstate__(self):
- """ needed for safe pickling via %store """
- return {'value': self.value}
-
- def __add__(self, other):
- if isinstance(other, Macro):
- return Macro(self.value + other.value)
- elif isinstance(other, py3compat.string_types):
- return Macro(self.value + other)
- raise TypeError
+"""Support for interactive macros in IPython"""
+
+#*****************************************************************************
+# Copyright (C) 2001-2005 Fernando Perez <fperez@colorado.edu>
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#*****************************************************************************
+
+import re
+
+from IPython.utils import py3compat
+from IPython.utils.encoding import DEFAULT_ENCODING
+
+coding_declaration = re.compile(r"#\s*coding[:=]\s*([-\w.]+)")
+
+class Macro(object):
+ """Simple class to store the value of macros as strings.
+
+ Macro is just a callable that executes a string of IPython
+ input when called.
+ """
+
+ def __init__(self,code):
+ """store the macro value, as a single string which can be executed"""
+ lines = []
+ enc = None
+ for line in code.splitlines():
+ coding_match = coding_declaration.match(line)
+ if coding_match:
+ enc = coding_match.group(1)
+ else:
+ lines.append(line)
+ code = "\n".join(lines)
+ if isinstance(code, bytes):
+ code = code.decode(enc or DEFAULT_ENCODING)
+ self.value = code + '\n'
+
+ def __str__(self):
+ return py3compat.unicode_to_str(self.value)
+
+ def __unicode__(self):
+ return self.value
+
+ def __repr__(self):
+ return 'IPython.macro.Macro(%s)' % repr(self.value)
+
+ def __getstate__(self):
+ """ needed for safe pickling via %store """
+ return {'value': self.value}
+
+ def __add__(self, other):
+ if isinstance(other, Macro):
+ return Macro(self.value + other.value)
+ elif isinstance(other, py3compat.string_types):
+ return Macro(self.value + other)
+ raise TypeError
diff --git a/contrib/python/ipython/py2/IPython/core/magic.py b/contrib/python/ipython/py2/IPython/core/magic.py
index 97e7b4291e..61a929fd23 100644
--- a/contrib/python/ipython/py2/IPython/core/magic.py
+++ b/contrib/python/ipython/py2/IPython/core/magic.py
@@ -1,680 +1,680 @@
-# encoding: utf-8
-"""Magic functions for InteractiveShell.
-"""
-from __future__ import print_function
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
-# Copyright (C) 2001 Fernando Perez <fperez@colorado.edu>
-# Copyright (C) 2008 The IPython Development Team
-
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-import os
-import re
-import sys
-import types
-from getopt import getopt, GetoptError
-
-from traitlets.config.configurable import Configurable
-from IPython.core import oinspect
-from IPython.core.error import UsageError
-from IPython.core.inputsplitter import ESC_MAGIC, ESC_MAGIC2
-from decorator import decorator
-from IPython.utils.ipstruct import Struct
-from IPython.utils.process import arg_split
-from IPython.utils.py3compat import string_types, iteritems
-from IPython.utils.text import dedent
+# encoding: utf-8
+"""Magic functions for InteractiveShell.
+"""
+from __future__ import print_function
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
+# Copyright (C) 2001 Fernando Perez <fperez@colorado.edu>
+# Copyright (C) 2008 The IPython Development Team
+
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+import os
+import re
+import sys
+import types
+from getopt import getopt, GetoptError
+
+from traitlets.config.configurable import Configurable
+from IPython.core import oinspect
+from IPython.core.error import UsageError
+from IPython.core.inputsplitter import ESC_MAGIC, ESC_MAGIC2
+from decorator import decorator
+from IPython.utils.ipstruct import Struct
+from IPython.utils.process import arg_split
+from IPython.utils.py3compat import string_types, iteritems
+from IPython.utils.text import dedent
from traitlets import Bool, Dict, Instance, observe
from logging import error
-
-#-----------------------------------------------------------------------------
-# Globals
-#-----------------------------------------------------------------------------
-
-# A dict we'll use for each class that has magics, used as temporary storage to
-# pass information between the @line/cell_magic method decorators and the
-# @magics_class class decorator, because the method decorators have no
-# access to the class when they run. See for more details:
-# http://stackoverflow.com/questions/2366713/can-a-python-decorator-of-an-instance-method-access-the-class
-
-magics = dict(line={}, cell={})
-
-magic_kinds = ('line', 'cell')
-magic_spec = ('line', 'cell', 'line_cell')
-magic_escapes = dict(line=ESC_MAGIC, cell=ESC_MAGIC2)
-
-#-----------------------------------------------------------------------------
-# Utility classes and functions
-#-----------------------------------------------------------------------------
-
-class Bunch: pass
-
-
-def on_off(tag):
- """Return an ON/OFF string for a 1/0 input. Simple utility function."""
- return ['OFF','ON'][tag]
-
-
-def compress_dhist(dh):
- """Compress a directory history into a new one with at most 20 entries.
-
- Return a new list made from the first and last 10 elements of dhist after
- removal of duplicates.
- """
- head, tail = dh[:-10], dh[-10:]
-
- newhead = []
- done = set()
- for h in head:
- if h in done:
- continue
- newhead.append(h)
- done.add(h)
-
- return newhead + tail
-
-
-def needs_local_scope(func):
- """Decorator to mark magic functions which need to local scope to run."""
- func.needs_local_scope = True
- return func
-
-#-----------------------------------------------------------------------------
-# Class and method decorators for registering magics
-#-----------------------------------------------------------------------------
-
-def magics_class(cls):
- """Class decorator for all subclasses of the main Magics class.
-
- Any class that subclasses Magics *must* also apply this decorator, to
- ensure that all the methods that have been decorated as line/cell magics
- get correctly registered in the class instance. This is necessary because
- when method decorators run, the class does not exist yet, so they
- temporarily store their information into a module global. Application of
- this class decorator copies that global data to the class instance and
- clears the global.
-
- Obviously, this mechanism is not thread-safe, which means that the
- *creation* of subclasses of Magic should only be done in a single-thread
- context. Instantiation of the classes has no restrictions. Given that
- these classes are typically created at IPython startup time and before user
- application code becomes active, in practice this should not pose any
- problems.
- """
- cls.registered = True
- cls.magics = dict(line = magics['line'],
- cell = magics['cell'])
- magics['line'] = {}
- magics['cell'] = {}
- return cls
-
-
-def record_magic(dct, magic_kind, magic_name, func):
- """Utility function to store a function as a magic of a specific kind.
-
- Parameters
- ----------
- dct : dict
- A dictionary with 'line' and 'cell' subdicts.
-
- magic_kind : str
- Kind of magic to be stored.
-
- magic_name : str
- Key to store the magic as.
-
- func : function
- Callable object to store.
- """
- if magic_kind == 'line_cell':
- dct['line'][magic_name] = dct['cell'][magic_name] = func
- else:
- dct[magic_kind][magic_name] = func
-
-
-def validate_type(magic_kind):
- """Ensure that the given magic_kind is valid.
-
- Check that the given magic_kind is one of the accepted spec types (stored
- in the global `magic_spec`), raise ValueError otherwise.
- """
- if magic_kind not in magic_spec:
- raise ValueError('magic_kind must be one of %s, %s given' %
- magic_kinds, magic_kind)
-
-
-# The docstrings for the decorator below will be fairly similar for the two
-# types (method and function), so we generate them here once and reuse the
-# templates below.
-_docstring_template = \
-"""Decorate the given {0} as {1} magic.
-
-The decorator can be used with or without arguments, as follows.
-
-i) without arguments: it will create a {1} magic named as the {0} being
-decorated::
-
- @deco
- def foo(...)
-
-will create a {1} magic named `foo`.
-
-ii) with one string argument: which will be used as the actual name of the
-resulting magic::
-
- @deco('bar')
- def foo(...)
-
-will create a {1} magic named `bar`.
-"""
-
-# These two are decorator factories. While they are conceptually very similar,
-# there are enough differences in the details that it's simpler to have them
-# written as completely standalone functions rather than trying to share code
-# and make a single one with convoluted logic.
-
-def _method_magic_marker(magic_kind):
- """Decorator factory for methods in Magics subclasses.
- """
-
- validate_type(magic_kind)
-
- # This is a closure to capture the magic_kind. We could also use a class,
- # but it's overkill for just that one bit of state.
- def magic_deco(arg):
- call = lambda f, *a, **k: f(*a, **k)
-
- if callable(arg):
- # "Naked" decorator call (just @foo, no args)
- func = arg
- name = func.__name__
- retval = decorator(call, func)
- record_magic(magics, magic_kind, name, name)
- elif isinstance(arg, string_types):
- # Decorator called with arguments (@foo('bar'))
- name = arg
- def mark(func, *a, **kw):
- record_magic(magics, magic_kind, name, func.__name__)
- return decorator(call, func)
- retval = mark
- else:
- raise TypeError("Decorator can only be called with "
- "string or function")
- return retval
-
- # Ensure the resulting decorator has a usable docstring
- magic_deco.__doc__ = _docstring_template.format('method', magic_kind)
- return magic_deco
-
-
-def _function_magic_marker(magic_kind):
- """Decorator factory for standalone functions.
- """
- validate_type(magic_kind)
-
- # This is a closure to capture the magic_kind. We could also use a class,
- # but it's overkill for just that one bit of state.
- def magic_deco(arg):
- call = lambda f, *a, **k: f(*a, **k)
-
- # Find get_ipython() in the caller's namespace
- caller = sys._getframe(1)
- for ns in ['f_locals', 'f_globals', 'f_builtins']:
- get_ipython = getattr(caller, ns).get('get_ipython')
- if get_ipython is not None:
- break
- else:
- raise NameError('Decorator can only run in context where '
- '`get_ipython` exists')
-
- ip = get_ipython()
-
- if callable(arg):
- # "Naked" decorator call (just @foo, no args)
- func = arg
- name = func.__name__
- ip.register_magic_function(func, magic_kind, name)
- retval = decorator(call, func)
- elif isinstance(arg, string_types):
- # Decorator called with arguments (@foo('bar'))
- name = arg
- def mark(func, *a, **kw):
- ip.register_magic_function(func, magic_kind, name)
- return decorator(call, func)
- retval = mark
- else:
- raise TypeError("Decorator can only be called with "
- "string or function")
- return retval
-
- # Ensure the resulting decorator has a usable docstring
- ds = _docstring_template.format('function', magic_kind)
-
- ds += dedent("""
- Note: this decorator can only be used in a context where IPython is already
- active, so that the `get_ipython()` call succeeds. You can therefore use
- it in your startup files loaded after IPython initializes, but *not* in the
- IPython configuration file itself, which is executed before IPython is
- fully up and running. Any file located in the `startup` subdirectory of
- your configuration profile will be OK in this sense.
- """)
-
- magic_deco.__doc__ = ds
- return magic_deco
-
-
-# Create the actual decorators for public use
-
-# These three are used to decorate methods in class definitions
-line_magic = _method_magic_marker('line')
-cell_magic = _method_magic_marker('cell')
-line_cell_magic = _method_magic_marker('line_cell')
-
-# These three decorate standalone functions and perform the decoration
-# immediately. They can only run where get_ipython() works
-register_line_magic = _function_magic_marker('line')
-register_cell_magic = _function_magic_marker('cell')
-register_line_cell_magic = _function_magic_marker('line_cell')
-
-#-----------------------------------------------------------------------------
-# Core Magic classes
-#-----------------------------------------------------------------------------
-
-class MagicsManager(Configurable):
- """Object that handles all magic-related functionality for IPython.
- """
- # Non-configurable class attributes
-
- # A two-level dict, first keyed by magic type, then by magic function, and
- # holding the actual callable object as value. This is the dict used for
- # magic function dispatch
- magics = Dict()
-
- # A registry of the original objects that we've been given holding magics.
- registry = Dict()
-
- shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
-
+
+#-----------------------------------------------------------------------------
+# Globals
+#-----------------------------------------------------------------------------
+
+# A dict we'll use for each class that has magics, used as temporary storage to
+# pass information between the @line/cell_magic method decorators and the
+# @magics_class class decorator, because the method decorators have no
+# access to the class when they run. See for more details:
+# http://stackoverflow.com/questions/2366713/can-a-python-decorator-of-an-instance-method-access-the-class
+
+magics = dict(line={}, cell={})
+
+magic_kinds = ('line', 'cell')
+magic_spec = ('line', 'cell', 'line_cell')
+magic_escapes = dict(line=ESC_MAGIC, cell=ESC_MAGIC2)
+
+#-----------------------------------------------------------------------------
+# Utility classes and functions
+#-----------------------------------------------------------------------------
+
+class Bunch: pass
+
+
+def on_off(tag):
+ """Return an ON/OFF string for a 1/0 input. Simple utility function."""
+ return ['OFF','ON'][tag]
+
+
+def compress_dhist(dh):
+ """Compress a directory history into a new one with at most 20 entries.
+
+ Return a new list made from the first and last 10 elements of dhist after
+ removal of duplicates.
+ """
+ head, tail = dh[:-10], dh[-10:]
+
+ newhead = []
+ done = set()
+ for h in head:
+ if h in done:
+ continue
+ newhead.append(h)
+ done.add(h)
+
+ return newhead + tail
+
+
+def needs_local_scope(func):
+ """Decorator to mark magic functions which need to local scope to run."""
+ func.needs_local_scope = True
+ return func
+
+#-----------------------------------------------------------------------------
+# Class and method decorators for registering magics
+#-----------------------------------------------------------------------------
+
+def magics_class(cls):
+ """Class decorator for all subclasses of the main Magics class.
+
+ Any class that subclasses Magics *must* also apply this decorator, to
+ ensure that all the methods that have been decorated as line/cell magics
+ get correctly registered in the class instance. This is necessary because
+ when method decorators run, the class does not exist yet, so they
+ temporarily store their information into a module global. Application of
+ this class decorator copies that global data to the class instance and
+ clears the global.
+
+ Obviously, this mechanism is not thread-safe, which means that the
+ *creation* of subclasses of Magic should only be done in a single-thread
+ context. Instantiation of the classes has no restrictions. Given that
+ these classes are typically created at IPython startup time and before user
+ application code becomes active, in practice this should not pose any
+ problems.
+ """
+ cls.registered = True
+ cls.magics = dict(line = magics['line'],
+ cell = magics['cell'])
+ magics['line'] = {}
+ magics['cell'] = {}
+ return cls
+
+
+def record_magic(dct, magic_kind, magic_name, func):
+ """Utility function to store a function as a magic of a specific kind.
+
+ Parameters
+ ----------
+ dct : dict
+ A dictionary with 'line' and 'cell' subdicts.
+
+ magic_kind : str
+ Kind of magic to be stored.
+
+ magic_name : str
+ Key to store the magic as.
+
+ func : function
+ Callable object to store.
+ """
+ if magic_kind == 'line_cell':
+ dct['line'][magic_name] = dct['cell'][magic_name] = func
+ else:
+ dct[magic_kind][magic_name] = func
+
+
+def validate_type(magic_kind):
+ """Ensure that the given magic_kind is valid.
+
+ Check that the given magic_kind is one of the accepted spec types (stored
+ in the global `magic_spec`), raise ValueError otherwise.
+ """
+ if magic_kind not in magic_spec:
+ raise ValueError('magic_kind must be one of %s, %s given' %
+ magic_kinds, magic_kind)
+
+
+# The docstrings for the decorator below will be fairly similar for the two
+# types (method and function), so we generate them here once and reuse the
+# templates below.
+_docstring_template = \
+"""Decorate the given {0} as {1} magic.
+
+The decorator can be used with or without arguments, as follows.
+
+i) without arguments: it will create a {1} magic named as the {0} being
+decorated::
+
+ @deco
+ def foo(...)
+
+will create a {1} magic named `foo`.
+
+ii) with one string argument: which will be used as the actual name of the
+resulting magic::
+
+ @deco('bar')
+ def foo(...)
+
+will create a {1} magic named `bar`.
+"""
+
+# These two are decorator factories. While they are conceptually very similar,
+# there are enough differences in the details that it's simpler to have them
+# written as completely standalone functions rather than trying to share code
+# and make a single one with convoluted logic.
+
+def _method_magic_marker(magic_kind):
+ """Decorator factory for methods in Magics subclasses.
+ """
+
+ validate_type(magic_kind)
+
+ # This is a closure to capture the magic_kind. We could also use a class,
+ # but it's overkill for just that one bit of state.
+ def magic_deco(arg):
+ call = lambda f, *a, **k: f(*a, **k)
+
+ if callable(arg):
+ # "Naked" decorator call (just @foo, no args)
+ func = arg
+ name = func.__name__
+ retval = decorator(call, func)
+ record_magic(magics, magic_kind, name, name)
+ elif isinstance(arg, string_types):
+ # Decorator called with arguments (@foo('bar'))
+ name = arg
+ def mark(func, *a, **kw):
+ record_magic(magics, magic_kind, name, func.__name__)
+ return decorator(call, func)
+ retval = mark
+ else:
+ raise TypeError("Decorator can only be called with "
+ "string or function")
+ return retval
+
+ # Ensure the resulting decorator has a usable docstring
+ magic_deco.__doc__ = _docstring_template.format('method', magic_kind)
+ return magic_deco
+
+
+def _function_magic_marker(magic_kind):
+ """Decorator factory for standalone functions.
+ """
+ validate_type(magic_kind)
+
+ # This is a closure to capture the magic_kind. We could also use a class,
+ # but it's overkill for just that one bit of state.
+ def magic_deco(arg):
+ call = lambda f, *a, **k: f(*a, **k)
+
+ # Find get_ipython() in the caller's namespace
+ caller = sys._getframe(1)
+ for ns in ['f_locals', 'f_globals', 'f_builtins']:
+ get_ipython = getattr(caller, ns).get('get_ipython')
+ if get_ipython is not None:
+ break
+ else:
+ raise NameError('Decorator can only run in context where '
+ '`get_ipython` exists')
+
+ ip = get_ipython()
+
+ if callable(arg):
+ # "Naked" decorator call (just @foo, no args)
+ func = arg
+ name = func.__name__
+ ip.register_magic_function(func, magic_kind, name)
+ retval = decorator(call, func)
+ elif isinstance(arg, string_types):
+ # Decorator called with arguments (@foo('bar'))
+ name = arg
+ def mark(func, *a, **kw):
+ ip.register_magic_function(func, magic_kind, name)
+ return decorator(call, func)
+ retval = mark
+ else:
+ raise TypeError("Decorator can only be called with "
+ "string or function")
+ return retval
+
+ # Ensure the resulting decorator has a usable docstring
+ ds = _docstring_template.format('function', magic_kind)
+
+ ds += dedent("""
+ Note: this decorator can only be used in a context where IPython is already
+ active, so that the `get_ipython()` call succeeds. You can therefore use
+ it in your startup files loaded after IPython initializes, but *not* in the
+ IPython configuration file itself, which is executed before IPython is
+ fully up and running. Any file located in the `startup` subdirectory of
+ your configuration profile will be OK in this sense.
+ """)
+
+ magic_deco.__doc__ = ds
+ return magic_deco
+
+
+# Create the actual decorators for public use
+
+# These three are used to decorate methods in class definitions
+line_magic = _method_magic_marker('line')
+cell_magic = _method_magic_marker('cell')
+line_cell_magic = _method_magic_marker('line_cell')
+
+# These three decorate standalone functions and perform the decoration
+# immediately. They can only run where get_ipython() works
+register_line_magic = _function_magic_marker('line')
+register_cell_magic = _function_magic_marker('cell')
+register_line_cell_magic = _function_magic_marker('line_cell')
+
+#-----------------------------------------------------------------------------
+# Core Magic classes
+#-----------------------------------------------------------------------------
+
+class MagicsManager(Configurable):
+ """Object that handles all magic-related functionality for IPython.
+ """
+ # Non-configurable class attributes
+
+ # A two-level dict, first keyed by magic type, then by magic function, and
+ # holding the actual callable object as value. This is the dict used for
+ # magic function dispatch
+ magics = Dict()
+
+ # A registry of the original objects that we've been given holding magics.
+ registry = Dict()
+
+ shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
+
auto_magic = Bool(True, help=
"Automatically call line magics without requiring explicit % prefix"
).tag(config=True)
@observe('auto_magic')
def _auto_magic_changed(self, change):
self.shell.automagic = change['new']
-
- _auto_status = [
- 'Automagic is OFF, % prefix IS needed for line magics.',
- 'Automagic is ON, % prefix IS NOT needed for line magics.']
-
- user_magics = Instance('IPython.core.magics.UserMagics', allow_none=True)
-
- def __init__(self, shell=None, config=None, user_magics=None, **traits):
-
- super(MagicsManager, self).__init__(shell=shell, config=config,
- user_magics=user_magics, **traits)
- self.magics = dict(line={}, cell={})
- # Let's add the user_magics to the registry for uniformity, so *all*
- # registered magic containers can be found there.
- self.registry[user_magics.__class__.__name__] = user_magics
-
- def auto_status(self):
- """Return descriptive string with automagic status."""
- return self._auto_status[self.auto_magic]
-
- def lsmagic(self):
- """Return a dict of currently available magic functions.
-
- The return dict has the keys 'line' and 'cell', corresponding to the
- two types of magics we support. Each value is a list of names.
- """
- return self.magics
-
- def lsmagic_docs(self, brief=False, missing=''):
- """Return dict of documentation of magic functions.
-
- The return dict has the keys 'line' and 'cell', corresponding to the
- two types of magics we support. Each value is a dict keyed by magic
- name whose value is the function docstring. If a docstring is
- unavailable, the value of `missing` is used instead.
-
- If brief is True, only the first line of each docstring will be returned.
- """
- docs = {}
- for m_type in self.magics:
- m_docs = {}
- for m_name, m_func in iteritems(self.magics[m_type]):
- if m_func.__doc__:
- if brief:
- m_docs[m_name] = m_func.__doc__.split('\n', 1)[0]
- else:
- m_docs[m_name] = m_func.__doc__.rstrip()
- else:
- m_docs[m_name] = missing
- docs[m_type] = m_docs
- return docs
-
- def register(self, *magic_objects):
- """Register one or more instances of Magics.
-
- Take one or more classes or instances of classes that subclass the main
- `core.Magic` class, and register them with IPython to use the magic
- functions they provide. The registration process will then ensure that
- any methods that have decorated to provide line and/or cell magics will
- be recognized with the `%x`/`%%x` syntax as a line/cell magic
- respectively.
-
- If classes are given, they will be instantiated with the default
- constructor. If your classes need a custom constructor, you should
- instanitate them first and pass the instance.
-
- The provided arguments can be an arbitrary mix of classes and instances.
-
- Parameters
- ----------
- magic_objects : one or more classes or instances
- """
- # Start by validating them to ensure they have all had their magic
- # methods registered at the instance level
- for m in magic_objects:
- if not m.registered:
- raise ValueError("Class of magics %r was constructed without "
- "the @register_magics class decorator")
- if isinstance(m, type):
- # If we're given an uninstantiated class
- m = m(shell=self.shell)
-
- # Now that we have an instance, we can register it and update the
- # table of callables
- self.registry[m.__class__.__name__] = m
- for mtype in magic_kinds:
- self.magics[mtype].update(m.magics[mtype])
-
- def register_function(self, func, magic_kind='line', magic_name=None):
- """Expose a standalone function as magic function for IPython.
-
- This will create an IPython magic (line, cell or both) from a
- standalone function. The functions should have the following
- signatures:
-
- * For line magics: `def f(line)`
- * For cell magics: `def f(line, cell)`
- * For a function that does both: `def f(line, cell=None)`
-
- In the latter case, the function will be called with `cell==None` when
- invoked as `%f`, and with cell as a string when invoked as `%%f`.
-
- Parameters
- ----------
- func : callable
- Function to be registered as a magic.
-
- magic_kind : str
- Kind of magic, one of 'line', 'cell' or 'line_cell'
-
- magic_name : optional str
- If given, the name the magic will have in the IPython namespace. By
- default, the name of the function itself is used.
- """
-
- # Create the new method in the user_magics and register it in the
- # global table
- validate_type(magic_kind)
- magic_name = func.__name__ if magic_name is None else magic_name
- setattr(self.user_magics, magic_name, func)
- record_magic(self.magics, magic_kind, magic_name, func)
-
- def register_alias(self, alias_name, magic_name, magic_kind='line'):
- """Register an alias to a magic function.
-
- The alias is an instance of :class:`MagicAlias`, which holds the
- name and kind of the magic it should call. Binding is done at
- call time, so if the underlying magic function is changed the alias
- will call the new function.
-
- Parameters
- ----------
- alias_name : str
- The name of the magic to be registered.
-
- magic_name : str
- The name of an existing magic.
-
- magic_kind : str
- Kind of magic, one of 'line' or 'cell'
- """
-
- # `validate_type` is too permissive, as it allows 'line_cell'
- # which we do not handle.
- if magic_kind not in magic_kinds:
- raise ValueError('magic_kind must be one of %s, %s given' %
- magic_kinds, magic_kind)
-
- alias = MagicAlias(self.shell, magic_name, magic_kind)
- setattr(self.user_magics, alias_name, alias)
- record_magic(self.magics, magic_kind, alias_name, alias)
-
-# Key base class that provides the central functionality for magics.
-
-
-class Magics(Configurable):
- """Base class for implementing magic functions.
-
- Shell functions which can be reached as %function_name. All magic
- functions should accept a string, which they can parse for their own
- needs. This can make some functions easier to type, eg `%cd ../`
- vs. `%cd("../")`
-
- Classes providing magic functions need to subclass this class, and they
- MUST:
-
- - Use the method decorators `@line_magic` and `@cell_magic` to decorate
- individual methods as magic functions, AND
-
- - Use the class decorator `@magics_class` to ensure that the magic
- methods are properly registered at the instance level upon instance
- initialization.
-
- See :mod:`magic_functions` for examples of actual implementation classes.
- """
- # Dict holding all command-line options for each magic.
- options_table = None
- # Dict for the mapping of magic names to methods, set by class decorator
- magics = None
- # Flag to check that the class decorator was properly applied
- registered = False
- # Instance of IPython shell
- shell = None
-
- def __init__(self, shell=None, **kwargs):
- if not(self.__class__.registered):
- raise ValueError('Magics subclass without registration - '
- 'did you forget to apply @magics_class?')
- if shell is not None:
- if hasattr(shell, 'configurables'):
- shell.configurables.append(self)
- if hasattr(shell, 'config'):
- kwargs.setdefault('parent', shell)
-
- self.shell = shell
- self.options_table = {}
- # The method decorators are run when the instance doesn't exist yet, so
- # they can only record the names of the methods they are supposed to
- # grab. Only now, that the instance exists, can we create the proper
- # mapping to bound methods. So we read the info off the original names
- # table and replace each method name by the actual bound method.
- # But we mustn't clobber the *class* mapping, in case of multiple instances.
- class_magics = self.magics
- self.magics = {}
- for mtype in magic_kinds:
- tab = self.magics[mtype] = {}
- cls_tab = class_magics[mtype]
- for magic_name, meth_name in iteritems(cls_tab):
- if isinstance(meth_name, string_types):
- # it's a method name, grab it
- tab[magic_name] = getattr(self, meth_name)
- else:
- # it's the real thing
- tab[magic_name] = meth_name
- # Configurable **needs** to be initiated at the end or the config
- # magics get screwed up.
- super(Magics, self).__init__(**kwargs)
-
- def arg_err(self,func):
- """Print docstring if incorrect arguments were passed"""
- print('Error in arguments:')
- print(oinspect.getdoc(func))
-
- def format_latex(self, strng):
- """Format a string for latex inclusion."""
-
- # Characters that need to be escaped for latex:
- escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE)
- # Magic command names as headers:
- cmd_name_re = re.compile(r'^(%s.*?):' % ESC_MAGIC,
- re.MULTILINE)
- # Magic commands
- cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % ESC_MAGIC,
- re.MULTILINE)
- # Paragraph continue
- par_re = re.compile(r'\\$',re.MULTILINE)
-
- # The "\n" symbol
- newline_re = re.compile(r'\\n')
-
- # Now build the string for output:
- #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng)
- strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:',
- strng)
- strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng)
- strng = par_re.sub(r'\\\\',strng)
- strng = escape_re.sub(r'\\\1',strng)
- strng = newline_re.sub(r'\\textbackslash{}n',strng)
- return strng
-
- def parse_options(self, arg_str, opt_str, *long_opts, **kw):
- """Parse options passed to an argument string.
-
- The interface is similar to that of :func:`getopt.getopt`, but it
- returns a :class:`~IPython.utils.struct.Struct` with the options as keys
- and the stripped argument string still as a string.
-
- arg_str is quoted as a true sys.argv vector by using shlex.split.
- This allows us to easily expand variables, glob files, quote
- arguments, etc.
-
- Parameters
- ----------
-
- arg_str : str
- The arguments to parse.
-
- opt_str : str
- The options specification.
-
- mode : str, default 'string'
- If given as 'list', the argument string is returned as a list (split
- on whitespace) instead of a string.
-
- list_all : bool, default False
- Put all option values in lists. Normally only options
- appearing more than once are put in a list.
-
- posix : bool, default True
- Whether to split the input line in POSIX mode or not, as per the
- conventions outlined in the :mod:`shlex` module from the standard
- library.
- """
-
- # inject default options at the beginning of the input line
- caller = sys._getframe(1).f_code.co_name
- arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str)
-
- mode = kw.get('mode','string')
- if mode not in ['string','list']:
- raise ValueError('incorrect mode given: %s' % mode)
- # Get options
- list_all = kw.get('list_all',0)
- posix = kw.get('posix', os.name == 'posix')
- strict = kw.get('strict', True)
-
- # Check if we have more than one argument to warrant extra processing:
- odict = {} # Dictionary with options
- args = arg_str.split()
- if len(args) >= 1:
- # If the list of inputs only has 0 or 1 thing in it, there's no
- # need to look for options
- argv = arg_split(arg_str, posix, strict)
- # Do regular option processing
- try:
- opts,args = getopt(argv, opt_str, long_opts)
- except GetoptError as e:
- raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str,
- " ".join(long_opts)))
- for o,a in opts:
- if o.startswith('--'):
- o = o[2:]
- else:
- o = o[1:]
- try:
- odict[o].append(a)
- except AttributeError:
- odict[o] = [odict[o],a]
- except KeyError:
- if list_all:
- odict[o] = [a]
- else:
- odict[o] = a
-
- # Prepare opts,args for return
- opts = Struct(odict)
- if mode == 'string':
- args = ' '.join(args)
-
- return opts,args
-
- def default_option(self, fn, optstr):
- """Make an entry in the options_table for fn, with value optstr"""
-
- if fn not in self.lsmagic():
- error("%s is not a magic function" % fn)
- self.options_table[fn] = optstr
-
-
-class MagicAlias(object):
- """An alias to another magic function.
-
- An alias is determined by its magic name and magic kind. Lookup
- is done at call time, so if the underlying magic changes the alias
- will call the new function.
-
- Use the :meth:`MagicsManager.register_alias` method or the
- `%alias_magic` magic function to create and register a new alias.
- """
- def __init__(self, shell, magic_name, magic_kind):
- self.shell = shell
- self.magic_name = magic_name
- self.magic_kind = magic_kind
-
- self.pretty_target = '%s%s' % (magic_escapes[self.magic_kind], self.magic_name)
- self.__doc__ = "Alias for `%s`." % self.pretty_target
-
- self._in_call = False
-
- def __call__(self, *args, **kwargs):
- """Call the magic alias."""
- fn = self.shell.find_magic(self.magic_name, self.magic_kind)
- if fn is None:
- raise UsageError("Magic `%s` not found." % self.pretty_target)
-
- # Protect against infinite recursion.
- if self._in_call:
- raise UsageError("Infinite recursion detected; "
- "magic aliases cannot call themselves.")
- self._in_call = True
- try:
- return fn(*args, **kwargs)
- finally:
- self._in_call = False
+
+ _auto_status = [
+ 'Automagic is OFF, % prefix IS needed for line magics.',
+ 'Automagic is ON, % prefix IS NOT needed for line magics.']
+
+ user_magics = Instance('IPython.core.magics.UserMagics', allow_none=True)
+
+ def __init__(self, shell=None, config=None, user_magics=None, **traits):
+
+ super(MagicsManager, self).__init__(shell=shell, config=config,
+ user_magics=user_magics, **traits)
+ self.magics = dict(line={}, cell={})
+ # Let's add the user_magics to the registry for uniformity, so *all*
+ # registered magic containers can be found there.
+ self.registry[user_magics.__class__.__name__] = user_magics
+
+ def auto_status(self):
+ """Return descriptive string with automagic status."""
+ return self._auto_status[self.auto_magic]
+
+ def lsmagic(self):
+ """Return a dict of currently available magic functions.
+
+ The return dict has the keys 'line' and 'cell', corresponding to the
+ two types of magics we support. Each value is a list of names.
+ """
+ return self.magics
+
+ def lsmagic_docs(self, brief=False, missing=''):
+ """Return dict of documentation of magic functions.
+
+ The return dict has the keys 'line' and 'cell', corresponding to the
+ two types of magics we support. Each value is a dict keyed by magic
+ name whose value is the function docstring. If a docstring is
+ unavailable, the value of `missing` is used instead.
+
+ If brief is True, only the first line of each docstring will be returned.
+ """
+ docs = {}
+ for m_type in self.magics:
+ m_docs = {}
+ for m_name, m_func in iteritems(self.magics[m_type]):
+ if m_func.__doc__:
+ if brief:
+ m_docs[m_name] = m_func.__doc__.split('\n', 1)[0]
+ else:
+ m_docs[m_name] = m_func.__doc__.rstrip()
+ else:
+ m_docs[m_name] = missing
+ docs[m_type] = m_docs
+ return docs
+
+ def register(self, *magic_objects):
+ """Register one or more instances of Magics.
+
+ Take one or more classes or instances of classes that subclass the main
+ `core.Magic` class, and register them with IPython to use the magic
+ functions they provide. The registration process will then ensure that
+ any methods that have decorated to provide line and/or cell magics will
+ be recognized with the `%x`/`%%x` syntax as a line/cell magic
+ respectively.
+
+ If classes are given, they will be instantiated with the default
+ constructor. If your classes need a custom constructor, you should
+ instanitate them first and pass the instance.
+
+ The provided arguments can be an arbitrary mix of classes and instances.
+
+ Parameters
+ ----------
+ magic_objects : one or more classes or instances
+ """
+ # Start by validating them to ensure they have all had their magic
+ # methods registered at the instance level
+ for m in magic_objects:
+ if not m.registered:
+ raise ValueError("Class of magics %r was constructed without "
+ "the @register_magics class decorator")
+ if isinstance(m, type):
+ # If we're given an uninstantiated class
+ m = m(shell=self.shell)
+
+ # Now that we have an instance, we can register it and update the
+ # table of callables
+ self.registry[m.__class__.__name__] = m
+ for mtype in magic_kinds:
+ self.magics[mtype].update(m.magics[mtype])
+
+ def register_function(self, func, magic_kind='line', magic_name=None):
+ """Expose a standalone function as magic function for IPython.
+
+ This will create an IPython magic (line, cell or both) from a
+ standalone function. The functions should have the following
+ signatures:
+
+ * For line magics: `def f(line)`
+ * For cell magics: `def f(line, cell)`
+ * For a function that does both: `def f(line, cell=None)`
+
+ In the latter case, the function will be called with `cell==None` when
+ invoked as `%f`, and with cell as a string when invoked as `%%f`.
+
+ Parameters
+ ----------
+ func : callable
+ Function to be registered as a magic.
+
+ magic_kind : str
+ Kind of magic, one of 'line', 'cell' or 'line_cell'
+
+ magic_name : optional str
+ If given, the name the magic will have in the IPython namespace. By
+ default, the name of the function itself is used.
+ """
+
+ # Create the new method in the user_magics and register it in the
+ # global table
+ validate_type(magic_kind)
+ magic_name = func.__name__ if magic_name is None else magic_name
+ setattr(self.user_magics, magic_name, func)
+ record_magic(self.magics, magic_kind, magic_name, func)
+
+ def register_alias(self, alias_name, magic_name, magic_kind='line'):
+ """Register an alias to a magic function.
+
+ The alias is an instance of :class:`MagicAlias`, which holds the
+ name and kind of the magic it should call. Binding is done at
+ call time, so if the underlying magic function is changed the alias
+ will call the new function.
+
+ Parameters
+ ----------
+ alias_name : str
+ The name of the magic to be registered.
+
+ magic_name : str
+ The name of an existing magic.
+
+ magic_kind : str
+ Kind of magic, one of 'line' or 'cell'
+ """
+
+ # `validate_type` is too permissive, as it allows 'line_cell'
+ # which we do not handle.
+ if magic_kind not in magic_kinds:
+ raise ValueError('magic_kind must be one of %s, %s given' %
+ magic_kinds, magic_kind)
+
+ alias = MagicAlias(self.shell, magic_name, magic_kind)
+ setattr(self.user_magics, alias_name, alias)
+ record_magic(self.magics, magic_kind, alias_name, alias)
+
+# Key base class that provides the central functionality for magics.
+
+
+class Magics(Configurable):
+ """Base class for implementing magic functions.
+
+ Shell functions which can be reached as %function_name. All magic
+ functions should accept a string, which they can parse for their own
+ needs. This can make some functions easier to type, eg `%cd ../`
+ vs. `%cd("../")`
+
+ Classes providing magic functions need to subclass this class, and they
+ MUST:
+
+ - Use the method decorators `@line_magic` and `@cell_magic` to decorate
+ individual methods as magic functions, AND
+
+ - Use the class decorator `@magics_class` to ensure that the magic
+ methods are properly registered at the instance level upon instance
+ initialization.
+
+ See :mod:`magic_functions` for examples of actual implementation classes.
+ """
+ # Dict holding all command-line options for each magic.
+ options_table = None
+ # Dict for the mapping of magic names to methods, set by class decorator
+ magics = None
+ # Flag to check that the class decorator was properly applied
+ registered = False
+ # Instance of IPython shell
+ shell = None
+
+ def __init__(self, shell=None, **kwargs):
+ if not(self.__class__.registered):
+ raise ValueError('Magics subclass without registration - '
+ 'did you forget to apply @magics_class?')
+ if shell is not None:
+ if hasattr(shell, 'configurables'):
+ shell.configurables.append(self)
+ if hasattr(shell, 'config'):
+ kwargs.setdefault('parent', shell)
+
+ self.shell = shell
+ self.options_table = {}
+ # The method decorators are run when the instance doesn't exist yet, so
+ # they can only record the names of the methods they are supposed to
+ # grab. Only now, that the instance exists, can we create the proper
+ # mapping to bound methods. So we read the info off the original names
+ # table and replace each method name by the actual bound method.
+ # But we mustn't clobber the *class* mapping, in case of multiple instances.
+ class_magics = self.magics
+ self.magics = {}
+ for mtype in magic_kinds:
+ tab = self.magics[mtype] = {}
+ cls_tab = class_magics[mtype]
+ for magic_name, meth_name in iteritems(cls_tab):
+ if isinstance(meth_name, string_types):
+ # it's a method name, grab it
+ tab[magic_name] = getattr(self, meth_name)
+ else:
+ # it's the real thing
+ tab[magic_name] = meth_name
+ # Configurable **needs** to be initiated at the end or the config
+ # magics get screwed up.
+ super(Magics, self).__init__(**kwargs)
+
+ def arg_err(self,func):
+ """Print docstring if incorrect arguments were passed"""
+ print('Error in arguments:')
+ print(oinspect.getdoc(func))
+
+ def format_latex(self, strng):
+ """Format a string for latex inclusion."""
+
+ # Characters that need to be escaped for latex:
+ escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE)
+ # Magic command names as headers:
+ cmd_name_re = re.compile(r'^(%s.*?):' % ESC_MAGIC,
+ re.MULTILINE)
+ # Magic commands
+ cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % ESC_MAGIC,
+ re.MULTILINE)
+ # Paragraph continue
+ par_re = re.compile(r'\\$',re.MULTILINE)
+
+ # The "\n" symbol
+ newline_re = re.compile(r'\\n')
+
+ # Now build the string for output:
+ #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng)
+ strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:',
+ strng)
+ strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng)
+ strng = par_re.sub(r'\\\\',strng)
+ strng = escape_re.sub(r'\\\1',strng)
+ strng = newline_re.sub(r'\\textbackslash{}n',strng)
+ return strng
+
+ def parse_options(self, arg_str, opt_str, *long_opts, **kw):
+ """Parse options passed to an argument string.
+
+ The interface is similar to that of :func:`getopt.getopt`, but it
+ returns a :class:`~IPython.utils.struct.Struct` with the options as keys
+ and the stripped argument string still as a string.
+
+ arg_str is quoted as a true sys.argv vector by using shlex.split.
+ This allows us to easily expand variables, glob files, quote
+ arguments, etc.
+
+ Parameters
+ ----------
+
+ arg_str : str
+ The arguments to parse.
+
+ opt_str : str
+ The options specification.
+
+ mode : str, default 'string'
+ If given as 'list', the argument string is returned as a list (split
+ on whitespace) instead of a string.
+
+ list_all : bool, default False
+ Put all option values in lists. Normally only options
+ appearing more than once are put in a list.
+
+ posix : bool, default True
+ Whether to split the input line in POSIX mode or not, as per the
+ conventions outlined in the :mod:`shlex` module from the standard
+ library.
+ """
+
+ # inject default options at the beginning of the input line
+ caller = sys._getframe(1).f_code.co_name
+ arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str)
+
+ mode = kw.get('mode','string')
+ if mode not in ['string','list']:
+ raise ValueError('incorrect mode given: %s' % mode)
+ # Get options
+ list_all = kw.get('list_all',0)
+ posix = kw.get('posix', os.name == 'posix')
+ strict = kw.get('strict', True)
+
+ # Check if we have more than one argument to warrant extra processing:
+ odict = {} # Dictionary with options
+ args = arg_str.split()
+ if len(args) >= 1:
+ # If the list of inputs only has 0 or 1 thing in it, there's no
+ # need to look for options
+ argv = arg_split(arg_str, posix, strict)
+ # Do regular option processing
+ try:
+ opts,args = getopt(argv, opt_str, long_opts)
+ except GetoptError as e:
+ raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str,
+ " ".join(long_opts)))
+ for o,a in opts:
+ if o.startswith('--'):
+ o = o[2:]
+ else:
+ o = o[1:]
+ try:
+ odict[o].append(a)
+ except AttributeError:
+ odict[o] = [odict[o],a]
+ except KeyError:
+ if list_all:
+ odict[o] = [a]
+ else:
+ odict[o] = a
+
+ # Prepare opts,args for return
+ opts = Struct(odict)
+ if mode == 'string':
+ args = ' '.join(args)
+
+ return opts,args
+
+ def default_option(self, fn, optstr):
+ """Make an entry in the options_table for fn, with value optstr"""
+
+ if fn not in self.lsmagic():
+ error("%s is not a magic function" % fn)
+ self.options_table[fn] = optstr
+
+
+class MagicAlias(object):
+ """An alias to another magic function.
+
+ An alias is determined by its magic name and magic kind. Lookup
+ is done at call time, so if the underlying magic changes the alias
+ will call the new function.
+
+ Use the :meth:`MagicsManager.register_alias` method or the
+ `%alias_magic` magic function to create and register a new alias.
+ """
+ def __init__(self, shell, magic_name, magic_kind):
+ self.shell = shell
+ self.magic_name = magic_name
+ self.magic_kind = magic_kind
+
+ self.pretty_target = '%s%s' % (magic_escapes[self.magic_kind], self.magic_name)
+ self.__doc__ = "Alias for `%s`." % self.pretty_target
+
+ self._in_call = False
+
+ def __call__(self, *args, **kwargs):
+ """Call the magic alias."""
+ fn = self.shell.find_magic(self.magic_name, self.magic_kind)
+ if fn is None:
+ raise UsageError("Magic `%s` not found." % self.pretty_target)
+
+ # Protect against infinite recursion.
+ if self._in_call:
+ raise UsageError("Infinite recursion detected; "
+ "magic aliases cannot call themselves.")
+ self._in_call = True
+ try:
+ return fn(*args, **kwargs)
+ finally:
+ self._in_call = False
diff --git a/contrib/python/ipython/py2/IPython/core/magic_arguments.py b/contrib/python/ipython/py2/IPython/core/magic_arguments.py
index 38e03aa176..9231609572 100644
--- a/contrib/python/ipython/py2/IPython/core/magic_arguments.py
+++ b/contrib/python/ipython/py2/IPython/core/magic_arguments.py
@@ -1,278 +1,278 @@
-''' A decorator-based method of constructing IPython magics with `argparse`
-option handling.
-
-New magic functions can be defined like so::
-
- from IPython.core.magic_arguments import (argument, magic_arguments,
- parse_argstring)
-
- @magic_arguments()
- @argument('-o', '--option', help='An optional argument.')
- @argument('arg', type=int, help='An integer positional argument.')
- def magic_cool(self, arg):
- """ A really cool magic command.
-
- """
- args = parse_argstring(magic_cool, arg)
- ...
-
-The `@magic_arguments` decorator marks the function as having argparse arguments.
-The `@argument` decorator adds an argument using the same syntax as argparse's
-`add_argument()` method. More sophisticated uses may also require the
-`@argument_group` or `@kwds` decorator to customize the formatting and the
-parsing.
-
-Help text for the magic is automatically generated from the docstring and the
-arguments::
-
- In[1]: %cool?
- %cool [-o OPTION] arg
-
- A really cool magic command.
-
- positional arguments:
- arg An integer positional argument.
-
- optional arguments:
- -o OPTION, --option OPTION
- An optional argument.
-
-Inheritance diagram:
-
-.. inheritance-diagram:: IPython.core.magic_arguments
- :parts: 3
-
-'''
-#-----------------------------------------------------------------------------
-# Copyright (C) 2010-2011, IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-import argparse
-import re
-
-# Our own imports
-from IPython.core.error import UsageError
-from IPython.utils.decorators import undoc
-from IPython.utils.process import arg_split
-from IPython.utils.text import dedent
-
-NAME_RE = re.compile(r"[a-zA-Z][a-zA-Z0-9_-]*$")
-
-@undoc
-class MagicHelpFormatter(argparse.RawDescriptionHelpFormatter):
- """A HelpFormatter with a couple of changes to meet our needs.
- """
- # Modified to dedent text.
- def _fill_text(self, text, width, indent):
- return argparse.RawDescriptionHelpFormatter._fill_text(self, dedent(text), width, indent)
-
- # Modified to wrap argument placeholders in <> where necessary.
- def _format_action_invocation(self, action):
- if not action.option_strings:
- metavar, = self._metavar_formatter(action, action.dest)(1)
- return metavar
-
- else:
- parts = []
-
- # if the Optional doesn't take a value, format is:
- # -s, --long
- if action.nargs == 0:
- parts.extend(action.option_strings)
-
- # if the Optional takes a value, format is:
- # -s ARGS, --long ARGS
- else:
- default = action.dest.upper()
- args_string = self._format_args(action, default)
- # IPYTHON MODIFICATION: If args_string is not a plain name, wrap
- # it in <> so it's valid RST.
- if not NAME_RE.match(args_string):
- args_string = "<%s>" % args_string
- for option_string in action.option_strings:
- parts.append('%s %s' % (option_string, args_string))
-
- return ', '.join(parts)
-
- # Override the default prefix ('usage') to our % magic escape,
- # in a code block.
- def add_usage(self, usage, actions, groups, prefix="::\n\n %"):
- super(MagicHelpFormatter, self).add_usage(usage, actions, groups, prefix)
-
-class MagicArgumentParser(argparse.ArgumentParser):
- """ An ArgumentParser tweaked for use by IPython magics.
- """
- def __init__(self,
- prog=None,
- usage=None,
- description=None,
- epilog=None,
- parents=None,
- formatter_class=MagicHelpFormatter,
- prefix_chars='-',
- argument_default=None,
- conflict_handler='error',
- add_help=False):
- if parents is None:
- parents = []
- super(MagicArgumentParser, self).__init__(prog=prog, usage=usage,
- description=description, epilog=epilog,
- parents=parents, formatter_class=formatter_class,
- prefix_chars=prefix_chars, argument_default=argument_default,
- conflict_handler=conflict_handler, add_help=add_help)
-
- def error(self, message):
- """ Raise a catchable error instead of exiting.
- """
- raise UsageError(message)
-
- def parse_argstring(self, argstring):
- """ Split a string into an argument list and parse that argument list.
- """
- argv = arg_split(argstring)
- return self.parse_args(argv)
-
-
-def construct_parser(magic_func):
- """ Construct an argument parser using the function decorations.
- """
- kwds = getattr(magic_func, 'argcmd_kwds', {})
- if 'description' not in kwds:
- kwds['description'] = getattr(magic_func, '__doc__', None)
- arg_name = real_name(magic_func)
- parser = MagicArgumentParser(arg_name, **kwds)
- # Reverse the list of decorators in order to apply them in the
- # order in which they appear in the source.
- group = None
- for deco in magic_func.decorators[::-1]:
- result = deco.add_to_parser(parser, group)
- if result is not None:
- group = result
-
- # Replace the magic function's docstring with the full help text.
- magic_func.__doc__ = parser.format_help()
-
- return parser
-
-
-def parse_argstring(magic_func, argstring):
- """ Parse the string of arguments for the given magic function.
- """
- return magic_func.parser.parse_argstring(argstring)
-
-
-def real_name(magic_func):
- """ Find the real name of the magic.
- """
- magic_name = magic_func.__name__
- if magic_name.startswith('magic_'):
- magic_name = magic_name[len('magic_'):]
- return getattr(magic_func, 'argcmd_name', magic_name)
-
-
-class ArgDecorator(object):
- """ Base class for decorators to add ArgumentParser information to a method.
- """
-
- def __call__(self, func):
- if not getattr(func, 'has_arguments', False):
- func.has_arguments = True
- func.decorators = []
- func.decorators.append(self)
- return func
-
- def add_to_parser(self, parser, group):
- """ Add this object's information to the parser, if necessary.
- """
- pass
-
-
-class magic_arguments(ArgDecorator):
- """ Mark the magic as having argparse arguments and possibly adjust the
- name.
- """
-
- def __init__(self, name=None):
- self.name = name
-
- def __call__(self, func):
- if not getattr(func, 'has_arguments', False):
- func.has_arguments = True
- func.decorators = []
- if self.name is not None:
- func.argcmd_name = self.name
- # This should be the first decorator in the list of decorators, thus the
- # last to execute. Build the parser.
- func.parser = construct_parser(func)
- return func
-
-
-class ArgMethodWrapper(ArgDecorator):
-
- """
- Base class to define a wrapper for ArgumentParser method.
-
- Child class must define either `_method_name` or `add_to_parser`.
-
- """
-
- _method_name = None
-
- def __init__(self, *args, **kwds):
- self.args = args
- self.kwds = kwds
-
- def add_to_parser(self, parser, group):
- """ Add this object's information to the parser.
- """
- if group is not None:
- parser = group
- getattr(parser, self._method_name)(*self.args, **self.kwds)
- return None
-
-
-class argument(ArgMethodWrapper):
- """ Store arguments and keywords to pass to add_argument().
-
- Instances also serve to decorate command methods.
- """
- _method_name = 'add_argument'
-
-
-class defaults(ArgMethodWrapper):
- """ Store arguments and keywords to pass to set_defaults().
-
- Instances also serve to decorate command methods.
- """
- _method_name = 'set_defaults'
-
-
-class argument_group(ArgMethodWrapper):
- """ Store arguments and keywords to pass to add_argument_group().
-
- Instances also serve to decorate command methods.
- """
-
- def add_to_parser(self, parser, group):
- """ Add this object's information to the parser.
- """
- return parser.add_argument_group(*self.args, **self.kwds)
-
-
-class kwds(ArgDecorator):
- """ Provide other keywords to the sub-parser constructor.
- """
- def __init__(self, **kwds):
- self.kwds = kwds
-
- def __call__(self, func):
- func = super(kwds, self).__call__(func)
- func.argcmd_kwds = self.kwds
- return func
-
-
-__all__ = ['magic_arguments', 'argument', 'argument_group', 'kwds',
- 'parse_argstring']
+''' A decorator-based method of constructing IPython magics with `argparse`
+option handling.
+
+New magic functions can be defined like so::
+
+ from IPython.core.magic_arguments import (argument, magic_arguments,
+ parse_argstring)
+
+ @magic_arguments()
+ @argument('-o', '--option', help='An optional argument.')
+ @argument('arg', type=int, help='An integer positional argument.')
+ def magic_cool(self, arg):
+ """ A really cool magic command.
+
+ """
+ args = parse_argstring(magic_cool, arg)
+ ...
+
+The `@magic_arguments` decorator marks the function as having argparse arguments.
+The `@argument` decorator adds an argument using the same syntax as argparse's
+`add_argument()` method. More sophisticated uses may also require the
+`@argument_group` or `@kwds` decorator to customize the formatting and the
+parsing.
+
+Help text for the magic is automatically generated from the docstring and the
+arguments::
+
+ In[1]: %cool?
+ %cool [-o OPTION] arg
+
+ A really cool magic command.
+
+ positional arguments:
+ arg An integer positional argument.
+
+ optional arguments:
+ -o OPTION, --option OPTION
+ An optional argument.
+
+Inheritance diagram:
+
+.. inheritance-diagram:: IPython.core.magic_arguments
+ :parts: 3
+
+'''
+#-----------------------------------------------------------------------------
+# Copyright (C) 2010-2011, IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+import argparse
+import re
+
+# Our own imports
+from IPython.core.error import UsageError
+from IPython.utils.decorators import undoc
+from IPython.utils.process import arg_split
+from IPython.utils.text import dedent
+
+NAME_RE = re.compile(r"[a-zA-Z][a-zA-Z0-9_-]*$")
+
+@undoc
+class MagicHelpFormatter(argparse.RawDescriptionHelpFormatter):
+ """A HelpFormatter with a couple of changes to meet our needs.
+ """
+ # Modified to dedent text.
+ def _fill_text(self, text, width, indent):
+ return argparse.RawDescriptionHelpFormatter._fill_text(self, dedent(text), width, indent)
+
+ # Modified to wrap argument placeholders in <> where necessary.
+ def _format_action_invocation(self, action):
+ if not action.option_strings:
+ metavar, = self._metavar_formatter(action, action.dest)(1)
+ return metavar
+
+ else:
+ parts = []
+
+ # if the Optional doesn't take a value, format is:
+ # -s, --long
+ if action.nargs == 0:
+ parts.extend(action.option_strings)
+
+ # if the Optional takes a value, format is:
+ # -s ARGS, --long ARGS
+ else:
+ default = action.dest.upper()
+ args_string = self._format_args(action, default)
+ # IPYTHON MODIFICATION: If args_string is not a plain name, wrap
+ # it in <> so it's valid RST.
+ if not NAME_RE.match(args_string):
+ args_string = "<%s>" % args_string
+ for option_string in action.option_strings:
+ parts.append('%s %s' % (option_string, args_string))
+
+ return ', '.join(parts)
+
+ # Override the default prefix ('usage') to our % magic escape,
+ # in a code block.
+ def add_usage(self, usage, actions, groups, prefix="::\n\n %"):
+ super(MagicHelpFormatter, self).add_usage(usage, actions, groups, prefix)
+
+class MagicArgumentParser(argparse.ArgumentParser):
+ """ An ArgumentParser tweaked for use by IPython magics.
+ """
+ def __init__(self,
+ prog=None,
+ usage=None,
+ description=None,
+ epilog=None,
+ parents=None,
+ formatter_class=MagicHelpFormatter,
+ prefix_chars='-',
+ argument_default=None,
+ conflict_handler='error',
+ add_help=False):
+ if parents is None:
+ parents = []
+ super(MagicArgumentParser, self).__init__(prog=prog, usage=usage,
+ description=description, epilog=epilog,
+ parents=parents, formatter_class=formatter_class,
+ prefix_chars=prefix_chars, argument_default=argument_default,
+ conflict_handler=conflict_handler, add_help=add_help)
+
+ def error(self, message):
+ """ Raise a catchable error instead of exiting.
+ """
+ raise UsageError(message)
+
+ def parse_argstring(self, argstring):
+ """ Split a string into an argument list and parse that argument list.
+ """
+ argv = arg_split(argstring)
+ return self.parse_args(argv)
+
+
+def construct_parser(magic_func):
+ """ Construct an argument parser using the function decorations.
+ """
+ kwds = getattr(magic_func, 'argcmd_kwds', {})
+ if 'description' not in kwds:
+ kwds['description'] = getattr(magic_func, '__doc__', None)
+ arg_name = real_name(magic_func)
+ parser = MagicArgumentParser(arg_name, **kwds)
+ # Reverse the list of decorators in order to apply them in the
+ # order in which they appear in the source.
+ group = None
+ for deco in magic_func.decorators[::-1]:
+ result = deco.add_to_parser(parser, group)
+ if result is not None:
+ group = result
+
+ # Replace the magic function's docstring with the full help text.
+ magic_func.__doc__ = parser.format_help()
+
+ return parser
+
+
+def parse_argstring(magic_func, argstring):
+ """ Parse the string of arguments for the given magic function.
+ """
+ return magic_func.parser.parse_argstring(argstring)
+
+
+def real_name(magic_func):
+ """ Find the real name of the magic.
+ """
+ magic_name = magic_func.__name__
+ if magic_name.startswith('magic_'):
+ magic_name = magic_name[len('magic_'):]
+ return getattr(magic_func, 'argcmd_name', magic_name)
+
+
+class ArgDecorator(object):
+ """ Base class for decorators to add ArgumentParser information to a method.
+ """
+
+ def __call__(self, func):
+ if not getattr(func, 'has_arguments', False):
+ func.has_arguments = True
+ func.decorators = []
+ func.decorators.append(self)
+ return func
+
+ def add_to_parser(self, parser, group):
+ """ Add this object's information to the parser, if necessary.
+ """
+ pass
+
+
+class magic_arguments(ArgDecorator):
+ """ Mark the magic as having argparse arguments and possibly adjust the
+ name.
+ """
+
+ def __init__(self, name=None):
+ self.name = name
+
+ def __call__(self, func):
+ if not getattr(func, 'has_arguments', False):
+ func.has_arguments = True
+ func.decorators = []
+ if self.name is not None:
+ func.argcmd_name = self.name
+ # This should be the first decorator in the list of decorators, thus the
+ # last to execute. Build the parser.
+ func.parser = construct_parser(func)
+ return func
+
+
+class ArgMethodWrapper(ArgDecorator):
+
+ """
+ Base class to define a wrapper for ArgumentParser method.
+
+ Child class must define either `_method_name` or `add_to_parser`.
+
+ """
+
+ _method_name = None
+
+ def __init__(self, *args, **kwds):
+ self.args = args
+ self.kwds = kwds
+
+ def add_to_parser(self, parser, group):
+ """ Add this object's information to the parser.
+ """
+ if group is not None:
+ parser = group
+ getattr(parser, self._method_name)(*self.args, **self.kwds)
+ return None
+
+
+class argument(ArgMethodWrapper):
+ """ Store arguments and keywords to pass to add_argument().
+
+ Instances also serve to decorate command methods.
+ """
+ _method_name = 'add_argument'
+
+
+class defaults(ArgMethodWrapper):
+ """ Store arguments and keywords to pass to set_defaults().
+
+ Instances also serve to decorate command methods.
+ """
+ _method_name = 'set_defaults'
+
+
+class argument_group(ArgMethodWrapper):
+ """ Store arguments and keywords to pass to add_argument_group().
+
+ Instances also serve to decorate command methods.
+ """
+
+ def add_to_parser(self, parser, group):
+ """ Add this object's information to the parser.
+ """
+ return parser.add_argument_group(*self.args, **self.kwds)
+
+
+class kwds(ArgDecorator):
+ """ Provide other keywords to the sub-parser constructor.
+ """
+ def __init__(self, **kwds):
+ self.kwds = kwds
+
+ def __call__(self, func):
+ func = super(kwds, self).__call__(func)
+ func.argcmd_kwds = self.kwds
+ return func
+
+
+__all__ = ['magic_arguments', 'argument', 'argument_group', 'kwds',
+ 'parse_argstring']
diff --git a/contrib/python/ipython/py2/IPython/core/magics/__init__.py b/contrib/python/ipython/py2/IPython/core/magics/__init__.py
index 78d4a45aa0..d2fd5a6cfb 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/__init__.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/__init__.py
@@ -1,41 +1,41 @@
-"""Implementation of all the magic functions built into IPython.
-"""
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012 The IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-from ..magic import Magics, magics_class
-from .auto import AutoMagics
-from .basic import BasicMagics
-from .code import CodeMagics, MacroToEdit
-from .config import ConfigMagics
-from .display import DisplayMagics
-from .execution import ExecutionMagics
-from .extension import ExtensionMagics
-from .history import HistoryMagics
-from .logging import LoggingMagics
-from .namespace import NamespaceMagics
-from .osm import OSMagics
-from .pylab import PylabMagics
-from .script import ScriptMagics
-
-#-----------------------------------------------------------------------------
-# Magic implementation classes
-#-----------------------------------------------------------------------------
-
-@magics_class
-class UserMagics(Magics):
- """Placeholder for user-defined magics to be added at runtime.
-
- All magics are eventually merged into a single namespace at runtime, but we
- use this class to isolate the magics defined dynamically by the user into
- their own class.
- """
+"""Implementation of all the magic functions built into IPython.
+"""
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012 The IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+from ..magic import Magics, magics_class
+from .auto import AutoMagics
+from .basic import BasicMagics
+from .code import CodeMagics, MacroToEdit
+from .config import ConfigMagics
+from .display import DisplayMagics
+from .execution import ExecutionMagics
+from .extension import ExtensionMagics
+from .history import HistoryMagics
+from .logging import LoggingMagics
+from .namespace import NamespaceMagics
+from .osm import OSMagics
+from .pylab import PylabMagics
+from .script import ScriptMagics
+
+#-----------------------------------------------------------------------------
+# Magic implementation classes
+#-----------------------------------------------------------------------------
+
+@magics_class
+class UserMagics(Magics):
+ """Placeholder for user-defined magics to be added at runtime.
+
+ All magics are eventually merged into a single namespace at runtime, but we
+ use this class to isolate the magics defined dynamically by the user into
+ their own class.
+ """
diff --git a/contrib/python/ipython/py2/IPython/core/magics/auto.py b/contrib/python/ipython/py2/IPython/core/magics/auto.py
index be6b218854..f87bafdeb1 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/auto.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/auto.py
@@ -1,130 +1,130 @@
-"""Implementation of magic functions that control various automatic behaviors.
-"""
-from __future__ import print_function
+"""Implementation of magic functions that control various automatic behaviors.
+"""
+from __future__ import print_function
from __future__ import absolute_import
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012 The IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Our own packages
-from IPython.core.magic import Bunch, Magics, magics_class, line_magic
-from IPython.testing.skipdoctest import skip_doctest
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012 The IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+# Our own packages
+from IPython.core.magic import Bunch, Magics, magics_class, line_magic
+from IPython.testing.skipdoctest import skip_doctest
from logging import error
-
-#-----------------------------------------------------------------------------
-# Magic implementation classes
-#-----------------------------------------------------------------------------
-
-@magics_class
-class AutoMagics(Magics):
- """Magics that control various autoX behaviors."""
-
- def __init__(self, shell):
- super(AutoMagics, self).__init__(shell)
- # namespace for holding state we may need
- self._magic_state = Bunch()
-
- @line_magic
- def automagic(self, parameter_s=''):
- """Make magic functions callable without having to type the initial %.
-
- Without argumentsl toggles on/off (when off, you must call it as
- %automagic, of course). With arguments it sets the value, and you can
- use any of (case insensitive):
-
- - on, 1, True: to activate
-
- - off, 0, False: to deactivate.
-
- Note that magic functions have lowest priority, so if there's a
- variable whose name collides with that of a magic fn, automagic won't
- work for that function (you get the variable instead). However, if you
- delete the variable (del var), the previously shadowed magic function
- becomes visible to automagic again."""
-
- arg = parameter_s.lower()
- mman = self.shell.magics_manager
- if arg in ('on', '1', 'true'):
- val = True
- elif arg in ('off', '0', 'false'):
- val = False
- else:
- val = not mman.auto_magic
- mman.auto_magic = val
- print('\n' + self.shell.magics_manager.auto_status())
-
- @skip_doctest
- @line_magic
- def autocall(self, parameter_s=''):
- """Make functions callable without having to type parentheses.
-
- Usage:
-
- %autocall [mode]
-
- The mode can be one of: 0->Off, 1->Smart, 2->Full. If not given, the
- value is toggled on and off (remembering the previous state).
-
- In more detail, these values mean:
-
- 0 -> fully disabled
-
- 1 -> active, but do not apply if there are no arguments on the line.
-
- In this mode, you get::
-
- In [1]: callable
- Out[1]: <built-in function callable>
-
- In [2]: callable 'hello'
- ------> callable('hello')
- Out[2]: False
-
- 2 -> Active always. Even if no arguments are present, the callable
- object is called::
-
- In [2]: float
- ------> float()
- Out[2]: 0.0
-
- Note that even with autocall off, you can still use '/' at the start of
- a line to treat the first argument on the command line as a function
- and add parentheses to it::
-
- In [8]: /str 43
- ------> str(43)
- Out[8]: '43'
-
- # all-random (note for auto-testing)
- """
-
- if parameter_s:
- arg = int(parameter_s)
- else:
- arg = 'toggle'
-
- if not arg in (0, 1, 2, 'toggle'):
- error('Valid modes: (0->Off, 1->Smart, 2->Full')
- return
-
- if arg in (0, 1, 2):
- self.shell.autocall = arg
- else: # toggle
- if self.shell.autocall:
- self._magic_state.autocall_save = self.shell.autocall
- self.shell.autocall = 0
- else:
- try:
- self.shell.autocall = self._magic_state.autocall_save
- except AttributeError:
- self.shell.autocall = self._magic_state.autocall_save = 1
-
- print("Automatic calling is:",['OFF','Smart','Full'][self.shell.autocall])
+
+#-----------------------------------------------------------------------------
+# Magic implementation classes
+#-----------------------------------------------------------------------------
+
+@magics_class
+class AutoMagics(Magics):
+ """Magics that control various autoX behaviors."""
+
+ def __init__(self, shell):
+ super(AutoMagics, self).__init__(shell)
+ # namespace for holding state we may need
+ self._magic_state = Bunch()
+
+ @line_magic
+ def automagic(self, parameter_s=''):
+ """Make magic functions callable without having to type the initial %.
+
+ Without argumentsl toggles on/off (when off, you must call it as
+ %automagic, of course). With arguments it sets the value, and you can
+ use any of (case insensitive):
+
+ - on, 1, True: to activate
+
+ - off, 0, False: to deactivate.
+
+ Note that magic functions have lowest priority, so if there's a
+ variable whose name collides with that of a magic fn, automagic won't
+ work for that function (you get the variable instead). However, if you
+ delete the variable (del var), the previously shadowed magic function
+ becomes visible to automagic again."""
+
+ arg = parameter_s.lower()
+ mman = self.shell.magics_manager
+ if arg in ('on', '1', 'true'):
+ val = True
+ elif arg in ('off', '0', 'false'):
+ val = False
+ else:
+ val = not mman.auto_magic
+ mman.auto_magic = val
+ print('\n' + self.shell.magics_manager.auto_status())
+
+ @skip_doctest
+ @line_magic
+ def autocall(self, parameter_s=''):
+ """Make functions callable without having to type parentheses.
+
+ Usage:
+
+ %autocall [mode]
+
+ The mode can be one of: 0->Off, 1->Smart, 2->Full. If not given, the
+ value is toggled on and off (remembering the previous state).
+
+ In more detail, these values mean:
+
+ 0 -> fully disabled
+
+ 1 -> active, but do not apply if there are no arguments on the line.
+
+ In this mode, you get::
+
+ In [1]: callable
+ Out[1]: <built-in function callable>
+
+ In [2]: callable 'hello'
+ ------> callable('hello')
+ Out[2]: False
+
+ 2 -> Active always. Even if no arguments are present, the callable
+ object is called::
+
+ In [2]: float
+ ------> float()
+ Out[2]: 0.0
+
+ Note that even with autocall off, you can still use '/' at the start of
+ a line to treat the first argument on the command line as a function
+ and add parentheses to it::
+
+ In [8]: /str 43
+ ------> str(43)
+ Out[8]: '43'
+
+ # all-random (note for auto-testing)
+ """
+
+ if parameter_s:
+ arg = int(parameter_s)
+ else:
+ arg = 'toggle'
+
+ if not arg in (0, 1, 2, 'toggle'):
+ error('Valid modes: (0->Off, 1->Smart, 2->Full')
+ return
+
+ if arg in (0, 1, 2):
+ self.shell.autocall = arg
+ else: # toggle
+ if self.shell.autocall:
+ self._magic_state.autocall_save = self.shell.autocall
+ self.shell.autocall = 0
+ else:
+ try:
+ self.shell.autocall = self._magic_state.autocall_save
+ except AttributeError:
+ self.shell.autocall = self._magic_state.autocall_save = 1
+
+ print("Automatic calling is:",['OFF','Smart','Full'][self.shell.autocall])
diff --git a/contrib/python/ipython/py2/IPython/core/magics/basic.py b/contrib/python/ipython/py2/IPython/core/magics/basic.py
index 75b1275211..ca69e2e698 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/basic.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/basic.py
@@ -1,575 +1,575 @@
-"""Implementation of basic magic functions."""
-
-from __future__ import print_function
+"""Implementation of basic magic functions."""
+
+from __future__ import print_function
from __future__ import absolute_import
-
+
import argparse
-import io
-import sys
-from pprint import pformat
-
-from IPython.core import magic_arguments, page
-from IPython.core.error import UsageError
-from IPython.core.magic import Magics, magics_class, line_magic, magic_escapes
-from IPython.utils.text import format_screen, dedent, indent
-from IPython.testing.skipdoctest import skip_doctest
-from IPython.utils.ipstruct import Struct
-from IPython.utils.py3compat import unicode_type
+import io
+import sys
+from pprint import pformat
+
+from IPython.core import magic_arguments, page
+from IPython.core.error import UsageError
+from IPython.core.magic import Magics, magics_class, line_magic, magic_escapes
+from IPython.utils.text import format_screen, dedent, indent
+from IPython.testing.skipdoctest import skip_doctest
+from IPython.utils.ipstruct import Struct
+from IPython.utils.py3compat import unicode_type
from warnings import warn
from logging import error
-
-
-class MagicsDisplay(object):
- def __init__(self, magics_manager):
- self.magics_manager = magics_manager
-
- def _lsmagic(self):
- """The main implementation of the %lsmagic"""
- mesc = magic_escapes['line']
- cesc = magic_escapes['cell']
- mman = self.magics_manager
- magics = mman.lsmagic()
- out = ['Available line magics:',
- mesc + (' '+mesc).join(sorted(magics['line'])),
- '',
- 'Available cell magics:',
- cesc + (' '+cesc).join(sorted(magics['cell'])),
- '',
- mman.auto_status()]
- return '\n'.join(out)
-
- def _repr_pretty_(self, p, cycle):
- p.text(self._lsmagic())
-
- def __str__(self):
- return self._lsmagic()
-
- def _jsonable(self):
- """turn magics dict into jsonable dict of the same structure
-
- replaces object instances with their class names as strings
- """
- magic_dict = {}
- mman = self.magics_manager
- magics = mman.lsmagic()
- for key, subdict in magics.items():
- d = {}
- magic_dict[key] = d
- for name, obj in subdict.items():
- try:
- classname = obj.__self__.__class__.__name__
- except AttributeError:
- classname = 'Other'
-
- d[name] = classname
- return magic_dict
-
- def _repr_json_(self):
- return self._jsonable()
-
-
-@magics_class
-class BasicMagics(Magics):
- """Magics that provide central IPython functionality.
-
- These are various magics that don't fit into specific categories but that
- are all part of the base 'IPython experience'."""
-
- @magic_arguments.magic_arguments()
- @magic_arguments.argument(
- '-l', '--line', action='store_true',
- help="""Create a line magic alias."""
- )
- @magic_arguments.argument(
- '-c', '--cell', action='store_true',
- help="""Create a cell magic alias."""
- )
- @magic_arguments.argument(
- 'name',
- help="""Name of the magic to be created."""
- )
- @magic_arguments.argument(
- 'target',
- help="""Name of the existing line or cell magic."""
- )
- @line_magic
- def alias_magic(self, line=''):
- """Create an alias for an existing line or cell magic.
-
- Examples
- --------
- ::
-
- In [1]: %alias_magic t timeit
- Created `%t` as an alias for `%timeit`.
- Created `%%t` as an alias for `%%timeit`.
-
- In [2]: %t -n1 pass
- 1 loops, best of 3: 954 ns per loop
-
- In [3]: %%t -n1
- ...: pass
- ...:
- 1 loops, best of 3: 954 ns per loop
-
- In [4]: %alias_magic --cell whereami pwd
- UsageError: Cell magic function `%%pwd` not found.
- In [5]: %alias_magic --line whereami pwd
- Created `%whereami` as an alias for `%pwd`.
-
- In [6]: %whereami
- Out[6]: u'/home/testuser'
- """
- args = magic_arguments.parse_argstring(self.alias_magic, line)
- shell = self.shell
- mman = self.shell.magics_manager
- escs = ''.join(magic_escapes.values())
-
- target = args.target.lstrip(escs)
- name = args.name.lstrip(escs)
-
- # Find the requested magics.
- m_line = shell.find_magic(target, 'line')
- m_cell = shell.find_magic(target, 'cell')
- if args.line and m_line is None:
- raise UsageError('Line magic function `%s%s` not found.' %
- (magic_escapes['line'], target))
- if args.cell and m_cell is None:
- raise UsageError('Cell magic function `%s%s` not found.' %
- (magic_escapes['cell'], target))
-
- # If --line and --cell are not specified, default to the ones
- # that are available.
- if not args.line and not args.cell:
- if not m_line and not m_cell:
- raise UsageError(
- 'No line or cell magic with name `%s` found.' % target
- )
- args.line = bool(m_line)
- args.cell = bool(m_cell)
-
- if args.line:
- mman.register_alias(name, target, 'line')
- print('Created `%s%s` as an alias for `%s%s`.' % (
- magic_escapes['line'], name,
- magic_escapes['line'], target))
-
- if args.cell:
- mman.register_alias(name, target, 'cell')
- print('Created `%s%s` as an alias for `%s%s`.' % (
- magic_escapes['cell'], name,
- magic_escapes['cell'], target))
-
- @line_magic
- def lsmagic(self, parameter_s=''):
- """List currently available magic functions."""
- return MagicsDisplay(self.shell.magics_manager)
-
- def _magic_docs(self, brief=False, rest=False):
- """Return docstrings from magic functions."""
- mman = self.shell.magics_manager
- docs = mman.lsmagic_docs(brief, missing='No documentation')
-
- if rest:
- format_string = '**%s%s**::\n\n%s\n\n'
- else:
- format_string = '%s%s:\n%s\n'
-
- return ''.join(
- [format_string % (magic_escapes['line'], fname,
- indent(dedent(fndoc)))
- for fname, fndoc in sorted(docs['line'].items())]
- +
- [format_string % (magic_escapes['cell'], fname,
- indent(dedent(fndoc)))
- for fname, fndoc in sorted(docs['cell'].items())]
- )
-
- @line_magic
- def magic(self, parameter_s=''):
- """Print information about the magic function system.
-
- Supported formats: -latex, -brief, -rest
- """
-
- mode = ''
- try:
- mode = parameter_s.split()[0][1:]
- except IndexError:
- pass
-
- brief = (mode == 'brief')
- rest = (mode == 'rest')
- magic_docs = self._magic_docs(brief, rest)
-
- if mode == 'latex':
- print(self.format_latex(magic_docs))
- return
- else:
- magic_docs = format_screen(magic_docs)
-
- out = ["""
-IPython's 'magic' functions
-===========================
-
-The magic function system provides a series of functions which allow you to
-control the behavior of IPython itself, plus a lot of system-type
-features. There are two kinds of magics, line-oriented and cell-oriented.
-
-Line magics are prefixed with the % character and work much like OS
-command-line calls: they get as an argument the rest of the line, where
-arguments are passed without parentheses or quotes. For example, this will
-time the given statement::
-
- %timeit range(1000)
-
-Cell magics are prefixed with a double %%, and they are functions that get as
-an argument not only the rest of the line, but also the lines below it in a
-separate argument. These magics are called with two arguments: the rest of the
-call line and the body of the cell, consisting of the lines below the first.
-For example::
-
- %%timeit x = numpy.random.randn((100, 100))
- numpy.linalg.svd(x)
-
-will time the execution of the numpy svd routine, running the assignment of x
-as part of the setup phase, which is not timed.
-
-In a line-oriented client (the terminal or Qt console IPython), starting a new
-input with %% will automatically enter cell mode, and IPython will continue
-reading input until a blank line is given. In the notebook, simply type the
-whole cell as one entity, but keep in mind that the %% escape can only be at
-the very start of the cell.
-
-NOTE: If you have 'automagic' enabled (via the command line option or with the
-%automagic function), you don't need to type in the % explicitly for line
-magics; cell magics always require an explicit '%%' escape. By default,
-IPython ships with automagic on, so you should only rarely need the % escape.
-
-Example: typing '%cd mydir' (without the quotes) changes your working directory
-to 'mydir', if it exists.
-
-For a list of the available magic functions, use %lsmagic. For a description
-of any of them, type %magic_name?, e.g. '%cd?'.
-
-Currently the magic system has the following functions:""",
- magic_docs,
- "Summary of magic functions (from %slsmagic):" % magic_escapes['line'],
- str(self.lsmagic()),
- ]
- page.page('\n'.join(out))
-
-
- @line_magic
- def page(self, parameter_s=''):
- """Pretty print the object and display it through a pager.
-
- %page [options] OBJECT
-
- If no object is given, use _ (last output).
-
- Options:
-
- -r: page str(object), don't pretty-print it."""
-
- # After a function contributed by Olivier Aubert, slightly modified.
-
- # Process options/args
- opts, args = self.parse_options(parameter_s, 'r')
- raw = 'r' in opts
-
- oname = args and args or '_'
- info = self.shell._ofind(oname)
- if info['found']:
- txt = (raw and str or pformat)( info['obj'] )
- page.page(txt)
- else:
- print('Object `%s` not found' % oname)
-
- @line_magic
- def profile(self, parameter_s=''):
+
+
+class MagicsDisplay(object):
+ def __init__(self, magics_manager):
+ self.magics_manager = magics_manager
+
+ def _lsmagic(self):
+ """The main implementation of the %lsmagic"""
+ mesc = magic_escapes['line']
+ cesc = magic_escapes['cell']
+ mman = self.magics_manager
+ magics = mman.lsmagic()
+ out = ['Available line magics:',
+ mesc + (' '+mesc).join(sorted(magics['line'])),
+ '',
+ 'Available cell magics:',
+ cesc + (' '+cesc).join(sorted(magics['cell'])),
+ '',
+ mman.auto_status()]
+ return '\n'.join(out)
+
+ def _repr_pretty_(self, p, cycle):
+ p.text(self._lsmagic())
+
+ def __str__(self):
+ return self._lsmagic()
+
+ def _jsonable(self):
+ """turn magics dict into jsonable dict of the same structure
+
+ replaces object instances with their class names as strings
+ """
+ magic_dict = {}
+ mman = self.magics_manager
+ magics = mman.lsmagic()
+ for key, subdict in magics.items():
+ d = {}
+ magic_dict[key] = d
+ for name, obj in subdict.items():
+ try:
+ classname = obj.__self__.__class__.__name__
+ except AttributeError:
+ classname = 'Other'
+
+ d[name] = classname
+ return magic_dict
+
+ def _repr_json_(self):
+ return self._jsonable()
+
+
+@magics_class
+class BasicMagics(Magics):
+ """Magics that provide central IPython functionality.
+
+ These are various magics that don't fit into specific categories but that
+ are all part of the base 'IPython experience'."""
+
+ @magic_arguments.magic_arguments()
+ @magic_arguments.argument(
+ '-l', '--line', action='store_true',
+ help="""Create a line magic alias."""
+ )
+ @magic_arguments.argument(
+ '-c', '--cell', action='store_true',
+ help="""Create a cell magic alias."""
+ )
+ @magic_arguments.argument(
+ 'name',
+ help="""Name of the magic to be created."""
+ )
+ @magic_arguments.argument(
+ 'target',
+ help="""Name of the existing line or cell magic."""
+ )
+ @line_magic
+ def alias_magic(self, line=''):
+ """Create an alias for an existing line or cell magic.
+
+ Examples
+ --------
+ ::
+
+ In [1]: %alias_magic t timeit
+ Created `%t` as an alias for `%timeit`.
+ Created `%%t` as an alias for `%%timeit`.
+
+ In [2]: %t -n1 pass
+ 1 loops, best of 3: 954 ns per loop
+
+ In [3]: %%t -n1
+ ...: pass
+ ...:
+ 1 loops, best of 3: 954 ns per loop
+
+ In [4]: %alias_magic --cell whereami pwd
+ UsageError: Cell magic function `%%pwd` not found.
+ In [5]: %alias_magic --line whereami pwd
+ Created `%whereami` as an alias for `%pwd`.
+
+ In [6]: %whereami
+ Out[6]: u'/home/testuser'
+ """
+ args = magic_arguments.parse_argstring(self.alias_magic, line)
+ shell = self.shell
+ mman = self.shell.magics_manager
+ escs = ''.join(magic_escapes.values())
+
+ target = args.target.lstrip(escs)
+ name = args.name.lstrip(escs)
+
+ # Find the requested magics.
+ m_line = shell.find_magic(target, 'line')
+ m_cell = shell.find_magic(target, 'cell')
+ if args.line and m_line is None:
+ raise UsageError('Line magic function `%s%s` not found.' %
+ (magic_escapes['line'], target))
+ if args.cell and m_cell is None:
+ raise UsageError('Cell magic function `%s%s` not found.' %
+ (magic_escapes['cell'], target))
+
+ # If --line and --cell are not specified, default to the ones
+ # that are available.
+ if not args.line and not args.cell:
+ if not m_line and not m_cell:
+ raise UsageError(
+ 'No line or cell magic with name `%s` found.' % target
+ )
+ args.line = bool(m_line)
+ args.cell = bool(m_cell)
+
+ if args.line:
+ mman.register_alias(name, target, 'line')
+ print('Created `%s%s` as an alias for `%s%s`.' % (
+ magic_escapes['line'], name,
+ magic_escapes['line'], target))
+
+ if args.cell:
+ mman.register_alias(name, target, 'cell')
+ print('Created `%s%s` as an alias for `%s%s`.' % (
+ magic_escapes['cell'], name,
+ magic_escapes['cell'], target))
+
+ @line_magic
+ def lsmagic(self, parameter_s=''):
+ """List currently available magic functions."""
+ return MagicsDisplay(self.shell.magics_manager)
+
+ def _magic_docs(self, brief=False, rest=False):
+ """Return docstrings from magic functions."""
+ mman = self.shell.magics_manager
+ docs = mman.lsmagic_docs(brief, missing='No documentation')
+
+ if rest:
+ format_string = '**%s%s**::\n\n%s\n\n'
+ else:
+ format_string = '%s%s:\n%s\n'
+
+ return ''.join(
+ [format_string % (magic_escapes['line'], fname,
+ indent(dedent(fndoc)))
+ for fname, fndoc in sorted(docs['line'].items())]
+ +
+ [format_string % (magic_escapes['cell'], fname,
+ indent(dedent(fndoc)))
+ for fname, fndoc in sorted(docs['cell'].items())]
+ )
+
+ @line_magic
+ def magic(self, parameter_s=''):
+ """Print information about the magic function system.
+
+ Supported formats: -latex, -brief, -rest
+ """
+
+ mode = ''
+ try:
+ mode = parameter_s.split()[0][1:]
+ except IndexError:
+ pass
+
+ brief = (mode == 'brief')
+ rest = (mode == 'rest')
+ magic_docs = self._magic_docs(brief, rest)
+
+ if mode == 'latex':
+ print(self.format_latex(magic_docs))
+ return
+ else:
+ magic_docs = format_screen(magic_docs)
+
+ out = ["""
+IPython's 'magic' functions
+===========================
+
+The magic function system provides a series of functions which allow you to
+control the behavior of IPython itself, plus a lot of system-type
+features. There are two kinds of magics, line-oriented and cell-oriented.
+
+Line magics are prefixed with the % character and work much like OS
+command-line calls: they get as an argument the rest of the line, where
+arguments are passed without parentheses or quotes. For example, this will
+time the given statement::
+
+ %timeit range(1000)
+
+Cell magics are prefixed with a double %%, and they are functions that get as
+an argument not only the rest of the line, but also the lines below it in a
+separate argument. These magics are called with two arguments: the rest of the
+call line and the body of the cell, consisting of the lines below the first.
+For example::
+
+ %%timeit x = numpy.random.randn((100, 100))
+ numpy.linalg.svd(x)
+
+will time the execution of the numpy svd routine, running the assignment of x
+as part of the setup phase, which is not timed.
+
+In a line-oriented client (the terminal or Qt console IPython), starting a new
+input with %% will automatically enter cell mode, and IPython will continue
+reading input until a blank line is given. In the notebook, simply type the
+whole cell as one entity, but keep in mind that the %% escape can only be at
+the very start of the cell.
+
+NOTE: If you have 'automagic' enabled (via the command line option or with the
+%automagic function), you don't need to type in the % explicitly for line
+magics; cell magics always require an explicit '%%' escape. By default,
+IPython ships with automagic on, so you should only rarely need the % escape.
+
+Example: typing '%cd mydir' (without the quotes) changes your working directory
+to 'mydir', if it exists.
+
+For a list of the available magic functions, use %lsmagic. For a description
+of any of them, type %magic_name?, e.g. '%cd?'.
+
+Currently the magic system has the following functions:""",
+ magic_docs,
+ "Summary of magic functions (from %slsmagic):" % magic_escapes['line'],
+ str(self.lsmagic()),
+ ]
+ page.page('\n'.join(out))
+
+
+ @line_magic
+ def page(self, parameter_s=''):
+ """Pretty print the object and display it through a pager.
+
+ %page [options] OBJECT
+
+ If no object is given, use _ (last output).
+
+ Options:
+
+ -r: page str(object), don't pretty-print it."""
+
+ # After a function contributed by Olivier Aubert, slightly modified.
+
+ # Process options/args
+ opts, args = self.parse_options(parameter_s, 'r')
+ raw = 'r' in opts
+
+ oname = args and args or '_'
+ info = self.shell._ofind(oname)
+ if info['found']:
+ txt = (raw and str or pformat)( info['obj'] )
+ page.page(txt)
+ else:
+ print('Object `%s` not found' % oname)
+
+ @line_magic
+ def profile(self, parameter_s=''):
"""DEPRECATED since IPython 2.0.
-
+
Raise `UsageError`. To profile code use the :magic:`prun` magic.
- See Also
- --------
+ See Also
+ --------
prun : run code using the Python profiler (:magic:`prun`)
- """
- warn("%profile is now deprecated. Please use get_ipython().profile instead.")
- from IPython.core.application import BaseIPythonApplication
- if BaseIPythonApplication.initialized():
- print(BaseIPythonApplication.instance().profile)
- else:
- error("profile is an application-level value, but you don't appear to be in an IPython application")
-
- @line_magic
- def pprint(self, parameter_s=''):
- """Toggle pretty printing on/off."""
- ptformatter = self.shell.display_formatter.formatters['text/plain']
- ptformatter.pprint = bool(1 - ptformatter.pprint)
- print('Pretty printing has been turned',
- ['OFF','ON'][ptformatter.pprint])
-
- @line_magic
- def colors(self, parameter_s=''):
- """Switch color scheme for prompts, info system and exception handlers.
-
- Currently implemented schemes: NoColor, Linux, LightBG.
-
- Color scheme names are not case-sensitive.
-
- Examples
- --------
- To get a plain black and white terminal::
-
- %colors nocolor
- """
- def color_switch_err(name):
- warn('Error changing %s color schemes.\n%s' %
+ """
+ warn("%profile is now deprecated. Please use get_ipython().profile instead.")
+ from IPython.core.application import BaseIPythonApplication
+ if BaseIPythonApplication.initialized():
+ print(BaseIPythonApplication.instance().profile)
+ else:
+ error("profile is an application-level value, but you don't appear to be in an IPython application")
+
+ @line_magic
+ def pprint(self, parameter_s=''):
+ """Toggle pretty printing on/off."""
+ ptformatter = self.shell.display_formatter.formatters['text/plain']
+ ptformatter.pprint = bool(1 - ptformatter.pprint)
+ print('Pretty printing has been turned',
+ ['OFF','ON'][ptformatter.pprint])
+
+ @line_magic
+ def colors(self, parameter_s=''):
+ """Switch color scheme for prompts, info system and exception handlers.
+
+ Currently implemented schemes: NoColor, Linux, LightBG.
+
+ Color scheme names are not case-sensitive.
+
+ Examples
+ --------
+ To get a plain black and white terminal::
+
+ %colors nocolor
+ """
+ def color_switch_err(name):
+ warn('Error changing %s color schemes.\n%s' %
(name, sys.exc_info()[1]), stacklevel=2)
-
-
- new_scheme = parameter_s.strip()
- if not new_scheme:
- raise UsageError(
- "%colors: you must specify a color scheme. See '%colors?'")
- # local shortcut
- shell = self.shell
-
+
+
+ new_scheme = parameter_s.strip()
+ if not new_scheme:
+ raise UsageError(
+ "%colors: you must specify a color scheme. See '%colors?'")
+ # local shortcut
+ shell = self.shell
+
# Set shell colour scheme
- try:
+ try:
shell.colors = new_scheme
shell.refresh_style()
- except:
+ except:
color_switch_err('shell')
- # Set exception colors
- try:
- shell.InteractiveTB.set_colors(scheme = new_scheme)
- shell.SyntaxTB.set_colors(scheme = new_scheme)
- except:
- color_switch_err('exception')
-
- # Set info (for 'object?') colors
- if shell.color_info:
- try:
- shell.inspector.set_active_scheme(new_scheme)
- except:
- color_switch_err('object inspector')
- else:
- shell.inspector.set_active_scheme('NoColor')
-
- @line_magic
- def xmode(self, parameter_s=''):
- """Switch modes for the exception handlers.
-
- Valid modes: Plain, Context and Verbose.
-
- If called without arguments, acts as a toggle."""
-
- def xmode_switch_err(name):
- warn('Error changing %s exception modes.\n%s' %
- (name,sys.exc_info()[1]))
-
- shell = self.shell
- new_mode = parameter_s.strip().capitalize()
- try:
- shell.InteractiveTB.set_mode(mode=new_mode)
- print('Exception reporting mode:',shell.InteractiveTB.mode)
- except:
- xmode_switch_err('user')
-
- @line_magic
- def quickref(self,arg):
- """ Show a quick reference sheet """
- from IPython.core.usage import quick_reference
- qr = quick_reference + self._magic_docs(brief=True)
- page.page(qr)
-
- @line_magic
- def doctest_mode(self, parameter_s=''):
- """Toggle doctest mode on and off.
-
- This mode is intended to make IPython behave as much as possible like a
- plain Python shell, from the perspective of how its prompts, exceptions
- and output look. This makes it easy to copy and paste parts of a
- session into doctests. It does so by:
-
- - Changing the prompts to the classic ``>>>`` ones.
- - Changing the exception reporting mode to 'Plain'.
- - Disabling pretty-printing of output.
-
- Note that IPython also supports the pasting of code snippets that have
- leading '>>>' and '...' prompts in them. This means that you can paste
- doctests from files or docstrings (even if they have leading
- whitespace), and the code will execute correctly. You can then use
- '%history -t' to see the translated history; this will give you the
- input after removal of all the leading prompts and whitespace, which
- can be pasted back into an editor.
-
- With these features, you can switch into this mode easily whenever you
- need to do testing and changes to doctests, without having to leave
- your existing IPython session.
- """
-
- # Shorthands
- shell = self.shell
- meta = shell.meta
- disp_formatter = self.shell.display_formatter
- ptformatter = disp_formatter.formatters['text/plain']
- # dstore is a data store kept in the instance metadata bag to track any
- # changes we make, so we can undo them later.
- dstore = meta.setdefault('doctest_mode',Struct())
- save_dstore = dstore.setdefault
-
- # save a few values we'll need to recover later
- mode = save_dstore('mode',False)
- save_dstore('rc_pprint',ptformatter.pprint)
- save_dstore('xmode',shell.InteractiveTB.mode)
- save_dstore('rc_separate_out',shell.separate_out)
- save_dstore('rc_separate_out2',shell.separate_out2)
- save_dstore('rc_separate_in',shell.separate_in)
- save_dstore('rc_active_types',disp_formatter.active_types)
-
- if not mode:
- # turn on
-
- # Prompt separators like plain python
- shell.separate_in = ''
- shell.separate_out = ''
- shell.separate_out2 = ''
-
-
- ptformatter.pprint = False
- disp_formatter.active_types = ['text/plain']
-
- shell.magic('xmode Plain')
- else:
- # turn off
- shell.separate_in = dstore.rc_separate_in
-
- shell.separate_out = dstore.rc_separate_out
- shell.separate_out2 = dstore.rc_separate_out2
-
- ptformatter.pprint = dstore.rc_pprint
- disp_formatter.active_types = dstore.rc_active_types
-
- shell.magic('xmode ' + dstore.xmode)
-
+ # Set exception colors
+ try:
+ shell.InteractiveTB.set_colors(scheme = new_scheme)
+ shell.SyntaxTB.set_colors(scheme = new_scheme)
+ except:
+ color_switch_err('exception')
+
+ # Set info (for 'object?') colors
+ if shell.color_info:
+ try:
+ shell.inspector.set_active_scheme(new_scheme)
+ except:
+ color_switch_err('object inspector')
+ else:
+ shell.inspector.set_active_scheme('NoColor')
+
+ @line_magic
+ def xmode(self, parameter_s=''):
+ """Switch modes for the exception handlers.
+
+ Valid modes: Plain, Context and Verbose.
+
+ If called without arguments, acts as a toggle."""
+
+ def xmode_switch_err(name):
+ warn('Error changing %s exception modes.\n%s' %
+ (name,sys.exc_info()[1]))
+
+ shell = self.shell
+ new_mode = parameter_s.strip().capitalize()
+ try:
+ shell.InteractiveTB.set_mode(mode=new_mode)
+ print('Exception reporting mode:',shell.InteractiveTB.mode)
+ except:
+ xmode_switch_err('user')
+
+ @line_magic
+ def quickref(self,arg):
+ """ Show a quick reference sheet """
+ from IPython.core.usage import quick_reference
+ qr = quick_reference + self._magic_docs(brief=True)
+ page.page(qr)
+
+ @line_magic
+ def doctest_mode(self, parameter_s=''):
+ """Toggle doctest mode on and off.
+
+ This mode is intended to make IPython behave as much as possible like a
+ plain Python shell, from the perspective of how its prompts, exceptions
+ and output look. This makes it easy to copy and paste parts of a
+ session into doctests. It does so by:
+
+ - Changing the prompts to the classic ``>>>`` ones.
+ - Changing the exception reporting mode to 'Plain'.
+ - Disabling pretty-printing of output.
+
+ Note that IPython also supports the pasting of code snippets that have
+ leading '>>>' and '...' prompts in them. This means that you can paste
+ doctests from files or docstrings (even if they have leading
+ whitespace), and the code will execute correctly. You can then use
+ '%history -t' to see the translated history; this will give you the
+ input after removal of all the leading prompts and whitespace, which
+ can be pasted back into an editor.
+
+ With these features, you can switch into this mode easily whenever you
+ need to do testing and changes to doctests, without having to leave
+ your existing IPython session.
+ """
+
+ # Shorthands
+ shell = self.shell
+ meta = shell.meta
+ disp_formatter = self.shell.display_formatter
+ ptformatter = disp_formatter.formatters['text/plain']
+ # dstore is a data store kept in the instance metadata bag to track any
+ # changes we make, so we can undo them later.
+ dstore = meta.setdefault('doctest_mode',Struct())
+ save_dstore = dstore.setdefault
+
+ # save a few values we'll need to recover later
+ mode = save_dstore('mode',False)
+ save_dstore('rc_pprint',ptformatter.pprint)
+ save_dstore('xmode',shell.InteractiveTB.mode)
+ save_dstore('rc_separate_out',shell.separate_out)
+ save_dstore('rc_separate_out2',shell.separate_out2)
+ save_dstore('rc_separate_in',shell.separate_in)
+ save_dstore('rc_active_types',disp_formatter.active_types)
+
+ if not mode:
+ # turn on
+
+ # Prompt separators like plain python
+ shell.separate_in = ''
+ shell.separate_out = ''
+ shell.separate_out2 = ''
+
+
+ ptformatter.pprint = False
+ disp_formatter.active_types = ['text/plain']
+
+ shell.magic('xmode Plain')
+ else:
+ # turn off
+ shell.separate_in = dstore.rc_separate_in
+
+ shell.separate_out = dstore.rc_separate_out
+ shell.separate_out2 = dstore.rc_separate_out2
+
+ ptformatter.pprint = dstore.rc_pprint
+ disp_formatter.active_types = dstore.rc_active_types
+
+ shell.magic('xmode ' + dstore.xmode)
+
# mode here is the state before we switch; switch_doctest_mode takes
# the mode we're switching to.
shell.switch_doctest_mode(not mode)
- # Store new mode and inform
+ # Store new mode and inform
dstore.mode = bool(not mode)
- mode_label = ['OFF','ON'][dstore.mode]
- print('Doctest mode is:', mode_label)
-
- @line_magic
- def gui(self, parameter_s=''):
- """Enable or disable IPython GUI event loop integration.
-
- %gui [GUINAME]
-
- This magic replaces IPython's threaded shells that were activated
- using the (pylab/wthread/etc.) command line flags. GUI toolkits
- can now be enabled at runtime and keyboard
- interrupts should work without any problems. The following toolkits
- are supported: wxPython, PyQt4, PyGTK, Tk and Cocoa (OSX)::
-
- %gui wx # enable wxPython event loop integration
- %gui qt4|qt # enable PyQt4 event loop integration
- %gui qt5 # enable PyQt5 event loop integration
- %gui gtk # enable PyGTK event loop integration
- %gui gtk3 # enable Gtk3 event loop integration
- %gui tk # enable Tk event loop integration
- %gui osx # enable Cocoa event loop integration
- # (requires %matplotlib 1.1)
- %gui # disable all event loop integration
-
- WARNING: after any of these has been called you can simply create
- an application object, but DO NOT start the event loop yourself, as
- we have already handled that.
- """
- opts, arg = self.parse_options(parameter_s, '')
- if arg=='': arg = None
- try:
- return self.shell.enable_gui(arg)
- except Exception as e:
- # print simple error message, rather than traceback if we can't
- # hook up the GUI
- error(str(e))
-
- @skip_doctest
- @line_magic
- def precision(self, s=''):
- """Set floating point precision for pretty printing.
-
- Can set either integer precision or a format string.
-
- If numpy has been imported and precision is an int,
- numpy display precision will also be set, via ``numpy.set_printoptions``.
-
- If no argument is given, defaults will be restored.
-
- Examples
- --------
- ::
-
- In [1]: from math import pi
-
- In [2]: %precision 3
- Out[2]: u'%.3f'
-
- In [3]: pi
- Out[3]: 3.142
-
- In [4]: %precision %i
- Out[4]: u'%i'
-
- In [5]: pi
- Out[5]: 3
-
- In [6]: %precision %e
- Out[6]: u'%e'
-
- In [7]: pi**10
- Out[7]: 9.364805e+04
-
- In [8]: %precision
- Out[8]: u'%r'
-
- In [9]: pi**10
- Out[9]: 93648.047476082982
- """
- ptformatter = self.shell.display_formatter.formatters['text/plain']
- ptformatter.float_precision = s
- return ptformatter.float_format
-
- @magic_arguments.magic_arguments()
- @magic_arguments.argument(
- '-e', '--export', action='store_true', default=False,
+ mode_label = ['OFF','ON'][dstore.mode]
+ print('Doctest mode is:', mode_label)
+
+ @line_magic
+ def gui(self, parameter_s=''):
+ """Enable or disable IPython GUI event loop integration.
+
+ %gui [GUINAME]
+
+ This magic replaces IPython's threaded shells that were activated
+ using the (pylab/wthread/etc.) command line flags. GUI toolkits
+ can now be enabled at runtime and keyboard
+ interrupts should work without any problems. The following toolkits
+ are supported: wxPython, PyQt4, PyGTK, Tk and Cocoa (OSX)::
+
+ %gui wx # enable wxPython event loop integration
+ %gui qt4|qt # enable PyQt4 event loop integration
+ %gui qt5 # enable PyQt5 event loop integration
+ %gui gtk # enable PyGTK event loop integration
+ %gui gtk3 # enable Gtk3 event loop integration
+ %gui tk # enable Tk event loop integration
+ %gui osx # enable Cocoa event loop integration
+ # (requires %matplotlib 1.1)
+ %gui # disable all event loop integration
+
+ WARNING: after any of these has been called you can simply create
+ an application object, but DO NOT start the event loop yourself, as
+ we have already handled that.
+ """
+ opts, arg = self.parse_options(parameter_s, '')
+ if arg=='': arg = None
+ try:
+ return self.shell.enable_gui(arg)
+ except Exception as e:
+ # print simple error message, rather than traceback if we can't
+ # hook up the GUI
+ error(str(e))
+
+ @skip_doctest
+ @line_magic
+ def precision(self, s=''):
+ """Set floating point precision for pretty printing.
+
+ Can set either integer precision or a format string.
+
+ If numpy has been imported and precision is an int,
+ numpy display precision will also be set, via ``numpy.set_printoptions``.
+
+ If no argument is given, defaults will be restored.
+
+ Examples
+ --------
+ ::
+
+ In [1]: from math import pi
+
+ In [2]: %precision 3
+ Out[2]: u'%.3f'
+
+ In [3]: pi
+ Out[3]: 3.142
+
+ In [4]: %precision %i
+ Out[4]: u'%i'
+
+ In [5]: pi
+ Out[5]: 3
+
+ In [6]: %precision %e
+ Out[6]: u'%e'
+
+ In [7]: pi**10
+ Out[7]: 9.364805e+04
+
+ In [8]: %precision
+ Out[8]: u'%r'
+
+ In [9]: pi**10
+ Out[9]: 93648.047476082982
+ """
+ ptformatter = self.shell.display_formatter.formatters['text/plain']
+ ptformatter.float_precision = s
+ return ptformatter.float_format
+
+ @magic_arguments.magic_arguments()
+ @magic_arguments.argument(
+ '-e', '--export', action='store_true', default=False,
help=argparse.SUPPRESS
- )
- @magic_arguments.argument(
- 'filename', type=unicode_type,
- help='Notebook name or filename'
- )
- @line_magic
- def notebook(self, s):
- """Export and convert IPython notebooks.
-
- This function can export the current IPython history to a notebook file.
+ )
+ @magic_arguments.argument(
+ 'filename', type=unicode_type,
+ help='Notebook name or filename'
+ )
+ @line_magic
+ def notebook(self, s):
+ """Export and convert IPython notebooks.
+
+ This function can export the current IPython history to a notebook file.
For example, to export the history to "foo.ipynb" do "%notebook foo.ipynb".
The -e or --export flag is deprecated in IPython 5.2, and will be
removed in the future.
- """
- args = magic_arguments.parse_argstring(self.notebook, s)
-
- from nbformat import write, v4
+ """
+ args = magic_arguments.parse_argstring(self.notebook, s)
+
+ from nbformat import write, v4
cells = []
hist = list(self.shell.history_manager.get_range())
diff --git a/contrib/python/ipython/py2/IPython/core/magics/code.py b/contrib/python/ipython/py2/IPython/core/magics/code.py
index 4f17cda0c0..4c1a40f197 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/code.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/code.py
@@ -1,146 +1,146 @@
-"""Implementation of code management magic functions.
-"""
-from __future__ import print_function
+"""Implementation of code management magic functions.
+"""
+from __future__ import print_function
from __future__ import absolute_import
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012 The IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Stdlib
-import inspect
-import io
-import os
-import re
-import sys
-import ast
-from itertools import chain
-
-# Our own packages
-from IPython.core.error import TryNext, StdinNotImplementedError, UsageError
-from IPython.core.macro import Macro
-from IPython.core.magic import Magics, magics_class, line_magic
-from IPython.core.oinspect import find_file, find_source_lines
-from IPython.testing.skipdoctest import skip_doctest
-from IPython.utils import py3compat
-from IPython.utils.py3compat import string_types
-from IPython.utils.contexts import preserve_keys
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012 The IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+# Stdlib
+import inspect
+import io
+import os
+import re
+import sys
+import ast
+from itertools import chain
+
+# Our own packages
+from IPython.core.error import TryNext, StdinNotImplementedError, UsageError
+from IPython.core.macro import Macro
+from IPython.core.magic import Magics, magics_class, line_magic
+from IPython.core.oinspect import find_file, find_source_lines
+from IPython.testing.skipdoctest import skip_doctest
+from IPython.utils import py3compat
+from IPython.utils.py3compat import string_types
+from IPython.utils.contexts import preserve_keys
from IPython.utils.path import get_py_filename
from warnings import warn
from logging import error
-from IPython.utils.text import get_text_list
-
-#-----------------------------------------------------------------------------
-# Magic implementation classes
-#-----------------------------------------------------------------------------
-
-# Used for exception handling in magic_edit
-class MacroToEdit(ValueError): pass
-
-ipython_input_pat = re.compile(r"<ipython\-input\-(\d+)-[a-z\d]+>$")
-
-# To match, e.g. 8-10 1:5 :10 3-
-range_re = re.compile(r"""
-(?P<start>\d+)?
-((?P<sep>[\-:])
- (?P<end>\d+)?)?
-$""", re.VERBOSE)
-
-
-def extract_code_ranges(ranges_str):
- """Turn a string of range for %%load into 2-tuples of (start, stop)
- ready to use as a slice of the content splitted by lines.
-
- Examples
- --------
- list(extract_input_ranges("5-10 2"))
- [(4, 10), (1, 2)]
- """
- for range_str in ranges_str.split():
- rmatch = range_re.match(range_str)
- if not rmatch:
- continue
- sep = rmatch.group("sep")
- start = rmatch.group("start")
- end = rmatch.group("end")
-
- if sep == '-':
- start = int(start) - 1 if start else None
- end = int(end) if end else None
- elif sep == ':':
- start = int(start) - 1 if start else None
- end = int(end) - 1 if end else None
- else:
- end = int(start)
- start = int(start) - 1
- yield (start, end)
-
-
-@skip_doctest
-def extract_symbols(code, symbols):
- """
- Return a tuple (blocks, not_found)
- where ``blocks`` is a list of code fragments
- for each symbol parsed from code, and ``not_found`` are
- symbols not found in the code.
-
- For example::
-
- >>> code = '''a = 10
-
- def b(): return 42
-
- class A: pass'''
-
- >>> extract_symbols(code, 'A,b,z')
- (["class A: pass", "def b(): return 42"], ['z'])
- """
- symbols = symbols.split(',')
-
- # this will raise SyntaxError if code isn't valid Python
- py_code = ast.parse(code)
-
- marks = [(getattr(s, 'name', None), s.lineno) for s in py_code.body]
- code = code.split('\n')
-
- symbols_lines = {}
-
- # we already know the start_lineno of each symbol (marks).
- # To find each end_lineno, we traverse in reverse order until each
- # non-blank line
- end = len(code)
- for name, start in reversed(marks):
- while not code[end - 1].strip():
- end -= 1
- if name:
- symbols_lines[name] = (start - 1, end)
- end = start - 1
-
- # Now symbols_lines is a map
- # {'symbol_name': (start_lineno, end_lineno), ...}
-
- # fill a list with chunks of codes for each requested symbol
- blocks = []
- not_found = []
- for symbol in symbols:
- if symbol in symbols_lines:
- start, end = symbols_lines[symbol]
- blocks.append('\n'.join(code[start:end]) + '\n')
- else:
- not_found.append(symbol)
-
- return blocks, not_found
-
+from IPython.utils.text import get_text_list
+
+#-----------------------------------------------------------------------------
+# Magic implementation classes
+#-----------------------------------------------------------------------------
+
+# Used for exception handling in magic_edit
+class MacroToEdit(ValueError): pass
+
+ipython_input_pat = re.compile(r"<ipython\-input\-(\d+)-[a-z\d]+>$")
+
+# To match, e.g. 8-10 1:5 :10 3-
+range_re = re.compile(r"""
+(?P<start>\d+)?
+((?P<sep>[\-:])
+ (?P<end>\d+)?)?
+$""", re.VERBOSE)
+
+
+def extract_code_ranges(ranges_str):
+ """Turn a string of range for %%load into 2-tuples of (start, stop)
+ ready to use as a slice of the content splitted by lines.
+
+ Examples
+ --------
+ list(extract_input_ranges("5-10 2"))
+ [(4, 10), (1, 2)]
+ """
+ for range_str in ranges_str.split():
+ rmatch = range_re.match(range_str)
+ if not rmatch:
+ continue
+ sep = rmatch.group("sep")
+ start = rmatch.group("start")
+ end = rmatch.group("end")
+
+ if sep == '-':
+ start = int(start) - 1 if start else None
+ end = int(end) if end else None
+ elif sep == ':':
+ start = int(start) - 1 if start else None
+ end = int(end) - 1 if end else None
+ else:
+ end = int(start)
+ start = int(start) - 1
+ yield (start, end)
+
+
+@skip_doctest
+def extract_symbols(code, symbols):
+ """
+ Return a tuple (blocks, not_found)
+ where ``blocks`` is a list of code fragments
+ for each symbol parsed from code, and ``not_found`` are
+ symbols not found in the code.
+
+ For example::
+
+ >>> code = '''a = 10
+
+ def b(): return 42
+
+ class A: pass'''
+
+ >>> extract_symbols(code, 'A,b,z')
+ (["class A: pass", "def b(): return 42"], ['z'])
+ """
+ symbols = symbols.split(',')
+
+ # this will raise SyntaxError if code isn't valid Python
+ py_code = ast.parse(code)
+
+ marks = [(getattr(s, 'name', None), s.lineno) for s in py_code.body]
+ code = code.split('\n')
+
+ symbols_lines = {}
+
+ # we already know the start_lineno of each symbol (marks).
+ # To find each end_lineno, we traverse in reverse order until each
+ # non-blank line
+ end = len(code)
+ for name, start in reversed(marks):
+ while not code[end - 1].strip():
+ end -= 1
+ if name:
+ symbols_lines[name] = (start - 1, end)
+ end = start - 1
+
+ # Now symbols_lines is a map
+ # {'symbol_name': (start_lineno, end_lineno), ...}
+
+ # fill a list with chunks of codes for each requested symbol
+ blocks = []
+ not_found = []
+ for symbol in symbols:
+ if symbol in symbols_lines:
+ start, end = symbols_lines[symbol]
+ blocks.append('\n'.join(code[start:end]) + '\n')
+ else:
+ not_found.append(symbol)
+
+ return blocks, not_found
+
def strip_initial_indent(lines):
"""For %load, strip indent from lines until finding an unindented line.
-
+
https://github.com/ipython/ipython/issues/9775
"""
indent_re = re.compile(r'\s+')
@@ -169,578 +169,578 @@ def strip_initial_indent(lines):
yield line
-class InteractivelyDefined(Exception):
- """Exception for interactively defined variable in magic_edit"""
- def __init__(self, index):
- self.index = index
-
-
-@magics_class
-class CodeMagics(Magics):
- """Magics related to code management (loading, saving, editing, ...)."""
-
- def __init__(self, *args, **kwargs):
- self._knowntemps = set()
- super(CodeMagics, self).__init__(*args, **kwargs)
-
- @line_magic
- def save(self, parameter_s=''):
- """Save a set of lines or a macro to a given filename.
-
- Usage:\\
- %save [options] filename n1-n2 n3-n4 ... n5 .. n6 ...
-
- Options:
-
- -r: use 'raw' input. By default, the 'processed' history is used,
- so that magics are loaded in their transformed version to valid
- Python. If this option is given, the raw input as typed as the
- command line is used instead.
-
- -f: force overwrite. If file exists, %save will prompt for overwrite
- unless -f is given.
-
- -a: append to the file instead of overwriting it.
-
- This function uses the same syntax as %history for input ranges,
- then saves the lines to the filename you specify.
-
- It adds a '.py' extension to the file if you don't do so yourself, and
- it asks for confirmation before overwriting existing files.
-
- If `-r` option is used, the default extension is `.ipy`.
- """
-
- opts,args = self.parse_options(parameter_s,'fra',mode='list')
- if not args:
- raise UsageError('Missing filename.')
- raw = 'r' in opts
- force = 'f' in opts
- append = 'a' in opts
- mode = 'a' if append else 'w'
- ext = u'.ipy' if raw else u'.py'
+class InteractivelyDefined(Exception):
+ """Exception for interactively defined variable in magic_edit"""
+ def __init__(self, index):
+ self.index = index
+
+
+@magics_class
+class CodeMagics(Magics):
+ """Magics related to code management (loading, saving, editing, ...)."""
+
+ def __init__(self, *args, **kwargs):
+ self._knowntemps = set()
+ super(CodeMagics, self).__init__(*args, **kwargs)
+
+ @line_magic
+ def save(self, parameter_s=''):
+ """Save a set of lines or a macro to a given filename.
+
+ Usage:\\
+ %save [options] filename n1-n2 n3-n4 ... n5 .. n6 ...
+
+ Options:
+
+ -r: use 'raw' input. By default, the 'processed' history is used,
+ so that magics are loaded in their transformed version to valid
+ Python. If this option is given, the raw input as typed as the
+ command line is used instead.
+
+ -f: force overwrite. If file exists, %save will prompt for overwrite
+ unless -f is given.
+
+ -a: append to the file instead of overwriting it.
+
+ This function uses the same syntax as %history for input ranges,
+ then saves the lines to the filename you specify.
+
+ It adds a '.py' extension to the file if you don't do so yourself, and
+ it asks for confirmation before overwriting existing files.
+
+ If `-r` option is used, the default extension is `.ipy`.
+ """
+
+ opts,args = self.parse_options(parameter_s,'fra',mode='list')
+ if not args:
+ raise UsageError('Missing filename.')
+ raw = 'r' in opts
+ force = 'f' in opts
+ append = 'a' in opts
+ mode = 'a' if append else 'w'
+ ext = u'.ipy' if raw else u'.py'
fname, codefrom = args[0], " ".join(args[1:])
- if not fname.endswith((u'.py',u'.ipy')):
- fname += ext
- file_exists = os.path.isfile(fname)
- if file_exists and not force and not append:
- try:
- overwrite = self.shell.ask_yes_no('File `%s` exists. Overwrite (y/[N])? ' % fname, default='n')
- except StdinNotImplementedError:
- print("File `%s` exists. Use `%%save -f %s` to force overwrite" % (fname, parameter_s))
- return
- if not overwrite :
- print('Operation cancelled.')
- return
- try:
- cmds = self.shell.find_user_code(codefrom,raw)
- except (TypeError, ValueError) as e:
- print(e.args[0])
- return
- out = py3compat.cast_unicode(cmds)
- with io.open(fname, mode, encoding="utf-8") as f:
- if not file_exists or not append:
- f.write(u"# coding: utf-8\n")
- f.write(out)
- # make sure we end on a newline
- if not out.endswith(u'\n'):
- f.write(u'\n')
- print('The following commands were written to file `%s`:' % fname)
- print(cmds)
-
- @line_magic
- def pastebin(self, parameter_s=''):
- """Upload code to Github's Gist paste bin, returning the URL.
-
- Usage:\\
- %pastebin [-d "Custom description"] 1-7
-
- The argument can be an input history range, a filename, or the name of a
- string or macro.
-
- Options:
-
- -d: Pass a custom description for the gist. The default will say
- "Pasted from IPython".
- """
- opts, args = self.parse_options(parameter_s, 'd:')
-
- try:
- code = self.shell.find_user_code(args)
- except (ValueError, TypeError) as e:
- print(e.args[0])
- return
-
- # Deferred import
- try:
- from urllib.request import urlopen # Py 3
- except ImportError:
- from urllib2 import urlopen
- import json
- post_data = json.dumps({
- "description": opts.get('d', "Pasted from IPython"),
- "public": True,
- "files": {
- "file1.py": {
- "content": code
- }
- }
- }).encode('utf-8')
-
- response = urlopen("https://api.github.com/gists", post_data)
- response_data = json.loads(response.read().decode('utf-8'))
- return response_data['html_url']
-
- @line_magic
- def loadpy(self, arg_s):
- """Alias of `%load`
-
- `%loadpy` has gained some flexibility and dropped the requirement of a `.py`
- extension. So it has been renamed simply into %load. You can look at
- `%load`'s docstring for more info.
- """
- self.load(arg_s)
-
- @line_magic
- def load(self, arg_s):
- """Load code into the current frontend.
-
- Usage:\\
- %load [options] source
-
- where source can be a filename, URL, input history range, macro, or
- element in the user namespace
-
- Options:
-
- -r <lines>: Specify lines or ranges of lines to load from the source.
- Ranges could be specified as x-y (x..y) or in python-style x:y
- (x..(y-1)). Both limits x and y can be left blank (meaning the
- beginning and end of the file, respectively).
-
- -s <symbols>: Specify function or classes to load from python source.
-
- -y : Don't ask confirmation for loading source above 200 000 characters.
-
- -n : Include the user's namespace when searching for source code.
-
- This magic command can either take a local filename, a URL, an history
- range (see %history) or a macro as argument, it will prompt for
- confirmation before loading source with more than 200 000 characters, unless
- -y flag is passed or if the frontend does not support raw_input::
-
- %load myscript.py
- %load 7-27
- %load myMacro
- %load http://www.example.com/myscript.py
- %load -r 5-10 myscript.py
- %load -r 10-20,30,40: foo.py
- %load -s MyClass,wonder_function myscript.py
- %load -n MyClass
- %load -n my_module.wonder_function
- """
- opts,args = self.parse_options(arg_s,'yns:r:')
-
- if not args:
- raise UsageError('Missing filename, URL, input history range, '
- 'macro, or element in the user namespace.')
-
- search_ns = 'n' in opts
-
- contents = self.shell.find_user_code(args, search_ns=search_ns)
-
- if 's' in opts:
- try:
- blocks, not_found = extract_symbols(contents, opts['s'])
- except SyntaxError:
- # non python code
- error("Unable to parse the input as valid Python code")
- return
-
- if len(not_found) == 1:
- warn('The symbol `%s` was not found' % not_found[0])
- elif len(not_found) > 1:
- warn('The symbols %s were not found' % get_text_list(not_found,
- wrap_item_with='`')
- )
-
- contents = '\n'.join(blocks)
-
- if 'r' in opts:
- ranges = opts['r'].replace(',', ' ')
- lines = contents.split('\n')
- slices = extract_code_ranges(ranges)
- contents = [lines[slice(*slc)] for slc in slices]
+ if not fname.endswith((u'.py',u'.ipy')):
+ fname += ext
+ file_exists = os.path.isfile(fname)
+ if file_exists and not force and not append:
+ try:
+ overwrite = self.shell.ask_yes_no('File `%s` exists. Overwrite (y/[N])? ' % fname, default='n')
+ except StdinNotImplementedError:
+ print("File `%s` exists. Use `%%save -f %s` to force overwrite" % (fname, parameter_s))
+ return
+ if not overwrite :
+ print('Operation cancelled.')
+ return
+ try:
+ cmds = self.shell.find_user_code(codefrom,raw)
+ except (TypeError, ValueError) as e:
+ print(e.args[0])
+ return
+ out = py3compat.cast_unicode(cmds)
+ with io.open(fname, mode, encoding="utf-8") as f:
+ if not file_exists or not append:
+ f.write(u"# coding: utf-8\n")
+ f.write(out)
+ # make sure we end on a newline
+ if not out.endswith(u'\n'):
+ f.write(u'\n')
+ print('The following commands were written to file `%s`:' % fname)
+ print(cmds)
+
+ @line_magic
+ def pastebin(self, parameter_s=''):
+ """Upload code to Github's Gist paste bin, returning the URL.
+
+ Usage:\\
+ %pastebin [-d "Custom description"] 1-7
+
+ The argument can be an input history range, a filename, or the name of a
+ string or macro.
+
+ Options:
+
+ -d: Pass a custom description for the gist. The default will say
+ "Pasted from IPython".
+ """
+ opts, args = self.parse_options(parameter_s, 'd:')
+
+ try:
+ code = self.shell.find_user_code(args)
+ except (ValueError, TypeError) as e:
+ print(e.args[0])
+ return
+
+ # Deferred import
+ try:
+ from urllib.request import urlopen # Py 3
+ except ImportError:
+ from urllib2 import urlopen
+ import json
+ post_data = json.dumps({
+ "description": opts.get('d', "Pasted from IPython"),
+ "public": True,
+ "files": {
+ "file1.py": {
+ "content": code
+ }
+ }
+ }).encode('utf-8')
+
+ response = urlopen("https://api.github.com/gists", post_data)
+ response_data = json.loads(response.read().decode('utf-8'))
+ return response_data['html_url']
+
+ @line_magic
+ def loadpy(self, arg_s):
+ """Alias of `%load`
+
+ `%loadpy` has gained some flexibility and dropped the requirement of a `.py`
+ extension. So it has been renamed simply into %load. You can look at
+ `%load`'s docstring for more info.
+ """
+ self.load(arg_s)
+
+ @line_magic
+ def load(self, arg_s):
+ """Load code into the current frontend.
+
+ Usage:\\
+ %load [options] source
+
+ where source can be a filename, URL, input history range, macro, or
+ element in the user namespace
+
+ Options:
+
+ -r <lines>: Specify lines or ranges of lines to load from the source.
+ Ranges could be specified as x-y (x..y) or in python-style x:y
+ (x..(y-1)). Both limits x and y can be left blank (meaning the
+ beginning and end of the file, respectively).
+
+ -s <symbols>: Specify function or classes to load from python source.
+
+ -y : Don't ask confirmation for loading source above 200 000 characters.
+
+ -n : Include the user's namespace when searching for source code.
+
+ This magic command can either take a local filename, a URL, an history
+ range (see %history) or a macro as argument, it will prompt for
+ confirmation before loading source with more than 200 000 characters, unless
+ -y flag is passed or if the frontend does not support raw_input::
+
+ %load myscript.py
+ %load 7-27
+ %load myMacro
+ %load http://www.example.com/myscript.py
+ %load -r 5-10 myscript.py
+ %load -r 10-20,30,40: foo.py
+ %load -s MyClass,wonder_function myscript.py
+ %load -n MyClass
+ %load -n my_module.wonder_function
+ """
+ opts,args = self.parse_options(arg_s,'yns:r:')
+
+ if not args:
+ raise UsageError('Missing filename, URL, input history range, '
+ 'macro, or element in the user namespace.')
+
+ search_ns = 'n' in opts
+
+ contents = self.shell.find_user_code(args, search_ns=search_ns)
+
+ if 's' in opts:
+ try:
+ blocks, not_found = extract_symbols(contents, opts['s'])
+ except SyntaxError:
+ # non python code
+ error("Unable to parse the input as valid Python code")
+ return
+
+ if len(not_found) == 1:
+ warn('The symbol `%s` was not found' % not_found[0])
+ elif len(not_found) > 1:
+ warn('The symbols %s were not found' % get_text_list(not_found,
+ wrap_item_with='`')
+ )
+
+ contents = '\n'.join(blocks)
+
+ if 'r' in opts:
+ ranges = opts['r'].replace(',', ' ')
+ lines = contents.split('\n')
+ slices = extract_code_ranges(ranges)
+ contents = [lines[slice(*slc)] for slc in slices]
contents = '\n'.join(strip_initial_indent(chain.from_iterable(contents)))
-
- l = len(contents)
-
- # 200 000 is ~ 2500 full 80 caracter lines
- # so in average, more than 5000 lines
- if l > 200000 and 'y' not in opts:
- try:
- ans = self.shell.ask_yes_no(("The text you're trying to load seems pretty big"\
- " (%d characters). Continue (y/[N]) ?" % l), default='n' )
- except StdinNotImplementedError:
- #asume yes if raw input not implemented
- ans = True
-
- if ans is False :
- print('Operation cancelled.')
- return
-
- contents = "# %load {}\n".format(arg_s) + contents
-
- self.shell.set_next_input(contents, replace=True)
-
- @staticmethod
- def _find_edit_target(shell, args, opts, last_call):
- """Utility method used by magic_edit to find what to edit."""
-
- def make_filename(arg):
- "Make a filename from the given args"
- try:
- filename = get_py_filename(arg)
- except IOError:
- # If it ends with .py but doesn't already exist, assume we want
- # a new file.
- if arg.endswith('.py'):
- filename = arg
- else:
- filename = None
- return filename
-
- # Set a few locals from the options for convenience:
- opts_prev = 'p' in opts
- opts_raw = 'r' in opts
-
- # custom exceptions
- class DataIsObject(Exception): pass
-
- # Default line number value
- lineno = opts.get('n',None)
-
- if opts_prev:
- args = '_%s' % last_call[0]
- if args not in shell.user_ns:
- args = last_call[1]
-
- # by default this is done with temp files, except when the given
- # arg is a filename
- use_temp = True
-
- data = ''
-
- # First, see if the arguments should be a filename.
- filename = make_filename(args)
- if filename:
- use_temp = False
- elif args:
- # Mode where user specifies ranges of lines, like in %macro.
- data = shell.extract_input_lines(args, opts_raw)
- if not data:
- try:
- # Load the parameter given as a variable. If not a string,
- # process it as an object instead (below)
-
- #print '*** args',args,'type',type(args) # dbg
- data = eval(args, shell.user_ns)
- if not isinstance(data, string_types):
- raise DataIsObject
-
- except (NameError,SyntaxError):
- # given argument is not a variable, try as a filename
- filename = make_filename(args)
- if filename is None:
- warn("Argument given (%s) can't be found as a variable "
- "or as a filename." % args)
- return (None, None, None)
- use_temp = False
-
- except DataIsObject:
- # macros have a special edit function
- if isinstance(data, Macro):
- raise MacroToEdit(data)
-
- # For objects, try to edit the file where they are defined
- filename = find_file(data)
- if filename:
- if 'fakemodule' in filename.lower() and \
- inspect.isclass(data):
- # class created by %edit? Try to find source
- # by looking for method definitions instead, the
- # __module__ in those classes is FakeModule.
- attrs = [getattr(data, aname) for aname in dir(data)]
- for attr in attrs:
- if not inspect.ismethod(attr):
- continue
- filename = find_file(attr)
- if filename and \
- 'fakemodule' not in filename.lower():
- # change the attribute to be the edit
- # target instead
- data = attr
- break
-
- m = ipython_input_pat.match(os.path.basename(filename))
- if m:
- raise InteractivelyDefined(int(m.groups()[0]))
-
- datafile = 1
- if filename is None:
- filename = make_filename(args)
- datafile = 1
- if filename is not None:
- # only warn about this if we get a real name
- warn('Could not find file where `%s` is defined.\n'
- 'Opening a file named `%s`' % (args, filename))
- # Now, make sure we can actually read the source (if it was
- # in a temp file it's gone by now).
- if datafile:
- if lineno is None:
- lineno = find_source_lines(data)
- if lineno is None:
- filename = make_filename(args)
- if filename is None:
- warn('The file where `%s` was defined '
- 'cannot be read or found.' % data)
- return (None, None, None)
- use_temp = False
-
- if use_temp:
- filename = shell.mktempfile(data)
- print('IPython will make a temporary file named:',filename)
-
- # use last_call to remember the state of the previous call, but don't
- # let it be clobbered by successive '-p' calls.
- try:
- last_call[0] = shell.displayhook.prompt_count
- if not opts_prev:
- last_call[1] = args
- except:
- pass
-
-
- return filename, lineno, use_temp
-
- def _edit_macro(self,mname,macro):
- """open an editor with the macro data in a file"""
- filename = self.shell.mktempfile(macro.value)
- self.shell.hooks.editor(filename)
-
- # and make a new macro object, to replace the old one
- with open(filename) as mfile:
- mvalue = mfile.read()
- self.shell.user_ns[mname] = Macro(mvalue)
-
- @skip_doctest
- @line_magic
- def edit(self, parameter_s='',last_call=['','']):
- """Bring up an editor and execute the resulting code.
-
- Usage:
- %edit [options] [args]
-
- %edit runs IPython's editor hook. The default version of this hook is
- set to call the editor specified by your $EDITOR environment variable.
- If this isn't found, it will default to vi under Linux/Unix and to
- notepad under Windows. See the end of this docstring for how to change
- the editor hook.
-
- You can also set the value of this editor via the
- ``TerminalInteractiveShell.editor`` option in your configuration file.
- This is useful if you wish to use a different editor from your typical
- default with IPython (and for Windows users who typically don't set
- environment variables).
-
- This command allows you to conveniently edit multi-line code right in
- your IPython session.
-
- If called without arguments, %edit opens up an empty editor with a
- temporary file and will execute the contents of this file when you
- close it (don't forget to save it!).
-
-
- Options:
-
- -n <number>: open the editor at a specified line number. By default,
- the IPython editor hook uses the unix syntax 'editor +N filename', but
- you can configure this by providing your own modified hook if your
- favorite editor supports line-number specifications with a different
- syntax.
-
- -p: this will call the editor with the same data as the previous time
- it was used, regardless of how long ago (in your current session) it
- was.
-
- -r: use 'raw' input. This option only applies to input taken from the
- user's history. By default, the 'processed' history is used, so that
- magics are loaded in their transformed version to valid Python. If
- this option is given, the raw input as typed as the command line is
- used instead. When you exit the editor, it will be executed by
- IPython's own processor.
-
- -x: do not execute the edited code immediately upon exit. This is
- mainly useful if you are editing programs which need to be called with
- command line arguments, which you can then do using %run.
-
-
- Arguments:
-
- If arguments are given, the following possibilities exist:
-
- - If the argument is a filename, IPython will load that into the
- editor. It will execute its contents with execfile() when you exit,
- loading any code in the file into your interactive namespace.
-
- - The arguments are ranges of input history, e.g. "7 ~1/4-6".
- The syntax is the same as in the %history magic.
-
- - If the argument is a string variable, its contents are loaded
- into the editor. You can thus edit any string which contains
- python code (including the result of previous edits).
-
- - If the argument is the name of an object (other than a string),
- IPython will try to locate the file where it was defined and open the
- editor at the point where it is defined. You can use `%edit function`
- to load an editor exactly at the point where 'function' is defined,
- edit it and have the file be executed automatically.
-
- - If the object is a macro (see %macro for details), this opens up your
- specified editor with a temporary file containing the macro's data.
- Upon exit, the macro is reloaded with the contents of the file.
-
- Note: opening at an exact line is only supported under Unix, and some
- editors (like kedit and gedit up to Gnome 2.8) do not understand the
- '+NUMBER' parameter necessary for this feature. Good editors like
- (X)Emacs, vi, jed, pico and joe all do.
-
- After executing your code, %edit will return as output the code you
- typed in the editor (except when it was an existing file). This way
- you can reload the code in further invocations of %edit as a variable,
- via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
- the output.
-
- Note that %edit is also available through the alias %ed.
-
- This is an example of creating a simple function inside the editor and
- then modifying it. First, start up the editor::
-
- In [1]: edit
- Editing... done. Executing edited code...
- Out[1]: 'def foo():\\n print "foo() was defined in an editing
- session"\\n'
-
- We can then call the function foo()::
-
- In [2]: foo()
- foo() was defined in an editing session
-
- Now we edit foo. IPython automatically loads the editor with the
- (temporary) file where foo() was previously defined::
-
- In [3]: edit foo
- Editing... done. Executing edited code...
-
- And if we call foo() again we get the modified version::
-
- In [4]: foo()
- foo() has now been changed!
-
- Here is an example of how to edit a code snippet successive
- times. First we call the editor::
-
- In [5]: edit
- Editing... done. Executing edited code...
- hello
- Out[5]: "print 'hello'\\n"
-
- Now we call it again with the previous output (stored in _)::
-
- In [6]: edit _
- Editing... done. Executing edited code...
- hello world
- Out[6]: "print 'hello world'\\n"
-
- Now we call it with the output #8 (stored in _8, also as Out[8])::
-
- In [7]: edit _8
- Editing... done. Executing edited code...
- hello again
- Out[7]: "print 'hello again'\\n"
-
-
- Changing the default editor hook:
-
- If you wish to write your own editor hook, you can put it in a
- configuration file which you load at startup time. The default hook
- is defined in the IPython.core.hooks module, and you can use that as a
- starting example for further modifications. That file also has
- general instructions on how to set a new hook for use once you've
- defined it."""
- opts,args = self.parse_options(parameter_s,'prxn:')
-
- try:
- filename, lineno, is_temp = self._find_edit_target(self.shell,
- args, opts, last_call)
- except MacroToEdit as e:
- self._edit_macro(args, e.args[0])
- return
- except InteractivelyDefined as e:
- print("Editing In[%i]" % e.index)
- args = str(e.index)
- filename, lineno, is_temp = self._find_edit_target(self.shell,
- args, opts, last_call)
- if filename is None:
- # nothing was found, warnings have already been issued,
- # just give up.
- return
-
- if is_temp:
- self._knowntemps.add(filename)
- elif (filename in self._knowntemps):
- is_temp = True
-
-
- # do actual editing here
- print('Editing...', end=' ')
- sys.stdout.flush()
- try:
- # Quote filenames that may have spaces in them
- if ' ' in filename:
- filename = "'%s'" % filename
- self.shell.hooks.editor(filename,lineno)
- except TryNext:
- warn('Could not open editor')
- return
-
- # XXX TODO: should this be generalized for all string vars?
- # For now, this is special-cased to blocks created by cpaste
- if args.strip() == 'pasted_block':
- with open(filename, 'r') as f:
- self.shell.user_ns['pasted_block'] = f.read()
-
- if 'x' in opts: # -x prevents actual execution
- print()
- else:
- print('done. Executing edited code...')
- with preserve_keys(self.shell.user_ns, '__file__'):
- if not is_temp:
- self.shell.user_ns['__file__'] = filename
- if 'r' in opts: # Untranslated IPython code
- with open(filename, 'r') as f:
- source = f.read()
- self.shell.run_cell(source, store_history=False)
- else:
- self.shell.safe_execfile(filename, self.shell.user_ns,
- self.shell.user_ns)
-
- if is_temp:
- try:
- return open(filename).read()
- except IOError as msg:
- if msg.filename == filename:
- warn('File not found. Did you forget to save?')
- return
- else:
- self.shell.showtraceback()
+
+ l = len(contents)
+
+ # 200 000 is ~ 2500 full 80 caracter lines
+ # so in average, more than 5000 lines
+ if l > 200000 and 'y' not in opts:
+ try:
+ ans = self.shell.ask_yes_no(("The text you're trying to load seems pretty big"\
+ " (%d characters). Continue (y/[N]) ?" % l), default='n' )
+ except StdinNotImplementedError:
+ #asume yes if raw input not implemented
+ ans = True
+
+ if ans is False :
+ print('Operation cancelled.')
+ return
+
+ contents = "# %load {}\n".format(arg_s) + contents
+
+ self.shell.set_next_input(contents, replace=True)
+
+ @staticmethod
+ def _find_edit_target(shell, args, opts, last_call):
+ """Utility method used by magic_edit to find what to edit."""
+
+ def make_filename(arg):
+ "Make a filename from the given args"
+ try:
+ filename = get_py_filename(arg)
+ except IOError:
+ # If it ends with .py but doesn't already exist, assume we want
+ # a new file.
+ if arg.endswith('.py'):
+ filename = arg
+ else:
+ filename = None
+ return filename
+
+ # Set a few locals from the options for convenience:
+ opts_prev = 'p' in opts
+ opts_raw = 'r' in opts
+
+ # custom exceptions
+ class DataIsObject(Exception): pass
+
+ # Default line number value
+ lineno = opts.get('n',None)
+
+ if opts_prev:
+ args = '_%s' % last_call[0]
+ if args not in shell.user_ns:
+ args = last_call[1]
+
+ # by default this is done with temp files, except when the given
+ # arg is a filename
+ use_temp = True
+
+ data = ''
+
+ # First, see if the arguments should be a filename.
+ filename = make_filename(args)
+ if filename:
+ use_temp = False
+ elif args:
+ # Mode where user specifies ranges of lines, like in %macro.
+ data = shell.extract_input_lines(args, opts_raw)
+ if not data:
+ try:
+ # Load the parameter given as a variable. If not a string,
+ # process it as an object instead (below)
+
+ #print '*** args',args,'type',type(args) # dbg
+ data = eval(args, shell.user_ns)
+ if not isinstance(data, string_types):
+ raise DataIsObject
+
+ except (NameError,SyntaxError):
+ # given argument is not a variable, try as a filename
+ filename = make_filename(args)
+ if filename is None:
+ warn("Argument given (%s) can't be found as a variable "
+ "or as a filename." % args)
+ return (None, None, None)
+ use_temp = False
+
+ except DataIsObject:
+ # macros have a special edit function
+ if isinstance(data, Macro):
+ raise MacroToEdit(data)
+
+ # For objects, try to edit the file where they are defined
+ filename = find_file(data)
+ if filename:
+ if 'fakemodule' in filename.lower() and \
+ inspect.isclass(data):
+ # class created by %edit? Try to find source
+ # by looking for method definitions instead, the
+ # __module__ in those classes is FakeModule.
+ attrs = [getattr(data, aname) for aname in dir(data)]
+ for attr in attrs:
+ if not inspect.ismethod(attr):
+ continue
+ filename = find_file(attr)
+ if filename and \
+ 'fakemodule' not in filename.lower():
+ # change the attribute to be the edit
+ # target instead
+ data = attr
+ break
+
+ m = ipython_input_pat.match(os.path.basename(filename))
+ if m:
+ raise InteractivelyDefined(int(m.groups()[0]))
+
+ datafile = 1
+ if filename is None:
+ filename = make_filename(args)
+ datafile = 1
+ if filename is not None:
+ # only warn about this if we get a real name
+ warn('Could not find file where `%s` is defined.\n'
+ 'Opening a file named `%s`' % (args, filename))
+ # Now, make sure we can actually read the source (if it was
+ # in a temp file it's gone by now).
+ if datafile:
+ if lineno is None:
+ lineno = find_source_lines(data)
+ if lineno is None:
+ filename = make_filename(args)
+ if filename is None:
+ warn('The file where `%s` was defined '
+ 'cannot be read or found.' % data)
+ return (None, None, None)
+ use_temp = False
+
+ if use_temp:
+ filename = shell.mktempfile(data)
+ print('IPython will make a temporary file named:',filename)
+
+ # use last_call to remember the state of the previous call, but don't
+ # let it be clobbered by successive '-p' calls.
+ try:
+ last_call[0] = shell.displayhook.prompt_count
+ if not opts_prev:
+ last_call[1] = args
+ except:
+ pass
+
+
+ return filename, lineno, use_temp
+
+ def _edit_macro(self,mname,macro):
+ """open an editor with the macro data in a file"""
+ filename = self.shell.mktempfile(macro.value)
+ self.shell.hooks.editor(filename)
+
+ # and make a new macro object, to replace the old one
+ with open(filename) as mfile:
+ mvalue = mfile.read()
+ self.shell.user_ns[mname] = Macro(mvalue)
+
+ @skip_doctest
+ @line_magic
+ def edit(self, parameter_s='',last_call=['','']):
+ """Bring up an editor and execute the resulting code.
+
+ Usage:
+ %edit [options] [args]
+
+ %edit runs IPython's editor hook. The default version of this hook is
+ set to call the editor specified by your $EDITOR environment variable.
+ If this isn't found, it will default to vi under Linux/Unix and to
+ notepad under Windows. See the end of this docstring for how to change
+ the editor hook.
+
+ You can also set the value of this editor via the
+ ``TerminalInteractiveShell.editor`` option in your configuration file.
+ This is useful if you wish to use a different editor from your typical
+ default with IPython (and for Windows users who typically don't set
+ environment variables).
+
+ This command allows you to conveniently edit multi-line code right in
+ your IPython session.
+
+ If called without arguments, %edit opens up an empty editor with a
+ temporary file and will execute the contents of this file when you
+ close it (don't forget to save it!).
+
+
+ Options:
+
+ -n <number>: open the editor at a specified line number. By default,
+ the IPython editor hook uses the unix syntax 'editor +N filename', but
+ you can configure this by providing your own modified hook if your
+ favorite editor supports line-number specifications with a different
+ syntax.
+
+ -p: this will call the editor with the same data as the previous time
+ it was used, regardless of how long ago (in your current session) it
+ was.
+
+ -r: use 'raw' input. This option only applies to input taken from the
+ user's history. By default, the 'processed' history is used, so that
+ magics are loaded in their transformed version to valid Python. If
+ this option is given, the raw input as typed as the command line is
+ used instead. When you exit the editor, it will be executed by
+ IPython's own processor.
+
+ -x: do not execute the edited code immediately upon exit. This is
+ mainly useful if you are editing programs which need to be called with
+ command line arguments, which you can then do using %run.
+
+
+ Arguments:
+
+ If arguments are given, the following possibilities exist:
+
+ - If the argument is a filename, IPython will load that into the
+ editor. It will execute its contents with execfile() when you exit,
+ loading any code in the file into your interactive namespace.
+
+ - The arguments are ranges of input history, e.g. "7 ~1/4-6".
+ The syntax is the same as in the %history magic.
+
+ - If the argument is a string variable, its contents are loaded
+ into the editor. You can thus edit any string which contains
+ python code (including the result of previous edits).
+
+ - If the argument is the name of an object (other than a string),
+ IPython will try to locate the file where it was defined and open the
+ editor at the point where it is defined. You can use `%edit function`
+ to load an editor exactly at the point where 'function' is defined,
+ edit it and have the file be executed automatically.
+
+ - If the object is a macro (see %macro for details), this opens up your
+ specified editor with a temporary file containing the macro's data.
+ Upon exit, the macro is reloaded with the contents of the file.
+
+ Note: opening at an exact line is only supported under Unix, and some
+ editors (like kedit and gedit up to Gnome 2.8) do not understand the
+ '+NUMBER' parameter necessary for this feature. Good editors like
+ (X)Emacs, vi, jed, pico and joe all do.
+
+ After executing your code, %edit will return as output the code you
+ typed in the editor (except when it was an existing file). This way
+ you can reload the code in further invocations of %edit as a variable,
+ via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
+ the output.
+
+ Note that %edit is also available through the alias %ed.
+
+ This is an example of creating a simple function inside the editor and
+ then modifying it. First, start up the editor::
+
+ In [1]: edit
+ Editing... done. Executing edited code...
+ Out[1]: 'def foo():\\n print "foo() was defined in an editing
+ session"\\n'
+
+ We can then call the function foo()::
+
+ In [2]: foo()
+ foo() was defined in an editing session
+
+ Now we edit foo. IPython automatically loads the editor with the
+ (temporary) file where foo() was previously defined::
+
+ In [3]: edit foo
+ Editing... done. Executing edited code...
+
+ And if we call foo() again we get the modified version::
+
+ In [4]: foo()
+ foo() has now been changed!
+
+ Here is an example of how to edit a code snippet successive
+ times. First we call the editor::
+
+ In [5]: edit
+ Editing... done. Executing edited code...
+ hello
+ Out[5]: "print 'hello'\\n"
+
+ Now we call it again with the previous output (stored in _)::
+
+ In [6]: edit _
+ Editing... done. Executing edited code...
+ hello world
+ Out[6]: "print 'hello world'\\n"
+
+ Now we call it with the output #8 (stored in _8, also as Out[8])::
+
+ In [7]: edit _8
+ Editing... done. Executing edited code...
+ hello again
+ Out[7]: "print 'hello again'\\n"
+
+
+ Changing the default editor hook:
+
+ If you wish to write your own editor hook, you can put it in a
+ configuration file which you load at startup time. The default hook
+ is defined in the IPython.core.hooks module, and you can use that as a
+ starting example for further modifications. That file also has
+ general instructions on how to set a new hook for use once you've
+ defined it."""
+ opts,args = self.parse_options(parameter_s,'prxn:')
+
+ try:
+ filename, lineno, is_temp = self._find_edit_target(self.shell,
+ args, opts, last_call)
+ except MacroToEdit as e:
+ self._edit_macro(args, e.args[0])
+ return
+ except InteractivelyDefined as e:
+ print("Editing In[%i]" % e.index)
+ args = str(e.index)
+ filename, lineno, is_temp = self._find_edit_target(self.shell,
+ args, opts, last_call)
+ if filename is None:
+ # nothing was found, warnings have already been issued,
+ # just give up.
+ return
+
+ if is_temp:
+ self._knowntemps.add(filename)
+ elif (filename in self._knowntemps):
+ is_temp = True
+
+
+ # do actual editing here
+ print('Editing...', end=' ')
+ sys.stdout.flush()
+ try:
+ # Quote filenames that may have spaces in them
+ if ' ' in filename:
+ filename = "'%s'" % filename
+ self.shell.hooks.editor(filename,lineno)
+ except TryNext:
+ warn('Could not open editor')
+ return
+
+ # XXX TODO: should this be generalized for all string vars?
+ # For now, this is special-cased to blocks created by cpaste
+ if args.strip() == 'pasted_block':
+ with open(filename, 'r') as f:
+ self.shell.user_ns['pasted_block'] = f.read()
+
+ if 'x' in opts: # -x prevents actual execution
+ print()
+ else:
+ print('done. Executing edited code...')
+ with preserve_keys(self.shell.user_ns, '__file__'):
+ if not is_temp:
+ self.shell.user_ns['__file__'] = filename
+ if 'r' in opts: # Untranslated IPython code
+ with open(filename, 'r') as f:
+ source = f.read()
+ self.shell.run_cell(source, store_history=False)
+ else:
+ self.shell.safe_execfile(filename, self.shell.user_ns,
+ self.shell.user_ns)
+
+ if is_temp:
+ try:
+ return open(filename).read()
+ except IOError as msg:
+ if msg.filename == filename:
+ warn('File not found. Did you forget to save?')
+ return
+ else:
+ self.shell.showtraceback()
diff --git a/contrib/python/ipython/py2/IPython/core/magics/config.py b/contrib/python/ipython/py2/IPython/core/magics/config.py
index f023cf9267..9505697791 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/config.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/config.py
@@ -1,159 +1,159 @@
-"""Implementation of configuration-related magic functions.
-"""
-from __future__ import print_function
+"""Implementation of configuration-related magic functions.
+"""
+from __future__ import print_function
from __future__ import absolute_import
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012 The IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Stdlib
-import re
-
-# Our own packages
-from IPython.core.error import UsageError
-from IPython.core.magic import Magics, magics_class, line_magic
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012 The IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+# Stdlib
+import re
+
+# Our own packages
+from IPython.core.error import UsageError
+from IPython.core.magic import Magics, magics_class, line_magic
from logging import error
-
-#-----------------------------------------------------------------------------
-# Magic implementation classes
-#-----------------------------------------------------------------------------
-
-reg = re.compile('^\w+\.\w+$')
-@magics_class
-class ConfigMagics(Magics):
-
- def __init__(self, shell):
- super(ConfigMagics, self).__init__(shell)
- self.configurables = []
-
- @line_magic
- def config(self, s):
- """configure IPython
-
- %config Class[.trait=value]
-
- This magic exposes most of the IPython config system. Any
- Configurable class should be able to be configured with the simple
- line::
-
- %config Class.trait=value
-
- Where `value` will be resolved in the user's namespace, if it is an
- expression or variable name.
-
- Examples
- --------
-
- To see what classes are available for config, pass no arguments::
-
- In [1]: %config
- Available objects for config:
- TerminalInteractiveShell
- HistoryManager
- PrefilterManager
- AliasManager
- IPCompleter
- DisplayFormatter
-
- To view what is configurable on a given class, just pass the class
- name::
-
- In [2]: %config IPCompleter
- IPCompleter options
- -----------------
- IPCompleter.omit__names=<Enum>
- Current: 2
- Choices: (0, 1, 2)
- Instruct the completer to omit private method names
- Specifically, when completing on ``object.<tab>``.
- When 2 [default]: all names that start with '_' will be excluded.
- When 1: all 'magic' names (``__foo__``) will be excluded.
- When 0: nothing will be excluded.
- IPCompleter.merge_completions=<CBool>
- Current: True
- Whether to merge completion results into a single list
- If False, only the completion results from the first non-empty
- completer will be returned.
- IPCompleter.limit_to__all__=<CBool>
- Current: False
- Instruct the completer to use __all__ for the completion
- Specifically, when completing on ``object.<tab>``.
- When True: only those names in obj.__all__ will be included.
- When False [default]: the __all__ attribute is ignored
- IPCompleter.greedy=<CBool>
- Current: False
- Activate greedy completion
- This will enable completion on elements of lists, results of
- function calls, etc., but can be unsafe because the code is
- actually evaluated on TAB.
-
- but the real use is in setting values::
-
- In [3]: %config IPCompleter.greedy = True
-
- and these values are read from the user_ns if they are variables::
-
- In [4]: feeling_greedy=False
-
- In [5]: %config IPCompleter.greedy = feeling_greedy
-
- """
- from traitlets.config.loader import Config
- # some IPython objects are Configurable, but do not yet have
- # any configurable traits. Exclude them from the effects of
- # this magic, as their presence is just noise:
- configurables = [ c for c in self.shell.configurables
- if c.__class__.class_traits(config=True) ]
- classnames = [ c.__class__.__name__ for c in configurables ]
-
- line = s.strip()
- if not line:
- # print available configurable names
- print("Available objects for config:")
- for name in classnames:
- print(" ", name)
- return
- elif line in classnames:
- # `%config TerminalInteractiveShell` will print trait info for
- # TerminalInteractiveShell
- c = configurables[classnames.index(line)]
- cls = c.__class__
- help = cls.class_get_help(c)
- # strip leading '--' from cl-args:
- help = re.sub(re.compile(r'^--', re.MULTILINE), '', help)
- print(help)
- return
- elif reg.match(line):
- cls, attr = line.split('.')
- return getattr(configurables[classnames.index(cls)],attr)
- elif '=' not in line:
- msg = "Invalid config statement: %r, "\
- "should be `Class.trait = value`."
-
- ll = line.lower()
- for classname in classnames:
- if ll == classname.lower():
- msg = msg + '\nDid you mean %s (note the case)?' % classname
- break
-
- raise UsageError( msg % line)
-
- # otherwise, assume we are setting configurables.
- # leave quotes on args when splitting, because we want
- # unquoted args to eval in user_ns
- cfg = Config()
- exec("cfg."+line, locals(), self.shell.user_ns)
-
- for configurable in configurables:
- try:
- configurable.update_config(cfg)
- except Exception as e:
- error(e)
+
+#-----------------------------------------------------------------------------
+# Magic implementation classes
+#-----------------------------------------------------------------------------
+
+reg = re.compile('^\w+\.\w+$')
+@magics_class
+class ConfigMagics(Magics):
+
+ def __init__(self, shell):
+ super(ConfigMagics, self).__init__(shell)
+ self.configurables = []
+
+ @line_magic
+ def config(self, s):
+ """configure IPython
+
+ %config Class[.trait=value]
+
+ This magic exposes most of the IPython config system. Any
+ Configurable class should be able to be configured with the simple
+ line::
+
+ %config Class.trait=value
+
+ Where `value` will be resolved in the user's namespace, if it is an
+ expression or variable name.
+
+ Examples
+ --------
+
+ To see what classes are available for config, pass no arguments::
+
+ In [1]: %config
+ Available objects for config:
+ TerminalInteractiveShell
+ HistoryManager
+ PrefilterManager
+ AliasManager
+ IPCompleter
+ DisplayFormatter
+
+ To view what is configurable on a given class, just pass the class
+ name::
+
+ In [2]: %config IPCompleter
+ IPCompleter options
+ -----------------
+ IPCompleter.omit__names=<Enum>
+ Current: 2
+ Choices: (0, 1, 2)
+ Instruct the completer to omit private method names
+ Specifically, when completing on ``object.<tab>``.
+ When 2 [default]: all names that start with '_' will be excluded.
+ When 1: all 'magic' names (``__foo__``) will be excluded.
+ When 0: nothing will be excluded.
+ IPCompleter.merge_completions=<CBool>
+ Current: True
+ Whether to merge completion results into a single list
+ If False, only the completion results from the first non-empty
+ completer will be returned.
+ IPCompleter.limit_to__all__=<CBool>
+ Current: False
+ Instruct the completer to use __all__ for the completion
+ Specifically, when completing on ``object.<tab>``.
+ When True: only those names in obj.__all__ will be included.
+ When False [default]: the __all__ attribute is ignored
+ IPCompleter.greedy=<CBool>
+ Current: False
+ Activate greedy completion
+ This will enable completion on elements of lists, results of
+ function calls, etc., but can be unsafe because the code is
+ actually evaluated on TAB.
+
+ but the real use is in setting values::
+
+ In [3]: %config IPCompleter.greedy = True
+
+ and these values are read from the user_ns if they are variables::
+
+ In [4]: feeling_greedy=False
+
+ In [5]: %config IPCompleter.greedy = feeling_greedy
+
+ """
+ from traitlets.config.loader import Config
+ # some IPython objects are Configurable, but do not yet have
+ # any configurable traits. Exclude them from the effects of
+ # this magic, as their presence is just noise:
+ configurables = [ c for c in self.shell.configurables
+ if c.__class__.class_traits(config=True) ]
+ classnames = [ c.__class__.__name__ for c in configurables ]
+
+ line = s.strip()
+ if not line:
+ # print available configurable names
+ print("Available objects for config:")
+ for name in classnames:
+ print(" ", name)
+ return
+ elif line in classnames:
+ # `%config TerminalInteractiveShell` will print trait info for
+ # TerminalInteractiveShell
+ c = configurables[classnames.index(line)]
+ cls = c.__class__
+ help = cls.class_get_help(c)
+ # strip leading '--' from cl-args:
+ help = re.sub(re.compile(r'^--', re.MULTILINE), '', help)
+ print(help)
+ return
+ elif reg.match(line):
+ cls, attr = line.split('.')
+ return getattr(configurables[classnames.index(cls)],attr)
+ elif '=' not in line:
+ msg = "Invalid config statement: %r, "\
+ "should be `Class.trait = value`."
+
+ ll = line.lower()
+ for classname in classnames:
+ if ll == classname.lower():
+ msg = msg + '\nDid you mean %s (note the case)?' % classname
+ break
+
+ raise UsageError( msg % line)
+
+ # otherwise, assume we are setting configurables.
+ # leave quotes on args when splitting, because we want
+ # unquoted args to eval in user_ns
+ cfg = Config()
+ exec("cfg."+line, locals(), self.shell.user_ns)
+
+ for configurable in configurables:
+ try:
+ configurable.update_config(cfg)
+ except Exception as e:
+ error(e)
diff --git a/contrib/python/ipython/py2/IPython/core/magics/display.py b/contrib/python/ipython/py2/IPython/core/magics/display.py
index 156a86b10a..c4a8f44d9a 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/display.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/display.py
@@ -1,36 +1,36 @@
-"""Simple magics for display formats"""
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012 The IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Our own packages
-from IPython.core.display import display, Javascript, Latex, SVG, HTML
-from IPython.core.magic import (
- Magics, magics_class, cell_magic
-)
-
-#-----------------------------------------------------------------------------
-# Magic implementation classes
-#-----------------------------------------------------------------------------
-
-
-@magics_class
-class DisplayMagics(Magics):
- """Magics for displaying various output types with literals
+"""Simple magics for display formats"""
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012 The IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+# Our own packages
+from IPython.core.display import display, Javascript, Latex, SVG, HTML
+from IPython.core.magic import (
+ Magics, magics_class, cell_magic
+)
+
+#-----------------------------------------------------------------------------
+# Magic implementation classes
+#-----------------------------------------------------------------------------
+
+
+@magics_class
+class DisplayMagics(Magics):
+ """Magics for displaying various output types with literals
Defines javascript/latex/svg/html cell magics for writing
- blocks in those languages, to be rendered in the frontend.
- """
+ blocks in those languages, to be rendered in the frontend.
+ """
- @cell_magic
+ @cell_magic
def js(self, line, cell):
"""Run the cell block of Javascript code
@@ -39,27 +39,27 @@ class DisplayMagics(Magics):
self.javascript(line, cell)
@cell_magic
- def javascript(self, line, cell):
- """Run the cell block of Javascript code"""
- display(Javascript(cell))
+ def javascript(self, line, cell):
+ """Run the cell block of Javascript code"""
+ display(Javascript(cell))
- @cell_magic
- def latex(self, line, cell):
- """Render the cell as a block of latex
+ @cell_magic
+ def latex(self, line, cell):
+ """Render the cell as a block of latex
- The subset of latex which is support depends on the implementation in
+ The subset of latex which is support depends on the implementation in
the client. In the Jupyter Notebook, this magic only renders the subset
of latex defined by MathJax
- [here](https://docs.mathjax.org/en/v2.5-latest/tex.html)."""
- display(Latex(cell))
-
- @cell_magic
- def svg(self, line, cell):
- """Render the cell as an SVG literal"""
- display(SVG(cell))
-
- @cell_magic
- def html(self, line, cell):
- """Render the cell as a block of HTML"""
- display(HTML(cell))
+ [here](https://docs.mathjax.org/en/v2.5-latest/tex.html)."""
+ display(Latex(cell))
+
+ @cell_magic
+ def svg(self, line, cell):
+ """Render the cell as an SVG literal"""
+ display(SVG(cell))
+
+ @cell_magic
+ def html(self, line, cell):
+ """Render the cell as a block of HTML"""
+ display(HTML(cell))
diff --git a/contrib/python/ipython/py2/IPython/core/magics/execution.py b/contrib/python/ipython/py2/IPython/core/magics/execution.py
index a10c3409bc..3734b0cdae 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/execution.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/execution.py
@@ -1,710 +1,710 @@
-# -*- coding: utf-8 -*-
-"""Implementation of execution-related magic functions."""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import print_function
+# -*- coding: utf-8 -*-
+"""Implementation of execution-related magic functions."""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import print_function
from __future__ import absolute_import
-
-import ast
-import bdb
-import gc
-import itertools
-import os
-import sys
-import time
-import timeit
-from pdb import Restart
-
-# cProfile was added in Python2.5
-try:
- import cProfile as profile
- import pstats
-except ImportError:
- # profile isn't bundled by default in Debian for license reasons
- try:
- import profile, pstats
- except ImportError:
- profile = pstats = None
-
+
+import ast
+import bdb
+import gc
+import itertools
+import os
+import sys
+import time
+import timeit
+from pdb import Restart
+
+# cProfile was added in Python2.5
+try:
+ import cProfile as profile
+ import pstats
+except ImportError:
+ # profile isn't bundled by default in Debian for license reasons
+ try:
+ import profile, pstats
+ except ImportError:
+ profile = pstats = None
+
from IPython.core import oinspect
-from IPython.core import magic_arguments
-from IPython.core import page
-from IPython.core.error import UsageError
-from IPython.core.macro import Macro
-from IPython.core.magic import (Magics, magics_class, line_magic, cell_magic,
- line_cell_magic, on_off, needs_local_scope)
-from IPython.testing.skipdoctest import skip_doctest
-from IPython.utils import py3compat
-from IPython.utils.py3compat import builtin_mod, iteritems, PY3
-from IPython.utils.contexts import preserve_keys
-from IPython.utils.capture import capture_output
-from IPython.utils.ipstruct import Struct
-from IPython.utils.module_paths import find_mod
+from IPython.core import magic_arguments
+from IPython.core import page
+from IPython.core.error import UsageError
+from IPython.core.macro import Macro
+from IPython.core.magic import (Magics, magics_class, line_magic, cell_magic,
+ line_cell_magic, on_off, needs_local_scope)
+from IPython.testing.skipdoctest import skip_doctest
+from IPython.utils import py3compat
+from IPython.utils.py3compat import builtin_mod, iteritems, PY3
+from IPython.utils.contexts import preserve_keys
+from IPython.utils.capture import capture_output
+from IPython.utils.ipstruct import Struct
+from IPython.utils.module_paths import find_mod
from IPython.utils.path import get_py_filename, shellglob
-from IPython.utils.timing import clock, clock2
+from IPython.utils.timing import clock, clock2
from warnings import warn
from logging import error
-
-if PY3:
- from io import StringIO
-else:
- from StringIO import StringIO
-
-#-----------------------------------------------------------------------------
-# Magic implementation classes
-#-----------------------------------------------------------------------------
-
-
-class TimeitResult(object):
- """
- Object returned by the timeit magic with info about the run.
-
+
+if PY3:
+ from io import StringIO
+else:
+ from StringIO import StringIO
+
+#-----------------------------------------------------------------------------
+# Magic implementation classes
+#-----------------------------------------------------------------------------
+
+
+class TimeitResult(object):
+ """
+ Object returned by the timeit magic with info about the run.
+
Contains the following attributes :
-
+
loops: (int) number of loops done per measurement
repeat: (int) number of times the measurement has been repeated
best: (float) best execution time / number
all_runs: (list of float) execution time of each run (in s)
- compile_time: (float) time of statement compilation (s)
-
- """
-
- def __init__(self, loops, repeat, best, worst, all_runs, compile_time, precision):
- self.loops = loops
- self.repeat = repeat
- self.best = best
- self.worst = worst
- self.all_runs = all_runs
- self.compile_time = compile_time
- self._precision = precision
-
- def _repr_pretty_(self, p , cycle):
- if self.loops == 1: # No s at "loops" if only one loop
- unic = u"%d loop, best of %d: %s per loop" % (self.loops, self.repeat,
- _format_time(self.best, self._precision))
- else:
- unic = u"%d loops, best of %d: %s per loop" % (self.loops, self.repeat,
- _format_time(self.best, self._precision))
- p.text(u'<TimeitResult : '+unic+u'>')
-
-
-class TimeitTemplateFiller(ast.NodeTransformer):
- """Fill in the AST template for timing execution.
-
- This is quite closely tied to the template definition, which is in
- :meth:`ExecutionMagics.timeit`.
- """
- def __init__(self, ast_setup, ast_stmt):
- self.ast_setup = ast_setup
- self.ast_stmt = ast_stmt
-
- def visit_FunctionDef(self, node):
- "Fill in the setup statement"
- self.generic_visit(node)
- if node.name == "inner":
- node.body[:1] = self.ast_setup.body
-
- return node
-
- def visit_For(self, node):
- "Fill in the statement to be timed"
- if getattr(getattr(node.body[0], 'value', None), 'id', None) == 'stmt':
- node.body = self.ast_stmt.body
- return node
-
-
-class Timer(timeit.Timer):
- """Timer class that explicitly uses self.inner
-
- which is an undocumented implementation detail of CPython,
- not shared by PyPy.
- """
- # Timer.timeit copied from CPython 3.4.2
- def timeit(self, number=timeit.default_number):
- """Time 'number' executions of the main statement.
-
- To be precise, this executes the setup statement once, and
- then returns the time it takes to execute the main statement
- a number of times, as a float measured in seconds. The
- argument is the number of times through the loop, defaulting
- to one million. The main statement, the setup statement and
- the timer function to be used are passed to the constructor.
- """
- it = itertools.repeat(None, number)
- gcold = gc.isenabled()
- gc.disable()
- try:
- timing = self.inner(it, self.timer)
- finally:
- if gcold:
- gc.enable()
- return timing
-
-
-@magics_class
-class ExecutionMagics(Magics):
- """Magics related to code execution, debugging, profiling, etc.
-
- """
-
- def __init__(self, shell):
- super(ExecutionMagics, self).__init__(shell)
- if profile is None:
- self.prun = self.profile_missing_notice
- # Default execution function used to actually run user code.
- self.default_runner = None
-
- def profile_missing_notice(self, *args, **kwargs):
- error("""\
-The profile module could not be found. It has been removed from the standard
-python packages because of its non-free license. To use profiling, install the
-python-profiler package from non-free.""")
-
- @skip_doctest
- @line_cell_magic
- def prun(self, parameter_s='', cell=None):
-
- """Run a statement through the python code profiler.
-
- Usage, in line mode:
- %prun [options] statement
-
- Usage, in cell mode:
- %%prun [options] [statement]
- code...
- code...
-
- In cell mode, the additional code lines are appended to the (possibly
- empty) statement in the first line. Cell mode allows you to easily
- profile multiline blocks without having to put them in a separate
- function.
-
- The given statement (which doesn't require quote marks) is run via the
- python profiler in a manner similar to the profile.run() function.
- Namespaces are internally managed to work correctly; profile.run
- cannot be used in IPython because it makes certain assumptions about
- namespaces which do not hold under IPython.
-
- Options:
-
- -l <limit>
- you can place restrictions on what or how much of the
- profile gets printed. The limit value can be:
-
- * A string: only information for function names containing this string
- is printed.
-
- * An integer: only these many lines are printed.
-
- * A float (between 0 and 1): this fraction of the report is printed
- (for example, use a limit of 0.4 to see the topmost 40% only).
-
- You can combine several limits with repeated use of the option. For
- example, ``-l __init__ -l 5`` will print only the topmost 5 lines of
- information about class constructors.
-
- -r
- return the pstats.Stats object generated by the profiling. This
- object has all the information about the profile in it, and you can
- later use it for further analysis or in other functions.
-
- -s <key>
- sort profile by given key. You can provide more than one key
- by using the option several times: '-s key1 -s key2 -s key3...'. The
- default sorting key is 'time'.
-
- The following is copied verbatim from the profile documentation
- referenced below:
-
- When more than one key is provided, additional keys are used as
- secondary criteria when the there is equality in all keys selected
- before them.
-
- Abbreviations can be used for any key names, as long as the
- abbreviation is unambiguous. The following are the keys currently
- defined:
-
- ============ =====================
- Valid Arg Meaning
- ============ =====================
- "calls" call count
- "cumulative" cumulative time
- "file" file name
- "module" file name
- "pcalls" primitive call count
- "line" line number
- "name" function name
- "nfl" name/file/line
- "stdname" standard name
- "time" internal time
- ============ =====================
-
- Note that all sorts on statistics are in descending order (placing
- most time consuming items first), where as name, file, and line number
- searches are in ascending order (i.e., alphabetical). The subtle
- distinction between "nfl" and "stdname" is that the standard name is a
- sort of the name as printed, which means that the embedded line
- numbers get compared in an odd way. For example, lines 3, 20, and 40
- would (if the file names were the same) appear in the string order
- "20" "3" and "40". In contrast, "nfl" does a numeric compare of the
- line numbers. In fact, sort_stats("nfl") is the same as
- sort_stats("name", "file", "line").
-
- -T <filename>
- save profile results as shown on screen to a text
- file. The profile is still shown on screen.
-
- -D <filename>
- save (via dump_stats) profile statistics to given
- filename. This data is in a format understood by the pstats module, and
- is generated by a call to the dump_stats() method of profile
- objects. The profile is still shown on screen.
-
- -q
- suppress output to the pager. Best used with -T and/or -D above.
-
- If you want to run complete programs under the profiler's control, use
- ``%run -p [prof_opts] filename.py [args to program]`` where prof_opts
- contains profiler specific options as described here.
-
- You can read the complete documentation for the profile module with::
-
- In [1]: import profile; profile.help()
- """
- opts, arg_str = self.parse_options(parameter_s, 'D:l:rs:T:q',
- list_all=True, posix=False)
- if cell is not None:
- arg_str += '\n' + cell
- arg_str = self.shell.input_splitter.transform_cell(arg_str)
- return self._run_with_profiler(arg_str, opts, self.shell.user_ns)
-
- def _run_with_profiler(self, code, opts, namespace):
- """
- Run `code` with profiler. Used by ``%prun`` and ``%run -p``.
-
- Parameters
- ----------
- code : str
- Code to be executed.
- opts : Struct
- Options parsed by `self.parse_options`.
- namespace : dict
- A dictionary for Python namespace (e.g., `self.shell.user_ns`).
-
- """
-
- # Fill default values for unspecified options:
- opts.merge(Struct(D=[''], l=[], s=['time'], T=['']))
-
- prof = profile.Profile()
- try:
- prof = prof.runctx(code, namespace, namespace)
- sys_exit = ''
- except SystemExit:
- sys_exit = """*** SystemExit exception caught in code being profiled."""
-
- stats = pstats.Stats(prof).strip_dirs().sort_stats(*opts.s)
-
- lims = opts.l
- if lims:
- lims = [] # rebuild lims with ints/floats/strings
- for lim in opts.l:
- try:
- lims.append(int(lim))
- except ValueError:
- try:
- lims.append(float(lim))
- except ValueError:
- lims.append(lim)
-
- # Trap output.
- stdout_trap = StringIO()
- stats_stream = stats.stream
- try:
- stats.stream = stdout_trap
- stats.print_stats(*lims)
- finally:
- stats.stream = stats_stream
-
- output = stdout_trap.getvalue()
- output = output.rstrip()
-
- if 'q' not in opts:
- page.page(output)
- print(sys_exit, end=' ')
-
- dump_file = opts.D[0]
- text_file = opts.T[0]
- if dump_file:
- prof.dump_stats(dump_file)
- print('\n*** Profile stats marshalled to file',\
- repr(dump_file)+'.',sys_exit)
- if text_file:
- pfile = open(text_file,'w')
- pfile.write(output)
- pfile.close()
- print('\n*** Profile printout saved to text file',\
- repr(text_file)+'.',sys_exit)
-
- if 'r' in opts:
- return stats
- else:
- return None
-
- @line_magic
- def pdb(self, parameter_s=''):
- """Control the automatic calling of the pdb interactive debugger.
-
- Call as '%pdb on', '%pdb 1', '%pdb off' or '%pdb 0'. If called without
- argument it works as a toggle.
-
- When an exception is triggered, IPython can optionally call the
- interactive pdb debugger after the traceback printout. %pdb toggles
- this feature on and off.
-
- The initial state of this feature is set in your configuration
- file (the option is ``InteractiveShell.pdb``).
-
- If you want to just activate the debugger AFTER an exception has fired,
- without having to type '%pdb on' and rerunning your code, you can use
- the %debug magic."""
-
- par = parameter_s.strip().lower()
-
- if par:
- try:
- new_pdb = {'off':0,'0':0,'on':1,'1':1}[par]
- except KeyError:
- print ('Incorrect argument. Use on/1, off/0, '
- 'or nothing for a toggle.')
- return
- else:
- # toggle
- new_pdb = not self.shell.call_pdb
-
- # set on the shell
- self.shell.call_pdb = new_pdb
- print('Automatic pdb calling has been turned',on_off(new_pdb))
-
- @skip_doctest
- @magic_arguments.magic_arguments()
- @magic_arguments.argument('--breakpoint', '-b', metavar='FILE:LINE',
- help="""
- Set break point at LINE in FILE.
- """
- )
- @magic_arguments.argument('statement', nargs='*',
- help="""
- Code to run in debugger.
- You can omit this in cell magic mode.
- """
- )
- @line_cell_magic
- def debug(self, line='', cell=None):
- """Activate the interactive debugger.
-
- This magic command support two ways of activating debugger.
- One is to activate debugger before executing code. This way, you
- can set a break point, to step through the code from the point.
- You can use this mode by giving statements to execute and optionally
- a breakpoint.
-
- The other one is to activate debugger in post-mortem mode. You can
- activate this mode simply running %debug without any argument.
- If an exception has just occurred, this lets you inspect its stack
- frames interactively. Note that this will always work only on the last
- traceback that occurred, so you must call this quickly after an
- exception that you wish to inspect has fired, because if another one
- occurs, it clobbers the previous one.
-
- If you want IPython to automatically do this on every exception, see
- the %pdb magic for more details.
- """
- args = magic_arguments.parse_argstring(self.debug, line)
-
- if not (args.breakpoint or args.statement or cell):
- self._debug_post_mortem()
- else:
- code = "\n".join(args.statement)
- if cell:
- code += "\n" + cell
- self._debug_exec(code, args.breakpoint)
-
- def _debug_post_mortem(self):
- self.shell.debugger(force=True)
-
- def _debug_exec(self, code, breakpoint):
- if breakpoint:
+ compile_time: (float) time of statement compilation (s)
+
+ """
+
+ def __init__(self, loops, repeat, best, worst, all_runs, compile_time, precision):
+ self.loops = loops
+ self.repeat = repeat
+ self.best = best
+ self.worst = worst
+ self.all_runs = all_runs
+ self.compile_time = compile_time
+ self._precision = precision
+
+ def _repr_pretty_(self, p , cycle):
+ if self.loops == 1: # No s at "loops" if only one loop
+ unic = u"%d loop, best of %d: %s per loop" % (self.loops, self.repeat,
+ _format_time(self.best, self._precision))
+ else:
+ unic = u"%d loops, best of %d: %s per loop" % (self.loops, self.repeat,
+ _format_time(self.best, self._precision))
+ p.text(u'<TimeitResult : '+unic+u'>')
+
+
+class TimeitTemplateFiller(ast.NodeTransformer):
+ """Fill in the AST template for timing execution.
+
+ This is quite closely tied to the template definition, which is in
+ :meth:`ExecutionMagics.timeit`.
+ """
+ def __init__(self, ast_setup, ast_stmt):
+ self.ast_setup = ast_setup
+ self.ast_stmt = ast_stmt
+
+ def visit_FunctionDef(self, node):
+ "Fill in the setup statement"
+ self.generic_visit(node)
+ if node.name == "inner":
+ node.body[:1] = self.ast_setup.body
+
+ return node
+
+ def visit_For(self, node):
+ "Fill in the statement to be timed"
+ if getattr(getattr(node.body[0], 'value', None), 'id', None) == 'stmt':
+ node.body = self.ast_stmt.body
+ return node
+
+
+class Timer(timeit.Timer):
+ """Timer class that explicitly uses self.inner
+
+ which is an undocumented implementation detail of CPython,
+ not shared by PyPy.
+ """
+ # Timer.timeit copied from CPython 3.4.2
+ def timeit(self, number=timeit.default_number):
+ """Time 'number' executions of the main statement.
+
+ To be precise, this executes the setup statement once, and
+ then returns the time it takes to execute the main statement
+ a number of times, as a float measured in seconds. The
+ argument is the number of times through the loop, defaulting
+ to one million. The main statement, the setup statement and
+ the timer function to be used are passed to the constructor.
+ """
+ it = itertools.repeat(None, number)
+ gcold = gc.isenabled()
+ gc.disable()
+ try:
+ timing = self.inner(it, self.timer)
+ finally:
+ if gcold:
+ gc.enable()
+ return timing
+
+
+@magics_class
+class ExecutionMagics(Magics):
+ """Magics related to code execution, debugging, profiling, etc.
+
+ """
+
+ def __init__(self, shell):
+ super(ExecutionMagics, self).__init__(shell)
+ if profile is None:
+ self.prun = self.profile_missing_notice
+ # Default execution function used to actually run user code.
+ self.default_runner = None
+
+ def profile_missing_notice(self, *args, **kwargs):
+ error("""\
+The profile module could not be found. It has been removed from the standard
+python packages because of its non-free license. To use profiling, install the
+python-profiler package from non-free.""")
+
+ @skip_doctest
+ @line_cell_magic
+ def prun(self, parameter_s='', cell=None):
+
+ """Run a statement through the python code profiler.
+
+ Usage, in line mode:
+ %prun [options] statement
+
+ Usage, in cell mode:
+ %%prun [options] [statement]
+ code...
+ code...
+
+ In cell mode, the additional code lines are appended to the (possibly
+ empty) statement in the first line. Cell mode allows you to easily
+ profile multiline blocks without having to put them in a separate
+ function.
+
+ The given statement (which doesn't require quote marks) is run via the
+ python profiler in a manner similar to the profile.run() function.
+ Namespaces are internally managed to work correctly; profile.run
+ cannot be used in IPython because it makes certain assumptions about
+ namespaces which do not hold under IPython.
+
+ Options:
+
+ -l <limit>
+ you can place restrictions on what or how much of the
+ profile gets printed. The limit value can be:
+
+ * A string: only information for function names containing this string
+ is printed.
+
+ * An integer: only these many lines are printed.
+
+ * A float (between 0 and 1): this fraction of the report is printed
+ (for example, use a limit of 0.4 to see the topmost 40% only).
+
+ You can combine several limits with repeated use of the option. For
+ example, ``-l __init__ -l 5`` will print only the topmost 5 lines of
+ information about class constructors.
+
+ -r
+ return the pstats.Stats object generated by the profiling. This
+ object has all the information about the profile in it, and you can
+ later use it for further analysis or in other functions.
+
+ -s <key>
+ sort profile by given key. You can provide more than one key
+ by using the option several times: '-s key1 -s key2 -s key3...'. The
+ default sorting key is 'time'.
+
+ The following is copied verbatim from the profile documentation
+ referenced below:
+
+ When more than one key is provided, additional keys are used as
+ secondary criteria when the there is equality in all keys selected
+ before them.
+
+ Abbreviations can be used for any key names, as long as the
+ abbreviation is unambiguous. The following are the keys currently
+ defined:
+
+ ============ =====================
+ Valid Arg Meaning
+ ============ =====================
+ "calls" call count
+ "cumulative" cumulative time
+ "file" file name
+ "module" file name
+ "pcalls" primitive call count
+ "line" line number
+ "name" function name
+ "nfl" name/file/line
+ "stdname" standard name
+ "time" internal time
+ ============ =====================
+
+ Note that all sorts on statistics are in descending order (placing
+ most time consuming items first), where as name, file, and line number
+ searches are in ascending order (i.e., alphabetical). The subtle
+ distinction between "nfl" and "stdname" is that the standard name is a
+ sort of the name as printed, which means that the embedded line
+ numbers get compared in an odd way. For example, lines 3, 20, and 40
+ would (if the file names were the same) appear in the string order
+ "20" "3" and "40". In contrast, "nfl" does a numeric compare of the
+ line numbers. In fact, sort_stats("nfl") is the same as
+ sort_stats("name", "file", "line").
+
+ -T <filename>
+ save profile results as shown on screen to a text
+ file. The profile is still shown on screen.
+
+ -D <filename>
+ save (via dump_stats) profile statistics to given
+ filename. This data is in a format understood by the pstats module, and
+ is generated by a call to the dump_stats() method of profile
+ objects. The profile is still shown on screen.
+
+ -q
+ suppress output to the pager. Best used with -T and/or -D above.
+
+ If you want to run complete programs under the profiler's control, use
+ ``%run -p [prof_opts] filename.py [args to program]`` where prof_opts
+ contains profiler specific options as described here.
+
+ You can read the complete documentation for the profile module with::
+
+ In [1]: import profile; profile.help()
+ """
+ opts, arg_str = self.parse_options(parameter_s, 'D:l:rs:T:q',
+ list_all=True, posix=False)
+ if cell is not None:
+ arg_str += '\n' + cell
+ arg_str = self.shell.input_splitter.transform_cell(arg_str)
+ return self._run_with_profiler(arg_str, opts, self.shell.user_ns)
+
+ def _run_with_profiler(self, code, opts, namespace):
+ """
+ Run `code` with profiler. Used by ``%prun`` and ``%run -p``.
+
+ Parameters
+ ----------
+ code : str
+ Code to be executed.
+ opts : Struct
+ Options parsed by `self.parse_options`.
+ namespace : dict
+ A dictionary for Python namespace (e.g., `self.shell.user_ns`).
+
+ """
+
+ # Fill default values for unspecified options:
+ opts.merge(Struct(D=[''], l=[], s=['time'], T=['']))
+
+ prof = profile.Profile()
+ try:
+ prof = prof.runctx(code, namespace, namespace)
+ sys_exit = ''
+ except SystemExit:
+ sys_exit = """*** SystemExit exception caught in code being profiled."""
+
+ stats = pstats.Stats(prof).strip_dirs().sort_stats(*opts.s)
+
+ lims = opts.l
+ if lims:
+ lims = [] # rebuild lims with ints/floats/strings
+ for lim in opts.l:
+ try:
+ lims.append(int(lim))
+ except ValueError:
+ try:
+ lims.append(float(lim))
+ except ValueError:
+ lims.append(lim)
+
+ # Trap output.
+ stdout_trap = StringIO()
+ stats_stream = stats.stream
+ try:
+ stats.stream = stdout_trap
+ stats.print_stats(*lims)
+ finally:
+ stats.stream = stats_stream
+
+ output = stdout_trap.getvalue()
+ output = output.rstrip()
+
+ if 'q' not in opts:
+ page.page(output)
+ print(sys_exit, end=' ')
+
+ dump_file = opts.D[0]
+ text_file = opts.T[0]
+ if dump_file:
+ prof.dump_stats(dump_file)
+ print('\n*** Profile stats marshalled to file',\
+ repr(dump_file)+'.',sys_exit)
+ if text_file:
+ pfile = open(text_file,'w')
+ pfile.write(output)
+ pfile.close()
+ print('\n*** Profile printout saved to text file',\
+ repr(text_file)+'.',sys_exit)
+
+ if 'r' in opts:
+ return stats
+ else:
+ return None
+
+ @line_magic
+ def pdb(self, parameter_s=''):
+ """Control the automatic calling of the pdb interactive debugger.
+
+ Call as '%pdb on', '%pdb 1', '%pdb off' or '%pdb 0'. If called without
+ argument it works as a toggle.
+
+ When an exception is triggered, IPython can optionally call the
+ interactive pdb debugger after the traceback printout. %pdb toggles
+ this feature on and off.
+
+ The initial state of this feature is set in your configuration
+ file (the option is ``InteractiveShell.pdb``).
+
+ If you want to just activate the debugger AFTER an exception has fired,
+ without having to type '%pdb on' and rerunning your code, you can use
+ the %debug magic."""
+
+ par = parameter_s.strip().lower()
+
+ if par:
+ try:
+ new_pdb = {'off':0,'0':0,'on':1,'1':1}[par]
+ except KeyError:
+ print ('Incorrect argument. Use on/1, off/0, '
+ 'or nothing for a toggle.')
+ return
+ else:
+ # toggle
+ new_pdb = not self.shell.call_pdb
+
+ # set on the shell
+ self.shell.call_pdb = new_pdb
+ print('Automatic pdb calling has been turned',on_off(new_pdb))
+
+ @skip_doctest
+ @magic_arguments.magic_arguments()
+ @magic_arguments.argument('--breakpoint', '-b', metavar='FILE:LINE',
+ help="""
+ Set break point at LINE in FILE.
+ """
+ )
+ @magic_arguments.argument('statement', nargs='*',
+ help="""
+ Code to run in debugger.
+ You can omit this in cell magic mode.
+ """
+ )
+ @line_cell_magic
+ def debug(self, line='', cell=None):
+ """Activate the interactive debugger.
+
+ This magic command support two ways of activating debugger.
+ One is to activate debugger before executing code. This way, you
+ can set a break point, to step through the code from the point.
+ You can use this mode by giving statements to execute and optionally
+ a breakpoint.
+
+ The other one is to activate debugger in post-mortem mode. You can
+ activate this mode simply running %debug without any argument.
+ If an exception has just occurred, this lets you inspect its stack
+ frames interactively. Note that this will always work only on the last
+ traceback that occurred, so you must call this quickly after an
+ exception that you wish to inspect has fired, because if another one
+ occurs, it clobbers the previous one.
+
+ If you want IPython to automatically do this on every exception, see
+ the %pdb magic for more details.
+ """
+ args = magic_arguments.parse_argstring(self.debug, line)
+
+ if not (args.breakpoint or args.statement or cell):
+ self._debug_post_mortem()
+ else:
+ code = "\n".join(args.statement)
+ if cell:
+ code += "\n" + cell
+ self._debug_exec(code, args.breakpoint)
+
+ def _debug_post_mortem(self):
+ self.shell.debugger(force=True)
+
+ def _debug_exec(self, code, breakpoint):
+ if breakpoint:
(filename, bp_line) = breakpoint.rsplit(':', 1)
- bp_line = int(bp_line)
- else:
- (filename, bp_line) = (None, None)
- self._run_with_debugger(code, self.shell.user_ns, filename, bp_line)
-
- @line_magic
- def tb(self, s):
- """Print the last traceback with the currently active exception mode.
-
- See %xmode for changing exception reporting modes."""
- self.shell.showtraceback()
-
- @skip_doctest
- @line_magic
- def run(self, parameter_s='', runner=None,
- file_finder=get_py_filename):
- """Run the named file inside IPython as a program.
-
- Usage::
-
- %run [-n -i -e -G]
- [( -t [-N<N>] | -d [-b<N>] | -p [profile options] )]
- ( -m mod | file ) [args]
-
- Parameters after the filename are passed as command-line arguments to
- the program (put in sys.argv). Then, control returns to IPython's
- prompt.
-
- This is similar to running at a system prompt ``python file args``,
- but with the advantage of giving you IPython's tracebacks, and of
- loading all variables into your interactive namespace for further use
- (unless -p is used, see below).
-
- The file is executed in a namespace initially consisting only of
- ``__name__=='__main__'`` and sys.argv constructed as indicated. It thus
- sees its environment as if it were being run as a stand-alone program
- (except for sharing global objects such as previously imported
- modules). But after execution, the IPython interactive namespace gets
- updated with all variables defined in the program (except for __name__
- and sys.argv). This allows for very convenient loading of code for
- interactive work, while giving each program a 'clean sheet' to run in.
-
- Arguments are expanded using shell-like glob match. Patterns
- '*', '?', '[seq]' and '[!seq]' can be used. Additionally,
- tilde '~' will be expanded into user's home directory. Unlike
- real shells, quotation does not suppress expansions. Use
- *two* back slashes (e.g. ``\\\\*``) to suppress expansions.
- To completely disable these expansions, you can use -G flag.
-
- Options:
-
- -n
- __name__ is NOT set to '__main__', but to the running file's name
- without extension (as python does under import). This allows running
- scripts and reloading the definitions in them without calling code
- protected by an ``if __name__ == "__main__"`` clause.
-
- -i
- run the file in IPython's namespace instead of an empty one. This
- is useful if you are experimenting with code written in a text editor
- which depends on variables defined interactively.
-
- -e
- ignore sys.exit() calls or SystemExit exceptions in the script
- being run. This is particularly useful if IPython is being used to
- run unittests, which always exit with a sys.exit() call. In such
- cases you are interested in the output of the test results, not in
- seeing a traceback of the unittest module.
-
- -t
- print timing information at the end of the run. IPython will give
- you an estimated CPU time consumption for your script, which under
- Unix uses the resource module to avoid the wraparound problems of
- time.clock(). Under Unix, an estimate of time spent on system tasks
- is also given (for Windows platforms this is reported as 0.0).
-
- If -t is given, an additional ``-N<N>`` option can be given, where <N>
- must be an integer indicating how many times you want the script to
- run. The final timing report will include total and per run results.
-
- For example (testing the script uniq_stable.py)::
-
- In [1]: run -t uniq_stable
-
- IPython CPU timings (estimated):
- User : 0.19597 s.
- System: 0.0 s.
-
- In [2]: run -t -N5 uniq_stable
-
- IPython CPU timings (estimated):
- Total runs performed: 5
- Times : Total Per run
- User : 0.910862 s, 0.1821724 s.
- System: 0.0 s, 0.0 s.
-
- -d
- run your program under the control of pdb, the Python debugger.
- This allows you to execute your program step by step, watch variables,
- etc. Internally, what IPython does is similar to calling::
-
- pdb.run('execfile("YOURFILENAME")')
-
- with a breakpoint set on line 1 of your file. You can change the line
- number for this automatic breakpoint to be <N> by using the -bN option
- (where N must be an integer). For example::
-
- %run -d -b40 myscript
-
- will set the first breakpoint at line 40 in myscript.py. Note that
- the first breakpoint must be set on a line which actually does
- something (not a comment or docstring) for it to stop execution.
-
- Or you can specify a breakpoint in a different file::
-
- %run -d -b myotherfile.py:20 myscript
-
- When the pdb debugger starts, you will see a (Pdb) prompt. You must
- first enter 'c' (without quotes) to start execution up to the first
- breakpoint.
-
- Entering 'help' gives information about the use of the debugger. You
- can easily see pdb's full documentation with "import pdb;pdb.help()"
- at a prompt.
-
- -p
- run program under the control of the Python profiler module (which
- prints a detailed report of execution times, function calls, etc).
-
- You can pass other options after -p which affect the behavior of the
- profiler itself. See the docs for %prun for details.
-
- In this mode, the program's variables do NOT propagate back to the
- IPython interactive namespace (because they remain in the namespace
- where the profiler executes them).
-
- Internally this triggers a call to %prun, see its documentation for
- details on the options available specifically for profiling.
-
- There is one special usage for which the text above doesn't apply:
- if the filename ends with .ipy[nb], the file is run as ipython script,
- just as if the commands were written on IPython prompt.
-
- -m
- specify module name to load instead of script path. Similar to
- the -m option for the python interpreter. Use this option last if you
- want to combine with other %run options. Unlike the python interpreter
- only source modules are allowed no .pyc or .pyo files.
- For example::
-
- %run -m example
-
- will run the example module.
-
- -G
- disable shell-like glob expansion of arguments.
-
- """
-
- # get arguments and set sys.argv for program to be run.
- opts, arg_lst = self.parse_options(parameter_s,
- 'nidtN:b:pD:l:rs:T:em:G',
- mode='list', list_all=1)
- if "m" in opts:
- modulename = opts["m"][0]
- modpath = find_mod(modulename)
- if modpath is None:
- warn('%r is not a valid modulename on sys.path'%modulename)
- return
- arg_lst = [modpath] + arg_lst
- try:
- filename = file_finder(arg_lst[0])
- except IndexError:
- warn('you must provide at least a filename.')
- print('\n%run:\n', oinspect.getdoc(self.run))
- return
- except IOError as e:
- try:
- msg = str(e)
- except UnicodeError:
- msg = e.message
- error(msg)
- return
-
- if filename.lower().endswith(('.ipy', '.ipynb')):
- with preserve_keys(self.shell.user_ns, '__file__'):
- self.shell.user_ns['__file__'] = filename
- self.shell.safe_execfile_ipy(filename)
- return
-
- # Control the response to exit() calls made by the script being run
- exit_ignore = 'e' in opts
-
- # Make sure that the running script gets a proper sys.argv as if it
- # were run from a system shell.
- save_argv = sys.argv # save it for later restoring
-
- if 'G' in opts:
- args = arg_lst[1:]
- else:
- # tilde and glob expansion
- args = shellglob(map(os.path.expanduser, arg_lst[1:]))
-
- sys.argv = [filename] + args # put in the proper filename
- # protect sys.argv from potential unicode strings on Python 2:
- if not py3compat.PY3:
- sys.argv = [ py3compat.cast_bytes(a) for a in sys.argv ]
-
- if 'i' in opts:
- # Run in user's interactive namespace
- prog_ns = self.shell.user_ns
- __name__save = self.shell.user_ns['__name__']
- prog_ns['__name__'] = '__main__'
- main_mod = self.shell.user_module
-
- # Since '%run foo' emulates 'python foo.py' at the cmd line, we must
- # set the __file__ global in the script's namespace
- # TK: Is this necessary in interactive mode?
- prog_ns['__file__'] = filename
- else:
- # Run in a fresh, empty namespace
- if 'n' in opts:
- name = os.path.splitext(os.path.basename(filename))[0]
- else:
- name = '__main__'
-
- # The shell MUST hold a reference to prog_ns so after %run
- # exits, the python deletion mechanism doesn't zero it out
- # (leaving dangling references). See interactiveshell for details
- main_mod = self.shell.new_main_mod(filename, name)
- prog_ns = main_mod.__dict__
-
- # pickle fix. See interactiveshell for an explanation. But we need to
- # make sure that, if we overwrite __main__, we replace it at the end
- main_mod_name = prog_ns['__name__']
-
- if main_mod_name == '__main__':
- restore_main = sys.modules['__main__']
- else:
- restore_main = False
-
- # This needs to be undone at the end to prevent holding references to
- # every single object ever created.
- sys.modules[main_mod_name] = main_mod
-
- if 'p' in opts or 'd' in opts:
- if 'm' in opts:
- code = 'run_module(modulename, prog_ns)'
- code_ns = {
- 'run_module': self.shell.safe_run_module,
- 'prog_ns': prog_ns,
- 'modulename': modulename,
- }
- else:
- if 'd' in opts:
- # allow exceptions to raise in debug mode
- code = 'execfile(filename, prog_ns, raise_exceptions=True)'
- else:
- code = 'execfile(filename, prog_ns)'
- code_ns = {
- 'execfile': self.shell.safe_execfile,
- 'prog_ns': prog_ns,
- 'filename': get_py_filename(filename),
- }
-
- try:
- stats = None
+ bp_line = int(bp_line)
+ else:
+ (filename, bp_line) = (None, None)
+ self._run_with_debugger(code, self.shell.user_ns, filename, bp_line)
+
+ @line_magic
+ def tb(self, s):
+ """Print the last traceback with the currently active exception mode.
+
+ See %xmode for changing exception reporting modes."""
+ self.shell.showtraceback()
+
+ @skip_doctest
+ @line_magic
+ def run(self, parameter_s='', runner=None,
+ file_finder=get_py_filename):
+ """Run the named file inside IPython as a program.
+
+ Usage::
+
+ %run [-n -i -e -G]
+ [( -t [-N<N>] | -d [-b<N>] | -p [profile options] )]
+ ( -m mod | file ) [args]
+
+ Parameters after the filename are passed as command-line arguments to
+ the program (put in sys.argv). Then, control returns to IPython's
+ prompt.
+
+ This is similar to running at a system prompt ``python file args``,
+ but with the advantage of giving you IPython's tracebacks, and of
+ loading all variables into your interactive namespace for further use
+ (unless -p is used, see below).
+
+ The file is executed in a namespace initially consisting only of
+ ``__name__=='__main__'`` and sys.argv constructed as indicated. It thus
+ sees its environment as if it were being run as a stand-alone program
+ (except for sharing global objects such as previously imported
+ modules). But after execution, the IPython interactive namespace gets
+ updated with all variables defined in the program (except for __name__
+ and sys.argv). This allows for very convenient loading of code for
+ interactive work, while giving each program a 'clean sheet' to run in.
+
+ Arguments are expanded using shell-like glob match. Patterns
+ '*', '?', '[seq]' and '[!seq]' can be used. Additionally,
+ tilde '~' will be expanded into user's home directory. Unlike
+ real shells, quotation does not suppress expansions. Use
+ *two* back slashes (e.g. ``\\\\*``) to suppress expansions.
+ To completely disable these expansions, you can use -G flag.
+
+ Options:
+
+ -n
+ __name__ is NOT set to '__main__', but to the running file's name
+ without extension (as python does under import). This allows running
+ scripts and reloading the definitions in them without calling code
+ protected by an ``if __name__ == "__main__"`` clause.
+
+ -i
+ run the file in IPython's namespace instead of an empty one. This
+ is useful if you are experimenting with code written in a text editor
+ which depends on variables defined interactively.
+
+ -e
+ ignore sys.exit() calls or SystemExit exceptions in the script
+ being run. This is particularly useful if IPython is being used to
+ run unittests, which always exit with a sys.exit() call. In such
+ cases you are interested in the output of the test results, not in
+ seeing a traceback of the unittest module.
+
+ -t
+ print timing information at the end of the run. IPython will give
+ you an estimated CPU time consumption for your script, which under
+ Unix uses the resource module to avoid the wraparound problems of
+ time.clock(). Under Unix, an estimate of time spent on system tasks
+ is also given (for Windows platforms this is reported as 0.0).
+
+ If -t is given, an additional ``-N<N>`` option can be given, where <N>
+ must be an integer indicating how many times you want the script to
+ run. The final timing report will include total and per run results.
+
+ For example (testing the script uniq_stable.py)::
+
+ In [1]: run -t uniq_stable
+
+ IPython CPU timings (estimated):
+ User : 0.19597 s.
+ System: 0.0 s.
+
+ In [2]: run -t -N5 uniq_stable
+
+ IPython CPU timings (estimated):
+ Total runs performed: 5
+ Times : Total Per run
+ User : 0.910862 s, 0.1821724 s.
+ System: 0.0 s, 0.0 s.
+
+ -d
+ run your program under the control of pdb, the Python debugger.
+ This allows you to execute your program step by step, watch variables,
+ etc. Internally, what IPython does is similar to calling::
+
+ pdb.run('execfile("YOURFILENAME")')
+
+ with a breakpoint set on line 1 of your file. You can change the line
+ number for this automatic breakpoint to be <N> by using the -bN option
+ (where N must be an integer). For example::
+
+ %run -d -b40 myscript
+
+ will set the first breakpoint at line 40 in myscript.py. Note that
+ the first breakpoint must be set on a line which actually does
+ something (not a comment or docstring) for it to stop execution.
+
+ Or you can specify a breakpoint in a different file::
+
+ %run -d -b myotherfile.py:20 myscript
+
+ When the pdb debugger starts, you will see a (Pdb) prompt. You must
+ first enter 'c' (without quotes) to start execution up to the first
+ breakpoint.
+
+ Entering 'help' gives information about the use of the debugger. You
+ can easily see pdb's full documentation with "import pdb;pdb.help()"
+ at a prompt.
+
+ -p
+ run program under the control of the Python profiler module (which
+ prints a detailed report of execution times, function calls, etc).
+
+ You can pass other options after -p which affect the behavior of the
+ profiler itself. See the docs for %prun for details.
+
+ In this mode, the program's variables do NOT propagate back to the
+ IPython interactive namespace (because they remain in the namespace
+ where the profiler executes them).
+
+ Internally this triggers a call to %prun, see its documentation for
+ details on the options available specifically for profiling.
+
+ There is one special usage for which the text above doesn't apply:
+ if the filename ends with .ipy[nb], the file is run as ipython script,
+ just as if the commands were written on IPython prompt.
+
+ -m
+ specify module name to load instead of script path. Similar to
+ the -m option for the python interpreter. Use this option last if you
+ want to combine with other %run options. Unlike the python interpreter
+ only source modules are allowed no .pyc or .pyo files.
+ For example::
+
+ %run -m example
+
+ will run the example module.
+
+ -G
+ disable shell-like glob expansion of arguments.
+
+ """
+
+ # get arguments and set sys.argv for program to be run.
+ opts, arg_lst = self.parse_options(parameter_s,
+ 'nidtN:b:pD:l:rs:T:em:G',
+ mode='list', list_all=1)
+ if "m" in opts:
+ modulename = opts["m"][0]
+ modpath = find_mod(modulename)
+ if modpath is None:
+ warn('%r is not a valid modulename on sys.path'%modulename)
+ return
+ arg_lst = [modpath] + arg_lst
+ try:
+ filename = file_finder(arg_lst[0])
+ except IndexError:
+ warn('you must provide at least a filename.')
+ print('\n%run:\n', oinspect.getdoc(self.run))
+ return
+ except IOError as e:
+ try:
+ msg = str(e)
+ except UnicodeError:
+ msg = e.message
+ error(msg)
+ return
+
+ if filename.lower().endswith(('.ipy', '.ipynb')):
+ with preserve_keys(self.shell.user_ns, '__file__'):
+ self.shell.user_ns['__file__'] = filename
+ self.shell.safe_execfile_ipy(filename)
+ return
+
+ # Control the response to exit() calls made by the script being run
+ exit_ignore = 'e' in opts
+
+ # Make sure that the running script gets a proper sys.argv as if it
+ # were run from a system shell.
+ save_argv = sys.argv # save it for later restoring
+
+ if 'G' in opts:
+ args = arg_lst[1:]
+ else:
+ # tilde and glob expansion
+ args = shellglob(map(os.path.expanduser, arg_lst[1:]))
+
+ sys.argv = [filename] + args # put in the proper filename
+ # protect sys.argv from potential unicode strings on Python 2:
+ if not py3compat.PY3:
+ sys.argv = [ py3compat.cast_bytes(a) for a in sys.argv ]
+
+ if 'i' in opts:
+ # Run in user's interactive namespace
+ prog_ns = self.shell.user_ns
+ __name__save = self.shell.user_ns['__name__']
+ prog_ns['__name__'] = '__main__'
+ main_mod = self.shell.user_module
+
+ # Since '%run foo' emulates 'python foo.py' at the cmd line, we must
+ # set the __file__ global in the script's namespace
+ # TK: Is this necessary in interactive mode?
+ prog_ns['__file__'] = filename
+ else:
+ # Run in a fresh, empty namespace
+ if 'n' in opts:
+ name = os.path.splitext(os.path.basename(filename))[0]
+ else:
+ name = '__main__'
+
+ # The shell MUST hold a reference to prog_ns so after %run
+ # exits, the python deletion mechanism doesn't zero it out
+ # (leaving dangling references). See interactiveshell for details
+ main_mod = self.shell.new_main_mod(filename, name)
+ prog_ns = main_mod.__dict__
+
+ # pickle fix. See interactiveshell for an explanation. But we need to
+ # make sure that, if we overwrite __main__, we replace it at the end
+ main_mod_name = prog_ns['__name__']
+
+ if main_mod_name == '__main__':
+ restore_main = sys.modules['__main__']
+ else:
+ restore_main = False
+
+ # This needs to be undone at the end to prevent holding references to
+ # every single object ever created.
+ sys.modules[main_mod_name] = main_mod
+
+ if 'p' in opts or 'd' in opts:
+ if 'm' in opts:
+ code = 'run_module(modulename, prog_ns)'
+ code_ns = {
+ 'run_module': self.shell.safe_run_module,
+ 'prog_ns': prog_ns,
+ 'modulename': modulename,
+ }
+ else:
+ if 'd' in opts:
+ # allow exceptions to raise in debug mode
+ code = 'execfile(filename, prog_ns, raise_exceptions=True)'
+ else:
+ code = 'execfile(filename, prog_ns)'
+ code_ns = {
+ 'execfile': self.shell.safe_execfile,
+ 'prog_ns': prog_ns,
+ 'filename': get_py_filename(filename),
+ }
+
+ try:
+ stats = None
if 'p' in opts:
stats = self._run_with_profiler(code, opts, code_ns)
else:
@@ -713,20 +713,20 @@ python-profiler package from non-free.""")
opts.get('b', ['1'])[0], filename)
self._run_with_debugger(
code, code_ns, filename, bp_line, bp_file)
- else:
+ else:
if 'm' in opts:
def run():
self.shell.safe_run_module(modulename, prog_ns)
- else:
+ else:
if runner is None:
runner = self.default_runner
if runner is None:
runner = self.shell.safe_execfile
-
+
def run():
runner(filename, prog_ns, prog_ns,
exit_ignore=exit_ignore)
-
+
if 't' in opts:
# timed execution
try:
@@ -740,67 +740,67 @@ python-profiler package from non-free.""")
else:
# regular execution
run()
-
+
if 'i' in opts:
self.shell.user_ns['__name__'] = __name__save
else:
# update IPython interactive namespace
-
+
# Some forms of read errors on the file may mean the
# __name__ key was never set; using pop we don't have to
# worry about a possible KeyError.
prog_ns.pop('__name__', None)
-
+
with preserve_keys(self.shell.user_ns, '__file__'):
self.shell.user_ns.update(prog_ns)
- finally:
- # It's a bit of a mystery why, but __builtins__ can change from
- # being a module to becoming a dict missing some key data after
- # %run. As best I can see, this is NOT something IPython is doing
- # at all, and similar problems have been reported before:
- # http://coding.derkeiler.com/Archive/Python/comp.lang.python/2004-10/0188.html
- # Since this seems to be done by the interpreter itself, the best
- # we can do is to at least restore __builtins__ for the user on
- # exit.
- self.shell.user_ns['__builtins__'] = builtin_mod
-
- # Ensure key global structures are restored
- sys.argv = save_argv
- if restore_main:
- sys.modules['__main__'] = restore_main
- else:
- # Remove from sys.modules the reference to main_mod we'd
- # added. Otherwise it will trap references to objects
- # contained therein.
- del sys.modules[main_mod_name]
-
- return stats
-
- def _run_with_debugger(self, code, code_ns, filename=None,
- bp_line=None, bp_file=None):
- """
- Run `code` in debugger with a break point.
-
- Parameters
- ----------
- code : str
- Code to execute.
- code_ns : dict
- A namespace in which `code` is executed.
- filename : str
- `code` is ran as if it is in `filename`.
- bp_line : int, optional
- Line number of the break point.
- bp_file : str, optional
- Path to the file in which break point is specified.
- `filename` is used if not given.
-
- Raises
- ------
- UsageError
- If the break point given by `bp_line` is not valid.
-
- """
+ finally:
+ # It's a bit of a mystery why, but __builtins__ can change from
+ # being a module to becoming a dict missing some key data after
+ # %run. As best I can see, this is NOT something IPython is doing
+ # at all, and similar problems have been reported before:
+ # http://coding.derkeiler.com/Archive/Python/comp.lang.python/2004-10/0188.html
+ # Since this seems to be done by the interpreter itself, the best
+ # we can do is to at least restore __builtins__ for the user on
+ # exit.
+ self.shell.user_ns['__builtins__'] = builtin_mod
+
+ # Ensure key global structures are restored
+ sys.argv = save_argv
+ if restore_main:
+ sys.modules['__main__'] = restore_main
+ else:
+ # Remove from sys.modules the reference to main_mod we'd
+ # added. Otherwise it will trap references to objects
+ # contained therein.
+ del sys.modules[main_mod_name]
+
+ return stats
+
+ def _run_with_debugger(self, code, code_ns, filename=None,
+ bp_line=None, bp_file=None):
+ """
+ Run `code` in debugger with a break point.
+
+ Parameters
+ ----------
+ code : str
+ Code to execute.
+ code_ns : dict
+ A namespace in which `code` is executed.
+ filename : str
+ `code` is ran as if it is in `filename`.
+ bp_line : int, optional
+ Line number of the break point.
+ bp_file : str, optional
+ Path to the file in which break point is specified.
+ `filename` is used if not given.
+
+ Raises
+ ------
+ UsageError
+ If the break point given by `bp_line` is not valid.
+
+ """
deb = self.shell.InteractiveTB.pdb
if not deb:
self.shell.InteractiveTB.pdb = self.shell.InteractiveTB.debugger_cls()
@@ -811,208 +811,208 @@ python-profiler package from non-free.""")
if hasattr(deb, 'curframe'):
del deb.curframe
- # reset Breakpoint state, which is moronically kept
- # in a class
- bdb.Breakpoint.next = 1
- bdb.Breakpoint.bplist = {}
- bdb.Breakpoint.bpbynumber = [None]
+ # reset Breakpoint state, which is moronically kept
+ # in a class
+ bdb.Breakpoint.next = 1
+ bdb.Breakpoint.bplist = {}
+ bdb.Breakpoint.bpbynumber = [None]
deb.clear_all_breaks()
- if bp_line is not None:
- # Set an initial breakpoint to stop execution
- maxtries = 10
- bp_file = bp_file or filename
- checkline = deb.checkline(bp_file, bp_line)
- if not checkline:
- for bp in range(bp_line + 1, bp_line + maxtries + 1):
- if deb.checkline(bp_file, bp):
- break
- else:
- msg = ("\nI failed to find a valid line to set "
- "a breakpoint\n"
- "after trying up to line: %s.\n"
- "Please set a valid breakpoint manually "
- "with the -b option." % bp)
- raise UsageError(msg)
- # if we find a good linenumber, set the breakpoint
- deb.do_break('%s:%s' % (bp_file, bp_line))
-
- if filename:
- # Mimic Pdb._runscript(...)
- deb._wait_for_mainpyfile = True
- deb.mainpyfile = deb.canonic(filename)
-
- # Start file run
- print("NOTE: Enter 'c' at the %s prompt to continue execution." % deb.prompt)
- try:
- if filename:
- # save filename so it can be used by methods on the deb object
- deb._exec_filename = filename
- while True:
- try:
- deb.run(code, code_ns)
- except Restart:
- print("Restarting")
- if filename:
- deb._wait_for_mainpyfile = True
- deb.mainpyfile = deb.canonic(filename)
- continue
- else:
- break
-
-
- except:
- etype, value, tb = sys.exc_info()
- # Skip three frames in the traceback: the %run one,
- # one inside bdb.py, and the command-line typed by the
- # user (run by exec in pdb itself).
- self.shell.InteractiveTB(etype, value, tb, tb_offset=3)
-
- @staticmethod
- def _run_with_timing(run, nruns):
- """
- Run function `run` and print timing information.
-
- Parameters
- ----------
- run : callable
- Any callable object which takes no argument.
- nruns : int
- Number of times to execute `run`.
-
- """
- twall0 = time.time()
- if nruns == 1:
- t0 = clock2()
- run()
- t1 = clock2()
- t_usr = t1[0] - t0[0]
- t_sys = t1[1] - t0[1]
- print("\nIPython CPU timings (estimated):")
- print(" User : %10.2f s." % t_usr)
- print(" System : %10.2f s." % t_sys)
- else:
- runs = range(nruns)
- t0 = clock2()
- for nr in runs:
- run()
- t1 = clock2()
- t_usr = t1[0] - t0[0]
- t_sys = t1[1] - t0[1]
- print("\nIPython CPU timings (estimated):")
- print("Total runs performed:", nruns)
- print(" Times : %10s %10s" % ('Total', 'Per run'))
- print(" User : %10.2f s, %10.2f s." % (t_usr, t_usr / nruns))
- print(" System : %10.2f s, %10.2f s." % (t_sys, t_sys / nruns))
- twall1 = time.time()
- print("Wall time: %10.2f s." % (twall1 - twall0))
-
- @skip_doctest
- @line_cell_magic
- def timeit(self, line='', cell=None):
- """Time execution of a Python statement or expression
-
- Usage, in line mode:
- %timeit [-n<N> -r<R> [-t|-c] -q -p<P> -o] statement
- or in cell mode:
- %%timeit [-n<N> -r<R> [-t|-c] -q -p<P> -o] setup_code
- code
- code...
-
- Time execution of a Python statement or expression using the timeit
- module. This function can be used both as a line and cell magic:
-
- - In line mode you can time a single-line statement (though multiple
- ones can be chained with using semicolons).
-
- - In cell mode, the statement in the first line is used as setup code
- (executed but not timed) and the body of the cell is timed. The cell
- body has access to any variables created in the setup code.
-
- Options:
- -n<N>: execute the given statement <N> times in a loop. If this value
- is not given, a fitting value is chosen.
-
- -r<R>: repeat the loop iteration <R> times and take the best result.
- Default: 3
-
- -t: use time.time to measure the time, which is the default on Unix.
- This function measures wall time.
-
- -c: use time.clock to measure the time, which is the default on
- Windows and measures wall time. On Unix, resource.getrusage is used
- instead and returns the CPU user time.
-
- -p<P>: use a precision of <P> digits to display the timing result.
- Default: 3
-
- -q: Quiet, do not print result.
-
- -o: return a TimeitResult that can be stored in a variable to inspect
- the result in more details.
-
-
- Examples
- --------
- ::
-
- In [1]: %timeit pass
- 10000000 loops, best of 3: 53.3 ns per loop
-
- In [2]: u = None
-
- In [3]: %timeit u is None
- 10000000 loops, best of 3: 184 ns per loop
-
- In [4]: %timeit -r 4 u == None
- 1000000 loops, best of 4: 242 ns per loop
-
- In [5]: import time
-
- In [6]: %timeit -n1 time.sleep(2)
- 1 loop, best of 3: 2 s per loop
-
-
- The times reported by %timeit will be slightly higher than those
- reported by the timeit.py script when variables are accessed. This is
- due to the fact that %timeit executes the statement in the namespace
- of the shell, compared with timeit.py, which uses a single setup
- statement to import function or create variables. Generally, the bias
- does not matter as long as results from timeit.py are not mixed with
- those from %timeit."""
-
- opts, stmt = self.parse_options(line,'n:r:tcp:qo',
- posix=False, strict=False)
- if stmt == "" and cell is None:
- return
-
- timefunc = timeit.default_timer
- number = int(getattr(opts, "n", 0))
- repeat = int(getattr(opts, "r", timeit.default_repeat))
- precision = int(getattr(opts, "p", 3))
- quiet = 'q' in opts
- return_result = 'o' in opts
- if hasattr(opts, "t"):
- timefunc = time.time
- if hasattr(opts, "c"):
- timefunc = clock
-
- timer = Timer(timer=timefunc)
- # this code has tight coupling to the inner workings of timeit.Timer,
- # but is there a better way to achieve that the code stmt has access
- # to the shell namespace?
- transform = self.shell.input_splitter.transform_cell
-
- if cell is None:
- # called as line magic
- ast_setup = self.shell.compile.ast_parse("pass")
- ast_stmt = self.shell.compile.ast_parse(transform(stmt))
- else:
- ast_setup = self.shell.compile.ast_parse(transform(stmt))
- ast_stmt = self.shell.compile.ast_parse(transform(cell))
-
- ast_setup = self.shell.transform_ast(ast_setup)
- ast_stmt = self.shell.transform_ast(ast_stmt)
-
+ if bp_line is not None:
+ # Set an initial breakpoint to stop execution
+ maxtries = 10
+ bp_file = bp_file or filename
+ checkline = deb.checkline(bp_file, bp_line)
+ if not checkline:
+ for bp in range(bp_line + 1, bp_line + maxtries + 1):
+ if deb.checkline(bp_file, bp):
+ break
+ else:
+ msg = ("\nI failed to find a valid line to set "
+ "a breakpoint\n"
+ "after trying up to line: %s.\n"
+ "Please set a valid breakpoint manually "
+ "with the -b option." % bp)
+ raise UsageError(msg)
+ # if we find a good linenumber, set the breakpoint
+ deb.do_break('%s:%s' % (bp_file, bp_line))
+
+ if filename:
+ # Mimic Pdb._runscript(...)
+ deb._wait_for_mainpyfile = True
+ deb.mainpyfile = deb.canonic(filename)
+
+ # Start file run
+ print("NOTE: Enter 'c' at the %s prompt to continue execution." % deb.prompt)
+ try:
+ if filename:
+ # save filename so it can be used by methods on the deb object
+ deb._exec_filename = filename
+ while True:
+ try:
+ deb.run(code, code_ns)
+ except Restart:
+ print("Restarting")
+ if filename:
+ deb._wait_for_mainpyfile = True
+ deb.mainpyfile = deb.canonic(filename)
+ continue
+ else:
+ break
+
+
+ except:
+ etype, value, tb = sys.exc_info()
+ # Skip three frames in the traceback: the %run one,
+ # one inside bdb.py, and the command-line typed by the
+ # user (run by exec in pdb itself).
+ self.shell.InteractiveTB(etype, value, tb, tb_offset=3)
+
+ @staticmethod
+ def _run_with_timing(run, nruns):
+ """
+ Run function `run` and print timing information.
+
+ Parameters
+ ----------
+ run : callable
+ Any callable object which takes no argument.
+ nruns : int
+ Number of times to execute `run`.
+
+ """
+ twall0 = time.time()
+ if nruns == 1:
+ t0 = clock2()
+ run()
+ t1 = clock2()
+ t_usr = t1[0] - t0[0]
+ t_sys = t1[1] - t0[1]
+ print("\nIPython CPU timings (estimated):")
+ print(" User : %10.2f s." % t_usr)
+ print(" System : %10.2f s." % t_sys)
+ else:
+ runs = range(nruns)
+ t0 = clock2()
+ for nr in runs:
+ run()
+ t1 = clock2()
+ t_usr = t1[0] - t0[0]
+ t_sys = t1[1] - t0[1]
+ print("\nIPython CPU timings (estimated):")
+ print("Total runs performed:", nruns)
+ print(" Times : %10s %10s" % ('Total', 'Per run'))
+ print(" User : %10.2f s, %10.2f s." % (t_usr, t_usr / nruns))
+ print(" System : %10.2f s, %10.2f s." % (t_sys, t_sys / nruns))
+ twall1 = time.time()
+ print("Wall time: %10.2f s." % (twall1 - twall0))
+
+ @skip_doctest
+ @line_cell_magic
+ def timeit(self, line='', cell=None):
+ """Time execution of a Python statement or expression
+
+ Usage, in line mode:
+ %timeit [-n<N> -r<R> [-t|-c] -q -p<P> -o] statement
+ or in cell mode:
+ %%timeit [-n<N> -r<R> [-t|-c] -q -p<P> -o] setup_code
+ code
+ code...
+
+ Time execution of a Python statement or expression using the timeit
+ module. This function can be used both as a line and cell magic:
+
+ - In line mode you can time a single-line statement (though multiple
+ ones can be chained with using semicolons).
+
+ - In cell mode, the statement in the first line is used as setup code
+ (executed but not timed) and the body of the cell is timed. The cell
+ body has access to any variables created in the setup code.
+
+ Options:
+ -n<N>: execute the given statement <N> times in a loop. If this value
+ is not given, a fitting value is chosen.
+
+ -r<R>: repeat the loop iteration <R> times and take the best result.
+ Default: 3
+
+ -t: use time.time to measure the time, which is the default on Unix.
+ This function measures wall time.
+
+ -c: use time.clock to measure the time, which is the default on
+ Windows and measures wall time. On Unix, resource.getrusage is used
+ instead and returns the CPU user time.
+
+ -p<P>: use a precision of <P> digits to display the timing result.
+ Default: 3
+
+ -q: Quiet, do not print result.
+
+ -o: return a TimeitResult that can be stored in a variable to inspect
+ the result in more details.
+
+
+ Examples
+ --------
+ ::
+
+ In [1]: %timeit pass
+ 10000000 loops, best of 3: 53.3 ns per loop
+
+ In [2]: u = None
+
+ In [3]: %timeit u is None
+ 10000000 loops, best of 3: 184 ns per loop
+
+ In [4]: %timeit -r 4 u == None
+ 1000000 loops, best of 4: 242 ns per loop
+
+ In [5]: import time
+
+ In [6]: %timeit -n1 time.sleep(2)
+ 1 loop, best of 3: 2 s per loop
+
+
+ The times reported by %timeit will be slightly higher than those
+ reported by the timeit.py script when variables are accessed. This is
+ due to the fact that %timeit executes the statement in the namespace
+ of the shell, compared with timeit.py, which uses a single setup
+ statement to import function or create variables. Generally, the bias
+ does not matter as long as results from timeit.py are not mixed with
+ those from %timeit."""
+
+ opts, stmt = self.parse_options(line,'n:r:tcp:qo',
+ posix=False, strict=False)
+ if stmt == "" and cell is None:
+ return
+
+ timefunc = timeit.default_timer
+ number = int(getattr(opts, "n", 0))
+ repeat = int(getattr(opts, "r", timeit.default_repeat))
+ precision = int(getattr(opts, "p", 3))
+ quiet = 'q' in opts
+ return_result = 'o' in opts
+ if hasattr(opts, "t"):
+ timefunc = time.time
+ if hasattr(opts, "c"):
+ timefunc = clock
+
+ timer = Timer(timer=timefunc)
+ # this code has tight coupling to the inner workings of timeit.Timer,
+ # but is there a better way to achieve that the code stmt has access
+ # to the shell namespace?
+ transform = self.shell.input_splitter.transform_cell
+
+ if cell is None:
+ # called as line magic
+ ast_setup = self.shell.compile.ast_parse("pass")
+ ast_stmt = self.shell.compile.ast_parse(transform(stmt))
+ else:
+ ast_setup = self.shell.compile.ast_parse(transform(stmt))
+ ast_stmt = self.shell.compile.ast_parse(transform(cell))
+
+ ast_setup = self.shell.transform_ast(ast_setup)
+ ast_stmt = self.shell.transform_ast(ast_stmt)
+
# Check that these compile to valid Python code *outside* the timer func
# Invalid code may become valid when put inside the function & loop,
# which messes up error messages.
@@ -1020,359 +1020,359 @@ python-profiler package from non-free.""")
self.shell.compile(ast_setup, "<magic-timeit-setup>", "exec")
self.shell.compile(ast_stmt, "<magic-timeit-stmt>", "exec")
- # This codestring is taken from timeit.template - we fill it in as an
- # AST, so that we can apply our AST transformations to the user code
- # without affecting the timing code.
- timeit_ast_template = ast.parse('def inner(_it, _timer):\n'
- ' setup\n'
- ' _t0 = _timer()\n'
- ' for _i in _it:\n'
- ' stmt\n'
- ' _t1 = _timer()\n'
- ' return _t1 - _t0\n')
-
- timeit_ast = TimeitTemplateFiller(ast_setup, ast_stmt).visit(timeit_ast_template)
- timeit_ast = ast.fix_missing_locations(timeit_ast)
-
- # Track compilation time so it can be reported if too long
- # Minimum time above which compilation time will be reported
- tc_min = 0.1
-
- t0 = clock()
- code = self.shell.compile(timeit_ast, "<magic-timeit>", "exec")
- tc = clock()-t0
-
- ns = {}
- exec(code, self.shell.user_ns, ns)
- timer.inner = ns["inner"]
-
- # This is used to check if there is a huge difference between the
- # best and worst timings.
- # Issue: https://github.com/ipython/ipython/issues/6471
- worst_tuning = 0
- if number == 0:
- # determine number so that 0.2 <= total time < 2.0
- number = 1
- for _ in range(1, 10):
- time_number = timer.timeit(number)
- worst_tuning = max(worst_tuning, time_number / number)
- if time_number >= 0.2:
- break
- number *= 10
- all_runs = timer.repeat(repeat, number)
- best = min(all_runs) / number
-
- worst = max(all_runs) / number
- if worst_tuning:
- worst = max(worst, worst_tuning)
-
- if not quiet :
- # Check best timing is greater than zero to avoid a
- # ZeroDivisionError.
- # In cases where the slowest timing is lesser than a micosecond
- # we assume that it does not really matter if the fastest
- # timing is 4 times faster than the slowest timing or not.
- if worst > 4 * best and best > 0 and worst > 1e-6:
- print("The slowest run took %0.2f times longer than the "
- "fastest. This could mean that an intermediate result "
- "is being cached." % (worst / best))
- if number == 1: # No s at "loops" if only one loop
- print(u"%d loop, best of %d: %s per loop" % (number, repeat,
- _format_time(best, precision)))
- else:
- print(u"%d loops, best of %d: %s per loop" % (number, repeat,
- _format_time(best, precision)))
- if tc > tc_min:
- print("Compiler time: %.2f s" % tc)
- if return_result:
- return TimeitResult(number, repeat, best, worst, all_runs, tc, precision)
-
- @skip_doctest
- @needs_local_scope
- @line_cell_magic
- def time(self,line='', cell=None, local_ns=None):
- """Time execution of a Python statement or expression.
-
- The CPU and wall clock times are printed, and the value of the
- expression (if any) is returned. Note that under Win32, system time
- is always reported as 0, since it can not be measured.
-
- This function can be used both as a line and cell magic:
-
- - In line mode you can time a single-line statement (though multiple
- ones can be chained with using semicolons).
-
- - In cell mode, you can time the cell body (a directly
- following statement raises an error).
-
- This function provides very basic timing functionality. Use the timeit
- magic for more control over the measurement.
-
- Examples
- --------
- ::
-
- In [1]: %time 2**128
- CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
- Wall time: 0.00
- Out[1]: 340282366920938463463374607431768211456L
-
- In [2]: n = 1000000
-
- In [3]: %time sum(range(n))
- CPU times: user 1.20 s, sys: 0.05 s, total: 1.25 s
- Wall time: 1.37
- Out[3]: 499999500000L
-
- In [4]: %time print 'hello world'
- hello world
- CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
- Wall time: 0.00
-
- Note that the time needed by Python to compile the given expression
- will be reported if it is more than 0.1s. In this example, the
- actual exponentiation is done by Python at compilation time, so while
- the expression can take a noticeable amount of time to compute, that
- time is purely due to the compilation:
-
- In [5]: %time 3**9999;
- CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
- Wall time: 0.00 s
-
- In [6]: %time 3**999999;
- CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
- Wall time: 0.00 s
- Compiler : 0.78 s
- """
-
- # fail immediately if the given expression can't be compiled
-
- if line and cell:
- raise UsageError("Can't use statement directly after '%%time'!")
-
- if cell:
- expr = self.shell.input_transformer_manager.transform_cell(cell)
- else:
- expr = self.shell.input_transformer_manager.transform_cell(line)
-
- # Minimum time above which parse time will be reported
- tp_min = 0.1
-
- t0 = clock()
- expr_ast = self.shell.compile.ast_parse(expr)
- tp = clock()-t0
-
- # Apply AST transformations
- expr_ast = self.shell.transform_ast(expr_ast)
-
- # Minimum time above which compilation time will be reported
- tc_min = 0.1
-
- if len(expr_ast.body)==1 and isinstance(expr_ast.body[0], ast.Expr):
- mode = 'eval'
- source = '<timed eval>'
- expr_ast = ast.Expression(expr_ast.body[0].value)
- else:
- mode = 'exec'
- source = '<timed exec>'
- t0 = clock()
- code = self.shell.compile(expr_ast, source, mode)
- tc = clock()-t0
-
- # skew measurement as little as possible
- glob = self.shell.user_ns
- wtime = time.time
- # time execution
- wall_st = wtime()
- if mode=='eval':
- st = clock2()
- out = eval(code, glob, local_ns)
- end = clock2()
- else:
- st = clock2()
- exec(code, glob, local_ns)
- end = clock2()
- out = None
- wall_end = wtime()
- # Compute actual times and report
- wall_time = wall_end-wall_st
- cpu_user = end[0]-st[0]
- cpu_sys = end[1]-st[1]
- cpu_tot = cpu_user+cpu_sys
- # On windows cpu_sys is always zero, so no new information to the next print
- if sys.platform != 'win32':
- print("CPU times: user %s, sys: %s, total: %s" % \
- (_format_time(cpu_user),_format_time(cpu_sys),_format_time(cpu_tot)))
- print("Wall time: %s" % _format_time(wall_time))
- if tc > tc_min:
- print("Compiler : %s" % _format_time(tc))
- if tp > tp_min:
- print("Parser : %s" % _format_time(tp))
- return out
-
- @skip_doctest
- @line_magic
- def macro(self, parameter_s=''):
- """Define a macro for future re-execution. It accepts ranges of history,
- filenames or string objects.
-
- Usage:\\
- %macro [options] name n1-n2 n3-n4 ... n5 .. n6 ...
-
- Options:
-
- -r: use 'raw' input. By default, the 'processed' history is used,
- so that magics are loaded in their transformed version to valid
- Python. If this option is given, the raw input as typed at the
- command line is used instead.
-
- -q: quiet macro definition. By default, a tag line is printed
- to indicate the macro has been created, and then the contents of
- the macro are printed. If this option is given, then no printout
- is produced once the macro is created.
-
- This will define a global variable called `name` which is a string
- made of joining the slices and lines you specify (n1,n2,... numbers
- above) from your input history into a single string. This variable
- acts like an automatic function which re-executes those lines as if
- you had typed them. You just type 'name' at the prompt and the code
- executes.
-
- The syntax for indicating input ranges is described in %history.
-
- Note: as a 'hidden' feature, you can also use traditional python slice
- notation, where N:M means numbers N through M-1.
-
- For example, if your history contains (print using %hist -n )::
-
- 44: x=1
- 45: y=3
- 46: z=x+y
- 47: print x
- 48: a=5
- 49: print 'x',x,'y',y
-
- you can create a macro with lines 44 through 47 (included) and line 49
- called my_macro with::
-
- In [55]: %macro my_macro 44-47 49
-
- Now, typing `my_macro` (without quotes) will re-execute all this code
- in one pass.
-
- You don't need to give the line-numbers in order, and any given line
- number can appear multiple times. You can assemble macros with any
- lines from your input history in any order.
-
- The macro is a simple object which holds its value in an attribute,
- but IPython's display system checks for macros and executes them as
- code instead of printing them when you type their name.
-
- You can view a macro's contents by explicitly printing it with::
-
- print macro_name
-
- """
- opts,args = self.parse_options(parameter_s,'rq',mode='list')
- if not args: # List existing macros
- return sorted(k for k,v in iteritems(self.shell.user_ns) if\
- isinstance(v, Macro))
- if len(args) == 1:
- raise UsageError(
- "%macro insufficient args; usage '%macro name n1-n2 n3-4...")
- name, codefrom = args[0], " ".join(args[1:])
-
- #print 'rng',ranges # dbg
- try:
- lines = self.shell.find_user_code(codefrom, 'r' in opts)
- except (ValueError, TypeError) as e:
- print(e.args[0])
- return
- macro = Macro(lines)
- self.shell.define_macro(name, macro)
- if not ( 'q' in opts) :
- print('Macro `%s` created. To execute, type its name (without quotes).' % name)
- print('=== Macro contents: ===')
- print(macro, end=' ')
-
- @magic_arguments.magic_arguments()
- @magic_arguments.argument('output', type=str, default='', nargs='?',
- help="""The name of the variable in which to store output.
- This is a utils.io.CapturedIO object with stdout/err attributes
- for the text of the captured output.
-
- CapturedOutput also has a show() method for displaying the output,
- and __call__ as well, so you can use that to quickly display the
- output.
-
- If unspecified, captured output is discarded.
- """
- )
- @magic_arguments.argument('--no-stderr', action="store_true",
- help="""Don't capture stderr."""
- )
- @magic_arguments.argument('--no-stdout', action="store_true",
- help="""Don't capture stdout."""
- )
- @magic_arguments.argument('--no-display', action="store_true",
- help="""Don't capture IPython's rich display."""
- )
- @cell_magic
- def capture(self, line, cell):
- """run the cell, capturing stdout, stderr, and IPython's rich display() calls."""
- args = magic_arguments.parse_argstring(self.capture, line)
- out = not args.no_stdout
- err = not args.no_stderr
- disp = not args.no_display
- with capture_output(out, err, disp) as io:
- self.shell.run_cell(cell)
- if args.output:
- self.shell.user_ns[args.output] = io
-
-def parse_breakpoint(text, current_file):
- '''Returns (file, line) for file:line and (current_file, line) for line'''
- colon = text.find(':')
- if colon == -1:
- return current_file, int(text)
- else:
- return text[:colon], int(text[colon+1:])
-
-def _format_time(timespan, precision=3):
- """Formats the timespan in a human readable form"""
- import math
-
- if timespan >= 60.0:
- # we have more than a minute, format that in a human readable form
- # Idea from http://snipplr.com/view/5713/
- parts = [("d", 60*60*24),("h", 60*60),("min", 60), ("s", 1)]
- time = []
- leftover = timespan
- for suffix, length in parts:
- value = int(leftover / length)
- if value > 0:
- leftover = leftover % length
- time.append(u'%s%s' % (str(value), suffix))
- if leftover < 1:
- break
- return " ".join(time)
-
-
- # Unfortunately the unicode 'micro' symbol can cause problems in
- # certain terminals.
- # See bug: https://bugs.launchpad.net/ipython/+bug/348466
- # Try to prevent crashes by being more secure than it needs to
- # E.g. eclipse is able to print a µ, but has no sys.stdout.encoding set.
- units = [u"s", u"ms",u'us',"ns"] # the save value
- if hasattr(sys.stdout, 'encoding') and sys.stdout.encoding:
- try:
- u'\xb5'.encode(sys.stdout.encoding)
- units = [u"s", u"ms",u'\xb5s',"ns"]
- except:
- pass
- scaling = [1, 1e3, 1e6, 1e9]
-
- if timespan > 0.0:
- order = min(-int(math.floor(math.log10(timespan)) // 3), 3)
- else:
- order = 3
- return u"%.*g %s" % (precision, timespan * scaling[order], units[order])
+ # This codestring is taken from timeit.template - we fill it in as an
+ # AST, so that we can apply our AST transformations to the user code
+ # without affecting the timing code.
+ timeit_ast_template = ast.parse('def inner(_it, _timer):\n'
+ ' setup\n'
+ ' _t0 = _timer()\n'
+ ' for _i in _it:\n'
+ ' stmt\n'
+ ' _t1 = _timer()\n'
+ ' return _t1 - _t0\n')
+
+ timeit_ast = TimeitTemplateFiller(ast_setup, ast_stmt).visit(timeit_ast_template)
+ timeit_ast = ast.fix_missing_locations(timeit_ast)
+
+ # Track compilation time so it can be reported if too long
+ # Minimum time above which compilation time will be reported
+ tc_min = 0.1
+
+ t0 = clock()
+ code = self.shell.compile(timeit_ast, "<magic-timeit>", "exec")
+ tc = clock()-t0
+
+ ns = {}
+ exec(code, self.shell.user_ns, ns)
+ timer.inner = ns["inner"]
+
+ # This is used to check if there is a huge difference between the
+ # best and worst timings.
+ # Issue: https://github.com/ipython/ipython/issues/6471
+ worst_tuning = 0
+ if number == 0:
+ # determine number so that 0.2 <= total time < 2.0
+ number = 1
+ for _ in range(1, 10):
+ time_number = timer.timeit(number)
+ worst_tuning = max(worst_tuning, time_number / number)
+ if time_number >= 0.2:
+ break
+ number *= 10
+ all_runs = timer.repeat(repeat, number)
+ best = min(all_runs) / number
+
+ worst = max(all_runs) / number
+ if worst_tuning:
+ worst = max(worst, worst_tuning)
+
+ if not quiet :
+ # Check best timing is greater than zero to avoid a
+ # ZeroDivisionError.
+ # In cases where the slowest timing is lesser than a micosecond
+ # we assume that it does not really matter if the fastest
+ # timing is 4 times faster than the slowest timing or not.
+ if worst > 4 * best and best > 0 and worst > 1e-6:
+ print("The slowest run took %0.2f times longer than the "
+ "fastest. This could mean that an intermediate result "
+ "is being cached." % (worst / best))
+ if number == 1: # No s at "loops" if only one loop
+ print(u"%d loop, best of %d: %s per loop" % (number, repeat,
+ _format_time(best, precision)))
+ else:
+ print(u"%d loops, best of %d: %s per loop" % (number, repeat,
+ _format_time(best, precision)))
+ if tc > tc_min:
+ print("Compiler time: %.2f s" % tc)
+ if return_result:
+ return TimeitResult(number, repeat, best, worst, all_runs, tc, precision)
+
+ @skip_doctest
+ @needs_local_scope
+ @line_cell_magic
+ def time(self,line='', cell=None, local_ns=None):
+ """Time execution of a Python statement or expression.
+
+ The CPU and wall clock times are printed, and the value of the
+ expression (if any) is returned. Note that under Win32, system time
+ is always reported as 0, since it can not be measured.
+
+ This function can be used both as a line and cell magic:
+
+ - In line mode you can time a single-line statement (though multiple
+ ones can be chained with using semicolons).
+
+ - In cell mode, you can time the cell body (a directly
+ following statement raises an error).
+
+ This function provides very basic timing functionality. Use the timeit
+ magic for more control over the measurement.
+
+ Examples
+ --------
+ ::
+
+ In [1]: %time 2**128
+ CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
+ Wall time: 0.00
+ Out[1]: 340282366920938463463374607431768211456L
+
+ In [2]: n = 1000000
+
+ In [3]: %time sum(range(n))
+ CPU times: user 1.20 s, sys: 0.05 s, total: 1.25 s
+ Wall time: 1.37
+ Out[3]: 499999500000L
+
+ In [4]: %time print 'hello world'
+ hello world
+ CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
+ Wall time: 0.00
+
+ Note that the time needed by Python to compile the given expression
+ will be reported if it is more than 0.1s. In this example, the
+ actual exponentiation is done by Python at compilation time, so while
+ the expression can take a noticeable amount of time to compute, that
+ time is purely due to the compilation:
+
+ In [5]: %time 3**9999;
+ CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
+ Wall time: 0.00 s
+
+ In [6]: %time 3**999999;
+ CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
+ Wall time: 0.00 s
+ Compiler : 0.78 s
+ """
+
+ # fail immediately if the given expression can't be compiled
+
+ if line and cell:
+ raise UsageError("Can't use statement directly after '%%time'!")
+
+ if cell:
+ expr = self.shell.input_transformer_manager.transform_cell(cell)
+ else:
+ expr = self.shell.input_transformer_manager.transform_cell(line)
+
+ # Minimum time above which parse time will be reported
+ tp_min = 0.1
+
+ t0 = clock()
+ expr_ast = self.shell.compile.ast_parse(expr)
+ tp = clock()-t0
+
+ # Apply AST transformations
+ expr_ast = self.shell.transform_ast(expr_ast)
+
+ # Minimum time above which compilation time will be reported
+ tc_min = 0.1
+
+ if len(expr_ast.body)==1 and isinstance(expr_ast.body[0], ast.Expr):
+ mode = 'eval'
+ source = '<timed eval>'
+ expr_ast = ast.Expression(expr_ast.body[0].value)
+ else:
+ mode = 'exec'
+ source = '<timed exec>'
+ t0 = clock()
+ code = self.shell.compile(expr_ast, source, mode)
+ tc = clock()-t0
+
+ # skew measurement as little as possible
+ glob = self.shell.user_ns
+ wtime = time.time
+ # time execution
+ wall_st = wtime()
+ if mode=='eval':
+ st = clock2()
+ out = eval(code, glob, local_ns)
+ end = clock2()
+ else:
+ st = clock2()
+ exec(code, glob, local_ns)
+ end = clock2()
+ out = None
+ wall_end = wtime()
+ # Compute actual times and report
+ wall_time = wall_end-wall_st
+ cpu_user = end[0]-st[0]
+ cpu_sys = end[1]-st[1]
+ cpu_tot = cpu_user+cpu_sys
+ # On windows cpu_sys is always zero, so no new information to the next print
+ if sys.platform != 'win32':
+ print("CPU times: user %s, sys: %s, total: %s" % \
+ (_format_time(cpu_user),_format_time(cpu_sys),_format_time(cpu_tot)))
+ print("Wall time: %s" % _format_time(wall_time))
+ if tc > tc_min:
+ print("Compiler : %s" % _format_time(tc))
+ if tp > tp_min:
+ print("Parser : %s" % _format_time(tp))
+ return out
+
+ @skip_doctest
+ @line_magic
+ def macro(self, parameter_s=''):
+ """Define a macro for future re-execution. It accepts ranges of history,
+ filenames or string objects.
+
+ Usage:\\
+ %macro [options] name n1-n2 n3-n4 ... n5 .. n6 ...
+
+ Options:
+
+ -r: use 'raw' input. By default, the 'processed' history is used,
+ so that magics are loaded in their transformed version to valid
+ Python. If this option is given, the raw input as typed at the
+ command line is used instead.
+
+ -q: quiet macro definition. By default, a tag line is printed
+ to indicate the macro has been created, and then the contents of
+ the macro are printed. If this option is given, then no printout
+ is produced once the macro is created.
+
+ This will define a global variable called `name` which is a string
+ made of joining the slices and lines you specify (n1,n2,... numbers
+ above) from your input history into a single string. This variable
+ acts like an automatic function which re-executes those lines as if
+ you had typed them. You just type 'name' at the prompt and the code
+ executes.
+
+ The syntax for indicating input ranges is described in %history.
+
+ Note: as a 'hidden' feature, you can also use traditional python slice
+ notation, where N:M means numbers N through M-1.
+
+ For example, if your history contains (print using %hist -n )::
+
+ 44: x=1
+ 45: y=3
+ 46: z=x+y
+ 47: print x
+ 48: a=5
+ 49: print 'x',x,'y',y
+
+ you can create a macro with lines 44 through 47 (included) and line 49
+ called my_macro with::
+
+ In [55]: %macro my_macro 44-47 49
+
+ Now, typing `my_macro` (without quotes) will re-execute all this code
+ in one pass.
+
+ You don't need to give the line-numbers in order, and any given line
+ number can appear multiple times. You can assemble macros with any
+ lines from your input history in any order.
+
+ The macro is a simple object which holds its value in an attribute,
+ but IPython's display system checks for macros and executes them as
+ code instead of printing them when you type their name.
+
+ You can view a macro's contents by explicitly printing it with::
+
+ print macro_name
+
+ """
+ opts,args = self.parse_options(parameter_s,'rq',mode='list')
+ if not args: # List existing macros
+ return sorted(k for k,v in iteritems(self.shell.user_ns) if\
+ isinstance(v, Macro))
+ if len(args) == 1:
+ raise UsageError(
+ "%macro insufficient args; usage '%macro name n1-n2 n3-4...")
+ name, codefrom = args[0], " ".join(args[1:])
+
+ #print 'rng',ranges # dbg
+ try:
+ lines = self.shell.find_user_code(codefrom, 'r' in opts)
+ except (ValueError, TypeError) as e:
+ print(e.args[0])
+ return
+ macro = Macro(lines)
+ self.shell.define_macro(name, macro)
+ if not ( 'q' in opts) :
+ print('Macro `%s` created. To execute, type its name (without quotes).' % name)
+ print('=== Macro contents: ===')
+ print(macro, end=' ')
+
+ @magic_arguments.magic_arguments()
+ @magic_arguments.argument('output', type=str, default='', nargs='?',
+ help="""The name of the variable in which to store output.
+ This is a utils.io.CapturedIO object with stdout/err attributes
+ for the text of the captured output.
+
+ CapturedOutput also has a show() method for displaying the output,
+ and __call__ as well, so you can use that to quickly display the
+ output.
+
+ If unspecified, captured output is discarded.
+ """
+ )
+ @magic_arguments.argument('--no-stderr', action="store_true",
+ help="""Don't capture stderr."""
+ )
+ @magic_arguments.argument('--no-stdout', action="store_true",
+ help="""Don't capture stdout."""
+ )
+ @magic_arguments.argument('--no-display', action="store_true",
+ help="""Don't capture IPython's rich display."""
+ )
+ @cell_magic
+ def capture(self, line, cell):
+ """run the cell, capturing stdout, stderr, and IPython's rich display() calls."""
+ args = magic_arguments.parse_argstring(self.capture, line)
+ out = not args.no_stdout
+ err = not args.no_stderr
+ disp = not args.no_display
+ with capture_output(out, err, disp) as io:
+ self.shell.run_cell(cell)
+ if args.output:
+ self.shell.user_ns[args.output] = io
+
+def parse_breakpoint(text, current_file):
+ '''Returns (file, line) for file:line and (current_file, line) for line'''
+ colon = text.find(':')
+ if colon == -1:
+ return current_file, int(text)
+ else:
+ return text[:colon], int(text[colon+1:])
+
+def _format_time(timespan, precision=3):
+ """Formats the timespan in a human readable form"""
+ import math
+
+ if timespan >= 60.0:
+ # we have more than a minute, format that in a human readable form
+ # Idea from http://snipplr.com/view/5713/
+ parts = [("d", 60*60*24),("h", 60*60),("min", 60), ("s", 1)]
+ time = []
+ leftover = timespan
+ for suffix, length in parts:
+ value = int(leftover / length)
+ if value > 0:
+ leftover = leftover % length
+ time.append(u'%s%s' % (str(value), suffix))
+ if leftover < 1:
+ break
+ return " ".join(time)
+
+
+ # Unfortunately the unicode 'micro' symbol can cause problems in
+ # certain terminals.
+ # See bug: https://bugs.launchpad.net/ipython/+bug/348466
+ # Try to prevent crashes by being more secure than it needs to
+ # E.g. eclipse is able to print a µ, but has no sys.stdout.encoding set.
+ units = [u"s", u"ms",u'us',"ns"] # the save value
+ if hasattr(sys.stdout, 'encoding') and sys.stdout.encoding:
+ try:
+ u'\xb5'.encode(sys.stdout.encoding)
+ units = [u"s", u"ms",u'\xb5s',"ns"]
+ except:
+ pass
+ scaling = [1, 1e3, 1e6, 1e9]
+
+ if timespan > 0.0:
+ order = min(-int(math.floor(math.log10(timespan)) // 3), 3)
+ else:
+ order = 3
+ return u"%.*g %s" % (precision, timespan * scaling[order], units[order])
diff --git a/contrib/python/ipython/py2/IPython/core/magics/extension.py b/contrib/python/ipython/py2/IPython/core/magics/extension.py
index cf9a9ab9d1..2991d55ca4 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/extension.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/extension.py
@@ -1,67 +1,67 @@
-"""Implementation of magic functions for the extension machinery.
-"""
-from __future__ import print_function
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012 The IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Stdlib
-import os
-
-# Our own packages
-from IPython.core.error import UsageError
-from IPython.core.magic import Magics, magics_class, line_magic
-from warnings import warn
-
-#-----------------------------------------------------------------------------
-# Magic implementation classes
-#-----------------------------------------------------------------------------
-
-@magics_class
-class ExtensionMagics(Magics):
- """Magics to manage the IPython extensions system."""
-
- @line_magic
- def load_ext(self, module_str):
- """Load an IPython extension by its module name."""
- if not module_str:
- raise UsageError('Missing module name.')
- res = self.shell.extension_manager.load_extension(module_str)
-
- if res == 'already loaded':
- print("The %s extension is already loaded. To reload it, use:" % module_str)
- print(" %reload_ext", module_str)
- elif res == 'no load function':
- print("The %s module is not an IPython extension." % module_str)
-
- @line_magic
- def unload_ext(self, module_str):
- """Unload an IPython extension by its module name.
-
- Not all extensions can be unloaded, only those which define an
- ``unload_ipython_extension`` function.
- """
- if not module_str:
- raise UsageError('Missing module name.')
-
- res = self.shell.extension_manager.unload_extension(module_str)
-
- if res == 'no unload function':
- print("The %s extension doesn't define how to unload it." % module_str)
- elif res == "not loaded":
- print("The %s extension is not loaded." % module_str)
-
- @line_magic
- def reload_ext(self, module_str):
- """Reload an IPython extension by its module name."""
- if not module_str:
- raise UsageError('Missing module name.')
- self.shell.extension_manager.reload_extension(module_str)
+"""Implementation of magic functions for the extension machinery.
+"""
+from __future__ import print_function
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012 The IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+# Stdlib
+import os
+
+# Our own packages
+from IPython.core.error import UsageError
+from IPython.core.magic import Magics, magics_class, line_magic
+from warnings import warn
+
+#-----------------------------------------------------------------------------
+# Magic implementation classes
+#-----------------------------------------------------------------------------
+
+@magics_class
+class ExtensionMagics(Magics):
+ """Magics to manage the IPython extensions system."""
+
+ @line_magic
+ def load_ext(self, module_str):
+ """Load an IPython extension by its module name."""
+ if not module_str:
+ raise UsageError('Missing module name.')
+ res = self.shell.extension_manager.load_extension(module_str)
+
+ if res == 'already loaded':
+ print("The %s extension is already loaded. To reload it, use:" % module_str)
+ print(" %reload_ext", module_str)
+ elif res == 'no load function':
+ print("The %s module is not an IPython extension." % module_str)
+
+ @line_magic
+ def unload_ext(self, module_str):
+ """Unload an IPython extension by its module name.
+
+ Not all extensions can be unloaded, only those which define an
+ ``unload_ipython_extension`` function.
+ """
+ if not module_str:
+ raise UsageError('Missing module name.')
+
+ res = self.shell.extension_manager.unload_extension(module_str)
+
+ if res == 'no unload function':
+ print("The %s extension doesn't define how to unload it." % module_str)
+ elif res == "not loaded":
+ print("The %s extension is not loaded." % module_str)
+
+ @line_magic
+ def reload_ext(self, module_str):
+ """Reload an IPython extension by its module name."""
+ if not module_str:
+ raise UsageError('Missing module name.')
+ self.shell.extension_manager.reload_extension(module_str)
diff --git a/contrib/python/ipython/py2/IPython/core/magics/history.py b/contrib/python/ipython/py2/IPython/core/magics/history.py
index 342e641d48..5967591394 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/history.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/history.py
@@ -1,320 +1,320 @@
-"""Implementation of magic functions related to History.
-"""
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012, IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-from __future__ import print_function
-
-# Stdlib
-import os
+"""Implementation of magic functions related to History.
+"""
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012, IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+from __future__ import print_function
+
+# Stdlib
+import os
import sys
-from io import open as io_open
-
-# Our own packages
-from IPython.core.error import StdinNotImplementedError
-from IPython.core.magic import Magics, magics_class, line_magic
-from IPython.core.magic_arguments import (argument, magic_arguments,
- parse_argstring)
-from IPython.testing.skipdoctest import skip_doctest
-from IPython.utils import io
-from IPython.utils.py3compat import cast_unicode_py2
-
-#-----------------------------------------------------------------------------
-# Magics class implementation
-#-----------------------------------------------------------------------------
-
-
-_unspecified = object()
-
-
-@magics_class
-class HistoryMagics(Magics):
-
- @magic_arguments()
- @argument(
- '-n', dest='print_nums', action='store_true', default=False,
- help="""
- print line numbers for each input.
- This feature is only available if numbered prompts are in use.
- """)
- @argument(
- '-o', dest='get_output', action='store_true', default=False,
- help="also print outputs for each input.")
- @argument(
- '-p', dest='pyprompts', action='store_true', default=False,
- help="""
- print classic '>>>' python prompts before each input.
- This is useful for making documentation, and in conjunction
- with -o, for producing doctest-ready output.
- """)
- @argument(
- '-t', dest='raw', action='store_false', default=True,
- help="""
- print the 'translated' history, as IPython understands it.
- IPython filters your input and converts it all into valid Python
- source before executing it (things like magics or aliases are turned
- into function calls, for example). With this option, you'll see the
- native history instead of the user-entered version: '%%cd /' will be
- seen as 'get_ipython().magic("%%cd /")' instead of '%%cd /'.
- """)
- @argument(
- '-f', dest='filename',
- help="""
- FILENAME: instead of printing the output to the screen, redirect
- it to the given file. The file is always overwritten, though *when
- it can*, IPython asks for confirmation first. In particular, running
- the command 'history -f FILENAME' from the IPython Notebook
- interface will replace FILENAME even if it already exists *without*
- confirmation.
- """)
- @argument(
- '-g', dest='pattern', nargs='*', default=None,
- help="""
- treat the arg as a glob pattern to search for in (full) history.
- This includes the saved history (almost all commands ever written).
- The pattern may contain '?' to match one unknown character and '*'
- to match any number of unknown characters. Use '%%hist -g' to show
- full saved history (may be very long).
- """)
- @argument(
- '-l', dest='limit', type=int, nargs='?', default=_unspecified,
- help="""
- get the last n lines from all sessions. Specify n as a single
- arg, or the default is the last 10 lines.
- """)
- @argument(
- '-u', dest='unique', action='store_true',
- help="""
- when searching history using `-g`, show only unique history.
- """)
- @argument('range', nargs='*')
- @skip_doctest
- @line_magic
- def history(self, parameter_s = ''):
- """Print input history (_i<n> variables), with most recent last.
-
- By default, input history is printed without line numbers so it can be
- directly pasted into an editor. Use -n to show them.
-
- By default, all input history from the current session is displayed.
- Ranges of history can be indicated using the syntax:
-
- ``4``
- Line 4, current session
- ``4-6``
- Lines 4-6, current session
- ``243/1-5``
- Lines 1-5, session 243
- ``~2/7``
- Line 7, session 2 before current
- ``~8/1-~6/5``
- From the first line of 8 sessions ago, to the fifth line of 6
- sessions ago.
-
- Multiple ranges can be entered, separated by spaces
-
- The same syntax is used by %macro, %save, %edit, %rerun
-
- Examples
- --------
- ::
-
- In [6]: %history -n 4-6
- 4:a = 12
- 5:print a**2
- 6:%history -n 4-6
-
- """
-
- args = parse_argstring(self.history, parameter_s)
-
- # For brevity
- history_manager = self.shell.history_manager
-
- def _format_lineno(session, line):
- """Helper function to format line numbers properly."""
- if session in (0, history_manager.session_number):
- return str(line)
- return "%s/%s" % (session, line)
-
- # Check if output to specific file was requested.
- outfname = args.filename
- if not outfname:
+from io import open as io_open
+
+# Our own packages
+from IPython.core.error import StdinNotImplementedError
+from IPython.core.magic import Magics, magics_class, line_magic
+from IPython.core.magic_arguments import (argument, magic_arguments,
+ parse_argstring)
+from IPython.testing.skipdoctest import skip_doctest
+from IPython.utils import io
+from IPython.utils.py3compat import cast_unicode_py2
+
+#-----------------------------------------------------------------------------
+# Magics class implementation
+#-----------------------------------------------------------------------------
+
+
+_unspecified = object()
+
+
+@magics_class
+class HistoryMagics(Magics):
+
+ @magic_arguments()
+ @argument(
+ '-n', dest='print_nums', action='store_true', default=False,
+ help="""
+ print line numbers for each input.
+ This feature is only available if numbered prompts are in use.
+ """)
+ @argument(
+ '-o', dest='get_output', action='store_true', default=False,
+ help="also print outputs for each input.")
+ @argument(
+ '-p', dest='pyprompts', action='store_true', default=False,
+ help="""
+ print classic '>>>' python prompts before each input.
+ This is useful for making documentation, and in conjunction
+ with -o, for producing doctest-ready output.
+ """)
+ @argument(
+ '-t', dest='raw', action='store_false', default=True,
+ help="""
+ print the 'translated' history, as IPython understands it.
+ IPython filters your input and converts it all into valid Python
+ source before executing it (things like magics or aliases are turned
+ into function calls, for example). With this option, you'll see the
+ native history instead of the user-entered version: '%%cd /' will be
+ seen as 'get_ipython().magic("%%cd /")' instead of '%%cd /'.
+ """)
+ @argument(
+ '-f', dest='filename',
+ help="""
+ FILENAME: instead of printing the output to the screen, redirect
+ it to the given file. The file is always overwritten, though *when
+ it can*, IPython asks for confirmation first. In particular, running
+ the command 'history -f FILENAME' from the IPython Notebook
+ interface will replace FILENAME even if it already exists *without*
+ confirmation.
+ """)
+ @argument(
+ '-g', dest='pattern', nargs='*', default=None,
+ help="""
+ treat the arg as a glob pattern to search for in (full) history.
+ This includes the saved history (almost all commands ever written).
+ The pattern may contain '?' to match one unknown character and '*'
+ to match any number of unknown characters. Use '%%hist -g' to show
+ full saved history (may be very long).
+ """)
+ @argument(
+ '-l', dest='limit', type=int, nargs='?', default=_unspecified,
+ help="""
+ get the last n lines from all sessions. Specify n as a single
+ arg, or the default is the last 10 lines.
+ """)
+ @argument(
+ '-u', dest='unique', action='store_true',
+ help="""
+ when searching history using `-g`, show only unique history.
+ """)
+ @argument('range', nargs='*')
+ @skip_doctest
+ @line_magic
+ def history(self, parameter_s = ''):
+ """Print input history (_i<n> variables), with most recent last.
+
+ By default, input history is printed without line numbers so it can be
+ directly pasted into an editor. Use -n to show them.
+
+ By default, all input history from the current session is displayed.
+ Ranges of history can be indicated using the syntax:
+
+ ``4``
+ Line 4, current session
+ ``4-6``
+ Lines 4-6, current session
+ ``243/1-5``
+ Lines 1-5, session 243
+ ``~2/7``
+ Line 7, session 2 before current
+ ``~8/1-~6/5``
+ From the first line of 8 sessions ago, to the fifth line of 6
+ sessions ago.
+
+ Multiple ranges can be entered, separated by spaces
+
+ The same syntax is used by %macro, %save, %edit, %rerun
+
+ Examples
+ --------
+ ::
+
+ In [6]: %history -n 4-6
+ 4:a = 12
+ 5:print a**2
+ 6:%history -n 4-6
+
+ """
+
+ args = parse_argstring(self.history, parameter_s)
+
+ # For brevity
+ history_manager = self.shell.history_manager
+
+ def _format_lineno(session, line):
+ """Helper function to format line numbers properly."""
+ if session in (0, history_manager.session_number):
+ return str(line)
+ return "%s/%s" % (session, line)
+
+ # Check if output to specific file was requested.
+ outfname = args.filename
+ if not outfname:
outfile = sys.stdout # default
- # We don't want to close stdout at the end!
- close_at_end = False
- else:
- if os.path.exists(outfname):
- try:
- ans = io.ask_yes_no("File %r exists. Overwrite?" % outfname)
- except StdinNotImplementedError:
- ans = True
- if not ans:
- print('Aborting.')
- return
- print("Overwriting file.")
- outfile = io_open(outfname, 'w', encoding='utf-8')
- close_at_end = True
-
- print_nums = args.print_nums
- get_output = args.get_output
- pyprompts = args.pyprompts
- raw = args.raw
-
- pattern = None
- limit = None if args.limit is _unspecified else args.limit
-
- if args.pattern is not None:
- if args.pattern:
- pattern = "*" + " ".join(args.pattern) + "*"
- else:
- pattern = "*"
- hist = history_manager.search(pattern, raw=raw, output=get_output,
- n=limit, unique=args.unique)
- print_nums = True
- elif args.limit is not _unspecified:
- n = 10 if limit is None else limit
- hist = history_manager.get_tail(n, raw=raw, output=get_output)
- else:
- if args.range: # Get history by ranges
- hist = history_manager.get_range_by_str(" ".join(args.range),
- raw, get_output)
- else: # Just get history for the current session
- hist = history_manager.get_range(raw=raw, output=get_output)
-
- # We could be displaying the entire history, so let's not try to pull
- # it into a list in memory. Anything that needs more space will just
- # misalign.
- width = 4
-
- for session, lineno, inline in hist:
- # Print user history with tabs expanded to 4 spaces. The GUI
- # clients use hard tabs for easier usability in auto-indented code,
- # but we want to produce PEP-8 compliant history for safe pasting
- # into an editor.
- if get_output:
- inline, output = inline
- inline = inline.expandtabs(4).rstrip()
-
- multiline = "\n" in inline
- line_sep = '\n' if multiline else ' '
- if print_nums:
- print(u'%s:%s' % (_format_lineno(session, lineno).rjust(width),
- line_sep), file=outfile, end=u'')
- if pyprompts:
- print(u">>> ", end=u"", file=outfile)
- if multiline:
- inline = "\n... ".join(inline.splitlines()) + "\n..."
- print(inline, file=outfile)
- if get_output and output:
- print(cast_unicode_py2(output), file=outfile)
-
- if close_at_end:
- outfile.close()
-
- @line_magic
- def recall(self, arg):
- r"""Repeat a command, or get command to input line for editing.
-
- %recall and %rep are equivalent.
-
- - %recall (no arguments):
-
- Place a string version of last computation result (stored in the
- special '_' variable) to the next input prompt. Allows you to create
- elaborate command lines without using copy-paste::
-
- In[1]: l = ["hei", "vaan"]
- In[2]: "".join(l)
- Out[2]: heivaan
- In[3]: %recall
- In[4]: heivaan_ <== cursor blinking
-
- %recall 45
-
- Place history line 45 on the next input prompt. Use %hist to find
- out the number.
-
- %recall 1-4
-
- Combine the specified lines into one cell, and place it on the next
- input prompt. See %history for the slice syntax.
-
- %recall foo+bar
-
- If foo+bar can be evaluated in the user namespace, the result is
- placed at the next input prompt. Otherwise, the history is searched
- for lines which contain that substring, and the most recent one is
- placed at the next input prompt.
- """
- if not arg: # Last output
- self.shell.set_next_input(str(self.shell.user_ns["_"]))
- return
- # Get history range
- histlines = self.shell.history_manager.get_range_by_str(arg)
- cmd = "\n".join(x[2] for x in histlines)
- if cmd:
- self.shell.set_next_input(cmd.rstrip())
- return
-
- try: # Variable in user namespace
- cmd = str(eval(arg, self.shell.user_ns))
- except Exception: # Search for term in history
- histlines = self.shell.history_manager.search("*"+arg+"*")
- for h in reversed([x[2] for x in histlines]):
- if 'recall' in h or 'rep' in h:
- continue
- self.shell.set_next_input(h.rstrip())
- return
- else:
- self.shell.set_next_input(cmd.rstrip())
- print("Couldn't evaluate or find in history:", arg)
-
- @line_magic
- def rerun(self, parameter_s=''):
- """Re-run previous input
-
- By default, you can specify ranges of input history to be repeated
- (as with %history). With no arguments, it will repeat the last line.
-
- Options:
-
- -l <n> : Repeat the last n lines of input, not including the
- current command.
-
- -g foo : Repeat the most recent line which contains foo
- """
- opts, args = self.parse_options(parameter_s, 'l:g:', mode='string')
- if "l" in opts: # Last n lines
- n = int(opts['l'])
- hist = self.shell.history_manager.get_tail(n)
- elif "g" in opts: # Search
- p = "*"+opts['g']+"*"
- hist = list(self.shell.history_manager.search(p))
- for l in reversed(hist):
- if "rerun" not in l[2]:
- hist = [l] # The last match which isn't a %rerun
- break
- else:
- hist = [] # No matches except %rerun
- elif args: # Specify history ranges
- hist = self.shell.history_manager.get_range_by_str(args)
- else: # Last line
- hist = self.shell.history_manager.get_tail(1)
- hist = [x[2] for x in hist]
- if not hist:
- print("No lines in history match specification")
- return
- histlines = "\n".join(hist)
- print("=== Executing: ===")
- print(histlines)
- print("=== Output: ===")
- self.shell.run_cell("\n".join(hist), store_history=False)
+ # We don't want to close stdout at the end!
+ close_at_end = False
+ else:
+ if os.path.exists(outfname):
+ try:
+ ans = io.ask_yes_no("File %r exists. Overwrite?" % outfname)
+ except StdinNotImplementedError:
+ ans = True
+ if not ans:
+ print('Aborting.')
+ return
+ print("Overwriting file.")
+ outfile = io_open(outfname, 'w', encoding='utf-8')
+ close_at_end = True
+
+ print_nums = args.print_nums
+ get_output = args.get_output
+ pyprompts = args.pyprompts
+ raw = args.raw
+
+ pattern = None
+ limit = None if args.limit is _unspecified else args.limit
+
+ if args.pattern is not None:
+ if args.pattern:
+ pattern = "*" + " ".join(args.pattern) + "*"
+ else:
+ pattern = "*"
+ hist = history_manager.search(pattern, raw=raw, output=get_output,
+ n=limit, unique=args.unique)
+ print_nums = True
+ elif args.limit is not _unspecified:
+ n = 10 if limit is None else limit
+ hist = history_manager.get_tail(n, raw=raw, output=get_output)
+ else:
+ if args.range: # Get history by ranges
+ hist = history_manager.get_range_by_str(" ".join(args.range),
+ raw, get_output)
+ else: # Just get history for the current session
+ hist = history_manager.get_range(raw=raw, output=get_output)
+
+ # We could be displaying the entire history, so let's not try to pull
+ # it into a list in memory. Anything that needs more space will just
+ # misalign.
+ width = 4
+
+ for session, lineno, inline in hist:
+ # Print user history with tabs expanded to 4 spaces. The GUI
+ # clients use hard tabs for easier usability in auto-indented code,
+ # but we want to produce PEP-8 compliant history for safe pasting
+ # into an editor.
+ if get_output:
+ inline, output = inline
+ inline = inline.expandtabs(4).rstrip()
+
+ multiline = "\n" in inline
+ line_sep = '\n' if multiline else ' '
+ if print_nums:
+ print(u'%s:%s' % (_format_lineno(session, lineno).rjust(width),
+ line_sep), file=outfile, end=u'')
+ if pyprompts:
+ print(u">>> ", end=u"", file=outfile)
+ if multiline:
+ inline = "\n... ".join(inline.splitlines()) + "\n..."
+ print(inline, file=outfile)
+ if get_output and output:
+ print(cast_unicode_py2(output), file=outfile)
+
+ if close_at_end:
+ outfile.close()
+
+ @line_magic
+ def recall(self, arg):
+ r"""Repeat a command, or get command to input line for editing.
+
+ %recall and %rep are equivalent.
+
+ - %recall (no arguments):
+
+ Place a string version of last computation result (stored in the
+ special '_' variable) to the next input prompt. Allows you to create
+ elaborate command lines without using copy-paste::
+
+ In[1]: l = ["hei", "vaan"]
+ In[2]: "".join(l)
+ Out[2]: heivaan
+ In[3]: %recall
+ In[4]: heivaan_ <== cursor blinking
+
+ %recall 45
+
+ Place history line 45 on the next input prompt. Use %hist to find
+ out the number.
+
+ %recall 1-4
+
+ Combine the specified lines into one cell, and place it on the next
+ input prompt. See %history for the slice syntax.
+
+ %recall foo+bar
+
+ If foo+bar can be evaluated in the user namespace, the result is
+ placed at the next input prompt. Otherwise, the history is searched
+ for lines which contain that substring, and the most recent one is
+ placed at the next input prompt.
+ """
+ if not arg: # Last output
+ self.shell.set_next_input(str(self.shell.user_ns["_"]))
+ return
+ # Get history range
+ histlines = self.shell.history_manager.get_range_by_str(arg)
+ cmd = "\n".join(x[2] for x in histlines)
+ if cmd:
+ self.shell.set_next_input(cmd.rstrip())
+ return
+
+ try: # Variable in user namespace
+ cmd = str(eval(arg, self.shell.user_ns))
+ except Exception: # Search for term in history
+ histlines = self.shell.history_manager.search("*"+arg+"*")
+ for h in reversed([x[2] for x in histlines]):
+ if 'recall' in h or 'rep' in h:
+ continue
+ self.shell.set_next_input(h.rstrip())
+ return
+ else:
+ self.shell.set_next_input(cmd.rstrip())
+ print("Couldn't evaluate or find in history:", arg)
+
+ @line_magic
+ def rerun(self, parameter_s=''):
+ """Re-run previous input
+
+ By default, you can specify ranges of input history to be repeated
+ (as with %history). With no arguments, it will repeat the last line.
+
+ Options:
+
+ -l <n> : Repeat the last n lines of input, not including the
+ current command.
+
+ -g foo : Repeat the most recent line which contains foo
+ """
+ opts, args = self.parse_options(parameter_s, 'l:g:', mode='string')
+ if "l" in opts: # Last n lines
+ n = int(opts['l'])
+ hist = self.shell.history_manager.get_tail(n)
+ elif "g" in opts: # Search
+ p = "*"+opts['g']+"*"
+ hist = list(self.shell.history_manager.search(p))
+ for l in reversed(hist):
+ if "rerun" not in l[2]:
+ hist = [l] # The last match which isn't a %rerun
+ break
+ else:
+ hist = [] # No matches except %rerun
+ elif args: # Specify history ranges
+ hist = self.shell.history_manager.get_range_by_str(args)
+ else: # Last line
+ hist = self.shell.history_manager.get_tail(1)
+ hist = [x[2] for x in hist]
+ if not hist:
+ print("No lines in history match specification")
+ return
+ histlines = "\n".join(hist)
+ print("=== Executing: ===")
+ print(histlines)
+ print("=== Output: ===")
+ self.shell.run_cell("\n".join(hist), store_history=False)
diff --git a/contrib/python/ipython/py2/IPython/core/magics/logging.py b/contrib/python/ipython/py2/IPython/core/magics/logging.py
index 0fafdeff6b..90214ab54a 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/logging.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/logging.py
@@ -1,184 +1,184 @@
-"""Implementation of magic functions for IPython's own logging.
-"""
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012 The IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Stdlib
-import os
-import sys
-
-# Our own packages
-from IPython.core.magic import Magics, magics_class, line_magic
+"""Implementation of magic functions for IPython's own logging.
+"""
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012 The IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+# Stdlib
+import os
+import sys
+
+# Our own packages
+from IPython.core.magic import Magics, magics_class, line_magic
from warnings import warn
-from IPython.utils.py3compat import str_to_unicode
-
-#-----------------------------------------------------------------------------
-# Magic implementation classes
-#-----------------------------------------------------------------------------
-
-@magics_class
-class LoggingMagics(Magics):
- """Magics related to all logging machinery."""
-
- @line_magic
- def logstart(self, parameter_s=''):
- """Start logging anywhere in a session.
-
- %logstart [-o|-r|-t] [log_name [log_mode]]
-
- If no name is given, it defaults to a file named 'ipython_log.py' in your
- current directory, in 'rotate' mode (see below).
-
- '%logstart name' saves to file 'name' in 'backup' mode. It saves your
- history up to that point and then continues logging.
-
- %logstart takes a second optional parameter: logging mode. This can be one
- of (note that the modes are given unquoted):
-
- append
- Keep logging at the end of any existing file.
-
- backup
- Rename any existing file to name~ and start name.
-
- global
- Append to a single logfile in your home directory.
-
- over
- Overwrite any existing log.
-
- rotate
- Create rotating logs: name.1~, name.2~, etc.
-
- Options:
-
- -o
- log also IPython's output. In this mode, all commands which
- generate an Out[NN] prompt are recorded to the logfile, right after
- their corresponding input line. The output lines are always
- prepended with a '#[Out]# ' marker, so that the log remains valid
- Python code.
-
- Since this marker is always the same, filtering only the output from
- a log is very easy, using for example a simple awk call::
-
- awk -F'#\\[Out\\]# ' '{if($2) {print $2}}' ipython_log.py
-
- -r
- log 'raw' input. Normally, IPython's logs contain the processed
- input, so that user lines are logged in their final form, converted
- into valid Python. For example, %Exit is logged as
- _ip.magic("Exit"). If the -r flag is given, all input is logged
- exactly as typed, with no transformations applied.
-
- -t
- put timestamps before each input line logged (these are put in
- comments).
- """
-
- opts,par = self.parse_options(parameter_s,'ort')
- log_output = 'o' in opts
- log_raw_input = 'r' in opts
- timestamp = 't' in opts
-
- logger = self.shell.logger
-
- # if no args are given, the defaults set in the logger constructor by
- # ipython remain valid
- if par:
- try:
- logfname,logmode = par.split()
- except:
- logfname = par
- logmode = 'backup'
- else:
- logfname = logger.logfname
- logmode = logger.logmode
- # put logfname into rc struct as if it had been called on the command
- # line, so it ends up saved in the log header Save it in case we need
- # to restore it...
- old_logfile = self.shell.logfile
- if logfname:
- logfname = os.path.expanduser(logfname)
- self.shell.logfile = logfname
-
- loghead = u'# IPython log file\n\n'
- try:
- logger.logstart(logfname, loghead, logmode, log_output, timestamp,
- log_raw_input)
- except:
- self.shell.logfile = old_logfile
- warn("Couldn't start log: %s" % sys.exc_info()[1])
- else:
- # log input history up to this point, optionally interleaving
- # output if requested
-
- if timestamp:
- # disable timestamping for the previous history, since we've
- # lost those already (no time machine here).
- logger.timestamp = False
-
- if log_raw_input:
- input_hist = self.shell.history_manager.input_hist_raw
- else:
- input_hist = self.shell.history_manager.input_hist_parsed
-
- if log_output:
- log_write = logger.log_write
- output_hist = self.shell.history_manager.output_hist
- for n in range(1,len(input_hist)-1):
- log_write(input_hist[n].rstrip() + u'\n')
- if n in output_hist:
- log_write(str_to_unicode(repr(output_hist[n])),'output')
- else:
- logger.log_write(u'\n'.join(input_hist[1:]))
- logger.log_write(u'\n')
- if timestamp:
- # re-enable timestamping
- logger.timestamp = True
-
- print ('Activating auto-logging. '
- 'Current session state plus future input saved.')
- logger.logstate()
-
- @line_magic
- def logstop(self, parameter_s=''):
- """Fully stop logging and close log file.
-
- In order to start logging again, a new %logstart call needs to be made,
- possibly (though not necessarily) with a new filename, mode and other
- options."""
- self.shell.logger.logstop()
-
- @line_magic
- def logoff(self, parameter_s=''):
- """Temporarily stop logging.
-
- You must have previously started logging."""
- self.shell.logger.switch_log(0)
-
- @line_magic
- def logon(self, parameter_s=''):
- """Restart logging.
-
- This function is for restarting logging which you've temporarily
- stopped with %logoff. For starting logging for the first time, you
- must use the %logstart function, which allows you to specify an
- optional log filename."""
-
- self.shell.logger.switch_log(1)
-
- @line_magic
- def logstate(self, parameter_s=''):
- """Print the status of the logging system."""
-
- self.shell.logger.logstate()
+from IPython.utils.py3compat import str_to_unicode
+
+#-----------------------------------------------------------------------------
+# Magic implementation classes
+#-----------------------------------------------------------------------------
+
+@magics_class
+class LoggingMagics(Magics):
+ """Magics related to all logging machinery."""
+
+ @line_magic
+ def logstart(self, parameter_s=''):
+ """Start logging anywhere in a session.
+
+ %logstart [-o|-r|-t] [log_name [log_mode]]
+
+ If no name is given, it defaults to a file named 'ipython_log.py' in your
+ current directory, in 'rotate' mode (see below).
+
+ '%logstart name' saves to file 'name' in 'backup' mode. It saves your
+ history up to that point and then continues logging.
+
+ %logstart takes a second optional parameter: logging mode. This can be one
+ of (note that the modes are given unquoted):
+
+ append
+ Keep logging at the end of any existing file.
+
+ backup
+ Rename any existing file to name~ and start name.
+
+ global
+ Append to a single logfile in your home directory.
+
+ over
+ Overwrite any existing log.
+
+ rotate
+ Create rotating logs: name.1~, name.2~, etc.
+
+ Options:
+
+ -o
+ log also IPython's output. In this mode, all commands which
+ generate an Out[NN] prompt are recorded to the logfile, right after
+ their corresponding input line. The output lines are always
+ prepended with a '#[Out]# ' marker, so that the log remains valid
+ Python code.
+
+ Since this marker is always the same, filtering only the output from
+ a log is very easy, using for example a simple awk call::
+
+ awk -F'#\\[Out\\]# ' '{if($2) {print $2}}' ipython_log.py
+
+ -r
+ log 'raw' input. Normally, IPython's logs contain the processed
+ input, so that user lines are logged in their final form, converted
+ into valid Python. For example, %Exit is logged as
+ _ip.magic("Exit"). If the -r flag is given, all input is logged
+ exactly as typed, with no transformations applied.
+
+ -t
+ put timestamps before each input line logged (these are put in
+ comments).
+ """
+
+ opts,par = self.parse_options(parameter_s,'ort')
+ log_output = 'o' in opts
+ log_raw_input = 'r' in opts
+ timestamp = 't' in opts
+
+ logger = self.shell.logger
+
+ # if no args are given, the defaults set in the logger constructor by
+ # ipython remain valid
+ if par:
+ try:
+ logfname,logmode = par.split()
+ except:
+ logfname = par
+ logmode = 'backup'
+ else:
+ logfname = logger.logfname
+ logmode = logger.logmode
+ # put logfname into rc struct as if it had been called on the command
+ # line, so it ends up saved in the log header Save it in case we need
+ # to restore it...
+ old_logfile = self.shell.logfile
+ if logfname:
+ logfname = os.path.expanduser(logfname)
+ self.shell.logfile = logfname
+
+ loghead = u'# IPython log file\n\n'
+ try:
+ logger.logstart(logfname, loghead, logmode, log_output, timestamp,
+ log_raw_input)
+ except:
+ self.shell.logfile = old_logfile
+ warn("Couldn't start log: %s" % sys.exc_info()[1])
+ else:
+ # log input history up to this point, optionally interleaving
+ # output if requested
+
+ if timestamp:
+ # disable timestamping for the previous history, since we've
+ # lost those already (no time machine here).
+ logger.timestamp = False
+
+ if log_raw_input:
+ input_hist = self.shell.history_manager.input_hist_raw
+ else:
+ input_hist = self.shell.history_manager.input_hist_parsed
+
+ if log_output:
+ log_write = logger.log_write
+ output_hist = self.shell.history_manager.output_hist
+ for n in range(1,len(input_hist)-1):
+ log_write(input_hist[n].rstrip() + u'\n')
+ if n in output_hist:
+ log_write(str_to_unicode(repr(output_hist[n])),'output')
+ else:
+ logger.log_write(u'\n'.join(input_hist[1:]))
+ logger.log_write(u'\n')
+ if timestamp:
+ # re-enable timestamping
+ logger.timestamp = True
+
+ print ('Activating auto-logging. '
+ 'Current session state plus future input saved.')
+ logger.logstate()
+
+ @line_magic
+ def logstop(self, parameter_s=''):
+ """Fully stop logging and close log file.
+
+ In order to start logging again, a new %logstart call needs to be made,
+ possibly (though not necessarily) with a new filename, mode and other
+ options."""
+ self.shell.logger.logstop()
+
+ @line_magic
+ def logoff(self, parameter_s=''):
+ """Temporarily stop logging.
+
+ You must have previously started logging."""
+ self.shell.logger.switch_log(0)
+
+ @line_magic
+ def logon(self, parameter_s=''):
+ """Restart logging.
+
+ This function is for restarting logging which you've temporarily
+ stopped with %logoff. For starting logging for the first time, you
+ must use the %logstart function, which allows you to specify an
+ optional log filename."""
+
+ self.shell.logger.switch_log(1)
+
+ @line_magic
+ def logstate(self, parameter_s=''):
+ """Print the status of the logging system."""
+
+ self.shell.logger.logstate()
diff --git a/contrib/python/ipython/py2/IPython/core/magics/namespace.py b/contrib/python/ipython/py2/IPython/core/magics/namespace.py
index fafecb191b..c02b38716b 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/namespace.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/namespace.py
@@ -1,704 +1,704 @@
-"""Implementation of namespace-related magic functions.
-"""
-from __future__ import print_function
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012 The IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Stdlib
-import gc
-import re
-import sys
-
-# Our own packages
-from IPython.core import page
-from IPython.core.error import StdinNotImplementedError, UsageError
-from IPython.core.magic import Magics, magics_class, line_magic
-from IPython.testing.skipdoctest import skip_doctest
-from IPython.utils.encoding import DEFAULT_ENCODING
-from IPython.utils.openpy import read_py_file
-from IPython.utils.path import get_py_filename
-from IPython.utils.py3compat import unicode_type
-
-#-----------------------------------------------------------------------------
-# Magic implementation classes
-#-----------------------------------------------------------------------------
-
-@magics_class
-class NamespaceMagics(Magics):
- """Magics to manage various aspects of the user's namespace.
-
- These include listing variables, introspecting into them, etc.
- """
-
- @line_magic
- def pinfo(self, parameter_s='', namespaces=None):
- """Provide detailed information about an object.
-
- '%pinfo object' is just a synonym for object? or ?object."""
-
- #print 'pinfo par: <%s>' % parameter_s # dbg
- # detail_level: 0 -> obj? , 1 -> obj??
- detail_level = 0
- # We need to detect if we got called as 'pinfo pinfo foo', which can
- # happen if the user types 'pinfo foo?' at the cmd line.
- pinfo,qmark1,oname,qmark2 = \
- re.match('(pinfo )?(\?*)(.*?)(\??$)',parameter_s).groups()
- if pinfo or qmark1 or qmark2:
- detail_level = 1
- if "*" in oname:
- self.psearch(oname)
- else:
- self.shell._inspect('pinfo', oname, detail_level=detail_level,
- namespaces=namespaces)
-
- @line_magic
- def pinfo2(self, parameter_s='', namespaces=None):
- """Provide extra detailed information about an object.
-
- '%pinfo2 object' is just a synonym for object?? or ??object."""
- self.shell._inspect('pinfo', parameter_s, detail_level=1,
- namespaces=namespaces)
-
- @skip_doctest
- @line_magic
- def pdef(self, parameter_s='', namespaces=None):
- """Print the call signature for any callable object.
-
- If the object is a class, print the constructor information.
-
- Examples
- --------
- ::
-
- In [3]: %pdef urllib.urlopen
- urllib.urlopen(url, data=None, proxies=None)
- """
- self.shell._inspect('pdef',parameter_s, namespaces)
-
- @line_magic
- def pdoc(self, parameter_s='', namespaces=None):
- """Print the docstring for an object.
-
- If the given object is a class, it will print both the class and the
- constructor docstrings."""
- self.shell._inspect('pdoc',parameter_s, namespaces)
-
- @line_magic
- def psource(self, parameter_s='', namespaces=None):
- """Print (or run through pager) the source code for an object."""
- if not parameter_s:
- raise UsageError('Missing object name.')
- self.shell._inspect('psource',parameter_s, namespaces)
-
- @line_magic
- def pfile(self, parameter_s='', namespaces=None):
- """Print (or run through pager) the file where an object is defined.
-
- The file opens at the line where the object definition begins. IPython
- will honor the environment variable PAGER if set, and otherwise will
- do its best to print the file in a convenient form.
-
- If the given argument is not an object currently defined, IPython will
- try to interpret it as a filename (automatically adding a .py extension
- if needed). You can thus use %pfile as a syntax highlighting code
- viewer."""
-
- # first interpret argument as an object name
- out = self.shell._inspect('pfile',parameter_s, namespaces)
- # if not, try the input as a filename
- if out == 'not found':
- try:
- filename = get_py_filename(parameter_s)
- except IOError as msg:
- print(msg)
- return
- page.page(self.shell.pycolorize(read_py_file(filename, skip_encoding_cookie=False)))
-
- @line_magic
- def psearch(self, parameter_s=''):
- """Search for object in namespaces by wildcard.
-
- %psearch [options] PATTERN [OBJECT TYPE]
-
- Note: ? can be used as a synonym for %psearch, at the beginning or at
- the end: both a*? and ?a* are equivalent to '%psearch a*'. Still, the
- rest of the command line must be unchanged (options come first), so
- for example the following forms are equivalent
-
- %psearch -i a* function
- -i a* function?
- ?-i a* function
-
- Arguments:
-
- PATTERN
-
- where PATTERN is a string containing * as a wildcard similar to its
- use in a shell. The pattern is matched in all namespaces on the
- search path. By default objects starting with a single _ are not
- matched, many IPython generated objects have a single
- underscore. The default is case insensitive matching. Matching is
- also done on the attributes of objects and not only on the objects
- in a module.
-
- [OBJECT TYPE]
-
- Is the name of a python type from the types module. The name is
- given in lowercase without the ending type, ex. StringType is
- written string. By adding a type here only objects matching the
- given type are matched. Using all here makes the pattern match all
- types (this is the default).
-
- Options:
-
- -a: makes the pattern match even objects whose names start with a
- single underscore. These names are normally omitted from the
- search.
-
- -i/-c: make the pattern case insensitive/sensitive. If neither of
- these options are given, the default is read from your configuration
- file, with the option ``InteractiveShell.wildcards_case_sensitive``.
- If this option is not specified in your configuration file, IPython's
- internal default is to do a case sensitive search.
-
- -e/-s NAMESPACE: exclude/search a given namespace. The pattern you
- specify can be searched in any of the following namespaces:
- 'builtin', 'user', 'user_global','internal', 'alias', where
- 'builtin' and 'user' are the search defaults. Note that you should
- not use quotes when specifying namespaces.
-
- 'Builtin' contains the python module builtin, 'user' contains all
- user data, 'alias' only contain the shell aliases and no python
- objects, 'internal' contains objects used by IPython. The
- 'user_global' namespace is only used by embedded IPython instances,
- and it contains module-level globals. You can add namespaces to the
- search with -s or exclude them with -e (these options can be given
- more than once).
-
- Examples
- --------
- ::
-
- %psearch a* -> objects beginning with an a
- %psearch -e builtin a* -> objects NOT in the builtin space starting in a
- %psearch a* function -> all functions beginning with an a
- %psearch re.e* -> objects beginning with an e in module re
- %psearch r*.e* -> objects that start with e in modules starting in r
- %psearch r*.* string -> all strings in modules beginning with r
-
- Case sensitive search::
-
- %psearch -c a* list all object beginning with lower case a
-
- Show objects beginning with a single _::
-
- %psearch -a _* list objects beginning with a single underscore
- """
- try:
- parameter_s.encode('ascii')
- except UnicodeEncodeError:
- print('Python identifiers can only contain ascii characters.')
- return
-
- # default namespaces to be searched
- def_search = ['user_local', 'user_global', 'builtin']
-
- # Process options/args
- opts,args = self.parse_options(parameter_s,'cias:e:',list_all=True)
- opt = opts.get
- shell = self.shell
- psearch = shell.inspector.psearch
-
- # select case options
- if 'i' in opts:
- ignore_case = True
- elif 'c' in opts:
- ignore_case = False
- else:
- ignore_case = not shell.wildcards_case_sensitive
-
- # Build list of namespaces to search from user options
- def_search.extend(opt('s',[]))
- ns_exclude = ns_exclude=opt('e',[])
- ns_search = [nm for nm in def_search if nm not in ns_exclude]
-
- # Call the actual search
- try:
- psearch(args,shell.ns_table,ns_search,
- show_all=opt('a'),ignore_case=ignore_case)
- except:
- shell.showtraceback()
-
- @skip_doctest
- @line_magic
- def who_ls(self, parameter_s=''):
- """Return a sorted list of all interactive variables.
-
- If arguments are given, only variables of types matching these
- arguments are returned.
-
- Examples
- --------
-
- Define two variables and list them with who_ls::
-
- In [1]: alpha = 123
-
- In [2]: beta = 'test'
-
- In [3]: %who_ls
- Out[3]: ['alpha', 'beta']
-
- In [4]: %who_ls int
- Out[4]: ['alpha']
-
- In [5]: %who_ls str
- Out[5]: ['beta']
- """
-
- user_ns = self.shell.user_ns
- user_ns_hidden = self.shell.user_ns_hidden
- nonmatching = object() # This can never be in user_ns
- out = [ i for i in user_ns
- if not i.startswith('_') \
- and (user_ns[i] is not user_ns_hidden.get(i, nonmatching)) ]
-
- typelist = parameter_s.split()
- if typelist:
- typeset = set(typelist)
- out = [i for i in out if type(user_ns[i]).__name__ in typeset]
-
- out.sort()
- return out
-
- @skip_doctest
- @line_magic
- def who(self, parameter_s=''):
- """Print all interactive variables, with some minimal formatting.
-
- If any arguments are given, only variables whose type matches one of
- these are printed. For example::
-
- %who function str
-
- will only list functions and strings, excluding all other types of
- variables. To find the proper type names, simply use type(var) at a
- command line to see how python prints type names. For example:
-
- ::
-
- In [1]: type('hello')\\
- Out[1]: <type 'str'>
-
- indicates that the type name for strings is 'str'.
-
- ``%who`` always excludes executed names loaded through your configuration
- file and things which are internal to IPython.
-
- This is deliberate, as typically you may load many modules and the
- purpose of %who is to show you only what you've manually defined.
-
- Examples
- --------
-
- Define two variables and list them with who::
-
- In [1]: alpha = 123
-
- In [2]: beta = 'test'
-
- In [3]: %who
- alpha beta
-
- In [4]: %who int
- alpha
-
- In [5]: %who str
- beta
- """
-
- varlist = self.who_ls(parameter_s)
- if not varlist:
- if parameter_s:
- print('No variables match your requested type.')
- else:
- print('Interactive namespace is empty.')
- return
-
- # if we have variables, move on...
- count = 0
- for i in varlist:
- print(i+'\t', end=' ')
- count += 1
- if count > 8:
- count = 0
- print()
- print()
-
- @skip_doctest
- @line_magic
- def whos(self, parameter_s=''):
- """Like %who, but gives some extra information about each variable.
-
- The same type filtering of %who can be applied here.
-
- For all variables, the type is printed. Additionally it prints:
-
- - For {},[],(): their length.
-
- - For numpy arrays, a summary with shape, number of
- elements, typecode and size in memory.
-
- - Everything else: a string representation, snipping their middle if
- too long.
-
- Examples
- --------
-
- Define two variables and list them with whos::
-
- In [1]: alpha = 123
-
- In [2]: beta = 'test'
-
- In [3]: %whos
- Variable Type Data/Info
- --------------------------------
- alpha int 123
- beta str test
- """
-
- varnames = self.who_ls(parameter_s)
- if not varnames:
- if parameter_s:
- print('No variables match your requested type.')
- else:
- print('Interactive namespace is empty.')
- return
-
- # if we have variables, move on...
-
- # for these types, show len() instead of data:
- seq_types = ['dict', 'list', 'tuple']
-
- # for numpy arrays, display summary info
- ndarray_type = None
- if 'numpy' in sys.modules:
- try:
- from numpy import ndarray
- except ImportError:
- pass
- else:
- ndarray_type = ndarray.__name__
-
- # Find all variable names and types so we can figure out column sizes
-
- # some types are well known and can be shorter
- abbrevs = {'IPython.core.macro.Macro' : 'Macro'}
- def type_name(v):
- tn = type(v).__name__
- return abbrevs.get(tn,tn)
-
- varlist = [self.shell.user_ns[n] for n in varnames]
-
- typelist = []
- for vv in varlist:
- tt = type_name(vv)
-
- if tt=='instance':
- typelist.append( abbrevs.get(str(vv.__class__),
- str(vv.__class__)))
- else:
- typelist.append(tt)
-
- # column labels and # of spaces as separator
- varlabel = 'Variable'
- typelabel = 'Type'
- datalabel = 'Data/Info'
- colsep = 3
- # variable format strings
- vformat = "{0:<{varwidth}}{1:<{typewidth}}"
- aformat = "%s: %s elems, type `%s`, %s bytes"
- # find the size of the columns to format the output nicely
- varwidth = max(max(map(len,varnames)), len(varlabel)) + colsep
- typewidth = max(max(map(len,typelist)), len(typelabel)) + colsep
- # table header
- print(varlabel.ljust(varwidth) + typelabel.ljust(typewidth) + \
- ' '+datalabel+'\n' + '-'*(varwidth+typewidth+len(datalabel)+1))
- # and the table itself
- kb = 1024
- Mb = 1048576 # kb**2
- for vname,var,vtype in zip(varnames,varlist,typelist):
- print(vformat.format(vname, vtype, varwidth=varwidth, typewidth=typewidth), end=' ')
- if vtype in seq_types:
- print("n="+str(len(var)))
- elif vtype == ndarray_type:
- vshape = str(var.shape).replace(',','').replace(' ','x')[1:-1]
- if vtype==ndarray_type:
- # numpy
- vsize = var.size
- vbytes = vsize*var.itemsize
- vdtype = var.dtype
-
- if vbytes < 100000:
- print(aformat % (vshape, vsize, vdtype, vbytes))
- else:
- print(aformat % (vshape, vsize, vdtype, vbytes), end=' ')
- if vbytes < Mb:
- print('(%s kb)' % (vbytes/kb,))
- else:
- print('(%s Mb)' % (vbytes/Mb,))
- else:
- try:
- vstr = str(var)
- except UnicodeEncodeError:
- vstr = unicode_type(var).encode(DEFAULT_ENCODING,
- 'backslashreplace')
- except:
- vstr = "<object with id %d (str() failed)>" % id(var)
- vstr = vstr.replace('\n', '\\n')
- if len(vstr) < 50:
- print(vstr)
- else:
- print(vstr[:25] + "<...>" + vstr[-25:])
-
- @line_magic
- def reset(self, parameter_s=''):
- """Resets the namespace by removing all names defined by the user, if
- called without arguments, or by removing some types of objects, such
- as everything currently in IPython's In[] and Out[] containers (see
- the parameters for details).
-
- Parameters
- ----------
- -f : force reset without asking for confirmation.
-
- -s : 'Soft' reset: Only clears your namespace, leaving history intact.
- References to objects may be kept. By default (without this option),
- we do a 'hard' reset, giving you a new session and removing all
- references to objects from the current session.
-
- in : reset input history
-
- out : reset output history
-
- dhist : reset directory history
-
- array : reset only variables that are NumPy arrays
-
- See Also
- --------
- reset_selective : invoked as ``%reset_selective``
-
- Examples
- --------
- ::
-
- In [6]: a = 1
-
- In [7]: a
- Out[7]: 1
-
- In [8]: 'a' in _ip.user_ns
- Out[8]: True
-
- In [9]: %reset -f
-
- In [1]: 'a' in _ip.user_ns
- Out[1]: False
-
- In [2]: %reset -f in
- Flushing input history
-
- In [3]: %reset -f dhist in
- Flushing directory history
- Flushing input history
-
- Notes
- -----
- Calling this magic from clients that do not implement standard input,
- such as the ipython notebook interface, will reset the namespace
- without confirmation.
- """
- opts, args = self.parse_options(parameter_s,'sf', mode='list')
- if 'f' in opts:
- ans = True
- else:
- try:
- ans = self.shell.ask_yes_no(
- "Once deleted, variables cannot be recovered. Proceed (y/[n])?",
- default='n')
- except StdinNotImplementedError:
- ans = True
- if not ans:
- print('Nothing done.')
- return
-
- if 's' in opts: # Soft reset
- user_ns = self.shell.user_ns
- for i in self.who_ls():
- del(user_ns[i])
- elif len(args) == 0: # Hard reset
- self.shell.reset(new_session = False)
-
- # reset in/out/dhist/array: previously extensinions/clearcmd.py
- ip = self.shell
- user_ns = self.shell.user_ns # local lookup, heavily used
-
- for target in args:
- target = target.lower() # make matches case insensitive
- if target == 'out':
- print("Flushing output cache (%d entries)" % len(user_ns['_oh']))
- self.shell.displayhook.flush()
-
- elif target == 'in':
- print("Flushing input history")
- pc = self.shell.displayhook.prompt_count + 1
- for n in range(1, pc):
- key = '_i'+repr(n)
- user_ns.pop(key,None)
- user_ns.update(dict(_i=u'',_ii=u'',_iii=u''))
- hm = ip.history_manager
- # don't delete these, as %save and %macro depending on the
- # length of these lists to be preserved
- hm.input_hist_parsed[:] = [''] * pc
- hm.input_hist_raw[:] = [''] * pc
- # hm has internal machinery for _i,_ii,_iii, clear it out
- hm._i = hm._ii = hm._iii = hm._i00 = u''
-
- elif target == 'array':
- # Support cleaning up numpy arrays
- try:
- from numpy import ndarray
- # This must be done with items and not iteritems because
- # we're going to modify the dict in-place.
- for x,val in list(user_ns.items()):
- if isinstance(val,ndarray):
- del user_ns[x]
- except ImportError:
- print("reset array only works if Numpy is available.")
-
- elif target == 'dhist':
- print("Flushing directory history")
- del user_ns['_dh'][:]
-
- else:
- print("Don't know how to reset ", end=' ')
- print(target + ", please run `%reset?` for details")
-
- gc.collect()
-
- @line_magic
- def reset_selective(self, parameter_s=''):
- """Resets the namespace by removing names defined by the user.
-
- Input/Output history are left around in case you need them.
-
- %reset_selective [-f] regex
-
- No action is taken if regex is not included
-
- Options
- -f : force reset without asking for confirmation.
-
- See Also
- --------
- reset : invoked as ``%reset``
-
- Examples
- --------
-
- We first fully reset the namespace so your output looks identical to
- this example for pedagogical reasons; in practice you do not need a
- full reset::
-
- In [1]: %reset -f
-
- Now, with a clean namespace we can make a few variables and use
- ``%reset_selective`` to only delete names that match our regexp::
-
- In [2]: a=1; b=2; c=3; b1m=4; b2m=5; b3m=6; b4m=7; b2s=8
-
- In [3]: who_ls
- Out[3]: ['a', 'b', 'b1m', 'b2m', 'b2s', 'b3m', 'b4m', 'c']
-
- In [4]: %reset_selective -f b[2-3]m
-
- In [5]: who_ls
- Out[5]: ['a', 'b', 'b1m', 'b2s', 'b4m', 'c']
-
- In [6]: %reset_selective -f d
-
- In [7]: who_ls
- Out[7]: ['a', 'b', 'b1m', 'b2s', 'b4m', 'c']
-
- In [8]: %reset_selective -f c
-
- In [9]: who_ls
- Out[9]: ['a', 'b', 'b1m', 'b2s', 'b4m']
-
- In [10]: %reset_selective -f b
-
- In [11]: who_ls
- Out[11]: ['a']
-
- Notes
- -----
- Calling this magic from clients that do not implement standard input,
- such as the ipython notebook interface, will reset the namespace
- without confirmation.
- """
-
- opts, regex = self.parse_options(parameter_s,'f')
-
- if 'f' in opts:
- ans = True
- else:
- try:
- ans = self.shell.ask_yes_no(
- "Once deleted, variables cannot be recovered. Proceed (y/[n])? ",
- default='n')
- except StdinNotImplementedError:
- ans = True
- if not ans:
- print('Nothing done.')
- return
- user_ns = self.shell.user_ns
- if not regex:
- print('No regex pattern specified. Nothing done.')
- return
- else:
- try:
- m = re.compile(regex)
- except TypeError:
- raise TypeError('regex must be a string or compiled pattern')
- for i in self.who_ls():
- if m.search(i):
- del(user_ns[i])
-
- @line_magic
- def xdel(self, parameter_s=''):
- """Delete a variable, trying to clear it from anywhere that
- IPython's machinery has references to it. By default, this uses
- the identity of the named object in the user namespace to remove
- references held under other names. The object is also removed
- from the output history.
-
- Options
- -n : Delete the specified name from all namespaces, without
- checking their identity.
- """
- opts, varname = self.parse_options(parameter_s,'n')
- try:
- self.shell.del_var(varname, ('n' in opts))
- except (NameError, ValueError) as e:
- print(type(e).__name__ +": "+ str(e))
+"""Implementation of namespace-related magic functions.
+"""
+from __future__ import print_function
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012 The IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+# Stdlib
+import gc
+import re
+import sys
+
+# Our own packages
+from IPython.core import page
+from IPython.core.error import StdinNotImplementedError, UsageError
+from IPython.core.magic import Magics, magics_class, line_magic
+from IPython.testing.skipdoctest import skip_doctest
+from IPython.utils.encoding import DEFAULT_ENCODING
+from IPython.utils.openpy import read_py_file
+from IPython.utils.path import get_py_filename
+from IPython.utils.py3compat import unicode_type
+
+#-----------------------------------------------------------------------------
+# Magic implementation classes
+#-----------------------------------------------------------------------------
+
+@magics_class
+class NamespaceMagics(Magics):
+ """Magics to manage various aspects of the user's namespace.
+
+ These include listing variables, introspecting into them, etc.
+ """
+
+ @line_magic
+ def pinfo(self, parameter_s='', namespaces=None):
+ """Provide detailed information about an object.
+
+ '%pinfo object' is just a synonym for object? or ?object."""
+
+ #print 'pinfo par: <%s>' % parameter_s # dbg
+ # detail_level: 0 -> obj? , 1 -> obj??
+ detail_level = 0
+ # We need to detect if we got called as 'pinfo pinfo foo', which can
+ # happen if the user types 'pinfo foo?' at the cmd line.
+ pinfo,qmark1,oname,qmark2 = \
+ re.match('(pinfo )?(\?*)(.*?)(\??$)',parameter_s).groups()
+ if pinfo or qmark1 or qmark2:
+ detail_level = 1
+ if "*" in oname:
+ self.psearch(oname)
+ else:
+ self.shell._inspect('pinfo', oname, detail_level=detail_level,
+ namespaces=namespaces)
+
+ @line_magic
+ def pinfo2(self, parameter_s='', namespaces=None):
+ """Provide extra detailed information about an object.
+
+ '%pinfo2 object' is just a synonym for object?? or ??object."""
+ self.shell._inspect('pinfo', parameter_s, detail_level=1,
+ namespaces=namespaces)
+
+ @skip_doctest
+ @line_magic
+ def pdef(self, parameter_s='', namespaces=None):
+ """Print the call signature for any callable object.
+
+ If the object is a class, print the constructor information.
+
+ Examples
+ --------
+ ::
+
+ In [3]: %pdef urllib.urlopen
+ urllib.urlopen(url, data=None, proxies=None)
+ """
+ self.shell._inspect('pdef',parameter_s, namespaces)
+
+ @line_magic
+ def pdoc(self, parameter_s='', namespaces=None):
+ """Print the docstring for an object.
+
+ If the given object is a class, it will print both the class and the
+ constructor docstrings."""
+ self.shell._inspect('pdoc',parameter_s, namespaces)
+
+ @line_magic
+ def psource(self, parameter_s='', namespaces=None):
+ """Print (or run through pager) the source code for an object."""
+ if not parameter_s:
+ raise UsageError('Missing object name.')
+ self.shell._inspect('psource',parameter_s, namespaces)
+
+ @line_magic
+ def pfile(self, parameter_s='', namespaces=None):
+ """Print (or run through pager) the file where an object is defined.
+
+ The file opens at the line where the object definition begins. IPython
+ will honor the environment variable PAGER if set, and otherwise will
+ do its best to print the file in a convenient form.
+
+ If the given argument is not an object currently defined, IPython will
+ try to interpret it as a filename (automatically adding a .py extension
+ if needed). You can thus use %pfile as a syntax highlighting code
+ viewer."""
+
+ # first interpret argument as an object name
+ out = self.shell._inspect('pfile',parameter_s, namespaces)
+ # if not, try the input as a filename
+ if out == 'not found':
+ try:
+ filename = get_py_filename(parameter_s)
+ except IOError as msg:
+ print(msg)
+ return
+ page.page(self.shell.pycolorize(read_py_file(filename, skip_encoding_cookie=False)))
+
+ @line_magic
+ def psearch(self, parameter_s=''):
+ """Search for object in namespaces by wildcard.
+
+ %psearch [options] PATTERN [OBJECT TYPE]
+
+ Note: ? can be used as a synonym for %psearch, at the beginning or at
+ the end: both a*? and ?a* are equivalent to '%psearch a*'. Still, the
+ rest of the command line must be unchanged (options come first), so
+ for example the following forms are equivalent
+
+ %psearch -i a* function
+ -i a* function?
+ ?-i a* function
+
+ Arguments:
+
+ PATTERN
+
+ where PATTERN is a string containing * as a wildcard similar to its
+ use in a shell. The pattern is matched in all namespaces on the
+ search path. By default objects starting with a single _ are not
+ matched, many IPython generated objects have a single
+ underscore. The default is case insensitive matching. Matching is
+ also done on the attributes of objects and not only on the objects
+ in a module.
+
+ [OBJECT TYPE]
+
+ Is the name of a python type from the types module. The name is
+ given in lowercase without the ending type, ex. StringType is
+ written string. By adding a type here only objects matching the
+ given type are matched. Using all here makes the pattern match all
+ types (this is the default).
+
+ Options:
+
+ -a: makes the pattern match even objects whose names start with a
+ single underscore. These names are normally omitted from the
+ search.
+
+ -i/-c: make the pattern case insensitive/sensitive. If neither of
+ these options are given, the default is read from your configuration
+ file, with the option ``InteractiveShell.wildcards_case_sensitive``.
+ If this option is not specified in your configuration file, IPython's
+ internal default is to do a case sensitive search.
+
+ -e/-s NAMESPACE: exclude/search a given namespace. The pattern you
+ specify can be searched in any of the following namespaces:
+ 'builtin', 'user', 'user_global','internal', 'alias', where
+ 'builtin' and 'user' are the search defaults. Note that you should
+ not use quotes when specifying namespaces.
+
+ 'Builtin' contains the python module builtin, 'user' contains all
+ user data, 'alias' only contain the shell aliases and no python
+ objects, 'internal' contains objects used by IPython. The
+ 'user_global' namespace is only used by embedded IPython instances,
+ and it contains module-level globals. You can add namespaces to the
+ search with -s or exclude them with -e (these options can be given
+ more than once).
+
+ Examples
+ --------
+ ::
+
+ %psearch a* -> objects beginning with an a
+ %psearch -e builtin a* -> objects NOT in the builtin space starting in a
+ %psearch a* function -> all functions beginning with an a
+ %psearch re.e* -> objects beginning with an e in module re
+ %psearch r*.e* -> objects that start with e in modules starting in r
+ %psearch r*.* string -> all strings in modules beginning with r
+
+ Case sensitive search::
+
+ %psearch -c a* list all object beginning with lower case a
+
+ Show objects beginning with a single _::
+
+ %psearch -a _* list objects beginning with a single underscore
+ """
+ try:
+ parameter_s.encode('ascii')
+ except UnicodeEncodeError:
+ print('Python identifiers can only contain ascii characters.')
+ return
+
+ # default namespaces to be searched
+ def_search = ['user_local', 'user_global', 'builtin']
+
+ # Process options/args
+ opts,args = self.parse_options(parameter_s,'cias:e:',list_all=True)
+ opt = opts.get
+ shell = self.shell
+ psearch = shell.inspector.psearch
+
+ # select case options
+ if 'i' in opts:
+ ignore_case = True
+ elif 'c' in opts:
+ ignore_case = False
+ else:
+ ignore_case = not shell.wildcards_case_sensitive
+
+ # Build list of namespaces to search from user options
+ def_search.extend(opt('s',[]))
+ ns_exclude = ns_exclude=opt('e',[])
+ ns_search = [nm for nm in def_search if nm not in ns_exclude]
+
+ # Call the actual search
+ try:
+ psearch(args,shell.ns_table,ns_search,
+ show_all=opt('a'),ignore_case=ignore_case)
+ except:
+ shell.showtraceback()
+
+ @skip_doctest
+ @line_magic
+ def who_ls(self, parameter_s=''):
+ """Return a sorted list of all interactive variables.
+
+ If arguments are given, only variables of types matching these
+ arguments are returned.
+
+ Examples
+ --------
+
+ Define two variables and list them with who_ls::
+
+ In [1]: alpha = 123
+
+ In [2]: beta = 'test'
+
+ In [3]: %who_ls
+ Out[3]: ['alpha', 'beta']
+
+ In [4]: %who_ls int
+ Out[4]: ['alpha']
+
+ In [5]: %who_ls str
+ Out[5]: ['beta']
+ """
+
+ user_ns = self.shell.user_ns
+ user_ns_hidden = self.shell.user_ns_hidden
+ nonmatching = object() # This can never be in user_ns
+ out = [ i for i in user_ns
+ if not i.startswith('_') \
+ and (user_ns[i] is not user_ns_hidden.get(i, nonmatching)) ]
+
+ typelist = parameter_s.split()
+ if typelist:
+ typeset = set(typelist)
+ out = [i for i in out if type(user_ns[i]).__name__ in typeset]
+
+ out.sort()
+ return out
+
+ @skip_doctest
+ @line_magic
+ def who(self, parameter_s=''):
+ """Print all interactive variables, with some minimal formatting.
+
+ If any arguments are given, only variables whose type matches one of
+ these are printed. For example::
+
+ %who function str
+
+ will only list functions and strings, excluding all other types of
+ variables. To find the proper type names, simply use type(var) at a
+ command line to see how python prints type names. For example:
+
+ ::
+
+ In [1]: type('hello')\\
+ Out[1]: <type 'str'>
+
+ indicates that the type name for strings is 'str'.
+
+ ``%who`` always excludes executed names loaded through your configuration
+ file and things which are internal to IPython.
+
+ This is deliberate, as typically you may load many modules and the
+ purpose of %who is to show you only what you've manually defined.
+
+ Examples
+ --------
+
+ Define two variables and list them with who::
+
+ In [1]: alpha = 123
+
+ In [2]: beta = 'test'
+
+ In [3]: %who
+ alpha beta
+
+ In [4]: %who int
+ alpha
+
+ In [5]: %who str
+ beta
+ """
+
+ varlist = self.who_ls(parameter_s)
+ if not varlist:
+ if parameter_s:
+ print('No variables match your requested type.')
+ else:
+ print('Interactive namespace is empty.')
+ return
+
+ # if we have variables, move on...
+ count = 0
+ for i in varlist:
+ print(i+'\t', end=' ')
+ count += 1
+ if count > 8:
+ count = 0
+ print()
+ print()
+
+ @skip_doctest
+ @line_magic
+ def whos(self, parameter_s=''):
+ """Like %who, but gives some extra information about each variable.
+
+ The same type filtering of %who can be applied here.
+
+ For all variables, the type is printed. Additionally it prints:
+
+ - For {},[],(): their length.
+
+ - For numpy arrays, a summary with shape, number of
+ elements, typecode and size in memory.
+
+ - Everything else: a string representation, snipping their middle if
+ too long.
+
+ Examples
+ --------
+
+ Define two variables and list them with whos::
+
+ In [1]: alpha = 123
+
+ In [2]: beta = 'test'
+
+ In [3]: %whos
+ Variable Type Data/Info
+ --------------------------------
+ alpha int 123
+ beta str test
+ """
+
+ varnames = self.who_ls(parameter_s)
+ if not varnames:
+ if parameter_s:
+ print('No variables match your requested type.')
+ else:
+ print('Interactive namespace is empty.')
+ return
+
+ # if we have variables, move on...
+
+ # for these types, show len() instead of data:
+ seq_types = ['dict', 'list', 'tuple']
+
+ # for numpy arrays, display summary info
+ ndarray_type = None
+ if 'numpy' in sys.modules:
+ try:
+ from numpy import ndarray
+ except ImportError:
+ pass
+ else:
+ ndarray_type = ndarray.__name__
+
+ # Find all variable names and types so we can figure out column sizes
+
+ # some types are well known and can be shorter
+ abbrevs = {'IPython.core.macro.Macro' : 'Macro'}
+ def type_name(v):
+ tn = type(v).__name__
+ return abbrevs.get(tn,tn)
+
+ varlist = [self.shell.user_ns[n] for n in varnames]
+
+ typelist = []
+ for vv in varlist:
+ tt = type_name(vv)
+
+ if tt=='instance':
+ typelist.append( abbrevs.get(str(vv.__class__),
+ str(vv.__class__)))
+ else:
+ typelist.append(tt)
+
+ # column labels and # of spaces as separator
+ varlabel = 'Variable'
+ typelabel = 'Type'
+ datalabel = 'Data/Info'
+ colsep = 3
+ # variable format strings
+ vformat = "{0:<{varwidth}}{1:<{typewidth}}"
+ aformat = "%s: %s elems, type `%s`, %s bytes"
+ # find the size of the columns to format the output nicely
+ varwidth = max(max(map(len,varnames)), len(varlabel)) + colsep
+ typewidth = max(max(map(len,typelist)), len(typelabel)) + colsep
+ # table header
+ print(varlabel.ljust(varwidth) + typelabel.ljust(typewidth) + \
+ ' '+datalabel+'\n' + '-'*(varwidth+typewidth+len(datalabel)+1))
+ # and the table itself
+ kb = 1024
+ Mb = 1048576 # kb**2
+ for vname,var,vtype in zip(varnames,varlist,typelist):
+ print(vformat.format(vname, vtype, varwidth=varwidth, typewidth=typewidth), end=' ')
+ if vtype in seq_types:
+ print("n="+str(len(var)))
+ elif vtype == ndarray_type:
+ vshape = str(var.shape).replace(',','').replace(' ','x')[1:-1]
+ if vtype==ndarray_type:
+ # numpy
+ vsize = var.size
+ vbytes = vsize*var.itemsize
+ vdtype = var.dtype
+
+ if vbytes < 100000:
+ print(aformat % (vshape, vsize, vdtype, vbytes))
+ else:
+ print(aformat % (vshape, vsize, vdtype, vbytes), end=' ')
+ if vbytes < Mb:
+ print('(%s kb)' % (vbytes/kb,))
+ else:
+ print('(%s Mb)' % (vbytes/Mb,))
+ else:
+ try:
+ vstr = str(var)
+ except UnicodeEncodeError:
+ vstr = unicode_type(var).encode(DEFAULT_ENCODING,
+ 'backslashreplace')
+ except:
+ vstr = "<object with id %d (str() failed)>" % id(var)
+ vstr = vstr.replace('\n', '\\n')
+ if len(vstr) < 50:
+ print(vstr)
+ else:
+ print(vstr[:25] + "<...>" + vstr[-25:])
+
+ @line_magic
+ def reset(self, parameter_s=''):
+ """Resets the namespace by removing all names defined by the user, if
+ called without arguments, or by removing some types of objects, such
+ as everything currently in IPython's In[] and Out[] containers (see
+ the parameters for details).
+
+ Parameters
+ ----------
+ -f : force reset without asking for confirmation.
+
+ -s : 'Soft' reset: Only clears your namespace, leaving history intact.
+ References to objects may be kept. By default (without this option),
+ we do a 'hard' reset, giving you a new session and removing all
+ references to objects from the current session.
+
+ in : reset input history
+
+ out : reset output history
+
+ dhist : reset directory history
+
+ array : reset only variables that are NumPy arrays
+
+ See Also
+ --------
+ reset_selective : invoked as ``%reset_selective``
+
+ Examples
+ --------
+ ::
+
+ In [6]: a = 1
+
+ In [7]: a
+ Out[7]: 1
+
+ In [8]: 'a' in _ip.user_ns
+ Out[8]: True
+
+ In [9]: %reset -f
+
+ In [1]: 'a' in _ip.user_ns
+ Out[1]: False
+
+ In [2]: %reset -f in
+ Flushing input history
+
+ In [3]: %reset -f dhist in
+ Flushing directory history
+ Flushing input history
+
+ Notes
+ -----
+ Calling this magic from clients that do not implement standard input,
+ such as the ipython notebook interface, will reset the namespace
+ without confirmation.
+ """
+ opts, args = self.parse_options(parameter_s,'sf', mode='list')
+ if 'f' in opts:
+ ans = True
+ else:
+ try:
+ ans = self.shell.ask_yes_no(
+ "Once deleted, variables cannot be recovered. Proceed (y/[n])?",
+ default='n')
+ except StdinNotImplementedError:
+ ans = True
+ if not ans:
+ print('Nothing done.')
+ return
+
+ if 's' in opts: # Soft reset
+ user_ns = self.shell.user_ns
+ for i in self.who_ls():
+ del(user_ns[i])
+ elif len(args) == 0: # Hard reset
+ self.shell.reset(new_session = False)
+
+ # reset in/out/dhist/array: previously extensinions/clearcmd.py
+ ip = self.shell
+ user_ns = self.shell.user_ns # local lookup, heavily used
+
+ for target in args:
+ target = target.lower() # make matches case insensitive
+ if target == 'out':
+ print("Flushing output cache (%d entries)" % len(user_ns['_oh']))
+ self.shell.displayhook.flush()
+
+ elif target == 'in':
+ print("Flushing input history")
+ pc = self.shell.displayhook.prompt_count + 1
+ for n in range(1, pc):
+ key = '_i'+repr(n)
+ user_ns.pop(key,None)
+ user_ns.update(dict(_i=u'',_ii=u'',_iii=u''))
+ hm = ip.history_manager
+ # don't delete these, as %save and %macro depending on the
+ # length of these lists to be preserved
+ hm.input_hist_parsed[:] = [''] * pc
+ hm.input_hist_raw[:] = [''] * pc
+ # hm has internal machinery for _i,_ii,_iii, clear it out
+ hm._i = hm._ii = hm._iii = hm._i00 = u''
+
+ elif target == 'array':
+ # Support cleaning up numpy arrays
+ try:
+ from numpy import ndarray
+ # This must be done with items and not iteritems because
+ # we're going to modify the dict in-place.
+ for x,val in list(user_ns.items()):
+ if isinstance(val,ndarray):
+ del user_ns[x]
+ except ImportError:
+ print("reset array only works if Numpy is available.")
+
+ elif target == 'dhist':
+ print("Flushing directory history")
+ del user_ns['_dh'][:]
+
+ else:
+ print("Don't know how to reset ", end=' ')
+ print(target + ", please run `%reset?` for details")
+
+ gc.collect()
+
+ @line_magic
+ def reset_selective(self, parameter_s=''):
+ """Resets the namespace by removing names defined by the user.
+
+ Input/Output history are left around in case you need them.
+
+ %reset_selective [-f] regex
+
+ No action is taken if regex is not included
+
+ Options
+ -f : force reset without asking for confirmation.
+
+ See Also
+ --------
+ reset : invoked as ``%reset``
+
+ Examples
+ --------
+
+ We first fully reset the namespace so your output looks identical to
+ this example for pedagogical reasons; in practice you do not need a
+ full reset::
+
+ In [1]: %reset -f
+
+ Now, with a clean namespace we can make a few variables and use
+ ``%reset_selective`` to only delete names that match our regexp::
+
+ In [2]: a=1; b=2; c=3; b1m=4; b2m=5; b3m=6; b4m=7; b2s=8
+
+ In [3]: who_ls
+ Out[3]: ['a', 'b', 'b1m', 'b2m', 'b2s', 'b3m', 'b4m', 'c']
+
+ In [4]: %reset_selective -f b[2-3]m
+
+ In [5]: who_ls
+ Out[5]: ['a', 'b', 'b1m', 'b2s', 'b4m', 'c']
+
+ In [6]: %reset_selective -f d
+
+ In [7]: who_ls
+ Out[7]: ['a', 'b', 'b1m', 'b2s', 'b4m', 'c']
+
+ In [8]: %reset_selective -f c
+
+ In [9]: who_ls
+ Out[9]: ['a', 'b', 'b1m', 'b2s', 'b4m']
+
+ In [10]: %reset_selective -f b
+
+ In [11]: who_ls
+ Out[11]: ['a']
+
+ Notes
+ -----
+ Calling this magic from clients that do not implement standard input,
+ such as the ipython notebook interface, will reset the namespace
+ without confirmation.
+ """
+
+ opts, regex = self.parse_options(parameter_s,'f')
+
+ if 'f' in opts:
+ ans = True
+ else:
+ try:
+ ans = self.shell.ask_yes_no(
+ "Once deleted, variables cannot be recovered. Proceed (y/[n])? ",
+ default='n')
+ except StdinNotImplementedError:
+ ans = True
+ if not ans:
+ print('Nothing done.')
+ return
+ user_ns = self.shell.user_ns
+ if not regex:
+ print('No regex pattern specified. Nothing done.')
+ return
+ else:
+ try:
+ m = re.compile(regex)
+ except TypeError:
+ raise TypeError('regex must be a string or compiled pattern')
+ for i in self.who_ls():
+ if m.search(i):
+ del(user_ns[i])
+
+ @line_magic
+ def xdel(self, parameter_s=''):
+ """Delete a variable, trying to clear it from anywhere that
+ IPython's machinery has references to it. By default, this uses
+ the identity of the named object in the user namespace to remove
+ references held under other names. The object is also removed
+ from the output history.
+
+ Options
+ -n : Delete the specified name from all namespaces, without
+ checking their identity.
+ """
+ opts, varname = self.parse_options(parameter_s,'n')
+ try:
+ self.shell.del_var(varname, ('n' in opts))
+ except (NameError, ValueError) as e:
+ print(type(e).__name__ +": "+ str(e))
diff --git a/contrib/python/ipython/py2/IPython/core/magics/osm.py b/contrib/python/ipython/py2/IPython/core/magics/osm.py
index e4c3bbc7b6..352cf2d451 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/osm.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/osm.py
@@ -1,790 +1,790 @@
-"""Implementation of magic functions for interaction with the OS.
-
-Note: this module is named 'osm' instead of 'os' to avoid a collision with the
-builtin.
-"""
-from __future__ import print_function
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012 The IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Stdlib
-import io
-import os
-import re
-import sys
-from pprint import pformat
-
-# Our own packages
-from IPython.core import magic_arguments
-from IPython.core import oinspect
-from IPython.core import page
-from IPython.core.alias import AliasError, Alias
-from IPython.core.error import UsageError
-from IPython.core.magic import (
- Magics, compress_dhist, magics_class, line_magic, cell_magic, line_cell_magic
-)
-from IPython.testing.skipdoctest import skip_doctest
-from IPython.utils.openpy import source_to_unicode
-from IPython.utils.process import abbrev_cwd
-from IPython.utils import py3compat
-from IPython.utils.py3compat import unicode_type
-from IPython.utils.terminal import set_term_title
-
-#-----------------------------------------------------------------------------
-# Magic implementation classes
-#-----------------------------------------------------------------------------
-@magics_class
-class OSMagics(Magics):
- """Magics to interact with the underlying OS (shell-type functionality).
- """
-
- @skip_doctest
- @line_magic
- def alias(self, parameter_s=''):
- """Define an alias for a system command.
-
- '%alias alias_name cmd' defines 'alias_name' as an alias for 'cmd'
-
- Then, typing 'alias_name params' will execute the system command 'cmd
- params' (from your underlying operating system).
-
- Aliases have lower precedence than magic functions and Python normal
- variables, so if 'foo' is both a Python variable and an alias, the
- alias can not be executed until 'del foo' removes the Python variable.
-
- You can use the %l specifier in an alias definition to represent the
- whole line when the alias is called. For example::
-
- In [2]: alias bracket echo "Input in brackets: <%l>"
- In [3]: bracket hello world
- Input in brackets: <hello world>
-
- You can also define aliases with parameters using %s specifiers (one
- per parameter)::
-
- In [1]: alias parts echo first %s second %s
- In [2]: %parts A B
- first A second B
- In [3]: %parts A
- Incorrect number of arguments: 2 expected.
- parts is an alias to: 'echo first %s second %s'
-
- Note that %l and %s are mutually exclusive. You can only use one or
- the other in your aliases.
-
- Aliases expand Python variables just like system calls using ! or !!
- do: all expressions prefixed with '$' get expanded. For details of
- the semantic rules, see PEP-215:
- http://www.python.org/peps/pep-0215.html. This is the library used by
- IPython for variable expansion. If you want to access a true shell
- variable, an extra $ is necessary to prevent its expansion by
- IPython::
-
- In [6]: alias show echo
- In [7]: PATH='A Python string'
- In [8]: show $PATH
- A Python string
- In [9]: show $$PATH
- /usr/local/lf9560/bin:/usr/local/intel/compiler70/ia32/bin:...
-
- You can use the alias facility to acess all of $PATH. See the %rehashx
- function, which automatically creates aliases for the contents of your
- $PATH.
-
- If called with no parameters, %alias prints the current alias table."""
-
- par = parameter_s.strip()
- if not par:
- aliases = sorted(self.shell.alias_manager.aliases)
- # stored = self.shell.db.get('stored_aliases', {} )
- # for k, v in stored:
- # atab.append(k, v[0])
-
- print("Total number of aliases:", len(aliases))
- sys.stdout.flush()
- return aliases
-
- # Now try to define a new one
- try:
- alias,cmd = par.split(None, 1)
- except TypeError:
- print(oinspect.getdoc(self.alias))
- return
-
- try:
- self.shell.alias_manager.define_alias(alias, cmd)
- except AliasError as e:
- print(e)
- # end magic_alias
-
- @line_magic
- def unalias(self, parameter_s=''):
- """Remove an alias"""
-
- aname = parameter_s.strip()
- try:
- self.shell.alias_manager.undefine_alias(aname)
- except ValueError as e:
- print(e)
- return
-
- stored = self.shell.db.get('stored_aliases', {} )
- if aname in stored:
- print("Removing %stored alias",aname)
- del stored[aname]
- self.shell.db['stored_aliases'] = stored
-
- @line_magic
- def rehashx(self, parameter_s=''):
- """Update the alias table with all executable files in $PATH.
-
- rehashx explicitly checks that every entry in $PATH is a file
- with execute access (os.X_OK).
-
- Under Windows, it checks executability as a match against a
- '|'-separated string of extensions, stored in the IPython config
- variable win_exec_ext. This defaults to 'exe|com|bat'.
-
- This function also resets the root module cache of module completer,
- used on slow filesystems.
- """
- from IPython.core.alias import InvalidAliasError
-
- # for the benefit of module completer in ipy_completers.py
- del self.shell.db['rootmodules_cache']
-
- path = [os.path.abspath(os.path.expanduser(p)) for p in
- os.environ.get('PATH','').split(os.pathsep)]
-
- syscmdlist = []
- # Now define isexec in a cross platform manner.
- if os.name == 'posix':
- isexec = lambda fname:os.path.isfile(fname) and \
- os.access(fname,os.X_OK)
- else:
- try:
- winext = os.environ['pathext'].replace(';','|').replace('.','')
- except KeyError:
- winext = 'exe|com|bat|py'
- if 'py' not in winext:
- winext += '|py'
- execre = re.compile(r'(.*)\.(%s)$' % winext,re.IGNORECASE)
- isexec = lambda fname:os.path.isfile(fname) and execre.match(fname)
- savedir = py3compat.getcwd()
-
- # Now walk the paths looking for executables to alias.
- try:
- # write the whole loop for posix/Windows so we don't have an if in
- # the innermost part
- if os.name == 'posix':
- for pdir in path:
- try:
- os.chdir(pdir)
- dirlist = os.listdir(pdir)
- except OSError:
- continue
- for ff in dirlist:
- if isexec(ff):
- try:
- # Removes dots from the name since ipython
- # will assume names with dots to be python.
- if not self.shell.alias_manager.is_alias(ff):
- self.shell.alias_manager.define_alias(
- ff.replace('.',''), ff)
- except InvalidAliasError:
- pass
- else:
- syscmdlist.append(ff)
- else:
- no_alias = Alias.blacklist
- for pdir in path:
- try:
- os.chdir(pdir)
- dirlist = os.listdir(pdir)
- except OSError:
- continue
- for ff in dirlist:
- base, ext = os.path.splitext(ff)
- if isexec(ff) and base.lower() not in no_alias:
- if ext.lower() == '.exe':
- ff = base
- try:
- # Removes dots from the name since ipython
- # will assume names with dots to be python.
- self.shell.alias_manager.define_alias(
- base.lower().replace('.',''), ff)
- except InvalidAliasError:
- pass
- syscmdlist.append(ff)
- self.shell.db['syscmdlist'] = syscmdlist
- finally:
- os.chdir(savedir)
-
- @skip_doctest
- @line_magic
- def pwd(self, parameter_s=''):
- """Return the current working directory path.
-
- Examples
- --------
- ::
-
- In [9]: pwd
- Out[9]: '/home/tsuser/sprint/ipython'
- """
- return py3compat.getcwd()
-
- @skip_doctest
- @line_magic
- def cd(self, parameter_s=''):
- """Change the current working directory.
-
- This command automatically maintains an internal list of directories
- you visit during your IPython session, in the variable _dh. The
- command %dhist shows this history nicely formatted. You can also
- do 'cd -<tab>' to see directory history conveniently.
-
- Usage:
-
- cd 'dir': changes to directory 'dir'.
-
- cd -: changes to the last visited directory.
-
- cd -<n>: changes to the n-th directory in the directory history.
-
- cd --foo: change to directory that matches 'foo' in history
-
- cd -b <bookmark_name>: jump to a bookmark set by %bookmark
- (note: cd <bookmark_name> is enough if there is no
- directory <bookmark_name>, but a bookmark with the name exists.)
- 'cd -b <tab>' allows you to tab-complete bookmark names.
-
- Options:
-
- -q: quiet. Do not print the working directory after the cd command is
- executed. By default IPython's cd command does print this directory,
- since the default prompts do not display path information.
-
- Note that !cd doesn't work for this purpose because the shell where
- !command runs is immediately discarded after executing 'command'.
-
- Examples
- --------
- ::
-
- In [10]: cd parent/child
- /home/tsuser/parent/child
- """
-
- oldcwd = py3compat.getcwd()
- numcd = re.match(r'(-)(\d+)$',parameter_s)
- # jump in directory history by number
- if numcd:
- nn = int(numcd.group(2))
- try:
- ps = self.shell.user_ns['_dh'][nn]
- except IndexError:
- print('The requested directory does not exist in history.')
- return
- else:
- opts = {}
- elif parameter_s.startswith('--'):
- ps = None
- fallback = None
- pat = parameter_s[2:]
- dh = self.shell.user_ns['_dh']
- # first search only by basename (last component)
- for ent in reversed(dh):
- if pat in os.path.basename(ent) and os.path.isdir(ent):
- ps = ent
- break
-
- if fallback is None and pat in ent and os.path.isdir(ent):
- fallback = ent
-
- # if we have no last part match, pick the first full path match
- if ps is None:
- ps = fallback
-
- if ps is None:
- print("No matching entry in directory history")
- return
- else:
- opts = {}
-
-
- else:
+"""Implementation of magic functions for interaction with the OS.
+
+Note: this module is named 'osm' instead of 'os' to avoid a collision with the
+builtin.
+"""
+from __future__ import print_function
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012 The IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+# Stdlib
+import io
+import os
+import re
+import sys
+from pprint import pformat
+
+# Our own packages
+from IPython.core import magic_arguments
+from IPython.core import oinspect
+from IPython.core import page
+from IPython.core.alias import AliasError, Alias
+from IPython.core.error import UsageError
+from IPython.core.magic import (
+ Magics, compress_dhist, magics_class, line_magic, cell_magic, line_cell_magic
+)
+from IPython.testing.skipdoctest import skip_doctest
+from IPython.utils.openpy import source_to_unicode
+from IPython.utils.process import abbrev_cwd
+from IPython.utils import py3compat
+from IPython.utils.py3compat import unicode_type
+from IPython.utils.terminal import set_term_title
+
+#-----------------------------------------------------------------------------
+# Magic implementation classes
+#-----------------------------------------------------------------------------
+@magics_class
+class OSMagics(Magics):
+ """Magics to interact with the underlying OS (shell-type functionality).
+ """
+
+ @skip_doctest
+ @line_magic
+ def alias(self, parameter_s=''):
+ """Define an alias for a system command.
+
+ '%alias alias_name cmd' defines 'alias_name' as an alias for 'cmd'
+
+ Then, typing 'alias_name params' will execute the system command 'cmd
+ params' (from your underlying operating system).
+
+ Aliases have lower precedence than magic functions and Python normal
+ variables, so if 'foo' is both a Python variable and an alias, the
+ alias can not be executed until 'del foo' removes the Python variable.
+
+ You can use the %l specifier in an alias definition to represent the
+ whole line when the alias is called. For example::
+
+ In [2]: alias bracket echo "Input in brackets: <%l>"
+ In [3]: bracket hello world
+ Input in brackets: <hello world>
+
+ You can also define aliases with parameters using %s specifiers (one
+ per parameter)::
+
+ In [1]: alias parts echo first %s second %s
+ In [2]: %parts A B
+ first A second B
+ In [3]: %parts A
+ Incorrect number of arguments: 2 expected.
+ parts is an alias to: 'echo first %s second %s'
+
+ Note that %l and %s are mutually exclusive. You can only use one or
+ the other in your aliases.
+
+ Aliases expand Python variables just like system calls using ! or !!
+ do: all expressions prefixed with '$' get expanded. For details of
+ the semantic rules, see PEP-215:
+ http://www.python.org/peps/pep-0215.html. This is the library used by
+ IPython for variable expansion. If you want to access a true shell
+ variable, an extra $ is necessary to prevent its expansion by
+ IPython::
+
+ In [6]: alias show echo
+ In [7]: PATH='A Python string'
+ In [8]: show $PATH
+ A Python string
+ In [9]: show $$PATH
+ /usr/local/lf9560/bin:/usr/local/intel/compiler70/ia32/bin:...
+
+ You can use the alias facility to acess all of $PATH. See the %rehashx
+ function, which automatically creates aliases for the contents of your
+ $PATH.
+
+ If called with no parameters, %alias prints the current alias table."""
+
+ par = parameter_s.strip()
+ if not par:
+ aliases = sorted(self.shell.alias_manager.aliases)
+ # stored = self.shell.db.get('stored_aliases', {} )
+ # for k, v in stored:
+ # atab.append(k, v[0])
+
+ print("Total number of aliases:", len(aliases))
+ sys.stdout.flush()
+ return aliases
+
+ # Now try to define a new one
+ try:
+ alias,cmd = par.split(None, 1)
+ except TypeError:
+ print(oinspect.getdoc(self.alias))
+ return
+
+ try:
+ self.shell.alias_manager.define_alias(alias, cmd)
+ except AliasError as e:
+ print(e)
+ # end magic_alias
+
+ @line_magic
+ def unalias(self, parameter_s=''):
+ """Remove an alias"""
+
+ aname = parameter_s.strip()
+ try:
+ self.shell.alias_manager.undefine_alias(aname)
+ except ValueError as e:
+ print(e)
+ return
+
+ stored = self.shell.db.get('stored_aliases', {} )
+ if aname in stored:
+ print("Removing %stored alias",aname)
+ del stored[aname]
+ self.shell.db['stored_aliases'] = stored
+
+ @line_magic
+ def rehashx(self, parameter_s=''):
+ """Update the alias table with all executable files in $PATH.
+
+ rehashx explicitly checks that every entry in $PATH is a file
+ with execute access (os.X_OK).
+
+ Under Windows, it checks executability as a match against a
+ '|'-separated string of extensions, stored in the IPython config
+ variable win_exec_ext. This defaults to 'exe|com|bat'.
+
+ This function also resets the root module cache of module completer,
+ used on slow filesystems.
+ """
+ from IPython.core.alias import InvalidAliasError
+
+ # for the benefit of module completer in ipy_completers.py
+ del self.shell.db['rootmodules_cache']
+
+ path = [os.path.abspath(os.path.expanduser(p)) for p in
+ os.environ.get('PATH','').split(os.pathsep)]
+
+ syscmdlist = []
+ # Now define isexec in a cross platform manner.
+ if os.name == 'posix':
+ isexec = lambda fname:os.path.isfile(fname) and \
+ os.access(fname,os.X_OK)
+ else:
+ try:
+ winext = os.environ['pathext'].replace(';','|').replace('.','')
+ except KeyError:
+ winext = 'exe|com|bat|py'
+ if 'py' not in winext:
+ winext += '|py'
+ execre = re.compile(r'(.*)\.(%s)$' % winext,re.IGNORECASE)
+ isexec = lambda fname:os.path.isfile(fname) and execre.match(fname)
+ savedir = py3compat.getcwd()
+
+ # Now walk the paths looking for executables to alias.
+ try:
+ # write the whole loop for posix/Windows so we don't have an if in
+ # the innermost part
+ if os.name == 'posix':
+ for pdir in path:
+ try:
+ os.chdir(pdir)
+ dirlist = os.listdir(pdir)
+ except OSError:
+ continue
+ for ff in dirlist:
+ if isexec(ff):
+ try:
+ # Removes dots from the name since ipython
+ # will assume names with dots to be python.
+ if not self.shell.alias_manager.is_alias(ff):
+ self.shell.alias_manager.define_alias(
+ ff.replace('.',''), ff)
+ except InvalidAliasError:
+ pass
+ else:
+ syscmdlist.append(ff)
+ else:
+ no_alias = Alias.blacklist
+ for pdir in path:
+ try:
+ os.chdir(pdir)
+ dirlist = os.listdir(pdir)
+ except OSError:
+ continue
+ for ff in dirlist:
+ base, ext = os.path.splitext(ff)
+ if isexec(ff) and base.lower() not in no_alias:
+ if ext.lower() == '.exe':
+ ff = base
+ try:
+ # Removes dots from the name since ipython
+ # will assume names with dots to be python.
+ self.shell.alias_manager.define_alias(
+ base.lower().replace('.',''), ff)
+ except InvalidAliasError:
+ pass
+ syscmdlist.append(ff)
+ self.shell.db['syscmdlist'] = syscmdlist
+ finally:
+ os.chdir(savedir)
+
+ @skip_doctest
+ @line_magic
+ def pwd(self, parameter_s=''):
+ """Return the current working directory path.
+
+ Examples
+ --------
+ ::
+
+ In [9]: pwd
+ Out[9]: '/home/tsuser/sprint/ipython'
+ """
+ return py3compat.getcwd()
+
+ @skip_doctest
+ @line_magic
+ def cd(self, parameter_s=''):
+ """Change the current working directory.
+
+ This command automatically maintains an internal list of directories
+ you visit during your IPython session, in the variable _dh. The
+ command %dhist shows this history nicely formatted. You can also
+ do 'cd -<tab>' to see directory history conveniently.
+
+ Usage:
+
+ cd 'dir': changes to directory 'dir'.
+
+ cd -: changes to the last visited directory.
+
+ cd -<n>: changes to the n-th directory in the directory history.
+
+ cd --foo: change to directory that matches 'foo' in history
+
+ cd -b <bookmark_name>: jump to a bookmark set by %bookmark
+ (note: cd <bookmark_name> is enough if there is no
+ directory <bookmark_name>, but a bookmark with the name exists.)
+ 'cd -b <tab>' allows you to tab-complete bookmark names.
+
+ Options:
+
+ -q: quiet. Do not print the working directory after the cd command is
+ executed. By default IPython's cd command does print this directory,
+ since the default prompts do not display path information.
+
+ Note that !cd doesn't work for this purpose because the shell where
+ !command runs is immediately discarded after executing 'command'.
+
+ Examples
+ --------
+ ::
+
+ In [10]: cd parent/child
+ /home/tsuser/parent/child
+ """
+
+ oldcwd = py3compat.getcwd()
+ numcd = re.match(r'(-)(\d+)$',parameter_s)
+ # jump in directory history by number
+ if numcd:
+ nn = int(numcd.group(2))
+ try:
+ ps = self.shell.user_ns['_dh'][nn]
+ except IndexError:
+ print('The requested directory does not exist in history.')
+ return
+ else:
+ opts = {}
+ elif parameter_s.startswith('--'):
+ ps = None
+ fallback = None
+ pat = parameter_s[2:]
+ dh = self.shell.user_ns['_dh']
+ # first search only by basename (last component)
+ for ent in reversed(dh):
+ if pat in os.path.basename(ent) and os.path.isdir(ent):
+ ps = ent
+ break
+
+ if fallback is None and pat in ent and os.path.isdir(ent):
+ fallback = ent
+
+ # if we have no last part match, pick the first full path match
+ if ps is None:
+ ps = fallback
+
+ if ps is None:
+ print("No matching entry in directory history")
+ return
+ else:
+ opts = {}
+
+
+ else:
opts, ps = self.parse_options(parameter_s, 'qb', mode='string')
- # jump to previous
- if ps == '-':
- try:
- ps = self.shell.user_ns['_dh'][-2]
- except IndexError:
- raise UsageError('%cd -: No previous directory to change to.')
- # jump to bookmark if needed
- else:
- if not os.path.isdir(ps) or 'b' in opts:
- bkms = self.shell.db.get('bookmarks', {})
-
- if ps in bkms:
- target = bkms[ps]
- print('(bookmark:%s) -> %s' % (ps, target))
- ps = target
- else:
- if 'b' in opts:
- raise UsageError("Bookmark '%s' not found. "
- "Use '%%bookmark -l' to see your bookmarks." % ps)
-
- # at this point ps should point to the target dir
- if ps:
- try:
- os.chdir(os.path.expanduser(ps))
- if hasattr(self.shell, 'term_title') and self.shell.term_title:
- set_term_title('IPython: ' + abbrev_cwd())
- except OSError:
- print(sys.exc_info()[1])
- else:
- cwd = py3compat.getcwd()
- dhist = self.shell.user_ns['_dh']
- if oldcwd != cwd:
- dhist.append(cwd)
- self.shell.db['dhist'] = compress_dhist(dhist)[-100:]
-
- else:
- os.chdir(self.shell.home_dir)
- if hasattr(self.shell, 'term_title') and self.shell.term_title:
- set_term_title('IPython: ' + '~')
- cwd = py3compat.getcwd()
- dhist = self.shell.user_ns['_dh']
-
- if oldcwd != cwd:
- dhist.append(cwd)
- self.shell.db['dhist'] = compress_dhist(dhist)[-100:]
- if not 'q' in opts and self.shell.user_ns['_dh']:
- print(self.shell.user_ns['_dh'][-1])
-
- @line_magic
- def env(self, parameter_s=''):
- """Get, set, or list environment variables.
-
- Usage:\\
-
- %env: lists all environment variables/values
- %env var: get value for var
- %env var val: set value for var
- %env var=val: set value for var
- %env var=$val: set value for var, using python expansion if possible
- """
- if parameter_s.strip():
- split = '=' if '=' in parameter_s else ' '
- bits = parameter_s.split(split)
- if len(bits) == 1:
- key = parameter_s.strip()
- if key in os.environ:
- return os.environ[key]
- else:
- err = "Environment does not have key: {0}".format(key)
- raise UsageError(err)
- if len(bits) > 1:
- return self.set_env(parameter_s)
- return dict(os.environ)
-
- @line_magic
- def set_env(self, parameter_s):
- """Set environment variables. Assumptions are that either "val" is a
- name in the user namespace, or val is something that evaluates to a
- string.
-
- Usage:\\
- %set_env var val: set value for var
- %set_env var=val: set value for var
- %set_env var=$val: set value for var, using python expansion if possible
- """
- split = '=' if '=' in parameter_s else ' '
- bits = parameter_s.split(split, 1)
- if not parameter_s.strip() or len(bits)<2:
- raise UsageError("usage is 'set_env var=val'")
- var = bits[0].strip()
- val = bits[1].strip()
- if re.match(r'.*\s.*', var):
- # an environment variable with whitespace is almost certainly
- # not what the user intended. what's more likely is the wrong
- # split was chosen, ie for "set_env cmd_args A=B", we chose
- # '=' for the split and should have chosen ' '. to get around
- # this, users should just assign directly to os.environ or use
- # standard magic {var} expansion.
- err = "refusing to set env var with whitespace: '{0}'"
- err = err.format(val)
- raise UsageError(err)
- os.environ[py3compat.cast_bytes_py2(var)] = py3compat.cast_bytes_py2(val)
- print('env: {0}={1}'.format(var,val))
-
- @line_magic
- def pushd(self, parameter_s=''):
- """Place the current dir on stack and change directory.
-
- Usage:\\
- %pushd ['dirname']
- """
-
- dir_s = self.shell.dir_stack
+ # jump to previous
+ if ps == '-':
+ try:
+ ps = self.shell.user_ns['_dh'][-2]
+ except IndexError:
+ raise UsageError('%cd -: No previous directory to change to.')
+ # jump to bookmark if needed
+ else:
+ if not os.path.isdir(ps) or 'b' in opts:
+ bkms = self.shell.db.get('bookmarks', {})
+
+ if ps in bkms:
+ target = bkms[ps]
+ print('(bookmark:%s) -> %s' % (ps, target))
+ ps = target
+ else:
+ if 'b' in opts:
+ raise UsageError("Bookmark '%s' not found. "
+ "Use '%%bookmark -l' to see your bookmarks." % ps)
+
+ # at this point ps should point to the target dir
+ if ps:
+ try:
+ os.chdir(os.path.expanduser(ps))
+ if hasattr(self.shell, 'term_title') and self.shell.term_title:
+ set_term_title('IPython: ' + abbrev_cwd())
+ except OSError:
+ print(sys.exc_info()[1])
+ else:
+ cwd = py3compat.getcwd()
+ dhist = self.shell.user_ns['_dh']
+ if oldcwd != cwd:
+ dhist.append(cwd)
+ self.shell.db['dhist'] = compress_dhist(dhist)[-100:]
+
+ else:
+ os.chdir(self.shell.home_dir)
+ if hasattr(self.shell, 'term_title') and self.shell.term_title:
+ set_term_title('IPython: ' + '~')
+ cwd = py3compat.getcwd()
+ dhist = self.shell.user_ns['_dh']
+
+ if oldcwd != cwd:
+ dhist.append(cwd)
+ self.shell.db['dhist'] = compress_dhist(dhist)[-100:]
+ if not 'q' in opts and self.shell.user_ns['_dh']:
+ print(self.shell.user_ns['_dh'][-1])
+
+ @line_magic
+ def env(self, parameter_s=''):
+ """Get, set, or list environment variables.
+
+ Usage:\\
+
+ %env: lists all environment variables/values
+ %env var: get value for var
+ %env var val: set value for var
+ %env var=val: set value for var
+ %env var=$val: set value for var, using python expansion if possible
+ """
+ if parameter_s.strip():
+ split = '=' if '=' in parameter_s else ' '
+ bits = parameter_s.split(split)
+ if len(bits) == 1:
+ key = parameter_s.strip()
+ if key in os.environ:
+ return os.environ[key]
+ else:
+ err = "Environment does not have key: {0}".format(key)
+ raise UsageError(err)
+ if len(bits) > 1:
+ return self.set_env(parameter_s)
+ return dict(os.environ)
+
+ @line_magic
+ def set_env(self, parameter_s):
+ """Set environment variables. Assumptions are that either "val" is a
+ name in the user namespace, or val is something that evaluates to a
+ string.
+
+ Usage:\\
+ %set_env var val: set value for var
+ %set_env var=val: set value for var
+ %set_env var=$val: set value for var, using python expansion if possible
+ """
+ split = '=' if '=' in parameter_s else ' '
+ bits = parameter_s.split(split, 1)
+ if not parameter_s.strip() or len(bits)<2:
+ raise UsageError("usage is 'set_env var=val'")
+ var = bits[0].strip()
+ val = bits[1].strip()
+ if re.match(r'.*\s.*', var):
+ # an environment variable with whitespace is almost certainly
+ # not what the user intended. what's more likely is the wrong
+ # split was chosen, ie for "set_env cmd_args A=B", we chose
+ # '=' for the split and should have chosen ' '. to get around
+ # this, users should just assign directly to os.environ or use
+ # standard magic {var} expansion.
+ err = "refusing to set env var with whitespace: '{0}'"
+ err = err.format(val)
+ raise UsageError(err)
+ os.environ[py3compat.cast_bytes_py2(var)] = py3compat.cast_bytes_py2(val)
+ print('env: {0}={1}'.format(var,val))
+
+ @line_magic
+ def pushd(self, parameter_s=''):
+ """Place the current dir on stack and change directory.
+
+ Usage:\\
+ %pushd ['dirname']
+ """
+
+ dir_s = self.shell.dir_stack
tgt = os.path.expanduser(parameter_s)
- cwd = py3compat.getcwd().replace(self.shell.home_dir,'~')
- if tgt:
- self.cd(parameter_s)
- dir_s.insert(0,cwd)
- return self.shell.magic('dirs')
-
- @line_magic
- def popd(self, parameter_s=''):
- """Change to directory popped off the top of the stack.
- """
- if not self.shell.dir_stack:
- raise UsageError("%popd on empty stack")
- top = self.shell.dir_stack.pop(0)
- self.cd(top)
- print("popd ->",top)
-
- @line_magic
- def dirs(self, parameter_s=''):
- """Return the current directory stack."""
-
- return self.shell.dir_stack
-
- @line_magic
- def dhist(self, parameter_s=''):
- """Print your history of visited directories.
-
- %dhist -> print full history\\
- %dhist n -> print last n entries only\\
- %dhist n1 n2 -> print entries between n1 and n2 (n2 not included)\\
-
- This history is automatically maintained by the %cd command, and
- always available as the global list variable _dh. You can use %cd -<n>
- to go to directory number <n>.
-
- Note that most of time, you should view directory history by entering
- cd -<TAB>.
-
- """
-
- dh = self.shell.user_ns['_dh']
- if parameter_s:
- try:
- args = map(int,parameter_s.split())
- except:
- self.arg_err(self.dhist)
- return
- if len(args) == 1:
- ini,fin = max(len(dh)-(args[0]),0),len(dh)
- elif len(args) == 2:
- ini,fin = args
- fin = min(fin, len(dh))
- else:
- self.arg_err(self.dhist)
- return
- else:
- ini,fin = 0,len(dh)
- print('Directory history (kept in _dh)')
- for i in range(ini, fin):
- print("%d: %s" % (i, dh[i]))
-
- @skip_doctest
- @line_magic
- def sc(self, parameter_s=''):
- """Shell capture - run shell command and capture output (DEPRECATED use !).
-
- DEPRECATED. Suboptimal, retained for backwards compatibility.
-
- You should use the form 'var = !command' instead. Example:
-
- "%sc -l myfiles = ls ~" should now be written as
-
- "myfiles = !ls ~"
-
- myfiles.s, myfiles.l and myfiles.n still apply as documented
- below.
-
- --
- %sc [options] varname=command
-
- IPython will run the given command using commands.getoutput(), and
- will then update the user's interactive namespace with a variable
- called varname, containing the value of the call. Your command can
- contain shell wildcards, pipes, etc.
-
- The '=' sign in the syntax is mandatory, and the variable name you
- supply must follow Python's standard conventions for valid names.
-
- (A special format without variable name exists for internal use)
-
- Options:
-
- -l: list output. Split the output on newlines into a list before
- assigning it to the given variable. By default the output is stored
- as a single string.
-
- -v: verbose. Print the contents of the variable.
-
- In most cases you should not need to split as a list, because the
- returned value is a special type of string which can automatically
- provide its contents either as a list (split on newlines) or as a
- space-separated string. These are convenient, respectively, either
- for sequential processing or to be passed to a shell command.
-
- For example::
-
- # Capture into variable a
- In [1]: sc a=ls *py
-
- # a is a string with embedded newlines
- In [2]: a
- Out[2]: 'setup.py\\nwin32_manual_post_install.py'
-
- # which can be seen as a list:
- In [3]: a.l
- Out[3]: ['setup.py', 'win32_manual_post_install.py']
-
- # or as a whitespace-separated string:
- In [4]: a.s
- Out[4]: 'setup.py win32_manual_post_install.py'
-
- # a.s is useful to pass as a single command line:
- In [5]: !wc -l $a.s
- 146 setup.py
- 130 win32_manual_post_install.py
- 276 total
-
- # while the list form is useful to loop over:
- In [6]: for f in a.l:
- ...: !wc -l $f
- ...:
- 146 setup.py
- 130 win32_manual_post_install.py
-
- Similarly, the lists returned by the -l option are also special, in
- the sense that you can equally invoke the .s attribute on them to
- automatically get a whitespace-separated string from their contents::
-
- In [7]: sc -l b=ls *py
-
- In [8]: b
- Out[8]: ['setup.py', 'win32_manual_post_install.py']
-
- In [9]: b.s
- Out[9]: 'setup.py win32_manual_post_install.py'
-
- In summary, both the lists and strings used for output capture have
- the following special attributes::
-
- .l (or .list) : value as list.
- .n (or .nlstr): value as newline-separated string.
- .s (or .spstr): value as space-separated string.
- """
-
- opts,args = self.parse_options(parameter_s, 'lv')
- # Try to get a variable name and command to run
- try:
- # the variable name must be obtained from the parse_options
- # output, which uses shlex.split to strip options out.
- var,_ = args.split('=', 1)
- var = var.strip()
- # But the command has to be extracted from the original input
- # parameter_s, not on what parse_options returns, to avoid the
- # quote stripping which shlex.split performs on it.
- _,cmd = parameter_s.split('=', 1)
- except ValueError:
- var,cmd = '',''
- # If all looks ok, proceed
- split = 'l' in opts
- out = self.shell.getoutput(cmd, split=split)
- if 'v' in opts:
- print('%s ==\n%s' % (var, pformat(out)))
- if var:
- self.shell.user_ns.update({var:out})
- else:
- return out
-
- @line_cell_magic
- def sx(self, line='', cell=None):
- """Shell execute - run shell command and capture output (!! is short-hand).
-
- %sx command
-
- IPython will run the given command using commands.getoutput(), and
- return the result formatted as a list (split on '\\n'). Since the
- output is _returned_, it will be stored in ipython's regular output
- cache Out[N] and in the '_N' automatic variables.
-
- Notes:
-
- 1) If an input line begins with '!!', then %sx is automatically
- invoked. That is, while::
-
- !ls
-
- causes ipython to simply issue system('ls'), typing::
-
- !!ls
-
- is a shorthand equivalent to::
-
- %sx ls
-
- 2) %sx differs from %sc in that %sx automatically splits into a list,
- like '%sc -l'. The reason for this is to make it as easy as possible
- to process line-oriented shell output via further python commands.
- %sc is meant to provide much finer control, but requires more
- typing.
-
- 3) Just like %sc -l, this is a list with special attributes:
- ::
-
- .l (or .list) : value as list.
- .n (or .nlstr): value as newline-separated string.
- .s (or .spstr): value as whitespace-separated string.
-
- This is very useful when trying to use such lists as arguments to
- system commands."""
-
- if cell is None:
- # line magic
- return self.shell.getoutput(line)
- else:
- opts,args = self.parse_options(line, '', 'out=')
- output = self.shell.getoutput(cell)
- out_name = opts.get('out', opts.get('o'))
- if out_name:
- self.shell.user_ns[out_name] = output
- else:
- return output
-
- system = line_cell_magic('system')(sx)
- bang = cell_magic('!')(sx)
-
- @line_magic
- def bookmark(self, parameter_s=''):
- """Manage IPython's bookmark system.
-
- %bookmark <name> - set bookmark to current dir
- %bookmark <name> <dir> - set bookmark to <dir>
- %bookmark -l - list all bookmarks
- %bookmark -d <name> - remove bookmark
- %bookmark -r - remove all bookmarks
-
- You can later on access a bookmarked folder with::
-
- %cd -b <name>
-
- or simply '%cd <name>' if there is no directory called <name> AND
- there is such a bookmark defined.
-
- Your bookmarks persist through IPython sessions, but they are
- associated with each profile."""
-
- opts,args = self.parse_options(parameter_s,'drl',mode='list')
- if len(args) > 2:
- raise UsageError("%bookmark: too many arguments")
-
- bkms = self.shell.db.get('bookmarks',{})
-
- if 'd' in opts:
- try:
- todel = args[0]
- except IndexError:
- raise UsageError(
- "%bookmark -d: must provide a bookmark to delete")
- else:
- try:
- del bkms[todel]
- except KeyError:
- raise UsageError(
- "%%bookmark -d: Can't delete bookmark '%s'" % todel)
-
- elif 'r' in opts:
- bkms = {}
- elif 'l' in opts:
- bks = sorted(bkms)
- if bks:
- size = max(map(len, bks))
- else:
- size = 0
- fmt = '%-'+str(size)+'s -> %s'
- print('Current bookmarks:')
- for bk in bks:
- print(fmt % (bk, bkms[bk]))
- else:
- if not args:
- raise UsageError("%bookmark: You must specify the bookmark name")
- elif len(args)==1:
- bkms[args[0]] = py3compat.getcwd()
- elif len(args)==2:
- bkms[args[0]] = args[1]
- self.shell.db['bookmarks'] = bkms
-
- @line_magic
- def pycat(self, parameter_s=''):
- """Show a syntax-highlighted file through a pager.
-
- This magic is similar to the cat utility, but it will assume the file
- to be Python source and will show it with syntax highlighting.
-
- This magic command can either take a local filename, an url,
- an history range (see %history) or a macro as argument ::
-
- %pycat myscript.py
- %pycat 7-27
- %pycat myMacro
- %pycat http://www.example.com/myscript.py
- """
- if not parameter_s:
- raise UsageError('Missing filename, URL, input history range, '
- 'or macro.')
-
- try :
- cont = self.shell.find_user_code(parameter_s, skip_encoding_cookie=False)
- except (ValueError, IOError):
- print("Error: no such file, variable, URL, history range or macro")
- return
-
- page.page(self.shell.pycolorize(source_to_unicode(cont)))
-
- @magic_arguments.magic_arguments()
- @magic_arguments.argument(
- '-a', '--append', action='store_true', default=False,
- help='Append contents of the cell to an existing file. '
- 'The file will be created if it does not exist.'
- )
- @magic_arguments.argument(
- 'filename', type=unicode_type,
- help='file to write'
- )
- @cell_magic
- def writefile(self, line, cell):
- """Write the contents of the cell to a file.
-
- The file will be overwritten unless the -a (--append) flag is specified.
- """
- args = magic_arguments.parse_argstring(self.writefile, line)
+ cwd = py3compat.getcwd().replace(self.shell.home_dir,'~')
+ if tgt:
+ self.cd(parameter_s)
+ dir_s.insert(0,cwd)
+ return self.shell.magic('dirs')
+
+ @line_magic
+ def popd(self, parameter_s=''):
+ """Change to directory popped off the top of the stack.
+ """
+ if not self.shell.dir_stack:
+ raise UsageError("%popd on empty stack")
+ top = self.shell.dir_stack.pop(0)
+ self.cd(top)
+ print("popd ->",top)
+
+ @line_magic
+ def dirs(self, parameter_s=''):
+ """Return the current directory stack."""
+
+ return self.shell.dir_stack
+
+ @line_magic
+ def dhist(self, parameter_s=''):
+ """Print your history of visited directories.
+
+ %dhist -> print full history\\
+ %dhist n -> print last n entries only\\
+ %dhist n1 n2 -> print entries between n1 and n2 (n2 not included)\\
+
+ This history is automatically maintained by the %cd command, and
+ always available as the global list variable _dh. You can use %cd -<n>
+ to go to directory number <n>.
+
+ Note that most of time, you should view directory history by entering
+ cd -<TAB>.
+
+ """
+
+ dh = self.shell.user_ns['_dh']
+ if parameter_s:
+ try:
+ args = map(int,parameter_s.split())
+ except:
+ self.arg_err(self.dhist)
+ return
+ if len(args) == 1:
+ ini,fin = max(len(dh)-(args[0]),0),len(dh)
+ elif len(args) == 2:
+ ini,fin = args
+ fin = min(fin, len(dh))
+ else:
+ self.arg_err(self.dhist)
+ return
+ else:
+ ini,fin = 0,len(dh)
+ print('Directory history (kept in _dh)')
+ for i in range(ini, fin):
+ print("%d: %s" % (i, dh[i]))
+
+ @skip_doctest
+ @line_magic
+ def sc(self, parameter_s=''):
+ """Shell capture - run shell command and capture output (DEPRECATED use !).
+
+ DEPRECATED. Suboptimal, retained for backwards compatibility.
+
+ You should use the form 'var = !command' instead. Example:
+
+ "%sc -l myfiles = ls ~" should now be written as
+
+ "myfiles = !ls ~"
+
+ myfiles.s, myfiles.l and myfiles.n still apply as documented
+ below.
+
+ --
+ %sc [options] varname=command
+
+ IPython will run the given command using commands.getoutput(), and
+ will then update the user's interactive namespace with a variable
+ called varname, containing the value of the call. Your command can
+ contain shell wildcards, pipes, etc.
+
+ The '=' sign in the syntax is mandatory, and the variable name you
+ supply must follow Python's standard conventions for valid names.
+
+ (A special format without variable name exists for internal use)
+
+ Options:
+
+ -l: list output. Split the output on newlines into a list before
+ assigning it to the given variable. By default the output is stored
+ as a single string.
+
+ -v: verbose. Print the contents of the variable.
+
+ In most cases you should not need to split as a list, because the
+ returned value is a special type of string which can automatically
+ provide its contents either as a list (split on newlines) or as a
+ space-separated string. These are convenient, respectively, either
+ for sequential processing or to be passed to a shell command.
+
+ For example::
+
+ # Capture into variable a
+ In [1]: sc a=ls *py
+
+ # a is a string with embedded newlines
+ In [2]: a
+ Out[2]: 'setup.py\\nwin32_manual_post_install.py'
+
+ # which can be seen as a list:
+ In [3]: a.l
+ Out[3]: ['setup.py', 'win32_manual_post_install.py']
+
+ # or as a whitespace-separated string:
+ In [4]: a.s
+ Out[4]: 'setup.py win32_manual_post_install.py'
+
+ # a.s is useful to pass as a single command line:
+ In [5]: !wc -l $a.s
+ 146 setup.py
+ 130 win32_manual_post_install.py
+ 276 total
+
+ # while the list form is useful to loop over:
+ In [6]: for f in a.l:
+ ...: !wc -l $f
+ ...:
+ 146 setup.py
+ 130 win32_manual_post_install.py
+
+ Similarly, the lists returned by the -l option are also special, in
+ the sense that you can equally invoke the .s attribute on them to
+ automatically get a whitespace-separated string from their contents::
+
+ In [7]: sc -l b=ls *py
+
+ In [8]: b
+ Out[8]: ['setup.py', 'win32_manual_post_install.py']
+
+ In [9]: b.s
+ Out[9]: 'setup.py win32_manual_post_install.py'
+
+ In summary, both the lists and strings used for output capture have
+ the following special attributes::
+
+ .l (or .list) : value as list.
+ .n (or .nlstr): value as newline-separated string.
+ .s (or .spstr): value as space-separated string.
+ """
+
+ opts,args = self.parse_options(parameter_s, 'lv')
+ # Try to get a variable name and command to run
+ try:
+ # the variable name must be obtained from the parse_options
+ # output, which uses shlex.split to strip options out.
+ var,_ = args.split('=', 1)
+ var = var.strip()
+ # But the command has to be extracted from the original input
+ # parameter_s, not on what parse_options returns, to avoid the
+ # quote stripping which shlex.split performs on it.
+ _,cmd = parameter_s.split('=', 1)
+ except ValueError:
+ var,cmd = '',''
+ # If all looks ok, proceed
+ split = 'l' in opts
+ out = self.shell.getoutput(cmd, split=split)
+ if 'v' in opts:
+ print('%s ==\n%s' % (var, pformat(out)))
+ if var:
+ self.shell.user_ns.update({var:out})
+ else:
+ return out
+
+ @line_cell_magic
+ def sx(self, line='', cell=None):
+ """Shell execute - run shell command and capture output (!! is short-hand).
+
+ %sx command
+
+ IPython will run the given command using commands.getoutput(), and
+ return the result formatted as a list (split on '\\n'). Since the
+ output is _returned_, it will be stored in ipython's regular output
+ cache Out[N] and in the '_N' automatic variables.
+
+ Notes:
+
+ 1) If an input line begins with '!!', then %sx is automatically
+ invoked. That is, while::
+
+ !ls
+
+ causes ipython to simply issue system('ls'), typing::
+
+ !!ls
+
+ is a shorthand equivalent to::
+
+ %sx ls
+
+ 2) %sx differs from %sc in that %sx automatically splits into a list,
+ like '%sc -l'. The reason for this is to make it as easy as possible
+ to process line-oriented shell output via further python commands.
+ %sc is meant to provide much finer control, but requires more
+ typing.
+
+ 3) Just like %sc -l, this is a list with special attributes:
+ ::
+
+ .l (or .list) : value as list.
+ .n (or .nlstr): value as newline-separated string.
+ .s (or .spstr): value as whitespace-separated string.
+
+ This is very useful when trying to use such lists as arguments to
+ system commands."""
+
+ if cell is None:
+ # line magic
+ return self.shell.getoutput(line)
+ else:
+ opts,args = self.parse_options(line, '', 'out=')
+ output = self.shell.getoutput(cell)
+ out_name = opts.get('out', opts.get('o'))
+ if out_name:
+ self.shell.user_ns[out_name] = output
+ else:
+ return output
+
+ system = line_cell_magic('system')(sx)
+ bang = cell_magic('!')(sx)
+
+ @line_magic
+ def bookmark(self, parameter_s=''):
+ """Manage IPython's bookmark system.
+
+ %bookmark <name> - set bookmark to current dir
+ %bookmark <name> <dir> - set bookmark to <dir>
+ %bookmark -l - list all bookmarks
+ %bookmark -d <name> - remove bookmark
+ %bookmark -r - remove all bookmarks
+
+ You can later on access a bookmarked folder with::
+
+ %cd -b <name>
+
+ or simply '%cd <name>' if there is no directory called <name> AND
+ there is such a bookmark defined.
+
+ Your bookmarks persist through IPython sessions, but they are
+ associated with each profile."""
+
+ opts,args = self.parse_options(parameter_s,'drl',mode='list')
+ if len(args) > 2:
+ raise UsageError("%bookmark: too many arguments")
+
+ bkms = self.shell.db.get('bookmarks',{})
+
+ if 'd' in opts:
+ try:
+ todel = args[0]
+ except IndexError:
+ raise UsageError(
+ "%bookmark -d: must provide a bookmark to delete")
+ else:
+ try:
+ del bkms[todel]
+ except KeyError:
+ raise UsageError(
+ "%%bookmark -d: Can't delete bookmark '%s'" % todel)
+
+ elif 'r' in opts:
+ bkms = {}
+ elif 'l' in opts:
+ bks = sorted(bkms)
+ if bks:
+ size = max(map(len, bks))
+ else:
+ size = 0
+ fmt = '%-'+str(size)+'s -> %s'
+ print('Current bookmarks:')
+ for bk in bks:
+ print(fmt % (bk, bkms[bk]))
+ else:
+ if not args:
+ raise UsageError("%bookmark: You must specify the bookmark name")
+ elif len(args)==1:
+ bkms[args[0]] = py3compat.getcwd()
+ elif len(args)==2:
+ bkms[args[0]] = args[1]
+ self.shell.db['bookmarks'] = bkms
+
+ @line_magic
+ def pycat(self, parameter_s=''):
+ """Show a syntax-highlighted file through a pager.
+
+ This magic is similar to the cat utility, but it will assume the file
+ to be Python source and will show it with syntax highlighting.
+
+ This magic command can either take a local filename, an url,
+ an history range (see %history) or a macro as argument ::
+
+ %pycat myscript.py
+ %pycat 7-27
+ %pycat myMacro
+ %pycat http://www.example.com/myscript.py
+ """
+ if not parameter_s:
+ raise UsageError('Missing filename, URL, input history range, '
+ 'or macro.')
+
+ try :
+ cont = self.shell.find_user_code(parameter_s, skip_encoding_cookie=False)
+ except (ValueError, IOError):
+ print("Error: no such file, variable, URL, history range or macro")
+ return
+
+ page.page(self.shell.pycolorize(source_to_unicode(cont)))
+
+ @magic_arguments.magic_arguments()
+ @magic_arguments.argument(
+ '-a', '--append', action='store_true', default=False,
+ help='Append contents of the cell to an existing file. '
+ 'The file will be created if it does not exist.'
+ )
+ @magic_arguments.argument(
+ 'filename', type=unicode_type,
+ help='file to write'
+ )
+ @cell_magic
+ def writefile(self, line, cell):
+ """Write the contents of the cell to a file.
+
+ The file will be overwritten unless the -a (--append) flag is specified.
+ """
+ args = magic_arguments.parse_argstring(self.writefile, line)
filename = os.path.expanduser(args.filename)
- if os.path.exists(filename):
- if args.append:
- print("Appending to %s" % filename)
- else:
- print("Overwriting %s" % filename)
- else:
- print("Writing %s" % filename)
-
- mode = 'a' if args.append else 'w'
- with io.open(filename, mode, encoding='utf-8') as f:
- f.write(cell)
+ if os.path.exists(filename):
+ if args.append:
+ print("Appending to %s" % filename)
+ else:
+ print("Overwriting %s" % filename)
+ else:
+ print("Writing %s" % filename)
+
+ mode = 'a' if args.append else 'w'
+ with io.open(filename, mode, encoding='utf-8') as f:
+ f.write(cell)
diff --git a/contrib/python/ipython/py2/IPython/core/magics/pylab.py b/contrib/python/ipython/py2/IPython/core/magics/pylab.py
index deec14cb7c..6c5cd68a59 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/pylab.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/pylab.py
@@ -1,167 +1,167 @@
-"""Implementation of magic functions for matplotlib/pylab support.
-"""
-from __future__ import print_function
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012 The IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Our own packages
-from traitlets.config.application import Application
-from IPython.core import magic_arguments
-from IPython.core.magic import Magics, magics_class, line_magic
-from IPython.testing.skipdoctest import skip_doctest
+"""Implementation of magic functions for matplotlib/pylab support.
+"""
+from __future__ import print_function
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012 The IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+# Our own packages
+from traitlets.config.application import Application
+from IPython.core import magic_arguments
+from IPython.core.magic import Magics, magics_class, line_magic
+from IPython.testing.skipdoctest import skip_doctest
from warnings import warn
-from IPython.core.pylabtools import backends
-
-#-----------------------------------------------------------------------------
-# Magic implementation classes
-#-----------------------------------------------------------------------------
-
-magic_gui_arg = magic_arguments.argument(
- 'gui', nargs='?',
- help="""Name of the matplotlib backend to use %s.
- If given, the corresponding matplotlib backend is used,
- otherwise it will be matplotlib's default
- (which you can set in your matplotlib config file).
- """ % str(tuple(sorted(backends.keys())))
-)
-
-
-@magics_class
-class PylabMagics(Magics):
- """Magics related to matplotlib's pylab support"""
-
- @skip_doctest
- @line_magic
- @magic_arguments.magic_arguments()
- @magic_arguments.argument('-l', '--list', action='store_true',
- help='Show available matplotlib backends')
- @magic_gui_arg
- def matplotlib(self, line=''):
- """Set up matplotlib to work interactively.
-
- This function lets you activate matplotlib interactive support
- at any point during an IPython session. It does not import anything
- into the interactive namespace.
-
- If you are using the inline matplotlib backend in the IPython Notebook
- you can set which figure formats are enabled using the following::
-
- In [1]: from IPython.display import set_matplotlib_formats
-
- In [2]: set_matplotlib_formats('pdf', 'svg')
-
- The default for inline figures sets `bbox_inches` to 'tight'. This can
- cause discrepancies between the displayed image and the identical
- image created using `savefig`. This behavior can be disabled using the
- `%config` magic::
-
- In [3]: %config InlineBackend.print_figure_kwargs = {'bbox_inches':None}
-
- In addition, see the docstring of
- `IPython.display.set_matplotlib_formats` and
- `IPython.display.set_matplotlib_close` for more information on
- changing additional behaviors of the inline backend.
-
- Examples
- --------
- To enable the inline backend for usage with the IPython Notebook::
-
- In [1]: %matplotlib inline
-
- In this case, where the matplotlib default is TkAgg::
-
- In [2]: %matplotlib
- Using matplotlib backend: TkAgg
-
- But you can explicitly request a different GUI backend::
-
- In [3]: %matplotlib qt
-
- You can list the available backends using the -l/--list option::
-
- In [4]: %matplotlib --list
- Available matplotlib backends: ['osx', 'qt4', 'qt5', 'gtk3', 'notebook', 'wx', 'qt', 'nbagg',
- 'gtk', 'tk', 'inline']
- """
- args = magic_arguments.parse_argstring(self.matplotlib, line)
- if args.list:
- backends_list = list(backends.keys())
- print("Available matplotlib backends: %s" % backends_list)
- else:
- gui, backend = self.shell.enable_matplotlib(args.gui)
- self._show_matplotlib_backend(args.gui, backend)
-
- @skip_doctest
- @line_magic
- @magic_arguments.magic_arguments()
- @magic_arguments.argument(
- '--no-import-all', action='store_true', default=None,
- help="""Prevent IPython from performing ``import *`` into the interactive namespace.
-
- You can govern the default behavior of this flag with the
- InteractiveShellApp.pylab_import_all configurable.
- """
- )
- @magic_gui_arg
- def pylab(self, line=''):
- """Load numpy and matplotlib to work interactively.
-
- This function lets you activate pylab (matplotlib, numpy and
- interactive support) at any point during an IPython session.
-
- %pylab makes the following imports::
-
- import numpy
- import matplotlib
- from matplotlib import pylab, mlab, pyplot
- np = numpy
- plt = pyplot
-
- from IPython.display import display
- from IPython.core.pylabtools import figsize, getfigs
-
- from pylab import *
- from numpy import *
-
- If you pass `--no-import-all`, the last two `*` imports will be excluded.
-
- See the %matplotlib magic for more details about activating matplotlib
- without affecting the interactive namespace.
- """
- args = magic_arguments.parse_argstring(self.pylab, line)
- if args.no_import_all is None:
- # get default from Application
- if Application.initialized():
- app = Application.instance()
- try:
- import_all = app.pylab_import_all
- except AttributeError:
- import_all = True
- else:
- # nothing specified, no app - default True
- import_all = True
- else:
- # invert no-import flag
- import_all = not args.no_import_all
-
- gui, backend, clobbered = self.shell.enable_pylab(args.gui, import_all=import_all)
- self._show_matplotlib_backend(args.gui, backend)
- print ("Populating the interactive namespace from numpy and matplotlib")
- if clobbered:
- warn("pylab import has clobbered these variables: %s" % clobbered +
- "\n`%matplotlib` prevents importing * from pylab and numpy"
- )
-
- def _show_matplotlib_backend(self, gui, backend):
- """show matplotlib message backend message"""
- if not gui or gui == 'auto':
- print("Using matplotlib backend: %s" % backend)
+from IPython.core.pylabtools import backends
+
+#-----------------------------------------------------------------------------
+# Magic implementation classes
+#-----------------------------------------------------------------------------
+
+magic_gui_arg = magic_arguments.argument(
+ 'gui', nargs='?',
+ help="""Name of the matplotlib backend to use %s.
+ If given, the corresponding matplotlib backend is used,
+ otherwise it will be matplotlib's default
+ (which you can set in your matplotlib config file).
+ """ % str(tuple(sorted(backends.keys())))
+)
+
+
+@magics_class
+class PylabMagics(Magics):
+ """Magics related to matplotlib's pylab support"""
+
+ @skip_doctest
+ @line_magic
+ @magic_arguments.magic_arguments()
+ @magic_arguments.argument('-l', '--list', action='store_true',
+ help='Show available matplotlib backends')
+ @magic_gui_arg
+ def matplotlib(self, line=''):
+ """Set up matplotlib to work interactively.
+
+ This function lets you activate matplotlib interactive support
+ at any point during an IPython session. It does not import anything
+ into the interactive namespace.
+
+ If you are using the inline matplotlib backend in the IPython Notebook
+ you can set which figure formats are enabled using the following::
+
+ In [1]: from IPython.display import set_matplotlib_formats
+
+ In [2]: set_matplotlib_formats('pdf', 'svg')
+
+ The default for inline figures sets `bbox_inches` to 'tight'. This can
+ cause discrepancies between the displayed image and the identical
+ image created using `savefig`. This behavior can be disabled using the
+ `%config` magic::
+
+ In [3]: %config InlineBackend.print_figure_kwargs = {'bbox_inches':None}
+
+ In addition, see the docstring of
+ `IPython.display.set_matplotlib_formats` and
+ `IPython.display.set_matplotlib_close` for more information on
+ changing additional behaviors of the inline backend.
+
+ Examples
+ --------
+ To enable the inline backend for usage with the IPython Notebook::
+
+ In [1]: %matplotlib inline
+
+ In this case, where the matplotlib default is TkAgg::
+
+ In [2]: %matplotlib
+ Using matplotlib backend: TkAgg
+
+ But you can explicitly request a different GUI backend::
+
+ In [3]: %matplotlib qt
+
+ You can list the available backends using the -l/--list option::
+
+ In [4]: %matplotlib --list
+ Available matplotlib backends: ['osx', 'qt4', 'qt5', 'gtk3', 'notebook', 'wx', 'qt', 'nbagg',
+ 'gtk', 'tk', 'inline']
+ """
+ args = magic_arguments.parse_argstring(self.matplotlib, line)
+ if args.list:
+ backends_list = list(backends.keys())
+ print("Available matplotlib backends: %s" % backends_list)
+ else:
+ gui, backend = self.shell.enable_matplotlib(args.gui)
+ self._show_matplotlib_backend(args.gui, backend)
+
+ @skip_doctest
+ @line_magic
+ @magic_arguments.magic_arguments()
+ @magic_arguments.argument(
+ '--no-import-all', action='store_true', default=None,
+ help="""Prevent IPython from performing ``import *`` into the interactive namespace.
+
+ You can govern the default behavior of this flag with the
+ InteractiveShellApp.pylab_import_all configurable.
+ """
+ )
+ @magic_gui_arg
+ def pylab(self, line=''):
+ """Load numpy and matplotlib to work interactively.
+
+ This function lets you activate pylab (matplotlib, numpy and
+ interactive support) at any point during an IPython session.
+
+ %pylab makes the following imports::
+
+ import numpy
+ import matplotlib
+ from matplotlib import pylab, mlab, pyplot
+ np = numpy
+ plt = pyplot
+
+ from IPython.display import display
+ from IPython.core.pylabtools import figsize, getfigs
+
+ from pylab import *
+ from numpy import *
+
+ If you pass `--no-import-all`, the last two `*` imports will be excluded.
+
+ See the %matplotlib magic for more details about activating matplotlib
+ without affecting the interactive namespace.
+ """
+ args = magic_arguments.parse_argstring(self.pylab, line)
+ if args.no_import_all is None:
+ # get default from Application
+ if Application.initialized():
+ app = Application.instance()
+ try:
+ import_all = app.pylab_import_all
+ except AttributeError:
+ import_all = True
+ else:
+ # nothing specified, no app - default True
+ import_all = True
+ else:
+ # invert no-import flag
+ import_all = not args.no_import_all
+
+ gui, backend, clobbered = self.shell.enable_pylab(args.gui, import_all=import_all)
+ self._show_matplotlib_backend(args.gui, backend)
+ print ("Populating the interactive namespace from numpy and matplotlib")
+ if clobbered:
+ warn("pylab import has clobbered these variables: %s" % clobbered +
+ "\n`%matplotlib` prevents importing * from pylab and numpy"
+ )
+
+ def _show_matplotlib_backend(self, gui, backend):
+ """show matplotlib message backend message"""
+ if not gui or gui == 'auto':
+ print("Using matplotlib backend: %s" % backend)
diff --git a/contrib/python/ipython/py2/IPython/core/magics/script.py b/contrib/python/ipython/py2/IPython/core/magics/script.py
index d381d97234..3fbddc38a8 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/script.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/script.py
@@ -1,280 +1,280 @@
-"""Magic functions for running cells in various scripts."""
-from __future__ import print_function
-
+"""Magic functions for running cells in various scripts."""
+from __future__ import print_function
+
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
-
-import errno
-import os
-import sys
-import signal
-import time
-from subprocess import Popen, PIPE
-import atexit
-
-from IPython.core import magic_arguments
-from IPython.core.magic import (
- Magics, magics_class, line_magic, cell_magic
-)
-from IPython.lib.backgroundjobs import BackgroundJobManager
-from IPython.utils import py3compat
-from IPython.utils.process import arg_split
+
+import errno
+import os
+import sys
+import signal
+import time
+from subprocess import Popen, PIPE
+import atexit
+
+from IPython.core import magic_arguments
+from IPython.core.magic import (
+ Magics, magics_class, line_magic, cell_magic
+)
+from IPython.lib.backgroundjobs import BackgroundJobManager
+from IPython.utils import py3compat
+from IPython.utils.process import arg_split
from traitlets import List, Dict, default
-
-#-----------------------------------------------------------------------------
-# Magic implementation classes
-#-----------------------------------------------------------------------------
-
-def script_args(f):
- """single decorator for adding script args"""
- args = [
- magic_arguments.argument(
- '--out', type=str,
- help="""The variable in which to store stdout from the script.
- If the script is backgrounded, this will be the stdout *pipe*,
- instead of the stderr text itself.
- """
- ),
- magic_arguments.argument(
- '--err', type=str,
- help="""The variable in which to store stderr from the script.
- If the script is backgrounded, this will be the stderr *pipe*,
- instead of the stderr text itself.
- """
- ),
- magic_arguments.argument(
- '--bg', action="store_true",
- help="""Whether to run the script in the background.
- If given, the only way to see the output of the command is
- with --out/err.
- """
- ),
- magic_arguments.argument(
- '--proc', type=str,
- help="""The variable in which to store Popen instance.
- This is used only when --bg option is given.
- """
- ),
- ]
- for arg in args:
- f = arg(f)
- return f
-
-@magics_class
-class ScriptMagics(Magics):
- """Magics for talking to scripts
-
- This defines a base `%%script` cell magic for running a cell
- with a program in a subprocess, and registers a few top-level
- magics that call %%script with common interpreters.
- """
+
+#-----------------------------------------------------------------------------
+# Magic implementation classes
+#-----------------------------------------------------------------------------
+
+def script_args(f):
+ """single decorator for adding script args"""
+ args = [
+ magic_arguments.argument(
+ '--out', type=str,
+ help="""The variable in which to store stdout from the script.
+ If the script is backgrounded, this will be the stdout *pipe*,
+ instead of the stderr text itself.
+ """
+ ),
+ magic_arguments.argument(
+ '--err', type=str,
+ help="""The variable in which to store stderr from the script.
+ If the script is backgrounded, this will be the stderr *pipe*,
+ instead of the stderr text itself.
+ """
+ ),
+ magic_arguments.argument(
+ '--bg', action="store_true",
+ help="""Whether to run the script in the background.
+ If given, the only way to see the output of the command is
+ with --out/err.
+ """
+ ),
+ magic_arguments.argument(
+ '--proc', type=str,
+ help="""The variable in which to store Popen instance.
+ This is used only when --bg option is given.
+ """
+ ),
+ ]
+ for arg in args:
+ f = arg(f)
+ return f
+
+@magics_class
+class ScriptMagics(Magics):
+ """Magics for talking to scripts
+
+ This defines a base `%%script` cell magic for running a cell
+ with a program in a subprocess, and registers a few top-level
+ magics that call %%script with common interpreters.
+ """
script_magics = List(
- help="""Extra script cell magics to define
-
- This generates simple wrappers of `%%script foo` as `%%foo`.
-
- If you want to add script magics that aren't on your path,
- specify them in script_paths
- """,
+ help="""Extra script cell magics to define
+
+ This generates simple wrappers of `%%script foo` as `%%foo`.
+
+ If you want to add script magics that aren't on your path,
+ specify them in script_paths
+ """,
).tag(config=True)
@default('script_magics')
- def _script_magics_default(self):
- """default to a common list of programs"""
-
- defaults = [
- 'sh',
- 'bash',
- 'perl',
- 'ruby',
- 'python',
- 'python2',
- 'python3',
- 'pypy',
- ]
- if os.name == 'nt':
- defaults.extend([
- 'cmd',
- ])
-
- return defaults
-
+ def _script_magics_default(self):
+ """default to a common list of programs"""
+
+ defaults = [
+ 'sh',
+ 'bash',
+ 'perl',
+ 'ruby',
+ 'python',
+ 'python2',
+ 'python3',
+ 'pypy',
+ ]
+ if os.name == 'nt':
+ defaults.extend([
+ 'cmd',
+ ])
+
+ return defaults
+
script_paths = Dict(
- help="""Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby'
-
- Only necessary for items in script_magics where the default path will not
- find the right interpreter.
- """
+ help="""Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby'
+
+ Only necessary for items in script_magics where the default path will not
+ find the right interpreter.
+ """
).tag(config=True)
-
- def __init__(self, shell=None):
- super(ScriptMagics, self).__init__(shell=shell)
- self._generate_script_magics()
- self.job_manager = BackgroundJobManager()
- self.bg_processes = []
- atexit.register(self.kill_bg_processes)
-
- def __del__(self):
- self.kill_bg_processes()
-
- def _generate_script_magics(self):
- cell_magics = self.magics['cell']
- for name in self.script_magics:
- cell_magics[name] = self._make_script_magic(name)
-
- def _make_script_magic(self, name):
- """make a named magic, that calls %%script with a particular program"""
- # expand to explicit path if necessary:
- script = self.script_paths.get(name, name)
-
- @magic_arguments.magic_arguments()
- @script_args
- def named_script_magic(line, cell):
- # if line, add it as cl-flags
- if line:
- line = "%s %s" % (script, line)
- else:
- line = script
- return self.shebang(line, cell)
-
- # write a basic docstring:
- named_script_magic.__doc__ = \
- """%%{name} script magic
-
- Run cells with {script} in a subprocess.
-
- This is a shortcut for `%%script {script}`
- """.format(**locals())
-
- return named_script_magic
-
- @magic_arguments.magic_arguments()
- @script_args
- @cell_magic("script")
- def shebang(self, line, cell):
- """Run a cell via a shell command
-
- The `%%script` line is like the #! line of script,
- specifying a program (bash, perl, ruby, etc.) with which to run.
-
- The rest of the cell is run by that program.
-
- Examples
- --------
- ::
-
- In [1]: %%script bash
- ...: for i in 1 2 3; do
- ...: echo $i
- ...: done
- 1
- 2
- 3
- """
- argv = arg_split(line, posix = not sys.platform.startswith('win'))
- args, cmd = self.shebang.parser.parse_known_args(argv)
-
- try:
- p = Popen(cmd, stdout=PIPE, stderr=PIPE, stdin=PIPE)
- except OSError as e:
- if e.errno == errno.ENOENT:
- print("Couldn't find program: %r" % cmd[0])
- return
- else:
- raise
-
- if not cell.endswith('\n'):
- cell += '\n'
- cell = cell.encode('utf8', 'replace')
- if args.bg:
- self.bg_processes.append(p)
- self._gc_bg_processes()
- if args.out:
- self.shell.user_ns[args.out] = p.stdout
- if args.err:
- self.shell.user_ns[args.err] = p.stderr
- self.job_manager.new(self._run_script, p, cell, daemon=True)
- if args.proc:
- self.shell.user_ns[args.proc] = p
- return
-
- try:
- out, err = p.communicate(cell)
- except KeyboardInterrupt:
- try:
- p.send_signal(signal.SIGINT)
- time.sleep(0.1)
- if p.poll() is not None:
- print("Process is interrupted.")
- return
- p.terminate()
- time.sleep(0.1)
- if p.poll() is not None:
- print("Process is terminated.")
- return
- p.kill()
- print("Process is killed.")
- except OSError:
- pass
- except Exception as e:
- print("Error while terminating subprocess (pid=%i): %s" \
- % (p.pid, e))
- return
- out = py3compat.bytes_to_str(out)
- err = py3compat.bytes_to_str(err)
- if args.out:
- self.shell.user_ns[args.out] = out
- else:
- sys.stdout.write(out)
- sys.stdout.flush()
- if args.err:
- self.shell.user_ns[args.err] = err
- else:
- sys.stderr.write(err)
- sys.stderr.flush()
-
- def _run_script(self, p, cell):
- """callback for running the script in the background"""
- p.stdin.write(cell)
- p.stdin.close()
- p.wait()
-
- @line_magic("killbgscripts")
- def killbgscripts(self, _nouse_=''):
- """Kill all BG processes started by %%script and its family."""
- self.kill_bg_processes()
- print("All background processes were killed.")
-
- def kill_bg_processes(self):
- """Kill all BG processes which are still running."""
+
+ def __init__(self, shell=None):
+ super(ScriptMagics, self).__init__(shell=shell)
+ self._generate_script_magics()
+ self.job_manager = BackgroundJobManager()
+ self.bg_processes = []
+ atexit.register(self.kill_bg_processes)
+
+ def __del__(self):
+ self.kill_bg_processes()
+
+ def _generate_script_magics(self):
+ cell_magics = self.magics['cell']
+ for name in self.script_magics:
+ cell_magics[name] = self._make_script_magic(name)
+
+ def _make_script_magic(self, name):
+ """make a named magic, that calls %%script with a particular program"""
+ # expand to explicit path if necessary:
+ script = self.script_paths.get(name, name)
+
+ @magic_arguments.magic_arguments()
+ @script_args
+ def named_script_magic(line, cell):
+ # if line, add it as cl-flags
+ if line:
+ line = "%s %s" % (script, line)
+ else:
+ line = script
+ return self.shebang(line, cell)
+
+ # write a basic docstring:
+ named_script_magic.__doc__ = \
+ """%%{name} script magic
+
+ Run cells with {script} in a subprocess.
+
+ This is a shortcut for `%%script {script}`
+ """.format(**locals())
+
+ return named_script_magic
+
+ @magic_arguments.magic_arguments()
+ @script_args
+ @cell_magic("script")
+ def shebang(self, line, cell):
+ """Run a cell via a shell command
+
+ The `%%script` line is like the #! line of script,
+ specifying a program (bash, perl, ruby, etc.) with which to run.
+
+ The rest of the cell is run by that program.
+
+ Examples
+ --------
+ ::
+
+ In [1]: %%script bash
+ ...: for i in 1 2 3; do
+ ...: echo $i
+ ...: done
+ 1
+ 2
+ 3
+ """
+ argv = arg_split(line, posix = not sys.platform.startswith('win'))
+ args, cmd = self.shebang.parser.parse_known_args(argv)
+
+ try:
+ p = Popen(cmd, stdout=PIPE, stderr=PIPE, stdin=PIPE)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ print("Couldn't find program: %r" % cmd[0])
+ return
+ else:
+ raise
+
+ if not cell.endswith('\n'):
+ cell += '\n'
+ cell = cell.encode('utf8', 'replace')
+ if args.bg:
+ self.bg_processes.append(p)
+ self._gc_bg_processes()
+ if args.out:
+ self.shell.user_ns[args.out] = p.stdout
+ if args.err:
+ self.shell.user_ns[args.err] = p.stderr
+ self.job_manager.new(self._run_script, p, cell, daemon=True)
+ if args.proc:
+ self.shell.user_ns[args.proc] = p
+ return
+
+ try:
+ out, err = p.communicate(cell)
+ except KeyboardInterrupt:
+ try:
+ p.send_signal(signal.SIGINT)
+ time.sleep(0.1)
+ if p.poll() is not None:
+ print("Process is interrupted.")
+ return
+ p.terminate()
+ time.sleep(0.1)
+ if p.poll() is not None:
+ print("Process is terminated.")
+ return
+ p.kill()
+ print("Process is killed.")
+ except OSError:
+ pass
+ except Exception as e:
+ print("Error while terminating subprocess (pid=%i): %s" \
+ % (p.pid, e))
+ return
+ out = py3compat.bytes_to_str(out)
+ err = py3compat.bytes_to_str(err)
+ if args.out:
+ self.shell.user_ns[args.out] = out
+ else:
+ sys.stdout.write(out)
+ sys.stdout.flush()
+ if args.err:
+ self.shell.user_ns[args.err] = err
+ else:
+ sys.stderr.write(err)
+ sys.stderr.flush()
+
+ def _run_script(self, p, cell):
+ """callback for running the script in the background"""
+ p.stdin.write(cell)
+ p.stdin.close()
+ p.wait()
+
+ @line_magic("killbgscripts")
+ def killbgscripts(self, _nouse_=''):
+ """Kill all BG processes started by %%script and its family."""
+ self.kill_bg_processes()
+ print("All background processes were killed.")
+
+ def kill_bg_processes(self):
+ """Kill all BG processes which are still running."""
if not self.bg_processes:
return
- for p in self.bg_processes:
- if p.poll() is None:
- try:
- p.send_signal(signal.SIGINT)
- except:
- pass
- time.sleep(0.1)
+ for p in self.bg_processes:
+ if p.poll() is None:
+ try:
+ p.send_signal(signal.SIGINT)
+ except:
+ pass
+ time.sleep(0.1)
self._gc_bg_processes()
if not self.bg_processes:
return
- for p in self.bg_processes:
- if p.poll() is None:
- try:
- p.terminate()
- except:
- pass
- time.sleep(0.1)
+ for p in self.bg_processes:
+ if p.poll() is None:
+ try:
+ p.terminate()
+ except:
+ pass
+ time.sleep(0.1)
self._gc_bg_processes()
if not self.bg_processes:
return
- for p in self.bg_processes:
- if p.poll() is None:
- try:
- p.kill()
- except:
- pass
- self._gc_bg_processes()
-
- def _gc_bg_processes(self):
- self.bg_processes = [p for p in self.bg_processes if p.poll() is None]
+ for p in self.bg_processes:
+ if p.poll() is None:
+ try:
+ p.kill()
+ except:
+ pass
+ self._gc_bg_processes()
+
+ def _gc_bg_processes(self):
+ self.bg_processes = [p for p in self.bg_processes if p.poll() is None]
diff --git a/contrib/python/ipython/py2/IPython/core/oinspect.py b/contrib/python/ipython/py2/IPython/core/oinspect.py
index 6849412528..55a4efe8c0 100644
--- a/contrib/python/ipython/py2/IPython/core/oinspect.py
+++ b/contrib/python/ipython/py2/IPython/core/oinspect.py
@@ -1,49 +1,49 @@
-# -*- coding: utf-8 -*-
-"""Tools for inspecting Python objects.
-
-Uses syntax highlighting for presenting the various information elements.
-
-Similar in spirit to the inspect module, but all calls take a name argument to
-reference the name under which an object is being read.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import print_function
-
-__all__ = ['Inspector','InspectColors']
-
-# stdlib modules
-import inspect
-import linecache
+# -*- coding: utf-8 -*-
+"""Tools for inspecting Python objects.
+
+Uses syntax highlighting for presenting the various information elements.
+
+Similar in spirit to the inspect module, but all calls take a name argument to
+reference the name under which an object is being read.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import print_function
+
+__all__ = ['Inspector','InspectColors']
+
+# stdlib modules
+import inspect
+import linecache
import warnings
-import os
-from textwrap import dedent
-import types
-import io as stdlib_io
-
-try:
- from itertools import izip_longest
-except ImportError:
- from itertools import zip_longest as izip_longest
-
-# IPython's own
-from IPython.core import page
-from IPython.lib.pretty import pretty
-from IPython.testing.skipdoctest import skip_doctest_py3
-from IPython.utils import PyColorize
-from IPython.utils import openpy
-from IPython.utils import py3compat
-from IPython.utils.dir2 import safe_hasattr
-from IPython.utils.path import compress_user
-from IPython.utils.text import indent
-from IPython.utils.wildcard import list_namespace
-from IPython.utils.coloransi import TermColors, ColorScheme, ColorSchemeTable
-from IPython.utils.py3compat import cast_unicode, string_types, PY3
-from IPython.utils.signatures import signature
+import os
+from textwrap import dedent
+import types
+import io as stdlib_io
+
+try:
+ from itertools import izip_longest
+except ImportError:
+ from itertools import zip_longest as izip_longest
+
+# IPython's own
+from IPython.core import page
+from IPython.lib.pretty import pretty
+from IPython.testing.skipdoctest import skip_doctest_py3
+from IPython.utils import PyColorize
+from IPython.utils import openpy
+from IPython.utils import py3compat
+from IPython.utils.dir2 import safe_hasattr
+from IPython.utils.path import compress_user
+from IPython.utils.text import indent
+from IPython.utils.wildcard import list_namespace
+from IPython.utils.coloransi import TermColors, ColorScheme, ColorSchemeTable
+from IPython.utils.py3compat import cast_unicode, string_types, PY3
+from IPython.utils.signatures import signature
from IPython.utils.colorable import Colorable
-
+
from pygments import highlight
try:
# PythonLexer was renamed to Python2Lexer in pygments 2.5
@@ -55,510 +55,510 @@ from pygments.formatters import HtmlFormatter
def pylight(code):
return highlight(code, Python2Lexer(), HtmlFormatter(noclasses=True))
-# builtin docstrings to ignore
-_func_call_docstring = types.FunctionType.__call__.__doc__
-_object_init_docstring = object.__init__.__doc__
-_builtin_type_docstrings = {
- inspect.getdoc(t) for t in (types.ModuleType, types.MethodType,
- types.FunctionType, property)
-}
-
-_builtin_func_type = type(all)
-_builtin_meth_type = type(str.upper) # Bound methods have the same type as builtin functions
-#****************************************************************************
-# Builtin color schemes
-
-Colors = TermColors # just a shorthand
-
-InspectColors = PyColorize.ANSICodeColors
-
-#****************************************************************************
-# Auxiliary functions and objects
-
-# See the messaging spec for the definition of all these fields. This list
-# effectively defines the order of display
-info_fields = ['type_name', 'base_class', 'string_form', 'namespace',
- 'length', 'file', 'definition', 'docstring', 'source',
- 'init_definition', 'class_docstring', 'init_docstring',
- 'call_def', 'call_docstring',
- # These won't be printed but will be used to determine how to
- # format the object
- 'ismagic', 'isalias', 'isclass', 'argspec', 'found', 'name'
- ]
-
-
-def object_info(**kw):
- """Make an object info dict with all fields present."""
- infodict = dict(izip_longest(info_fields, [None]))
- infodict.update(kw)
- return infodict
-
-
-def get_encoding(obj):
- """Get encoding for python source file defining obj
-
- Returns None if obj is not defined in a sourcefile.
- """
- ofile = find_file(obj)
- # run contents of file through pager starting at line where the object
- # is defined, as long as the file isn't binary and is actually on the
- # filesystem.
- if ofile is None:
- return None
- elif ofile.endswith(('.so', '.dll', '.pyd')):
- return None
- elif not os.path.isfile(ofile):
- return None
- else:
- # Print only text files, not extension binaries. Note that
- # getsourcelines returns lineno with 1-offset and page() uses
- # 0-offset, so we must adjust.
- with stdlib_io.open(ofile, 'rb') as buffer: # Tweaked to use io.open for Python 2
- encoding, lines = openpy.detect_encoding(buffer.readline)
- return encoding
-
-def getdoc(obj):
- """Stable wrapper around inspect.getdoc.
-
- This can't crash because of attribute problems.
-
- It also attempts to call a getdoc() method on the given object. This
- allows objects which provide their docstrings via non-standard mechanisms
+# builtin docstrings to ignore
+_func_call_docstring = types.FunctionType.__call__.__doc__
+_object_init_docstring = object.__init__.__doc__
+_builtin_type_docstrings = {
+ inspect.getdoc(t) for t in (types.ModuleType, types.MethodType,
+ types.FunctionType, property)
+}
+
+_builtin_func_type = type(all)
+_builtin_meth_type = type(str.upper) # Bound methods have the same type as builtin functions
+#****************************************************************************
+# Builtin color schemes
+
+Colors = TermColors # just a shorthand
+
+InspectColors = PyColorize.ANSICodeColors
+
+#****************************************************************************
+# Auxiliary functions and objects
+
+# See the messaging spec for the definition of all these fields. This list
+# effectively defines the order of display
+info_fields = ['type_name', 'base_class', 'string_form', 'namespace',
+ 'length', 'file', 'definition', 'docstring', 'source',
+ 'init_definition', 'class_docstring', 'init_docstring',
+ 'call_def', 'call_docstring',
+ # These won't be printed but will be used to determine how to
+ # format the object
+ 'ismagic', 'isalias', 'isclass', 'argspec', 'found', 'name'
+ ]
+
+
+def object_info(**kw):
+ """Make an object info dict with all fields present."""
+ infodict = dict(izip_longest(info_fields, [None]))
+ infodict.update(kw)
+ return infodict
+
+
+def get_encoding(obj):
+ """Get encoding for python source file defining obj
+
+ Returns None if obj is not defined in a sourcefile.
+ """
+ ofile = find_file(obj)
+ # run contents of file through pager starting at line where the object
+ # is defined, as long as the file isn't binary and is actually on the
+ # filesystem.
+ if ofile is None:
+ return None
+ elif ofile.endswith(('.so', '.dll', '.pyd')):
+ return None
+ elif not os.path.isfile(ofile):
+ return None
+ else:
+ # Print only text files, not extension binaries. Note that
+ # getsourcelines returns lineno with 1-offset and page() uses
+ # 0-offset, so we must adjust.
+ with stdlib_io.open(ofile, 'rb') as buffer: # Tweaked to use io.open for Python 2
+ encoding, lines = openpy.detect_encoding(buffer.readline)
+ return encoding
+
+def getdoc(obj):
+ """Stable wrapper around inspect.getdoc.
+
+ This can't crash because of attribute problems.
+
+ It also attempts to call a getdoc() method on the given object. This
+ allows objects which provide their docstrings via non-standard mechanisms
(like Pyro proxies) to still be inspected by ipython's ? system.
"""
- # Allow objects to offer customized documentation via a getdoc method:
- try:
- ds = obj.getdoc()
- except Exception:
- pass
- else:
- # if we get extra info, we add it to the normal docstring.
- if isinstance(ds, string_types):
- return inspect.cleandoc(ds)
- try:
- docstr = inspect.getdoc(obj)
- encoding = get_encoding(obj)
- return py3compat.cast_unicode(docstr, encoding=encoding)
- except Exception:
- # Harden against an inspect failure, which can occur with
+ # Allow objects to offer customized documentation via a getdoc method:
+ try:
+ ds = obj.getdoc()
+ except Exception:
+ pass
+ else:
+ # if we get extra info, we add it to the normal docstring.
+ if isinstance(ds, string_types):
+ return inspect.cleandoc(ds)
+ try:
+ docstr = inspect.getdoc(obj)
+ encoding = get_encoding(obj)
+ return py3compat.cast_unicode(docstr, encoding=encoding)
+ except Exception:
+ # Harden against an inspect failure, which can occur with
# extensions modules.
- raise
- return None
-
-
-def getsource(obj, oname=''):
- """Wrapper around inspect.getsource.
-
- This can be modified by other projects to provide customized source
- extraction.
-
- Parameters
- ----------
- obj : object
- an object whose source code we will attempt to extract
- oname : str
- (optional) a name under which the object is known
-
- Returns
- -------
- src : unicode or None
-
- """
-
- if isinstance(obj, property):
- sources = []
- for attrname in ['fget', 'fset', 'fdel']:
- fn = getattr(obj, attrname)
- if fn is not None:
- encoding = get_encoding(fn)
- oname_prefix = ('%s.' % oname) if oname else ''
- sources.append(cast_unicode(
- ''.join(('# ', oname_prefix, attrname)),
- encoding=encoding))
- if inspect.isfunction(fn):
- sources.append(dedent(getsource(fn)))
- else:
- # Default str/repr only prints function name,
- # pretty.pretty prints module name too.
- sources.append(cast_unicode(
- '%s%s = %s\n' % (
- oname_prefix, attrname, pretty(fn)),
- encoding=encoding))
- if sources:
- return '\n'.join(sources)
- else:
- return None
-
- else:
- # Get source for non-property objects.
-
- obj = _get_wrapped(obj)
-
- try:
- src = inspect.getsource(obj)
- except TypeError:
- # The object itself provided no meaningful source, try looking for
- # its class definition instead.
- if hasattr(obj, '__class__'):
- try:
- src = inspect.getsource(obj.__class__)
- except TypeError:
- return None
-
- encoding = get_encoding(obj)
- return cast_unicode(src, encoding=encoding)
-
-
-def is_simple_callable(obj):
- """True if obj is a function ()"""
- return (inspect.isfunction(obj) or inspect.ismethod(obj) or \
- isinstance(obj, _builtin_func_type) or isinstance(obj, _builtin_meth_type))
-
-
-def getargspec(obj):
- """Wrapper around :func:`inspect.getfullargspec` on Python 3, and
- :func:inspect.getargspec` on Python 2.
-
- In addition to functions and methods, this can also handle objects with a
- ``__call__`` attribute.
- """
- if safe_hasattr(obj, '__call__') and not is_simple_callable(obj):
- obj = obj.__call__
-
- return inspect.getfullargspec(obj) if PY3 else inspect.getargspec(obj)
-
-
-def format_argspec(argspec):
- """Format argspect, convenience wrapper around inspect's.
-
- This takes a dict instead of ordered arguments and calls
- inspect.format_argspec with the arguments in the necessary order.
- """
- return inspect.formatargspec(argspec['args'], argspec['varargs'],
- argspec['varkw'], argspec['defaults'])
-
-
-def call_tip(oinfo, format_call=True):
- """Extract call tip data from an oinfo dict.
-
- Parameters
- ----------
- oinfo : dict
-
- format_call : bool, optional
- If True, the call line is formatted and returned as a string. If not, a
- tuple of (name, argspec) is returned.
-
- Returns
- -------
- call_info : None, str or (str, dict) tuple.
- When format_call is True, the whole call information is formattted as a
- single string. Otherwise, the object's name and its argspec dict are
- returned. If no call information is available, None is returned.
-
- docstring : str or None
- The most relevant docstring for calling purposes is returned, if
- available. The priority is: call docstring for callable instances, then
- constructor docstring for classes, then main object's docstring otherwise
- (regular functions).
- """
- # Get call definition
- argspec = oinfo.get('argspec')
- if argspec is None:
- call_line = None
- else:
- # Callable objects will have 'self' as their first argument, prune
- # it out if it's there for clarity (since users do *not* pass an
- # extra first argument explicitly).
- try:
- has_self = argspec['args'][0] == 'self'
- except (KeyError, IndexError):
- pass
- else:
- if has_self:
- argspec['args'] = argspec['args'][1:]
-
- call_line = oinfo['name']+format_argspec(argspec)
-
- # Now get docstring.
- # The priority is: call docstring, constructor docstring, main one.
- doc = oinfo.get('call_docstring')
- if doc is None:
- doc = oinfo.get('init_docstring')
- if doc is None:
- doc = oinfo.get('docstring','')
-
- return call_line, doc
-
-
-def _get_wrapped(obj):
- """Get the original object if wrapped in one or more @decorators
-
- Some objects automatically construct similar objects on any unrecognised
- attribute access (e.g. unittest.mock.call). To protect against infinite loops,
- this will arbitrarily cut off after 100 levels of obj.__wrapped__
- attribute access. --TK, Jan 2016
- """
- orig_obj = obj
- i = 0
- while safe_hasattr(obj, '__wrapped__'):
- obj = obj.__wrapped__
- i += 1
- if i > 100:
- # __wrapped__ is probably a lie, so return the thing we started with
- return orig_obj
- return obj
-
-def find_file(obj):
- """Find the absolute path to the file where an object was defined.
-
- This is essentially a robust wrapper around `inspect.getabsfile`.
-
- Returns None if no file can be found.
-
- Parameters
- ----------
- obj : any Python object
-
- Returns
- -------
- fname : str
- The absolute path to the file where the object was defined.
- """
- obj = _get_wrapped(obj)
-
- fname = None
- try:
- fname = inspect.getabsfile(obj)
- except TypeError:
- # For an instance, the file that matters is where its class was
- # declared.
- if hasattr(obj, '__class__'):
- try:
- fname = inspect.getabsfile(obj.__class__)
- except TypeError:
- # Can happen for builtins
- pass
- except:
- pass
- return cast_unicode(fname)
-
-
-def find_source_lines(obj):
- """Find the line number in a file where an object was defined.
-
- This is essentially a robust wrapper around `inspect.getsourcelines`.
-
- Returns None if no file can be found.
-
- Parameters
- ----------
- obj : any Python object
-
- Returns
- -------
- lineno : int
- The line number where the object definition starts.
- """
- obj = _get_wrapped(obj)
-
- try:
- try:
- lineno = inspect.getsourcelines(obj)[1]
- except TypeError:
- # For instances, try the class object like getsource() does
- if hasattr(obj, '__class__'):
- lineno = inspect.getsourcelines(obj.__class__)[1]
- else:
- lineno = None
- except:
- return None
-
- return lineno
-
+ raise
+ return None
+
+
+def getsource(obj, oname=''):
+ """Wrapper around inspect.getsource.
+
+ This can be modified by other projects to provide customized source
+ extraction.
+
+ Parameters
+ ----------
+ obj : object
+ an object whose source code we will attempt to extract
+ oname : str
+ (optional) a name under which the object is known
+
+ Returns
+ -------
+ src : unicode or None
+
+ """
+
+ if isinstance(obj, property):
+ sources = []
+ for attrname in ['fget', 'fset', 'fdel']:
+ fn = getattr(obj, attrname)
+ if fn is not None:
+ encoding = get_encoding(fn)
+ oname_prefix = ('%s.' % oname) if oname else ''
+ sources.append(cast_unicode(
+ ''.join(('# ', oname_prefix, attrname)),
+ encoding=encoding))
+ if inspect.isfunction(fn):
+ sources.append(dedent(getsource(fn)))
+ else:
+ # Default str/repr only prints function name,
+ # pretty.pretty prints module name too.
+ sources.append(cast_unicode(
+ '%s%s = %s\n' % (
+ oname_prefix, attrname, pretty(fn)),
+ encoding=encoding))
+ if sources:
+ return '\n'.join(sources)
+ else:
+ return None
+
+ else:
+ # Get source for non-property objects.
+
+ obj = _get_wrapped(obj)
+
+ try:
+ src = inspect.getsource(obj)
+ except TypeError:
+ # The object itself provided no meaningful source, try looking for
+ # its class definition instead.
+ if hasattr(obj, '__class__'):
+ try:
+ src = inspect.getsource(obj.__class__)
+ except TypeError:
+ return None
+
+ encoding = get_encoding(obj)
+ return cast_unicode(src, encoding=encoding)
+
+
+def is_simple_callable(obj):
+ """True if obj is a function ()"""
+ return (inspect.isfunction(obj) or inspect.ismethod(obj) or \
+ isinstance(obj, _builtin_func_type) or isinstance(obj, _builtin_meth_type))
+
+
+def getargspec(obj):
+ """Wrapper around :func:`inspect.getfullargspec` on Python 3, and
+ :func:inspect.getargspec` on Python 2.
+
+ In addition to functions and methods, this can also handle objects with a
+ ``__call__`` attribute.
+ """
+ if safe_hasattr(obj, '__call__') and not is_simple_callable(obj):
+ obj = obj.__call__
+
+ return inspect.getfullargspec(obj) if PY3 else inspect.getargspec(obj)
+
+
+def format_argspec(argspec):
+ """Format argspect, convenience wrapper around inspect's.
+
+ This takes a dict instead of ordered arguments and calls
+ inspect.format_argspec with the arguments in the necessary order.
+ """
+ return inspect.formatargspec(argspec['args'], argspec['varargs'],
+ argspec['varkw'], argspec['defaults'])
+
+
+def call_tip(oinfo, format_call=True):
+ """Extract call tip data from an oinfo dict.
+
+ Parameters
+ ----------
+ oinfo : dict
+
+ format_call : bool, optional
+ If True, the call line is formatted and returned as a string. If not, a
+ tuple of (name, argspec) is returned.
+
+ Returns
+ -------
+ call_info : None, str or (str, dict) tuple.
+ When format_call is True, the whole call information is formattted as a
+ single string. Otherwise, the object's name and its argspec dict are
+ returned. If no call information is available, None is returned.
+
+ docstring : str or None
+ The most relevant docstring for calling purposes is returned, if
+ available. The priority is: call docstring for callable instances, then
+ constructor docstring for classes, then main object's docstring otherwise
+ (regular functions).
+ """
+ # Get call definition
+ argspec = oinfo.get('argspec')
+ if argspec is None:
+ call_line = None
+ else:
+ # Callable objects will have 'self' as their first argument, prune
+ # it out if it's there for clarity (since users do *not* pass an
+ # extra first argument explicitly).
+ try:
+ has_self = argspec['args'][0] == 'self'
+ except (KeyError, IndexError):
+ pass
+ else:
+ if has_self:
+ argspec['args'] = argspec['args'][1:]
+
+ call_line = oinfo['name']+format_argspec(argspec)
+
+ # Now get docstring.
+ # The priority is: call docstring, constructor docstring, main one.
+ doc = oinfo.get('call_docstring')
+ if doc is None:
+ doc = oinfo.get('init_docstring')
+ if doc is None:
+ doc = oinfo.get('docstring','')
+
+ return call_line, doc
+
+
+def _get_wrapped(obj):
+ """Get the original object if wrapped in one or more @decorators
+
+ Some objects automatically construct similar objects on any unrecognised
+ attribute access (e.g. unittest.mock.call). To protect against infinite loops,
+ this will arbitrarily cut off after 100 levels of obj.__wrapped__
+ attribute access. --TK, Jan 2016
+ """
+ orig_obj = obj
+ i = 0
+ while safe_hasattr(obj, '__wrapped__'):
+ obj = obj.__wrapped__
+ i += 1
+ if i > 100:
+ # __wrapped__ is probably a lie, so return the thing we started with
+ return orig_obj
+ return obj
+
+def find_file(obj):
+ """Find the absolute path to the file where an object was defined.
+
+ This is essentially a robust wrapper around `inspect.getabsfile`.
+
+ Returns None if no file can be found.
+
+ Parameters
+ ----------
+ obj : any Python object
+
+ Returns
+ -------
+ fname : str
+ The absolute path to the file where the object was defined.
+ """
+ obj = _get_wrapped(obj)
+
+ fname = None
+ try:
+ fname = inspect.getabsfile(obj)
+ except TypeError:
+ # For an instance, the file that matters is where its class was
+ # declared.
+ if hasattr(obj, '__class__'):
+ try:
+ fname = inspect.getabsfile(obj.__class__)
+ except TypeError:
+ # Can happen for builtins
+ pass
+ except:
+ pass
+ return cast_unicode(fname)
+
+
+def find_source_lines(obj):
+ """Find the line number in a file where an object was defined.
+
+ This is essentially a robust wrapper around `inspect.getsourcelines`.
+
+ Returns None if no file can be found.
+
+ Parameters
+ ----------
+ obj : any Python object
+
+ Returns
+ -------
+ lineno : int
+ The line number where the object definition starts.
+ """
+ obj = _get_wrapped(obj)
+
+ try:
+ try:
+ lineno = inspect.getsourcelines(obj)[1]
+ except TypeError:
+ # For instances, try the class object like getsource() does
+ if hasattr(obj, '__class__'):
+ lineno = inspect.getsourcelines(obj.__class__)[1]
+ else:
+ lineno = None
+ except:
+ return None
+
+ return lineno
+
class Inspector(Colorable):
-
- def __init__(self, color_table=InspectColors,
- code_color_table=PyColorize.ANSICodeColors,
- scheme='NoColor',
+
+ def __init__(self, color_table=InspectColors,
+ code_color_table=PyColorize.ANSICodeColors,
+ scheme='NoColor',
str_detail_level=0,
parent=None, config=None):
super(Inspector, self).__init__(parent=parent, config=config)
- self.color_table = color_table
+ self.color_table = color_table
self.parser = PyColorize.Parser(out='str', parent=self, style=scheme)
- self.format = self.parser.format
- self.str_detail_level = str_detail_level
- self.set_active_scheme(scheme)
-
- def _getdef(self,obj,oname=''):
- """Return the call signature for any callable object.
-
- If any exception is generated, None is returned instead and the
- exception is suppressed."""
- try:
- hdef = oname + str(signature(obj))
- return cast_unicode(hdef)
- except:
- return None
-
- def __head(self,h):
- """Return a header string with proper colors."""
- return '%s%s%s' % (self.color_table.active_colors.header,h,
- self.color_table.active_colors.normal)
-
- def set_active_scheme(self, scheme):
- self.color_table.set_active_scheme(scheme)
- self.parser.color_table.set_active_scheme(scheme)
-
- def noinfo(self, msg, oname):
- """Generic message when no information is found."""
- print('No %s found' % msg, end=' ')
- if oname:
- print('for %s' % oname)
- else:
- print()
-
- def pdef(self, obj, oname=''):
- """Print the call signature for any callable object.
-
- If the object is a class, print the constructor information."""
-
- if not callable(obj):
- print('Object is not callable.')
- return
-
- header = ''
-
- if inspect.isclass(obj):
- header = self.__head('Class constructor information:\n')
- elif (not py3compat.PY3) and type(obj) is types.InstanceType:
- obj = obj.__call__
-
- output = self._getdef(obj,oname)
- if output is None:
- self.noinfo('definition header',oname)
- else:
+ self.format = self.parser.format
+ self.str_detail_level = str_detail_level
+ self.set_active_scheme(scheme)
+
+ def _getdef(self,obj,oname=''):
+ """Return the call signature for any callable object.
+
+ If any exception is generated, None is returned instead and the
+ exception is suppressed."""
+ try:
+ hdef = oname + str(signature(obj))
+ return cast_unicode(hdef)
+ except:
+ return None
+
+ def __head(self,h):
+ """Return a header string with proper colors."""
+ return '%s%s%s' % (self.color_table.active_colors.header,h,
+ self.color_table.active_colors.normal)
+
+ def set_active_scheme(self, scheme):
+ self.color_table.set_active_scheme(scheme)
+ self.parser.color_table.set_active_scheme(scheme)
+
+ def noinfo(self, msg, oname):
+ """Generic message when no information is found."""
+ print('No %s found' % msg, end=' ')
+ if oname:
+ print('for %s' % oname)
+ else:
+ print()
+
+ def pdef(self, obj, oname=''):
+ """Print the call signature for any callable object.
+
+ If the object is a class, print the constructor information."""
+
+ if not callable(obj):
+ print('Object is not callable.')
+ return
+
+ header = ''
+
+ if inspect.isclass(obj):
+ header = self.__head('Class constructor information:\n')
+ elif (not py3compat.PY3) and type(obj) is types.InstanceType:
+ obj = obj.__call__
+
+ output = self._getdef(obj,oname)
+ if output is None:
+ self.noinfo('definition header',oname)
+ else:
print(header,self.format(output), end=' ')
-
- # In Python 3, all classes are new-style, so they all have __init__.
- @skip_doctest_py3
+
+ # In Python 3, all classes are new-style, so they all have __init__.
+ @skip_doctest_py3
def pdoc(self, obj, oname='', formatter=None):
- """Print the docstring for any object.
-
- Optional:
- -formatter: a function to run the docstring through for specially
- formatted docstrings.
-
- Examples
- --------
-
- In [1]: class NoInit:
- ...: pass
-
- In [2]: class NoDoc:
- ...: def __init__(self):
- ...: pass
-
- In [3]: %pdoc NoDoc
- No documentation found for NoDoc
-
- In [4]: %pdoc NoInit
- No documentation found for NoInit
-
- In [5]: obj = NoInit()
-
- In [6]: %pdoc obj
- No documentation found for obj
-
- In [5]: obj2 = NoDoc()
-
- In [6]: %pdoc obj2
- No documentation found for obj2
- """
-
- head = self.__head # For convenience
- lines = []
- ds = getdoc(obj)
- if formatter:
+ """Print the docstring for any object.
+
+ Optional:
+ -formatter: a function to run the docstring through for specially
+ formatted docstrings.
+
+ Examples
+ --------
+
+ In [1]: class NoInit:
+ ...: pass
+
+ In [2]: class NoDoc:
+ ...: def __init__(self):
+ ...: pass
+
+ In [3]: %pdoc NoDoc
+ No documentation found for NoDoc
+
+ In [4]: %pdoc NoInit
+ No documentation found for NoInit
+
+ In [5]: obj = NoInit()
+
+ In [6]: %pdoc obj
+ No documentation found for obj
+
+ In [5]: obj2 = NoDoc()
+
+ In [6]: %pdoc obj2
+ No documentation found for obj2
+ """
+
+ head = self.__head # For convenience
+ lines = []
+ ds = getdoc(obj)
+ if formatter:
ds = formatter(ds).get('plain/text', ds)
- if ds:
- lines.append(head("Class docstring:"))
- lines.append(indent(ds))
- if inspect.isclass(obj) and hasattr(obj, '__init__'):
- init_ds = getdoc(obj.__init__)
- if init_ds is not None:
- lines.append(head("Init docstring:"))
- lines.append(indent(init_ds))
- elif hasattr(obj,'__call__'):
- call_ds = getdoc(obj.__call__)
- if call_ds:
- lines.append(head("Call docstring:"))
- lines.append(indent(call_ds))
-
- if not lines:
- self.noinfo('documentation',oname)
- else:
- page.page('\n'.join(lines))
-
- def psource(self, obj, oname=''):
- """Print the source code for an object."""
-
- # Flush the source cache because inspect can return out-of-date source
- linecache.checkcache()
- try:
- src = getsource(obj, oname=oname)
- except Exception:
- src = None
-
- if src is None:
- self.noinfo('source', oname)
- else:
- page.page(self.format(src))
-
- def pfile(self, obj, oname=''):
- """Show the whole file where an object was defined."""
-
- lineno = find_source_lines(obj)
- if lineno is None:
- self.noinfo('file', oname)
- return
-
- ofile = find_file(obj)
- # run contents of file through pager starting at line where the object
- # is defined, as long as the file isn't binary and is actually on the
- # filesystem.
- if ofile.endswith(('.so', '.dll', '.pyd')):
- print('File %r is binary, not printing.' % ofile)
- elif not os.path.isfile(ofile):
- print('File %r does not exist, not printing.' % ofile)
- else:
- # Print only text files, not extension binaries. Note that
- # getsourcelines returns lineno with 1-offset and page() uses
- # 0-offset, so we must adjust.
- page.page(self.format(openpy.read_py_file(ofile, skip_encoding_cookie=False)), lineno - 1)
-
- def _format_fields(self, fields, title_width=0):
- """Formats a list of fields for display.
-
- Parameters
- ----------
- fields : list
- A list of 2-tuples: (field_title, field_content)
- title_width : int
- How many characters to pad titles to. Default to longest title.
- """
- out = []
- header = self.__head
- if title_width == 0:
- title_width = max(len(title) + 2 for title, _ in fields)
- for title, content in fields:
- if len(content.splitlines()) > 1:
+ if ds:
+ lines.append(head("Class docstring:"))
+ lines.append(indent(ds))
+ if inspect.isclass(obj) and hasattr(obj, '__init__'):
+ init_ds = getdoc(obj.__init__)
+ if init_ds is not None:
+ lines.append(head("Init docstring:"))
+ lines.append(indent(init_ds))
+ elif hasattr(obj,'__call__'):
+ call_ds = getdoc(obj.__call__)
+ if call_ds:
+ lines.append(head("Call docstring:"))
+ lines.append(indent(call_ds))
+
+ if not lines:
+ self.noinfo('documentation',oname)
+ else:
+ page.page('\n'.join(lines))
+
+ def psource(self, obj, oname=''):
+ """Print the source code for an object."""
+
+ # Flush the source cache because inspect can return out-of-date source
+ linecache.checkcache()
+ try:
+ src = getsource(obj, oname=oname)
+ except Exception:
+ src = None
+
+ if src is None:
+ self.noinfo('source', oname)
+ else:
+ page.page(self.format(src))
+
+ def pfile(self, obj, oname=''):
+ """Show the whole file where an object was defined."""
+
+ lineno = find_source_lines(obj)
+ if lineno is None:
+ self.noinfo('file', oname)
+ return
+
+ ofile = find_file(obj)
+ # run contents of file through pager starting at line where the object
+ # is defined, as long as the file isn't binary and is actually on the
+ # filesystem.
+ if ofile.endswith(('.so', '.dll', '.pyd')):
+ print('File %r is binary, not printing.' % ofile)
+ elif not os.path.isfile(ofile):
+ print('File %r does not exist, not printing.' % ofile)
+ else:
+ # Print only text files, not extension binaries. Note that
+ # getsourcelines returns lineno with 1-offset and page() uses
+ # 0-offset, so we must adjust.
+ page.page(self.format(openpy.read_py_file(ofile, skip_encoding_cookie=False)), lineno - 1)
+
+ def _format_fields(self, fields, title_width=0):
+ """Formats a list of fields for display.
+
+ Parameters
+ ----------
+ fields : list
+ A list of 2-tuples: (field_title, field_content)
+ title_width : int
+ How many characters to pad titles to. Default to longest title.
+ """
+ out = []
+ header = self.__head
+ if title_width == 0:
+ title_width = max(len(title) + 2 for title, _ in fields)
+ for title, content in fields:
+ if len(content.splitlines()) > 1:
title = header(title + ':') + '\n'
- else:
+ else:
title = header((title + ':').ljust(title_width))
- out.append(cast_unicode(title) + cast_unicode(content))
- return "\n".join(out)
-
+ out.append(cast_unicode(title) + cast_unicode(content))
+ return "\n".join(out)
+
def _mime_format(self, text, formatter=None):
"""Return a mime bundle representation of the input text.
@@ -637,56 +637,56 @@ class Inspector(Colorable):
'text/html': pylight(text)
}
- if info['isalias']:
+ if info['isalias']:
append_field(_mime, 'Repr', 'string_form')
-
- elif info['ismagic']:
+
+ elif info['ismagic']:
if detail_level > 0:
append_field(_mime, 'Source', 'source', code_formatter)
- else:
+ else:
append_field(_mime, 'Docstring', 'docstring', formatter)
append_field(_mime, 'File', 'file')
-
- elif info['isclass'] or is_simple_callable(obj):
- # Functions, methods, classes
+
+ elif info['isclass'] or is_simple_callable(obj):
+ # Functions, methods, classes
append_field(_mime, 'Signature', 'definition', code_formatter)
append_field(_mime, 'Init signature', 'init_definition', code_formatter)
if detail_level > 0 and info['source']:
append_field(_mime, 'Source', 'source', code_formatter)
- else:
+ else:
append_field(_mime, 'Docstring', 'docstring', formatter)
append_field(_mime, 'Init docstring', 'init_docstring', formatter)
-
+
append_field(_mime, 'File', 'file')
append_field(_mime, 'Type', 'type_name')
-
- else:
- # General Python objects
+
+ else:
+ # General Python objects
append_field(_mime, 'Signature', 'definition', code_formatter)
append_field(_mime, 'Call signature', 'call_def', code_formatter)
append_field(_mime, 'Type', 'type_name')
-
- # Base class for old-style instances
- if (not py3compat.PY3) and isinstance(obj, types.InstanceType) and info['base_class']:
+
+ # Base class for old-style instances
+ if (not py3compat.PY3) and isinstance(obj, types.InstanceType) and info['base_class']:
append_field(_mime, 'Base Class', 'base_class')
-
+
append_field(_mime, 'String form', 'string_form')
-
- # Namespace
- if info['namespace'] != 'Interactive':
+
+ # Namespace
+ if info['namespace'] != 'Interactive':
append_field(_mime, 'Namespace', 'namespace')
-
+
append_field(_mime, 'Length', 'length')
append_field(_mime, 'File', 'file')
- # Source or docstring, depending on detail level and whether
- # source found.
+ # Source or docstring, depending on detail level and whether
+ # source found.
if detail_level > 0:
append_field(_mime, 'Source', 'source', code_formatter)
else:
append_field(_mime, 'Docstring', 'docstring', formatter)
-
+
append_field(_mime, 'Class docstring', 'class_docstring', formatter)
append_field(_mime, 'Init docstring', 'init_docstring', formatter)
append_field(_mime, 'Call docstring', 'call_docstring', formatter)
@@ -695,15 +695,15 @@ class Inspector(Colorable):
return self.format_mime(_mime)
def pinfo(self, obj, oname='', formatter=None, info=None, detail_level=0, enable_html_pager=True):
- """Show detailed information about an object.
-
- Optional arguments:
-
- - oname: name of the variable pointing to the object.
-
+ """Show detailed information about an object.
+
+ Optional arguments:
+
+ - oname: name of the variable pointing to the object.
+
- formatter: callable (optional)
A special formatter for docstrings.
-
+
The formatter is a callable that takes a string as an input
and returns either a formatted string or a mime type bundle
in the form of a dictionnary.
@@ -711,17 +711,17 @@ class Inspector(Colorable):
Although the support of custom formatter returning a string
instead of a mime type bundle is deprecated.
- - info: a structure with some information fields which may have been
- precomputed already.
-
- - detail_level: if set to 1, more information is given.
- """
+ - info: a structure with some information fields which may have been
+ precomputed already.
+
+ - detail_level: if set to 1, more information is given.
+ """
info = self._get_info(obj, oname, formatter, info, detail_level)
if not enable_html_pager:
del info['text/html']
page.page(info)
- def info(self, obj, oname='', formatter=None, info=None, detail_level=0):
+ def info(self, obj, oname='', formatter=None, info=None, detail_level=0):
"""DEPRECATED. Compute a dict with detailed information about an object.
"""
if formatter is not None:
@@ -731,126 +731,126 @@ class Inspector(Colorable):
return self._info(obj, oname=oname, info=info, detail_level=detail_level)
def _info(self, obj, oname='', info=None, detail_level=0):
- """Compute a dict with detailed information about an object.
-
- Optional arguments:
-
- - oname: name of the variable pointing to the object.
-
- - info: a structure with some information fields which may have been
- precomputed already.
-
- - detail_level: if set to 1, more information is given.
- """
-
- obj_type = type(obj)
-
- if info is None:
- ismagic = 0
- isalias = 0
- ospace = ''
- else:
- ismagic = info.ismagic
- isalias = info.isalias
- ospace = info.namespace
-
- # Get docstring, special-casing aliases:
- if isalias:
- if not callable(obj):
- try:
- ds = "Alias to the system command:\n %s" % obj[1]
- except:
- ds = "Alias: " + str(obj)
- else:
- ds = "Alias to " + str(obj)
- if obj.__doc__:
- ds += "\nDocstring:\n" + obj.__doc__
- else:
- ds = getdoc(obj)
- if ds is None:
- ds = '<no docstring>'
-
- # store output in a dict, we initialize it here and fill it as we go
- out = dict(name=oname, found=True, isalias=isalias, ismagic=ismagic)
-
- string_max = 200 # max size of strings to show (snipped if longer)
+ """Compute a dict with detailed information about an object.
+
+ Optional arguments:
+
+ - oname: name of the variable pointing to the object.
+
+ - info: a structure with some information fields which may have been
+ precomputed already.
+
+ - detail_level: if set to 1, more information is given.
+ """
+
+ obj_type = type(obj)
+
+ if info is None:
+ ismagic = 0
+ isalias = 0
+ ospace = ''
+ else:
+ ismagic = info.ismagic
+ isalias = info.isalias
+ ospace = info.namespace
+
+ # Get docstring, special-casing aliases:
+ if isalias:
+ if not callable(obj):
+ try:
+ ds = "Alias to the system command:\n %s" % obj[1]
+ except:
+ ds = "Alias: " + str(obj)
+ else:
+ ds = "Alias to " + str(obj)
+ if obj.__doc__:
+ ds += "\nDocstring:\n" + obj.__doc__
+ else:
+ ds = getdoc(obj)
+ if ds is None:
+ ds = '<no docstring>'
+
+ # store output in a dict, we initialize it here and fill it as we go
+ out = dict(name=oname, found=True, isalias=isalias, ismagic=ismagic)
+
+ string_max = 200 # max size of strings to show (snipped if longer)
shalf = int((string_max - 5) / 2)
-
- if ismagic:
- obj_type_name = 'Magic function'
- elif isalias:
- obj_type_name = 'System alias'
- else:
- obj_type_name = obj_type.__name__
- out['type_name'] = obj_type_name
-
- try:
- bclass = obj.__class__
- out['base_class'] = str(bclass)
- except: pass
-
- # String form, but snip if too long in ? form (full in ??)
- if detail_level >= self.str_detail_level:
- try:
- ostr = str(obj)
- str_head = 'string_form'
- if not detail_level and len(ostr)>string_max:
- ostr = ostr[:shalf] + ' <...> ' + ostr[-shalf:]
- ostr = ("\n" + " " * len(str_head.expandtabs())).\
- join(q.strip() for q in ostr.split("\n"))
- out[str_head] = ostr
- except:
- pass
-
- if ospace:
- out['namespace'] = ospace
-
- # Length (for strings and lists)
- try:
- out['length'] = str(len(obj))
- except: pass
-
- # Filename where object was defined
- binary_file = False
- fname = find_file(obj)
- if fname is None:
- # if anything goes wrong, we don't want to show source, so it's as
- # if the file was binary
- binary_file = True
- else:
- if fname.endswith(('.so', '.dll', '.pyd')):
- binary_file = True
- elif fname.endswith('<string>'):
- fname = 'Dynamically generated function. No source code available.'
- out['file'] = compress_user(fname)
-
- # Original source code for a callable, class or property.
- if detail_level:
- # Flush the source cache because inspect can return out-of-date
- # source
- linecache.checkcache()
- try:
- if isinstance(obj, property) or not binary_file:
- src = getsource(obj, oname)
- if src is not None:
- src = src.rstrip()
- out['source'] = src
-
- except Exception:
- pass
-
- # Add docstring only if no source is to be shown (avoid repetitions).
- if ds and out.get('source', None) is None:
- out['docstring'] = ds
-
- # Constructor docstring for classes
- if inspect.isclass(obj):
- out['isclass'] = True
+
+ if ismagic:
+ obj_type_name = 'Magic function'
+ elif isalias:
+ obj_type_name = 'System alias'
+ else:
+ obj_type_name = obj_type.__name__
+ out['type_name'] = obj_type_name
+
+ try:
+ bclass = obj.__class__
+ out['base_class'] = str(bclass)
+ except: pass
+
+ # String form, but snip if too long in ? form (full in ??)
+ if detail_level >= self.str_detail_level:
+ try:
+ ostr = str(obj)
+ str_head = 'string_form'
+ if not detail_level and len(ostr)>string_max:
+ ostr = ostr[:shalf] + ' <...> ' + ostr[-shalf:]
+ ostr = ("\n" + " " * len(str_head.expandtabs())).\
+ join(q.strip() for q in ostr.split("\n"))
+ out[str_head] = ostr
+ except:
+ pass
+
+ if ospace:
+ out['namespace'] = ospace
+
+ # Length (for strings and lists)
+ try:
+ out['length'] = str(len(obj))
+ except: pass
+
+ # Filename where object was defined
+ binary_file = False
+ fname = find_file(obj)
+ if fname is None:
+ # if anything goes wrong, we don't want to show source, so it's as
+ # if the file was binary
+ binary_file = True
+ else:
+ if fname.endswith(('.so', '.dll', '.pyd')):
+ binary_file = True
+ elif fname.endswith('<string>'):
+ fname = 'Dynamically generated function. No source code available.'
+ out['file'] = compress_user(fname)
+
+ # Original source code for a callable, class or property.
+ if detail_level:
+ # Flush the source cache because inspect can return out-of-date
+ # source
+ linecache.checkcache()
+ try:
+ if isinstance(obj, property) or not binary_file:
+ src = getsource(obj, oname)
+ if src is not None:
+ src = src.rstrip()
+ out['source'] = src
+
+ except Exception:
+ pass
+
+ # Add docstring only if no source is to be shown (avoid repetitions).
+ if ds and out.get('source', None) is None:
+ out['docstring'] = ds
+
+ # Constructor docstring for classes
+ if inspect.isclass(obj):
+ out['isclass'] = True
# get the init signature:
- try:
+ try:
init_def = self._getdef(obj, oname)
- except AttributeError:
+ except AttributeError:
init_def = None
# get the __init__ docstring
@@ -858,7 +858,7 @@ class Inspector(Colorable):
obj_init = obj.__init__
except AttributeError:
init_ds = None
- else:
+ else:
if init_def is None:
# Get signature from init if top-level sig failed.
# Can happen for built-in types (list, etc.).
@@ -867,149 +867,149 @@ class Inspector(Colorable):
except AttributeError:
pass
init_ds = getdoc(obj_init)
- # Skip Python's auto-generated docstrings
- if init_ds == _object_init_docstring:
- init_ds = None
-
+ # Skip Python's auto-generated docstrings
+ if init_ds == _object_init_docstring:
+ init_ds = None
+
if init_def:
out['init_definition'] = init_def
-
+
if init_ds:
out['init_docstring'] = init_ds
- # and class docstring for instances:
- else:
- # reconstruct the function definition and print it:
- defln = self._getdef(obj, oname)
- if defln:
+ # and class docstring for instances:
+ else:
+ # reconstruct the function definition and print it:
+ defln = self._getdef(obj, oname)
+ if defln:
out['definition'] = defln
-
- # First, check whether the instance docstring is identical to the
- # class one, and print it separately if they don't coincide. In
- # most cases they will, but it's nice to print all the info for
- # objects which use instance-customized docstrings.
- if ds:
- try:
- cls = getattr(obj,'__class__')
- except:
- class_ds = None
- else:
- class_ds = getdoc(cls)
- # Skip Python's auto-generated docstrings
- if class_ds in _builtin_type_docstrings:
- class_ds = None
- if class_ds and ds != class_ds:
- out['class_docstring'] = class_ds
-
- # Next, try to show constructor docstrings
- try:
- init_ds = getdoc(obj.__init__)
- # Skip Python's auto-generated docstrings
- if init_ds == _object_init_docstring:
- init_ds = None
- except AttributeError:
- init_ds = None
- if init_ds:
- out['init_docstring'] = init_ds
-
- # Call form docstring for callable instances
- if safe_hasattr(obj, '__call__') and not is_simple_callable(obj):
- call_def = self._getdef(obj.__call__, oname)
+
+ # First, check whether the instance docstring is identical to the
+ # class one, and print it separately if they don't coincide. In
+ # most cases they will, but it's nice to print all the info for
+ # objects which use instance-customized docstrings.
+ if ds:
+ try:
+ cls = getattr(obj,'__class__')
+ except:
+ class_ds = None
+ else:
+ class_ds = getdoc(cls)
+ # Skip Python's auto-generated docstrings
+ if class_ds in _builtin_type_docstrings:
+ class_ds = None
+ if class_ds and ds != class_ds:
+ out['class_docstring'] = class_ds
+
+ # Next, try to show constructor docstrings
+ try:
+ init_ds = getdoc(obj.__init__)
+ # Skip Python's auto-generated docstrings
+ if init_ds == _object_init_docstring:
+ init_ds = None
+ except AttributeError:
+ init_ds = None
+ if init_ds:
+ out['init_docstring'] = init_ds
+
+ # Call form docstring for callable instances
+ if safe_hasattr(obj, '__call__') and not is_simple_callable(obj):
+ call_def = self._getdef(obj.__call__, oname)
if call_def and (call_def != out.get('definition')):
- # it may never be the case that call def and definition differ,
- # but don't include the same signature twice
+ # it may never be the case that call def and definition differ,
+ # but don't include the same signature twice
out['call_def'] = call_def
- call_ds = getdoc(obj.__call__)
- # Skip Python's auto-generated docstrings
- if call_ds == _func_call_docstring:
- call_ds = None
- if call_ds:
- out['call_docstring'] = call_ds
-
- # Compute the object's argspec as a callable. The key is to decide
- # whether to pull it from the object itself, from its __init__ or
- # from its __call__ method.
-
- if inspect.isclass(obj):
- # Old-style classes need not have an __init__
- callable_obj = getattr(obj, "__init__", None)
- elif callable(obj):
- callable_obj = obj
- else:
- callable_obj = None
-
- if callable_obj is not None:
- try:
- argspec = getargspec(callable_obj)
- except (TypeError, AttributeError):
- # For extensions/builtins we can't retrieve the argspec
- pass
- else:
- # named tuples' _asdict() method returns an OrderedDict, but we
- # we want a normal
- out['argspec'] = argspec_dict = dict(argspec._asdict())
- # We called this varkw before argspec became a named tuple.
- # With getfullargspec it's also called varkw.
- if 'varkw' not in argspec_dict:
- argspec_dict['varkw'] = argspec_dict.pop('keywords')
-
- return object_info(**out)
-
- def psearch(self,pattern,ns_table,ns_search=[],
- ignore_case=False,show_all=False):
- """Search namespaces with wildcards for objects.
-
- Arguments:
-
- - pattern: string containing shell-like wildcards to use in namespace
- searches and optionally a type specification to narrow the search to
- objects of that type.
-
- - ns_table: dict of name->namespaces for search.
-
- Optional arguments:
-
- - ns_search: list of namespace names to include in search.
-
- - ignore_case(False): make the search case-insensitive.
-
- - show_all(False): show all names, including those starting with
- underscores.
- """
- #print 'ps pattern:<%r>' % pattern # dbg
-
- # defaults
- type_pattern = 'all'
- filter = ''
-
- cmds = pattern.split()
- len_cmds = len(cmds)
- if len_cmds == 1:
- # Only filter pattern given
- filter = cmds[0]
- elif len_cmds == 2:
- # Both filter and type specified
- filter,type_pattern = cmds
- else:
- raise ValueError('invalid argument string for psearch: <%s>' %
- pattern)
-
- # filter search namespaces
- for name in ns_search:
- if name not in ns_table:
- raise ValueError('invalid namespace <%s>. Valid names: %s' %
- (name,ns_table.keys()))
-
- #print 'type_pattern:',type_pattern # dbg
- search_result, namespaces_seen = set(), set()
- for ns_name in ns_search:
- ns = ns_table[ns_name]
- # Normally, locals and globals are the same, so we just check one.
- if id(ns) in namespaces_seen:
- continue
- namespaces_seen.add(id(ns))
- tmp_res = list_namespace(ns, type_pattern, filter,
- ignore_case=ignore_case, show_all=show_all)
- search_result.update(tmp_res)
-
- page.page('\n'.join(sorted(search_result)))
+ call_ds = getdoc(obj.__call__)
+ # Skip Python's auto-generated docstrings
+ if call_ds == _func_call_docstring:
+ call_ds = None
+ if call_ds:
+ out['call_docstring'] = call_ds
+
+ # Compute the object's argspec as a callable. The key is to decide
+ # whether to pull it from the object itself, from its __init__ or
+ # from its __call__ method.
+
+ if inspect.isclass(obj):
+ # Old-style classes need not have an __init__
+ callable_obj = getattr(obj, "__init__", None)
+ elif callable(obj):
+ callable_obj = obj
+ else:
+ callable_obj = None
+
+ if callable_obj is not None:
+ try:
+ argspec = getargspec(callable_obj)
+ except (TypeError, AttributeError):
+ # For extensions/builtins we can't retrieve the argspec
+ pass
+ else:
+ # named tuples' _asdict() method returns an OrderedDict, but we
+ # we want a normal
+ out['argspec'] = argspec_dict = dict(argspec._asdict())
+ # We called this varkw before argspec became a named tuple.
+ # With getfullargspec it's also called varkw.
+ if 'varkw' not in argspec_dict:
+ argspec_dict['varkw'] = argspec_dict.pop('keywords')
+
+ return object_info(**out)
+
+ def psearch(self,pattern,ns_table,ns_search=[],
+ ignore_case=False,show_all=False):
+ """Search namespaces with wildcards for objects.
+
+ Arguments:
+
+ - pattern: string containing shell-like wildcards to use in namespace
+ searches and optionally a type specification to narrow the search to
+ objects of that type.
+
+ - ns_table: dict of name->namespaces for search.
+
+ Optional arguments:
+
+ - ns_search: list of namespace names to include in search.
+
+ - ignore_case(False): make the search case-insensitive.
+
+ - show_all(False): show all names, including those starting with
+ underscores.
+ """
+ #print 'ps pattern:<%r>' % pattern # dbg
+
+ # defaults
+ type_pattern = 'all'
+ filter = ''
+
+ cmds = pattern.split()
+ len_cmds = len(cmds)
+ if len_cmds == 1:
+ # Only filter pattern given
+ filter = cmds[0]
+ elif len_cmds == 2:
+ # Both filter and type specified
+ filter,type_pattern = cmds
+ else:
+ raise ValueError('invalid argument string for psearch: <%s>' %
+ pattern)
+
+ # filter search namespaces
+ for name in ns_search:
+ if name not in ns_table:
+ raise ValueError('invalid namespace <%s>. Valid names: %s' %
+ (name,ns_table.keys()))
+
+ #print 'type_pattern:',type_pattern # dbg
+ search_result, namespaces_seen = set(), set()
+ for ns_name in ns_search:
+ ns = ns_table[ns_name]
+ # Normally, locals and globals are the same, so we just check one.
+ if id(ns) in namespaces_seen:
+ continue
+ namespaces_seen.add(id(ns))
+ tmp_res = list_namespace(ns, type_pattern, filter,
+ ignore_case=ignore_case, show_all=show_all)
+ search_result.update(tmp_res)
+
+ page.page('\n'.join(sorted(search_result)))
diff --git a/contrib/python/ipython/py2/IPython/core/page.py b/contrib/python/ipython/py2/IPython/core/page.py
index ba14901e19..6d213c9f29 100644
--- a/contrib/python/ipython/py2/IPython/core/page.py
+++ b/contrib/python/ipython/py2/IPython/core/page.py
@@ -1,386 +1,386 @@
-# encoding: utf-8
-"""
-Paging capabilities for IPython.core
-
-Notes
------
-
-For now this uses IPython hooks, so it can't be in IPython.utils. If we can get
-rid of that dependency, we could move it there.
------
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import print_function
-
-import os
-import re
-import sys
-import tempfile
-
-from io import UnsupportedOperation
-
-from IPython import get_ipython
-from IPython.core.display import display
-from IPython.core.error import TryNext
-from IPython.utils.data import chop
-from IPython.utils.process import system
-from IPython.utils.terminal import get_terminal_size
-from IPython.utils import py3compat
-
-
-def display_page(strng, start=0, screen_lines=25):
- """Just display, no paging. screen_lines is ignored."""
- if isinstance(strng, dict):
- data = strng
- else:
- if start:
- strng = u'\n'.join(strng.splitlines()[start:])
+# encoding: utf-8
+"""
+Paging capabilities for IPython.core
+
+Notes
+-----
+
+For now this uses IPython hooks, so it can't be in IPython.utils. If we can get
+rid of that dependency, we could move it there.
+-----
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import print_function
+
+import os
+import re
+import sys
+import tempfile
+
+from io import UnsupportedOperation
+
+from IPython import get_ipython
+from IPython.core.display import display
+from IPython.core.error import TryNext
+from IPython.utils.data import chop
+from IPython.utils.process import system
+from IPython.utils.terminal import get_terminal_size
+from IPython.utils import py3compat
+
+
+def display_page(strng, start=0, screen_lines=25):
+ """Just display, no paging. screen_lines is ignored."""
+ if isinstance(strng, dict):
+ data = strng
+ else:
+ if start:
+ strng = u'\n'.join(strng.splitlines()[start:])
data = { 'text/plain': strng }
- display(data, raw=True)
-
-
-def as_hook(page_func):
- """Wrap a pager func to strip the `self` arg
-
- so it can be called as a hook.
- """
- return lambda self, *args, **kwargs: page_func(*args, **kwargs)
-
-
-esc_re = re.compile(r"(\x1b[^m]+m)")
-
-def page_dumb(strng, start=0, screen_lines=25):
- """Very dumb 'pager' in Python, for when nothing else works.
-
- Only moves forward, same interface as page(), except for pager_cmd and
+ display(data, raw=True)
+
+
+def as_hook(page_func):
+ """Wrap a pager func to strip the `self` arg
+
+ so it can be called as a hook.
+ """
+ return lambda self, *args, **kwargs: page_func(*args, **kwargs)
+
+
+esc_re = re.compile(r"(\x1b[^m]+m)")
+
+def page_dumb(strng, start=0, screen_lines=25):
+ """Very dumb 'pager' in Python, for when nothing else works.
+
+ Only moves forward, same interface as page(), except for pager_cmd and
mode.
"""
if isinstance(strng, dict):
strng = strng.get('text/plain', '')
- out_ln = strng.splitlines()[start:]
- screens = chop(out_ln,screen_lines-1)
- if len(screens) == 1:
+ out_ln = strng.splitlines()[start:]
+ screens = chop(out_ln,screen_lines-1)
+ if len(screens) == 1:
print(os.linesep.join(screens[0]))
- else:
- last_escape = ""
- for scr in screens[0:-1]:
- hunk = os.linesep.join(scr)
+ else:
+ last_escape = ""
+ for scr in screens[0:-1]:
+ hunk = os.linesep.join(scr)
print(last_escape + hunk)
- if not page_more():
- return
- esc_list = esc_re.findall(hunk)
- if len(esc_list) > 0:
- last_escape = esc_list[-1]
+ if not page_more():
+ return
+ esc_list = esc_re.findall(hunk)
+ if len(esc_list) > 0:
+ last_escape = esc_list[-1]
print(last_escape + os.linesep.join(screens[-1]))
-
-def _detect_screen_size(screen_lines_def):
- """Attempt to work out the number of lines on the screen.
-
- This is called by page(). It can raise an error (e.g. when run in the
- test suite), so it's separated out so it can easily be called in a try block.
- """
- TERM = os.environ.get('TERM',None)
- if not((TERM=='xterm' or TERM=='xterm-color') and sys.platform != 'sunos5'):
- # curses causes problems on many terminals other than xterm, and
- # some termios calls lock up on Sun OS5.
- return screen_lines_def
-
- try:
- import termios
- import curses
- except ImportError:
- return screen_lines_def
-
- # There is a bug in curses, where *sometimes* it fails to properly
- # initialize, and then after the endwin() call is made, the
- # terminal is left in an unusable state. Rather than trying to
- # check everytime for this (by requesting and comparing termios
- # flags each time), we just save the initial terminal state and
- # unconditionally reset it every time. It's cheaper than making
- # the checks.
- try:
- term_flags = termios.tcgetattr(sys.stdout)
- except termios.error as err:
- # can fail on Linux 2.6, pager_page will catch the TypeError
- raise TypeError('termios error: {0}'.format(err))
-
- # Curses modifies the stdout buffer size by default, which messes
- # up Python's normal stdout buffering. This would manifest itself
- # to IPython users as delayed printing on stdout after having used
- # the pager.
- #
- # We can prevent this by manually setting the NCURSES_NO_SETBUF
- # environment variable. For more details, see:
- # http://bugs.python.org/issue10144
- NCURSES_NO_SETBUF = os.environ.get('NCURSES_NO_SETBUF', None)
- os.environ['NCURSES_NO_SETBUF'] = ''
-
- # Proceed with curses initialization
- try:
- scr = curses.initscr()
- except AttributeError:
- # Curses on Solaris may not be complete, so we can't use it there
- return screen_lines_def
-
- screen_lines_real,screen_cols = scr.getmaxyx()
- curses.endwin()
-
- # Restore environment
- if NCURSES_NO_SETBUF is None:
- del os.environ['NCURSES_NO_SETBUF']
- else:
- os.environ['NCURSES_NO_SETBUF'] = NCURSES_NO_SETBUF
-
- # Restore terminal state in case endwin() didn't.
- termios.tcsetattr(sys.stdout,termios.TCSANOW,term_flags)
- # Now we have what we needed: the screen size in rows/columns
- return screen_lines_real
- #print '***Screen size:',screen_lines_real,'lines x',\
- #screen_cols,'columns.' # dbg
-
-def pager_page(strng, start=0, screen_lines=0, pager_cmd=None):
- """Display a string, piping through a pager after a certain length.
-
- strng can be a mime-bundle dict, supplying multiple representations,
- keyed by mime-type.
-
- The screen_lines parameter specifies the number of *usable* lines of your
- terminal screen (total lines minus lines you need to reserve to show other
- information).
-
- If you set screen_lines to a number <=0, page() will try to auto-determine
- your screen size and will only use up to (screen_size+screen_lines) for
- printing, paging after that. That is, if you want auto-detection but need
- to reserve the bottom 3 lines of the screen, use screen_lines = -3, and for
- auto-detection without any lines reserved simply use screen_lines = 0.
-
- If a string won't fit in the allowed lines, it is sent through the
- specified pager command. If none given, look for PAGER in the environment,
- and ultimately default to less.
-
- If no system pager works, the string is sent through a 'dumb pager'
- written in python, very simplistic.
- """
-
- # for compatibility with mime-bundle form:
- if isinstance(strng, dict):
- strng = strng['text/plain']
-
- # Ugly kludge, but calling curses.initscr() flat out crashes in emacs
- TERM = os.environ.get('TERM','dumb')
- if TERM in ['dumb','emacs'] and os.name != 'nt':
- print(strng)
- return
- # chop off the topmost part of the string we don't want to see
- str_lines = strng.splitlines()[start:]
- str_toprint = os.linesep.join(str_lines)
- num_newlines = len(str_lines)
- len_str = len(str_toprint)
-
- # Dumb heuristics to guesstimate number of on-screen lines the string
- # takes. Very basic, but good enough for docstrings in reasonable
- # terminals. If someone later feels like refining it, it's not hard.
- numlines = max(num_newlines,int(len_str/80)+1)
-
- screen_lines_def = get_terminal_size()[1]
-
- # auto-determine screen size
- if screen_lines <= 0:
- try:
- screen_lines += _detect_screen_size(screen_lines_def)
- except (TypeError, UnsupportedOperation):
+
+def _detect_screen_size(screen_lines_def):
+ """Attempt to work out the number of lines on the screen.
+
+ This is called by page(). It can raise an error (e.g. when run in the
+ test suite), so it's separated out so it can easily be called in a try block.
+ """
+ TERM = os.environ.get('TERM',None)
+ if not((TERM=='xterm' or TERM=='xterm-color') and sys.platform != 'sunos5'):
+ # curses causes problems on many terminals other than xterm, and
+ # some termios calls lock up on Sun OS5.
+ return screen_lines_def
+
+ try:
+ import termios
+ import curses
+ except ImportError:
+ return screen_lines_def
+
+ # There is a bug in curses, where *sometimes* it fails to properly
+ # initialize, and then after the endwin() call is made, the
+ # terminal is left in an unusable state. Rather than trying to
+ # check everytime for this (by requesting and comparing termios
+ # flags each time), we just save the initial terminal state and
+ # unconditionally reset it every time. It's cheaper than making
+ # the checks.
+ try:
+ term_flags = termios.tcgetattr(sys.stdout)
+ except termios.error as err:
+ # can fail on Linux 2.6, pager_page will catch the TypeError
+ raise TypeError('termios error: {0}'.format(err))
+
+ # Curses modifies the stdout buffer size by default, which messes
+ # up Python's normal stdout buffering. This would manifest itself
+ # to IPython users as delayed printing on stdout after having used
+ # the pager.
+ #
+ # We can prevent this by manually setting the NCURSES_NO_SETBUF
+ # environment variable. For more details, see:
+ # http://bugs.python.org/issue10144
+ NCURSES_NO_SETBUF = os.environ.get('NCURSES_NO_SETBUF', None)
+ os.environ['NCURSES_NO_SETBUF'] = ''
+
+ # Proceed with curses initialization
+ try:
+ scr = curses.initscr()
+ except AttributeError:
+ # Curses on Solaris may not be complete, so we can't use it there
+ return screen_lines_def
+
+ screen_lines_real,screen_cols = scr.getmaxyx()
+ curses.endwin()
+
+ # Restore environment
+ if NCURSES_NO_SETBUF is None:
+ del os.environ['NCURSES_NO_SETBUF']
+ else:
+ os.environ['NCURSES_NO_SETBUF'] = NCURSES_NO_SETBUF
+
+ # Restore terminal state in case endwin() didn't.
+ termios.tcsetattr(sys.stdout,termios.TCSANOW,term_flags)
+ # Now we have what we needed: the screen size in rows/columns
+ return screen_lines_real
+ #print '***Screen size:',screen_lines_real,'lines x',\
+ #screen_cols,'columns.' # dbg
+
+def pager_page(strng, start=0, screen_lines=0, pager_cmd=None):
+ """Display a string, piping through a pager after a certain length.
+
+ strng can be a mime-bundle dict, supplying multiple representations,
+ keyed by mime-type.
+
+ The screen_lines parameter specifies the number of *usable* lines of your
+ terminal screen (total lines minus lines you need to reserve to show other
+ information).
+
+ If you set screen_lines to a number <=0, page() will try to auto-determine
+ your screen size and will only use up to (screen_size+screen_lines) for
+ printing, paging after that. That is, if you want auto-detection but need
+ to reserve the bottom 3 lines of the screen, use screen_lines = -3, and for
+ auto-detection without any lines reserved simply use screen_lines = 0.
+
+ If a string won't fit in the allowed lines, it is sent through the
+ specified pager command. If none given, look for PAGER in the environment,
+ and ultimately default to less.
+
+ If no system pager works, the string is sent through a 'dumb pager'
+ written in python, very simplistic.
+ """
+
+ # for compatibility with mime-bundle form:
+ if isinstance(strng, dict):
+ strng = strng['text/plain']
+
+ # Ugly kludge, but calling curses.initscr() flat out crashes in emacs
+ TERM = os.environ.get('TERM','dumb')
+ if TERM in ['dumb','emacs'] and os.name != 'nt':
+ print(strng)
+ return
+ # chop off the topmost part of the string we don't want to see
+ str_lines = strng.splitlines()[start:]
+ str_toprint = os.linesep.join(str_lines)
+ num_newlines = len(str_lines)
+ len_str = len(str_toprint)
+
+ # Dumb heuristics to guesstimate number of on-screen lines the string
+ # takes. Very basic, but good enough for docstrings in reasonable
+ # terminals. If someone later feels like refining it, it's not hard.
+ numlines = max(num_newlines,int(len_str/80)+1)
+
+ screen_lines_def = get_terminal_size()[1]
+
+ # auto-determine screen size
+ if screen_lines <= 0:
+ try:
+ screen_lines += _detect_screen_size(screen_lines_def)
+ except (TypeError, UnsupportedOperation):
print(str_toprint)
- return
-
- #print 'numlines',numlines,'screenlines',screen_lines # dbg
- if numlines <= screen_lines :
- #print '*** normal print' # dbg
+ return
+
+ #print 'numlines',numlines,'screenlines',screen_lines # dbg
+ if numlines <= screen_lines :
+ #print '*** normal print' # dbg
print(str_toprint)
- else:
- # Try to open pager and default to internal one if that fails.
- # All failure modes are tagged as 'retval=1', to match the return
- # value of a failed system command. If any intermediate attempt
- # sets retval to 1, at the end we resort to our own page_dumb() pager.
- pager_cmd = get_pager_cmd(pager_cmd)
- pager_cmd += ' ' + get_pager_start(pager_cmd,start)
- if os.name == 'nt':
- if pager_cmd.startswith('type'):
- # The default WinXP 'type' command is failing on complex strings.
- retval = 1
- else:
- fd, tmpname = tempfile.mkstemp('.txt')
- try:
- os.close(fd)
- with open(tmpname, 'wt') as tmpfile:
- tmpfile.write(strng)
- cmd = "%s < %s" % (pager_cmd, tmpname)
- # tmpfile needs to be closed for windows
- if os.system(cmd):
- retval = 1
- else:
- retval = None
- finally:
- os.remove(tmpname)
- else:
- try:
- retval = None
- # if I use popen4, things hang. No idea why.
- #pager,shell_out = os.popen4(pager_cmd)
- pager = os.popen(pager_cmd, 'w')
- try:
- pager_encoding = pager.encoding or sys.stdout.encoding
- pager.write(py3compat.cast_bytes_py2(
- strng, encoding=pager_encoding))
- finally:
- retval = pager.close()
- except IOError as msg: # broken pipe when user quits
- if msg.args == (32, 'Broken pipe'):
- retval = None
- else:
- retval = 1
- except OSError:
- # Other strange problems, sometimes seen in Win2k/cygwin
- retval = 1
- if retval is not None:
- page_dumb(strng,screen_lines=screen_lines)
-
-
-def page(data, start=0, screen_lines=0, pager_cmd=None):
- """Display content in a pager, piping through a pager after a certain length.
-
- data can be a mime-bundle dict, supplying multiple representations,
- keyed by mime-type, or text.
-
- Pager is dispatched via the `show_in_pager` IPython hook.
- If no hook is registered, `pager_page` will be used.
- """
- # Some routines may auto-compute start offsets incorrectly and pass a
- # negative value. Offset to 0 for robustness.
- start = max(0, start)
-
- # first, try the hook
- ip = get_ipython()
- if ip:
- try:
- ip.hooks.show_in_pager(data, start=start, screen_lines=screen_lines)
- return
- except TryNext:
- pass
-
- # fallback on default pager
- return pager_page(data, start, screen_lines, pager_cmd)
-
-
-def page_file(fname, start=0, pager_cmd=None):
- """Page a file, using an optional pager command and starting line.
- """
-
- pager_cmd = get_pager_cmd(pager_cmd)
- pager_cmd += ' ' + get_pager_start(pager_cmd,start)
-
- try:
- if os.environ['TERM'] in ['emacs','dumb']:
- raise EnvironmentError
- system(pager_cmd + ' ' + fname)
- except:
- try:
- if start > 0:
- start -= 1
- page(open(fname).read(),start)
- except:
- print('Unable to show file',repr(fname))
-
-
-def get_pager_cmd(pager_cmd=None):
- """Return a pager command.
-
- Makes some attempts at finding an OS-correct one.
- """
- if os.name == 'posix':
+ else:
+ # Try to open pager and default to internal one if that fails.
+ # All failure modes are tagged as 'retval=1', to match the return
+ # value of a failed system command. If any intermediate attempt
+ # sets retval to 1, at the end we resort to our own page_dumb() pager.
+ pager_cmd = get_pager_cmd(pager_cmd)
+ pager_cmd += ' ' + get_pager_start(pager_cmd,start)
+ if os.name == 'nt':
+ if pager_cmd.startswith('type'):
+ # The default WinXP 'type' command is failing on complex strings.
+ retval = 1
+ else:
+ fd, tmpname = tempfile.mkstemp('.txt')
+ try:
+ os.close(fd)
+ with open(tmpname, 'wt') as tmpfile:
+ tmpfile.write(strng)
+ cmd = "%s < %s" % (pager_cmd, tmpname)
+ # tmpfile needs to be closed for windows
+ if os.system(cmd):
+ retval = 1
+ else:
+ retval = None
+ finally:
+ os.remove(tmpname)
+ else:
+ try:
+ retval = None
+ # if I use popen4, things hang. No idea why.
+ #pager,shell_out = os.popen4(pager_cmd)
+ pager = os.popen(pager_cmd, 'w')
+ try:
+ pager_encoding = pager.encoding or sys.stdout.encoding
+ pager.write(py3compat.cast_bytes_py2(
+ strng, encoding=pager_encoding))
+ finally:
+ retval = pager.close()
+ except IOError as msg: # broken pipe when user quits
+ if msg.args == (32, 'Broken pipe'):
+ retval = None
+ else:
+ retval = 1
+ except OSError:
+ # Other strange problems, sometimes seen in Win2k/cygwin
+ retval = 1
+ if retval is not None:
+ page_dumb(strng,screen_lines=screen_lines)
+
+
+def page(data, start=0, screen_lines=0, pager_cmd=None):
+ """Display content in a pager, piping through a pager after a certain length.
+
+ data can be a mime-bundle dict, supplying multiple representations,
+ keyed by mime-type, or text.
+
+ Pager is dispatched via the `show_in_pager` IPython hook.
+ If no hook is registered, `pager_page` will be used.
+ """
+ # Some routines may auto-compute start offsets incorrectly and pass a
+ # negative value. Offset to 0 for robustness.
+ start = max(0, start)
+
+ # first, try the hook
+ ip = get_ipython()
+ if ip:
+ try:
+ ip.hooks.show_in_pager(data, start=start, screen_lines=screen_lines)
+ return
+ except TryNext:
+ pass
+
+ # fallback on default pager
+ return pager_page(data, start, screen_lines, pager_cmd)
+
+
+def page_file(fname, start=0, pager_cmd=None):
+ """Page a file, using an optional pager command and starting line.
+ """
+
+ pager_cmd = get_pager_cmd(pager_cmd)
+ pager_cmd += ' ' + get_pager_start(pager_cmd,start)
+
+ try:
+ if os.environ['TERM'] in ['emacs','dumb']:
+ raise EnvironmentError
+ system(pager_cmd + ' ' + fname)
+ except:
+ try:
+ if start > 0:
+ start -= 1
+ page(open(fname).read(),start)
+ except:
+ print('Unable to show file',repr(fname))
+
+
+def get_pager_cmd(pager_cmd=None):
+ """Return a pager command.
+
+ Makes some attempts at finding an OS-correct one.
+ """
+ if os.name == 'posix':
default_pager_cmd = 'less -R' # -R for color control sequences
- elif os.name in ['nt','dos']:
- default_pager_cmd = 'type'
-
- if pager_cmd is None:
- try:
- pager_cmd = os.environ['PAGER']
- except:
- pager_cmd = default_pager_cmd
+ elif os.name in ['nt','dos']:
+ default_pager_cmd = 'type'
+
+ if pager_cmd is None:
+ try:
+ pager_cmd = os.environ['PAGER']
+ except:
+ pager_cmd = default_pager_cmd
if pager_cmd == 'less' and '-r' not in os.environ.get('LESS', '').lower():
pager_cmd += ' -R'
- return pager_cmd
-
-
-def get_pager_start(pager, start):
- """Return the string for paging files with an offset.
-
- This is the '+N' argument which less and more (under Unix) accept.
- """
-
- if pager in ['less','more']:
- if start:
- start_string = '+' + str(start)
- else:
- start_string = ''
- else:
- start_string = ''
- return start_string
-
-
-# (X)emacs on win32 doesn't like to be bypassed with msvcrt.getch()
-if os.name == 'nt' and os.environ.get('TERM','dumb') != 'emacs':
- import msvcrt
- def page_more():
- """ Smart pausing between pages
-
- @return: True if need print more lines, False if quit
- """
+ return pager_cmd
+
+
+def get_pager_start(pager, start):
+ """Return the string for paging files with an offset.
+
+ This is the '+N' argument which less and more (under Unix) accept.
+ """
+
+ if pager in ['less','more']:
+ if start:
+ start_string = '+' + str(start)
+ else:
+ start_string = ''
+ else:
+ start_string = ''
+ return start_string
+
+
+# (X)emacs on win32 doesn't like to be bypassed with msvcrt.getch()
+if os.name == 'nt' and os.environ.get('TERM','dumb') != 'emacs':
+ import msvcrt
+ def page_more():
+ """ Smart pausing between pages
+
+ @return: True if need print more lines, False if quit
+ """
sys.stdout.write('---Return to continue, q to quit--- ')
- ans = msvcrt.getwch()
- if ans in ("q", "Q"):
- result = False
- else:
- result = True
+ ans = msvcrt.getwch()
+ if ans in ("q", "Q"):
+ result = False
+ else:
+ result = True
sys.stdout.write("\b"*37 + " "*37 + "\b"*37)
- return result
-else:
- def page_more():
- ans = py3compat.input('---Return to continue, q to quit--- ')
- if ans.lower().startswith('q'):
- return False
- else:
- return True
-
-
-def snip_print(str,width = 75,print_full = 0,header = ''):
- """Print a string snipping the midsection to fit in width.
-
- print_full: mode control:
-
- - 0: only snip long strings
- - 1: send to page() directly.
- - 2: snip long strings and ask for full length viewing with page()
-
- Return 1 if snipping was necessary, 0 otherwise."""
-
- if print_full == 1:
- page(header+str)
- return 0
-
- print(header, end=' ')
- if len(str) < width:
- print(str)
- snip = 0
- else:
- whalf = int((width -5)/2)
- print(str[:whalf] + ' <...> ' + str[-whalf:])
- snip = 1
- if snip and print_full == 2:
- if py3compat.input(header+' Snipped. View (y/n)? [N]').lower() == 'y':
- page(str)
- return snip
+ return result
+else:
+ def page_more():
+ ans = py3compat.input('---Return to continue, q to quit--- ')
+ if ans.lower().startswith('q'):
+ return False
+ else:
+ return True
+
+
+def snip_print(str,width = 75,print_full = 0,header = ''):
+ """Print a string snipping the midsection to fit in width.
+
+ print_full: mode control:
+
+ - 0: only snip long strings
+ - 1: send to page() directly.
+ - 2: snip long strings and ask for full length viewing with page()
+
+ Return 1 if snipping was necessary, 0 otherwise."""
+
+ if print_full == 1:
+ page(header+str)
+ return 0
+
+ print(header, end=' ')
+ if len(str) < width:
+ print(str)
+ snip = 0
+ else:
+ whalf = int((width -5)/2)
+ print(str[:whalf] + ' <...> ' + str[-whalf:])
+ snip = 1
+ if snip and print_full == 2:
+ if py3compat.input(header+' Snipped. View (y/n)? [N]').lower() == 'y':
+ page(str)
+ return snip
diff --git a/contrib/python/ipython/py2/IPython/core/payload.py b/contrib/python/ipython/py2/IPython/core/payload.py
index caa9268fe2..6818be1537 100644
--- a/contrib/python/ipython/py2/IPython/core/payload.py
+++ b/contrib/python/ipython/py2/IPython/core/payload.py
@@ -1,55 +1,55 @@
-# -*- coding: utf-8 -*-
-"""Payload system for IPython.
-
-Authors:
-
-* Fernando Perez
-* Brian Granger
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-from traitlets.config.configurable import Configurable
-from traitlets import List
-
-#-----------------------------------------------------------------------------
-# Main payload class
-#-----------------------------------------------------------------------------
-
-class PayloadManager(Configurable):
-
- _payload = List([])
-
- def write_payload(self, data, single=True):
- """Include or update the specified `data` payload in the PayloadManager.
-
- If a previous payload with the same source exists and `single` is True,
- it will be overwritten with the new one.
- """
-
- if not isinstance(data, dict):
- raise TypeError('Each payload write must be a dict, got: %r' % data)
-
- if single and 'source' in data:
- source = data['source']
- for i, pl in enumerate(self._payload):
- if 'source' in pl and pl['source'] == source:
- self._payload[i] = data
- return
-
- self._payload.append(data)
-
- def read_payload(self):
- return self._payload
-
- def clear_payload(self):
- self._payload = []
+# -*- coding: utf-8 -*-
+"""Payload system for IPython.
+
+Authors:
+
+* Fernando Perez
+* Brian Granger
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+from traitlets.config.configurable import Configurable
+from traitlets import List
+
+#-----------------------------------------------------------------------------
+# Main payload class
+#-----------------------------------------------------------------------------
+
+class PayloadManager(Configurable):
+
+ _payload = List([])
+
+ def write_payload(self, data, single=True):
+ """Include or update the specified `data` payload in the PayloadManager.
+
+ If a previous payload with the same source exists and `single` is True,
+ it will be overwritten with the new one.
+ """
+
+ if not isinstance(data, dict):
+ raise TypeError('Each payload write must be a dict, got: %r' % data)
+
+ if single and 'source' in data:
+ source = data['source']
+ for i, pl in enumerate(self._payload):
+ if 'source' in pl and pl['source'] == source:
+ self._payload[i] = data
+ return
+
+ self._payload.append(data)
+
+ def read_payload(self):
+ return self._payload
+
+ def clear_payload(self):
+ self._payload = []
diff --git a/contrib/python/ipython/py2/IPython/core/payloadpage.py b/contrib/python/ipython/py2/IPython/core/payloadpage.py
index 43ac441631..eb613445dd 100644
--- a/contrib/python/ipython/py2/IPython/core/payloadpage.py
+++ b/contrib/python/ipython/py2/IPython/core/payloadpage.py
@@ -1,52 +1,52 @@
-# encoding: utf-8
-"""A payload based version of page."""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import warnings
-from IPython.core.getipython import get_ipython
-
-
-def page(strng, start=0, screen_lines=0, pager_cmd=None):
- """Print a string, piping through a pager.
-
- This version ignores the screen_lines and pager_cmd arguments and uses
- IPython's payload system instead.
-
- Parameters
- ----------
- strng : str or mime-dict
- Text to page, or a mime-type keyed dict of already formatted data.
-
- start : int
- Starting line at which to place the display.
- """
-
- # Some routines may auto-compute start offsets incorrectly and pass a
- # negative value. Offset to 0 for robustness.
- start = max(0, start)
- shell = get_ipython()
-
- if isinstance(strng, dict):
- data = strng
- else:
- data = {'text/plain' : strng}
- payload = dict(
- source='page',
- data=data,
- start=start,
- )
- shell.payload_manager.write_payload(payload)
-
-
-def install_payload_page():
- """DEPRECATED, use show_in_pager hook
-
- Install this version of page as IPython.core.page.page.
- """
- warnings.warn("""install_payload_page is deprecated.
- Use `ip.set_hook('show_in_pager, page.as_hook(payloadpage.page))`
- """)
- from IPython.core import page as corepage
- corepage.page = page
+# encoding: utf-8
+"""A payload based version of page."""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import warnings
+from IPython.core.getipython import get_ipython
+
+
+def page(strng, start=0, screen_lines=0, pager_cmd=None):
+ """Print a string, piping through a pager.
+
+ This version ignores the screen_lines and pager_cmd arguments and uses
+ IPython's payload system instead.
+
+ Parameters
+ ----------
+ strng : str or mime-dict
+ Text to page, or a mime-type keyed dict of already formatted data.
+
+ start : int
+ Starting line at which to place the display.
+ """
+
+ # Some routines may auto-compute start offsets incorrectly and pass a
+ # negative value. Offset to 0 for robustness.
+ start = max(0, start)
+ shell = get_ipython()
+
+ if isinstance(strng, dict):
+ data = strng
+ else:
+ data = {'text/plain' : strng}
+ payload = dict(
+ source='page',
+ data=data,
+ start=start,
+ )
+ shell.payload_manager.write_payload(payload)
+
+
+def install_payload_page():
+ """DEPRECATED, use show_in_pager hook
+
+ Install this version of page as IPython.core.page.page.
+ """
+ warnings.warn("""install_payload_page is deprecated.
+ Use `ip.set_hook('show_in_pager, page.as_hook(payloadpage.page))`
+ """)
+ from IPython.core import page as corepage
+ corepage.page = page
diff --git a/contrib/python/ipython/py2/IPython/core/prefilter.py b/contrib/python/ipython/py2/IPython/core/prefilter.py
index 953b6d2d43..cbed3fd80a 100644
--- a/contrib/python/ipython/py2/IPython/core/prefilter.py
+++ b/contrib/python/ipython/py2/IPython/core/prefilter.py
@@ -1,700 +1,700 @@
-# encoding: utf-8
-"""
-Prefiltering components.
-
-Prefilters transform user input before it is exec'd by Python. These
-transforms are used to implement additional syntax such as !ls and %magic.
-"""
-
+# encoding: utf-8
+"""
+Prefiltering components.
+
+Prefilters transform user input before it is exec'd by Python. These
+transforms are used to implement additional syntax such as !ls and %magic.
+"""
+
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
-
-from keyword import iskeyword
-import re
-
-from IPython.core.autocall import IPyAutocall
-from traitlets.config.configurable import Configurable
-from IPython.core.inputsplitter import (
- ESC_MAGIC,
- ESC_QUOTE,
- ESC_QUOTE2,
- ESC_PAREN,
-)
-from IPython.core.macro import Macro
-from IPython.core.splitinput import LineInfo
-
-from traitlets import (
+
+from keyword import iskeyword
+import re
+
+from IPython.core.autocall import IPyAutocall
+from traitlets.config.configurable import Configurable
+from IPython.core.inputsplitter import (
+ ESC_MAGIC,
+ ESC_QUOTE,
+ ESC_QUOTE2,
+ ESC_PAREN,
+)
+from IPython.core.macro import Macro
+from IPython.core.splitinput import LineInfo
+
+from traitlets import (
List, Integer, Unicode, Bool, Instance, CRegExp
-)
-
-#-----------------------------------------------------------------------------
-# Global utilities, errors and constants
-#-----------------------------------------------------------------------------
-
-
-class PrefilterError(Exception):
- pass
-
-
-# RegExp to identify potential function names
-re_fun_name = re.compile(r'[a-zA-Z_]([a-zA-Z0-9_.]*) *$')
-
-# RegExp to exclude strings with this start from autocalling. In
-# particular, all binary operators should be excluded, so that if foo is
-# callable, foo OP bar doesn't become foo(OP bar), which is invalid. The
-# characters '!=()' don't need to be checked for, as the checkPythonChars
-# routine explicitely does so, to catch direct calls and rebindings of
-# existing names.
-
-# Warning: the '-' HAS TO BE AT THE END of the first group, otherwise
-# it affects the rest of the group in square brackets.
-re_exclude_auto = re.compile(r'^[,&^\|\*/\+-]'
- r'|^is |^not |^in |^and |^or ')
-
-# try to catch also methods for stuff in lists/tuples/dicts: off
-# (experimental). For this to work, the line_split regexp would need
-# to be modified so it wouldn't break things at '['. That line is
-# nasty enough that I shouldn't change it until I can test it _well_.
-#self.re_fun_name = re.compile (r'[a-zA-Z_]([a-zA-Z0-9_.\[\]]*) ?$')
-
-
-# Handler Check Utilities
-def is_shadowed(identifier, ip):
- """Is the given identifier defined in one of the namespaces which shadow
- the alias and magic namespaces? Note that an identifier is different
- than ifun, because it can not contain a '.' character."""
- # This is much safer than calling ofind, which can change state
- return (identifier in ip.user_ns \
- or identifier in ip.user_global_ns \
- or identifier in ip.ns_table['builtin']\
- or iskeyword(identifier))
-
-
-#-----------------------------------------------------------------------------
-# Main Prefilter manager
-#-----------------------------------------------------------------------------
-
-
-class PrefilterManager(Configurable):
- """Main prefilter component.
-
- The IPython prefilter is run on all user input before it is run. The
- prefilter consumes lines of input and produces transformed lines of
- input.
-
- The iplementation consists of two phases:
-
- 1. Transformers
- 2. Checkers and handlers
-
- Over time, we plan on deprecating the checkers and handlers and doing
- everything in the transformers.
-
- The transformers are instances of :class:`PrefilterTransformer` and have
- a single method :meth:`transform` that takes a line and returns a
- transformed line. The transformation can be accomplished using any
- tool, but our current ones use regular expressions for speed.
-
- After all the transformers have been run, the line is fed to the checkers,
- which are instances of :class:`PrefilterChecker`. The line is passed to
- the :meth:`check` method, which either returns `None` or a
- :class:`PrefilterHandler` instance. If `None` is returned, the other
- checkers are tried. If an :class:`PrefilterHandler` instance is returned,
- the line is passed to the :meth:`handle` method of the returned
- handler and no further checkers are tried.
-
- Both transformers and checkers have a `priority` attribute, that determines
- the order in which they are called. Smaller priorities are tried first.
-
- Both transformers and checkers also have `enabled` attribute, which is
- a boolean that determines if the instance is used.
-
- Users or developers can change the priority or enabled attribute of
- transformers or checkers, but they must call the :meth:`sort_checkers`
- or :meth:`sort_transformers` method after changing the priority.
- """
-
+)
+
+#-----------------------------------------------------------------------------
+# Global utilities, errors and constants
+#-----------------------------------------------------------------------------
+
+
+class PrefilterError(Exception):
+ pass
+
+
+# RegExp to identify potential function names
+re_fun_name = re.compile(r'[a-zA-Z_]([a-zA-Z0-9_.]*) *$')
+
+# RegExp to exclude strings with this start from autocalling. In
+# particular, all binary operators should be excluded, so that if foo is
+# callable, foo OP bar doesn't become foo(OP bar), which is invalid. The
+# characters '!=()' don't need to be checked for, as the checkPythonChars
+# routine explicitely does so, to catch direct calls and rebindings of
+# existing names.
+
+# Warning: the '-' HAS TO BE AT THE END of the first group, otherwise
+# it affects the rest of the group in square brackets.
+re_exclude_auto = re.compile(r'^[,&^\|\*/\+-]'
+ r'|^is |^not |^in |^and |^or ')
+
+# try to catch also methods for stuff in lists/tuples/dicts: off
+# (experimental). For this to work, the line_split regexp would need
+# to be modified so it wouldn't break things at '['. That line is
+# nasty enough that I shouldn't change it until I can test it _well_.
+#self.re_fun_name = re.compile (r'[a-zA-Z_]([a-zA-Z0-9_.\[\]]*) ?$')
+
+
+# Handler Check Utilities
+def is_shadowed(identifier, ip):
+ """Is the given identifier defined in one of the namespaces which shadow
+ the alias and magic namespaces? Note that an identifier is different
+ than ifun, because it can not contain a '.' character."""
+ # This is much safer than calling ofind, which can change state
+ return (identifier in ip.user_ns \
+ or identifier in ip.user_global_ns \
+ or identifier in ip.ns_table['builtin']\
+ or iskeyword(identifier))
+
+
+#-----------------------------------------------------------------------------
+# Main Prefilter manager
+#-----------------------------------------------------------------------------
+
+
+class PrefilterManager(Configurable):
+ """Main prefilter component.
+
+ The IPython prefilter is run on all user input before it is run. The
+ prefilter consumes lines of input and produces transformed lines of
+ input.
+
+ The iplementation consists of two phases:
+
+ 1. Transformers
+ 2. Checkers and handlers
+
+ Over time, we plan on deprecating the checkers and handlers and doing
+ everything in the transformers.
+
+ The transformers are instances of :class:`PrefilterTransformer` and have
+ a single method :meth:`transform` that takes a line and returns a
+ transformed line. The transformation can be accomplished using any
+ tool, but our current ones use regular expressions for speed.
+
+ After all the transformers have been run, the line is fed to the checkers,
+ which are instances of :class:`PrefilterChecker`. The line is passed to
+ the :meth:`check` method, which either returns `None` or a
+ :class:`PrefilterHandler` instance. If `None` is returned, the other
+ checkers are tried. If an :class:`PrefilterHandler` instance is returned,
+ the line is passed to the :meth:`handle` method of the returned
+ handler and no further checkers are tried.
+
+ Both transformers and checkers have a `priority` attribute, that determines
+ the order in which they are called. Smaller priorities are tried first.
+
+ Both transformers and checkers also have `enabled` attribute, which is
+ a boolean that determines if the instance is used.
+
+ Users or developers can change the priority or enabled attribute of
+ transformers or checkers, but they must call the :meth:`sort_checkers`
+ or :meth:`sort_transformers` method after changing the priority.
+ """
+
multi_line_specials = Bool(True).tag(config=True)
- shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
-
- def __init__(self, shell=None, **kwargs):
- super(PrefilterManager, self).__init__(shell=shell, **kwargs)
- self.shell = shell
- self.init_transformers()
- self.init_handlers()
- self.init_checkers()
-
- #-------------------------------------------------------------------------
- # API for managing transformers
- #-------------------------------------------------------------------------
-
- def init_transformers(self):
- """Create the default transformers."""
- self._transformers = []
- for transformer_cls in _default_transformers:
- transformer_cls(
- shell=self.shell, prefilter_manager=self, parent=self
- )
-
- def sort_transformers(self):
- """Sort the transformers by priority.
-
- This must be called after the priority of a transformer is changed.
- The :meth:`register_transformer` method calls this automatically.
- """
- self._transformers.sort(key=lambda x: x.priority)
-
- @property
- def transformers(self):
- """Return a list of checkers, sorted by priority."""
- return self._transformers
-
- def register_transformer(self, transformer):
- """Register a transformer instance."""
- if transformer not in self._transformers:
- self._transformers.append(transformer)
- self.sort_transformers()
-
- def unregister_transformer(self, transformer):
- """Unregister a transformer instance."""
- if transformer in self._transformers:
- self._transformers.remove(transformer)
-
- #-------------------------------------------------------------------------
- # API for managing checkers
- #-------------------------------------------------------------------------
-
- def init_checkers(self):
- """Create the default checkers."""
- self._checkers = []
- for checker in _default_checkers:
- checker(
- shell=self.shell, prefilter_manager=self, parent=self
- )
-
- def sort_checkers(self):
- """Sort the checkers by priority.
-
- This must be called after the priority of a checker is changed.
- The :meth:`register_checker` method calls this automatically.
- """
- self._checkers.sort(key=lambda x: x.priority)
-
- @property
- def checkers(self):
- """Return a list of checkers, sorted by priority."""
- return self._checkers
-
- def register_checker(self, checker):
- """Register a checker instance."""
- if checker not in self._checkers:
- self._checkers.append(checker)
- self.sort_checkers()
-
- def unregister_checker(self, checker):
- """Unregister a checker instance."""
- if checker in self._checkers:
- self._checkers.remove(checker)
-
- #-------------------------------------------------------------------------
- # API for managing handlers
- #-------------------------------------------------------------------------
-
- def init_handlers(self):
- """Create the default handlers."""
- self._handlers = {}
- self._esc_handlers = {}
- for handler in _default_handlers:
- handler(
- shell=self.shell, prefilter_manager=self, parent=self
- )
-
- @property
- def handlers(self):
- """Return a dict of all the handlers."""
- return self._handlers
-
- def register_handler(self, name, handler, esc_strings):
- """Register a handler instance by name with esc_strings."""
- self._handlers[name] = handler
- for esc_str in esc_strings:
- self._esc_handlers[esc_str] = handler
-
- def unregister_handler(self, name, handler, esc_strings):
- """Unregister a handler instance by name with esc_strings."""
- try:
- del self._handlers[name]
- except KeyError:
- pass
- for esc_str in esc_strings:
- h = self._esc_handlers.get(esc_str)
- if h is handler:
- del self._esc_handlers[esc_str]
-
- def get_handler_by_name(self, name):
- """Get a handler by its name."""
- return self._handlers.get(name)
-
- def get_handler_by_esc(self, esc_str):
- """Get a handler by its escape string."""
- return self._esc_handlers.get(esc_str)
-
- #-------------------------------------------------------------------------
- # Main prefiltering API
- #-------------------------------------------------------------------------
-
- def prefilter_line_info(self, line_info):
- """Prefilter a line that has been converted to a LineInfo object.
-
- This implements the checker/handler part of the prefilter pipe.
- """
- # print "prefilter_line_info: ", line_info
- handler = self.find_handler(line_info)
- return handler.handle(line_info)
-
- def find_handler(self, line_info):
- """Find a handler for the line_info by trying checkers."""
- for checker in self.checkers:
- if checker.enabled:
- handler = checker.check(line_info)
- if handler:
- return handler
- return self.get_handler_by_name('normal')
-
- def transform_line(self, line, continue_prompt):
- """Calls the enabled transformers in order of increasing priority."""
- for transformer in self.transformers:
- if transformer.enabled:
- line = transformer.transform(line, continue_prompt)
- return line
-
- def prefilter_line(self, line, continue_prompt=False):
- """Prefilter a single input line as text.
-
- This method prefilters a single line of text by calling the
- transformers and then the checkers/handlers.
- """
-
- # print "prefilter_line: ", line, continue_prompt
- # All handlers *must* return a value, even if it's blank ('').
-
- # save the line away in case we crash, so the post-mortem handler can
- # record it
- self.shell._last_input_line = line
-
- if not line:
- # Return immediately on purely empty lines, so that if the user
- # previously typed some whitespace that started a continuation
- # prompt, he can break out of that loop with just an empty line.
- # This is how the default python prompt works.
- return ''
-
- # At this point, we invoke our transformers.
- if not continue_prompt or (continue_prompt and self.multi_line_specials):
- line = self.transform_line(line, continue_prompt)
-
- # Now we compute line_info for the checkers and handlers
- line_info = LineInfo(line, continue_prompt)
-
- # the input history needs to track even empty lines
- stripped = line.strip()
-
- normal_handler = self.get_handler_by_name('normal')
- if not stripped:
- return normal_handler.handle(line_info)
-
- # special handlers are only allowed for single line statements
- if continue_prompt and not self.multi_line_specials:
- return normal_handler.handle(line_info)
-
- prefiltered = self.prefilter_line_info(line_info)
- # print "prefiltered line: %r" % prefiltered
- return prefiltered
-
- def prefilter_lines(self, lines, continue_prompt=False):
- """Prefilter multiple input lines of text.
-
- This is the main entry point for prefiltering multiple lines of
- input. This simply calls :meth:`prefilter_line` for each line of
- input.
-
- This covers cases where there are multiple lines in the user entry,
- which is the case when the user goes back to a multiline history
- entry and presses enter.
- """
- llines = lines.rstrip('\n').split('\n')
- # We can get multiple lines in one shot, where multiline input 'blends'
- # into one line, in cases like recalling from the readline history
- # buffer. We need to make sure that in such cases, we correctly
- # communicate downstream which line is first and which are continuation
- # ones.
- if len(llines) > 1:
- out = '\n'.join([self.prefilter_line(line, lnum>0)
- for lnum, line in enumerate(llines) ])
- else:
- out = self.prefilter_line(llines[0], continue_prompt)
-
- return out
-
-#-----------------------------------------------------------------------------
-# Prefilter transformers
-#-----------------------------------------------------------------------------
-
-
-class PrefilterTransformer(Configurable):
- """Transform a line of user input."""
-
+ shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
+
+ def __init__(self, shell=None, **kwargs):
+ super(PrefilterManager, self).__init__(shell=shell, **kwargs)
+ self.shell = shell
+ self.init_transformers()
+ self.init_handlers()
+ self.init_checkers()
+
+ #-------------------------------------------------------------------------
+ # API for managing transformers
+ #-------------------------------------------------------------------------
+
+ def init_transformers(self):
+ """Create the default transformers."""
+ self._transformers = []
+ for transformer_cls in _default_transformers:
+ transformer_cls(
+ shell=self.shell, prefilter_manager=self, parent=self
+ )
+
+ def sort_transformers(self):
+ """Sort the transformers by priority.
+
+ This must be called after the priority of a transformer is changed.
+ The :meth:`register_transformer` method calls this automatically.
+ """
+ self._transformers.sort(key=lambda x: x.priority)
+
+ @property
+ def transformers(self):
+ """Return a list of checkers, sorted by priority."""
+ return self._transformers
+
+ def register_transformer(self, transformer):
+ """Register a transformer instance."""
+ if transformer not in self._transformers:
+ self._transformers.append(transformer)
+ self.sort_transformers()
+
+ def unregister_transformer(self, transformer):
+ """Unregister a transformer instance."""
+ if transformer in self._transformers:
+ self._transformers.remove(transformer)
+
+ #-------------------------------------------------------------------------
+ # API for managing checkers
+ #-------------------------------------------------------------------------
+
+ def init_checkers(self):
+ """Create the default checkers."""
+ self._checkers = []
+ for checker in _default_checkers:
+ checker(
+ shell=self.shell, prefilter_manager=self, parent=self
+ )
+
+ def sort_checkers(self):
+ """Sort the checkers by priority.
+
+ This must be called after the priority of a checker is changed.
+ The :meth:`register_checker` method calls this automatically.
+ """
+ self._checkers.sort(key=lambda x: x.priority)
+
+ @property
+ def checkers(self):
+ """Return a list of checkers, sorted by priority."""
+ return self._checkers
+
+ def register_checker(self, checker):
+ """Register a checker instance."""
+ if checker not in self._checkers:
+ self._checkers.append(checker)
+ self.sort_checkers()
+
+ def unregister_checker(self, checker):
+ """Unregister a checker instance."""
+ if checker in self._checkers:
+ self._checkers.remove(checker)
+
+ #-------------------------------------------------------------------------
+ # API for managing handlers
+ #-------------------------------------------------------------------------
+
+ def init_handlers(self):
+ """Create the default handlers."""
+ self._handlers = {}
+ self._esc_handlers = {}
+ for handler in _default_handlers:
+ handler(
+ shell=self.shell, prefilter_manager=self, parent=self
+ )
+
+ @property
+ def handlers(self):
+ """Return a dict of all the handlers."""
+ return self._handlers
+
+ def register_handler(self, name, handler, esc_strings):
+ """Register a handler instance by name with esc_strings."""
+ self._handlers[name] = handler
+ for esc_str in esc_strings:
+ self._esc_handlers[esc_str] = handler
+
+ def unregister_handler(self, name, handler, esc_strings):
+ """Unregister a handler instance by name with esc_strings."""
+ try:
+ del self._handlers[name]
+ except KeyError:
+ pass
+ for esc_str in esc_strings:
+ h = self._esc_handlers.get(esc_str)
+ if h is handler:
+ del self._esc_handlers[esc_str]
+
+ def get_handler_by_name(self, name):
+ """Get a handler by its name."""
+ return self._handlers.get(name)
+
+ def get_handler_by_esc(self, esc_str):
+ """Get a handler by its escape string."""
+ return self._esc_handlers.get(esc_str)
+
+ #-------------------------------------------------------------------------
+ # Main prefiltering API
+ #-------------------------------------------------------------------------
+
+ def prefilter_line_info(self, line_info):
+ """Prefilter a line that has been converted to a LineInfo object.
+
+ This implements the checker/handler part of the prefilter pipe.
+ """
+ # print "prefilter_line_info: ", line_info
+ handler = self.find_handler(line_info)
+ return handler.handle(line_info)
+
+ def find_handler(self, line_info):
+ """Find a handler for the line_info by trying checkers."""
+ for checker in self.checkers:
+ if checker.enabled:
+ handler = checker.check(line_info)
+ if handler:
+ return handler
+ return self.get_handler_by_name('normal')
+
+ def transform_line(self, line, continue_prompt):
+ """Calls the enabled transformers in order of increasing priority."""
+ for transformer in self.transformers:
+ if transformer.enabled:
+ line = transformer.transform(line, continue_prompt)
+ return line
+
+ def prefilter_line(self, line, continue_prompt=False):
+ """Prefilter a single input line as text.
+
+ This method prefilters a single line of text by calling the
+ transformers and then the checkers/handlers.
+ """
+
+ # print "prefilter_line: ", line, continue_prompt
+ # All handlers *must* return a value, even if it's blank ('').
+
+ # save the line away in case we crash, so the post-mortem handler can
+ # record it
+ self.shell._last_input_line = line
+
+ if not line:
+ # Return immediately on purely empty lines, so that if the user
+ # previously typed some whitespace that started a continuation
+ # prompt, he can break out of that loop with just an empty line.
+ # This is how the default python prompt works.
+ return ''
+
+ # At this point, we invoke our transformers.
+ if not continue_prompt or (continue_prompt and self.multi_line_specials):
+ line = self.transform_line(line, continue_prompt)
+
+ # Now we compute line_info for the checkers and handlers
+ line_info = LineInfo(line, continue_prompt)
+
+ # the input history needs to track even empty lines
+ stripped = line.strip()
+
+ normal_handler = self.get_handler_by_name('normal')
+ if not stripped:
+ return normal_handler.handle(line_info)
+
+ # special handlers are only allowed for single line statements
+ if continue_prompt and not self.multi_line_specials:
+ return normal_handler.handle(line_info)
+
+ prefiltered = self.prefilter_line_info(line_info)
+ # print "prefiltered line: %r" % prefiltered
+ return prefiltered
+
+ def prefilter_lines(self, lines, continue_prompt=False):
+ """Prefilter multiple input lines of text.
+
+ This is the main entry point for prefiltering multiple lines of
+ input. This simply calls :meth:`prefilter_line` for each line of
+ input.
+
+ This covers cases where there are multiple lines in the user entry,
+ which is the case when the user goes back to a multiline history
+ entry and presses enter.
+ """
+ llines = lines.rstrip('\n').split('\n')
+ # We can get multiple lines in one shot, where multiline input 'blends'
+ # into one line, in cases like recalling from the readline history
+ # buffer. We need to make sure that in such cases, we correctly
+ # communicate downstream which line is first and which are continuation
+ # ones.
+ if len(llines) > 1:
+ out = '\n'.join([self.prefilter_line(line, lnum>0)
+ for lnum, line in enumerate(llines) ])
+ else:
+ out = self.prefilter_line(llines[0], continue_prompt)
+
+ return out
+
+#-----------------------------------------------------------------------------
+# Prefilter transformers
+#-----------------------------------------------------------------------------
+
+
+class PrefilterTransformer(Configurable):
+ """Transform a line of user input."""
+
priority = Integer(100).tag(config=True)
- # Transformers don't currently use shell or prefilter_manager, but as we
- # move away from checkers and handlers, they will need them.
- shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
- prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True)
+ # Transformers don't currently use shell or prefilter_manager, but as we
+ # move away from checkers and handlers, they will need them.
+ shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
+ prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True)
enabled = Bool(True).tag(config=True)
-
- def __init__(self, shell=None, prefilter_manager=None, **kwargs):
- super(PrefilterTransformer, self).__init__(
- shell=shell, prefilter_manager=prefilter_manager, **kwargs
- )
- self.prefilter_manager.register_transformer(self)
-
- def transform(self, line, continue_prompt):
- """Transform a line, returning the new one."""
- return None
-
- def __repr__(self):
- return "<%s(priority=%r, enabled=%r)>" % (
- self.__class__.__name__, self.priority, self.enabled)
-
-
-#-----------------------------------------------------------------------------
-# Prefilter checkers
-#-----------------------------------------------------------------------------
-
-
-class PrefilterChecker(Configurable):
- """Inspect an input line and return a handler for that line."""
-
+
+ def __init__(self, shell=None, prefilter_manager=None, **kwargs):
+ super(PrefilterTransformer, self).__init__(
+ shell=shell, prefilter_manager=prefilter_manager, **kwargs
+ )
+ self.prefilter_manager.register_transformer(self)
+
+ def transform(self, line, continue_prompt):
+ """Transform a line, returning the new one."""
+ return None
+
+ def __repr__(self):
+ return "<%s(priority=%r, enabled=%r)>" % (
+ self.__class__.__name__, self.priority, self.enabled)
+
+
+#-----------------------------------------------------------------------------
+# Prefilter checkers
+#-----------------------------------------------------------------------------
+
+
+class PrefilterChecker(Configurable):
+ """Inspect an input line and return a handler for that line."""
+
priority = Integer(100).tag(config=True)
- shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
- prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True)
+ shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
+ prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True)
enabled = Bool(True).tag(config=True)
-
- def __init__(self, shell=None, prefilter_manager=None, **kwargs):
- super(PrefilterChecker, self).__init__(
- shell=shell, prefilter_manager=prefilter_manager, **kwargs
- )
- self.prefilter_manager.register_checker(self)
-
- def check(self, line_info):
- """Inspect line_info and return a handler instance or None."""
- return None
-
- def __repr__(self):
- return "<%s(priority=%r, enabled=%r)>" % (
- self.__class__.__name__, self.priority, self.enabled)
-
-
-class EmacsChecker(PrefilterChecker):
-
+
+ def __init__(self, shell=None, prefilter_manager=None, **kwargs):
+ super(PrefilterChecker, self).__init__(
+ shell=shell, prefilter_manager=prefilter_manager, **kwargs
+ )
+ self.prefilter_manager.register_checker(self)
+
+ def check(self, line_info):
+ """Inspect line_info and return a handler instance or None."""
+ return None
+
+ def __repr__(self):
+ return "<%s(priority=%r, enabled=%r)>" % (
+ self.__class__.__name__, self.priority, self.enabled)
+
+
+class EmacsChecker(PrefilterChecker):
+
priority = Integer(100).tag(config=True)
enabled = Bool(False).tag(config=True)
-
- def check(self, line_info):
- "Emacs ipython-mode tags certain input lines."
- if line_info.line.endswith('# PYTHON-MODE'):
- return self.prefilter_manager.get_handler_by_name('emacs')
- else:
- return None
-
-
-class MacroChecker(PrefilterChecker):
-
+
+ def check(self, line_info):
+ "Emacs ipython-mode tags certain input lines."
+ if line_info.line.endswith('# PYTHON-MODE'):
+ return self.prefilter_manager.get_handler_by_name('emacs')
+ else:
+ return None
+
+
+class MacroChecker(PrefilterChecker):
+
priority = Integer(250).tag(config=True)
-
- def check(self, line_info):
- obj = self.shell.user_ns.get(line_info.ifun)
- if isinstance(obj, Macro):
- return self.prefilter_manager.get_handler_by_name('macro')
- else:
- return None
-
-
-class IPyAutocallChecker(PrefilterChecker):
-
+
+ def check(self, line_info):
+ obj = self.shell.user_ns.get(line_info.ifun)
+ if isinstance(obj, Macro):
+ return self.prefilter_manager.get_handler_by_name('macro')
+ else:
+ return None
+
+
+class IPyAutocallChecker(PrefilterChecker):
+
priority = Integer(300).tag(config=True)
-
- def check(self, line_info):
- "Instances of IPyAutocall in user_ns get autocalled immediately"
- obj = self.shell.user_ns.get(line_info.ifun, None)
- if isinstance(obj, IPyAutocall):
- obj.set_ip(self.shell)
- return self.prefilter_manager.get_handler_by_name('auto')
- else:
- return None
-
-
-class AssignmentChecker(PrefilterChecker):
-
+
+ def check(self, line_info):
+ "Instances of IPyAutocall in user_ns get autocalled immediately"
+ obj = self.shell.user_ns.get(line_info.ifun, None)
+ if isinstance(obj, IPyAutocall):
+ obj.set_ip(self.shell)
+ return self.prefilter_manager.get_handler_by_name('auto')
+ else:
+ return None
+
+
+class AssignmentChecker(PrefilterChecker):
+
priority = Integer(600).tag(config=True)
-
- def check(self, line_info):
- """Check to see if user is assigning to a var for the first time, in
- which case we want to avoid any sort of automagic / autocall games.
-
- This allows users to assign to either alias or magic names true python
- variables (the magic/alias systems always take second seat to true
- python code). E.g. ls='hi', or ls,that=1,2"""
- if line_info.the_rest:
- if line_info.the_rest[0] in '=,':
- return self.prefilter_manager.get_handler_by_name('normal')
- else:
- return None
-
-
-class AutoMagicChecker(PrefilterChecker):
-
+
+ def check(self, line_info):
+ """Check to see if user is assigning to a var for the first time, in
+ which case we want to avoid any sort of automagic / autocall games.
+
+ This allows users to assign to either alias or magic names true python
+ variables (the magic/alias systems always take second seat to true
+ python code). E.g. ls='hi', or ls,that=1,2"""
+ if line_info.the_rest:
+ if line_info.the_rest[0] in '=,':
+ return self.prefilter_manager.get_handler_by_name('normal')
+ else:
+ return None
+
+
+class AutoMagicChecker(PrefilterChecker):
+
priority = Integer(700).tag(config=True)
-
- def check(self, line_info):
- """If the ifun is magic, and automagic is on, run it. Note: normal,
- non-auto magic would already have been triggered via '%' in
- check_esc_chars. This just checks for automagic. Also, before
- triggering the magic handler, make sure that there is nothing in the
- user namespace which could shadow it."""
- if not self.shell.automagic or not self.shell.find_magic(line_info.ifun):
- return None
-
- # We have a likely magic method. Make sure we should actually call it.
- if line_info.continue_prompt and not self.prefilter_manager.multi_line_specials:
- return None
-
- head = line_info.ifun.split('.',1)[0]
- if is_shadowed(head, self.shell):
- return None
-
- return self.prefilter_manager.get_handler_by_name('magic')
-
-
-class PythonOpsChecker(PrefilterChecker):
-
+
+ def check(self, line_info):
+ """If the ifun is magic, and automagic is on, run it. Note: normal,
+ non-auto magic would already have been triggered via '%' in
+ check_esc_chars. This just checks for automagic. Also, before
+ triggering the magic handler, make sure that there is nothing in the
+ user namespace which could shadow it."""
+ if not self.shell.automagic or not self.shell.find_magic(line_info.ifun):
+ return None
+
+ # We have a likely magic method. Make sure we should actually call it.
+ if line_info.continue_prompt and not self.prefilter_manager.multi_line_specials:
+ return None
+
+ head = line_info.ifun.split('.',1)[0]
+ if is_shadowed(head, self.shell):
+ return None
+
+ return self.prefilter_manager.get_handler_by_name('magic')
+
+
+class PythonOpsChecker(PrefilterChecker):
+
priority = Integer(900).tag(config=True)
-
- def check(self, line_info):
- """If the 'rest' of the line begins with a function call or pretty much
- any python operator, we should simply execute the line (regardless of
- whether or not there's a possible autocall expansion). This avoids
- spurious (and very confusing) geattr() accesses."""
- if line_info.the_rest and line_info.the_rest[0] in '!=()<>,+*/%^&|':
- return self.prefilter_manager.get_handler_by_name('normal')
- else:
- return None
-
-
-class AutocallChecker(PrefilterChecker):
-
+
+ def check(self, line_info):
+ """If the 'rest' of the line begins with a function call or pretty much
+ any python operator, we should simply execute the line (regardless of
+ whether or not there's a possible autocall expansion). This avoids
+ spurious (and very confusing) geattr() accesses."""
+ if line_info.the_rest and line_info.the_rest[0] in '!=()<>,+*/%^&|':
+ return self.prefilter_manager.get_handler_by_name('normal')
+ else:
+ return None
+
+
+class AutocallChecker(PrefilterChecker):
+
priority = Integer(1000).tag(config=True)
-
+
function_name_regexp = CRegExp(re_fun_name,
help="RegExp to identify potential function names."
).tag(config=True)
exclude_regexp = CRegExp(re_exclude_auto,
help="RegExp to exclude strings with this start from autocalling."
).tag(config=True)
-
- def check(self, line_info):
- "Check if the initial word/function is callable and autocall is on."
- if not self.shell.autocall:
- return None
-
- oinfo = line_info.ofind(self.shell) # This can mutate state via getattr
- if not oinfo['found']:
- return None
-
- if callable(oinfo['obj']) \
- and (not self.exclude_regexp.match(line_info.the_rest)) \
- and self.function_name_regexp.match(line_info.ifun):
- return self.prefilter_manager.get_handler_by_name('auto')
- else:
- return None
-
-
-#-----------------------------------------------------------------------------
-# Prefilter handlers
-#-----------------------------------------------------------------------------
-
-
-class PrefilterHandler(Configurable):
-
- handler_name = Unicode('normal')
- esc_strings = List([])
- shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
- prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True)
-
- def __init__(self, shell=None, prefilter_manager=None, **kwargs):
- super(PrefilterHandler, self).__init__(
- shell=shell, prefilter_manager=prefilter_manager, **kwargs
- )
- self.prefilter_manager.register_handler(
- self.handler_name,
- self,
- self.esc_strings
- )
-
- def handle(self, line_info):
- # print "normal: ", line_info
- """Handle normal input lines. Use as a template for handlers."""
-
- # With autoindent on, we need some way to exit the input loop, and I
- # don't want to force the user to have to backspace all the way to
- # clear the line. The rule will be in this case, that either two
- # lines of pure whitespace in a row, or a line of pure whitespace but
- # of a size different to the indent level, will exit the input loop.
- line = line_info.line
- continue_prompt = line_info.continue_prompt
-
- if (continue_prompt and
- self.shell.autoindent and
- line.isspace() and
- 0 < abs(len(line) - self.shell.indent_current_nsp) <= 2):
- line = ''
-
- return line
-
- def __str__(self):
- return "<%s(name=%s)>" % (self.__class__.__name__, self.handler_name)
-
-
-class MacroHandler(PrefilterHandler):
- handler_name = Unicode("macro")
-
- def handle(self, line_info):
- obj = self.shell.user_ns.get(line_info.ifun)
- pre_space = line_info.pre_whitespace
- line_sep = "\n" + pre_space
- return pre_space + line_sep.join(obj.value.splitlines())
-
-
-class MagicHandler(PrefilterHandler):
-
- handler_name = Unicode('magic')
- esc_strings = List([ESC_MAGIC])
-
- def handle(self, line_info):
- """Execute magic functions."""
- ifun = line_info.ifun
- the_rest = line_info.the_rest
- cmd = '%sget_ipython().magic(%r)' % (line_info.pre_whitespace,
- (ifun + " " + the_rest))
- return cmd
-
-
-class AutoHandler(PrefilterHandler):
-
- handler_name = Unicode('auto')
- esc_strings = List([ESC_PAREN, ESC_QUOTE, ESC_QUOTE2])
-
- def handle(self, line_info):
- """Handle lines which can be auto-executed, quoting if requested."""
- line = line_info.line
- ifun = line_info.ifun
- the_rest = line_info.the_rest
- esc = line_info.esc
- continue_prompt = line_info.continue_prompt
- obj = line_info.ofind(self.shell)['obj']
-
- # This should only be active for single-line input!
- if continue_prompt:
- return line
-
- force_auto = isinstance(obj, IPyAutocall)
-
- # User objects sometimes raise exceptions on attribute access other
- # than AttributeError (we've seen it in the past), so it's safest to be
- # ultra-conservative here and catch all.
- try:
- auto_rewrite = obj.rewrite
- except Exception:
- auto_rewrite = True
-
- if esc == ESC_QUOTE:
- # Auto-quote splitting on whitespace
- newcmd = '%s("%s")' % (ifun,'", "'.join(the_rest.split()) )
- elif esc == ESC_QUOTE2:
- # Auto-quote whole string
- newcmd = '%s("%s")' % (ifun,the_rest)
- elif esc == ESC_PAREN:
- newcmd = '%s(%s)' % (ifun,",".join(the_rest.split()))
- else:
- # Auto-paren.
- if force_auto:
- # Don't rewrite if it is already a call.
- do_rewrite = not the_rest.startswith('(')
- else:
- if not the_rest:
- # We only apply it to argument-less calls if the autocall
- # parameter is set to 2.
- do_rewrite = (self.shell.autocall >= 2)
- elif the_rest.startswith('[') and hasattr(obj, '__getitem__'):
- # Don't autocall in this case: item access for an object
- # which is BOTH callable and implements __getitem__.
- do_rewrite = False
- else:
- do_rewrite = True
-
- # Figure out the rewritten command
- if do_rewrite:
- if the_rest.endswith(';'):
- newcmd = '%s(%s);' % (ifun.rstrip(),the_rest[:-1])
- else:
- newcmd = '%s(%s)' % (ifun.rstrip(), the_rest)
- else:
- normal_handler = self.prefilter_manager.get_handler_by_name('normal')
- return normal_handler.handle(line_info)
-
- # Display the rewritten call
- if auto_rewrite:
- self.shell.auto_rewrite_input(newcmd)
-
- return newcmd
-
-
-class EmacsHandler(PrefilterHandler):
-
- handler_name = Unicode('emacs')
- esc_strings = List([])
-
- def handle(self, line_info):
- """Handle input lines marked by python-mode."""
-
- # Currently, nothing is done. Later more functionality can be added
- # here if needed.
-
- # The input cache shouldn't be updated
- return line_info.line
-
-
-#-----------------------------------------------------------------------------
-# Defaults
-#-----------------------------------------------------------------------------
-
-
-_default_transformers = [
-]
-
-_default_checkers = [
- EmacsChecker,
- MacroChecker,
- IPyAutocallChecker,
- AssignmentChecker,
- AutoMagicChecker,
- PythonOpsChecker,
- AutocallChecker
-]
-
-_default_handlers = [
- PrefilterHandler,
- MacroHandler,
- MagicHandler,
- AutoHandler,
- EmacsHandler
-]
+
+ def check(self, line_info):
+ "Check if the initial word/function is callable and autocall is on."
+ if not self.shell.autocall:
+ return None
+
+ oinfo = line_info.ofind(self.shell) # This can mutate state via getattr
+ if not oinfo['found']:
+ return None
+
+ if callable(oinfo['obj']) \
+ and (not self.exclude_regexp.match(line_info.the_rest)) \
+ and self.function_name_regexp.match(line_info.ifun):
+ return self.prefilter_manager.get_handler_by_name('auto')
+ else:
+ return None
+
+
+#-----------------------------------------------------------------------------
+# Prefilter handlers
+#-----------------------------------------------------------------------------
+
+
+class PrefilterHandler(Configurable):
+
+ handler_name = Unicode('normal')
+ esc_strings = List([])
+ shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
+ prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True)
+
+ def __init__(self, shell=None, prefilter_manager=None, **kwargs):
+ super(PrefilterHandler, self).__init__(
+ shell=shell, prefilter_manager=prefilter_manager, **kwargs
+ )
+ self.prefilter_manager.register_handler(
+ self.handler_name,
+ self,
+ self.esc_strings
+ )
+
+ def handle(self, line_info):
+ # print "normal: ", line_info
+ """Handle normal input lines. Use as a template for handlers."""
+
+ # With autoindent on, we need some way to exit the input loop, and I
+ # don't want to force the user to have to backspace all the way to
+ # clear the line. The rule will be in this case, that either two
+ # lines of pure whitespace in a row, or a line of pure whitespace but
+ # of a size different to the indent level, will exit the input loop.
+ line = line_info.line
+ continue_prompt = line_info.continue_prompt
+
+ if (continue_prompt and
+ self.shell.autoindent and
+ line.isspace() and
+ 0 < abs(len(line) - self.shell.indent_current_nsp) <= 2):
+ line = ''
+
+ return line
+
+ def __str__(self):
+ return "<%s(name=%s)>" % (self.__class__.__name__, self.handler_name)
+
+
+class MacroHandler(PrefilterHandler):
+ handler_name = Unicode("macro")
+
+ def handle(self, line_info):
+ obj = self.shell.user_ns.get(line_info.ifun)
+ pre_space = line_info.pre_whitespace
+ line_sep = "\n" + pre_space
+ return pre_space + line_sep.join(obj.value.splitlines())
+
+
+class MagicHandler(PrefilterHandler):
+
+ handler_name = Unicode('magic')
+ esc_strings = List([ESC_MAGIC])
+
+ def handle(self, line_info):
+ """Execute magic functions."""
+ ifun = line_info.ifun
+ the_rest = line_info.the_rest
+ cmd = '%sget_ipython().magic(%r)' % (line_info.pre_whitespace,
+ (ifun + " " + the_rest))
+ return cmd
+
+
+class AutoHandler(PrefilterHandler):
+
+ handler_name = Unicode('auto')
+ esc_strings = List([ESC_PAREN, ESC_QUOTE, ESC_QUOTE2])
+
+ def handle(self, line_info):
+ """Handle lines which can be auto-executed, quoting if requested."""
+ line = line_info.line
+ ifun = line_info.ifun
+ the_rest = line_info.the_rest
+ esc = line_info.esc
+ continue_prompt = line_info.continue_prompt
+ obj = line_info.ofind(self.shell)['obj']
+
+ # This should only be active for single-line input!
+ if continue_prompt:
+ return line
+
+ force_auto = isinstance(obj, IPyAutocall)
+
+ # User objects sometimes raise exceptions on attribute access other
+ # than AttributeError (we've seen it in the past), so it's safest to be
+ # ultra-conservative here and catch all.
+ try:
+ auto_rewrite = obj.rewrite
+ except Exception:
+ auto_rewrite = True
+
+ if esc == ESC_QUOTE:
+ # Auto-quote splitting on whitespace
+ newcmd = '%s("%s")' % (ifun,'", "'.join(the_rest.split()) )
+ elif esc == ESC_QUOTE2:
+ # Auto-quote whole string
+ newcmd = '%s("%s")' % (ifun,the_rest)
+ elif esc == ESC_PAREN:
+ newcmd = '%s(%s)' % (ifun,",".join(the_rest.split()))
+ else:
+ # Auto-paren.
+ if force_auto:
+ # Don't rewrite if it is already a call.
+ do_rewrite = not the_rest.startswith('(')
+ else:
+ if not the_rest:
+ # We only apply it to argument-less calls if the autocall
+ # parameter is set to 2.
+ do_rewrite = (self.shell.autocall >= 2)
+ elif the_rest.startswith('[') and hasattr(obj, '__getitem__'):
+ # Don't autocall in this case: item access for an object
+ # which is BOTH callable and implements __getitem__.
+ do_rewrite = False
+ else:
+ do_rewrite = True
+
+ # Figure out the rewritten command
+ if do_rewrite:
+ if the_rest.endswith(';'):
+ newcmd = '%s(%s);' % (ifun.rstrip(),the_rest[:-1])
+ else:
+ newcmd = '%s(%s)' % (ifun.rstrip(), the_rest)
+ else:
+ normal_handler = self.prefilter_manager.get_handler_by_name('normal')
+ return normal_handler.handle(line_info)
+
+ # Display the rewritten call
+ if auto_rewrite:
+ self.shell.auto_rewrite_input(newcmd)
+
+ return newcmd
+
+
+class EmacsHandler(PrefilterHandler):
+
+ handler_name = Unicode('emacs')
+ esc_strings = List([])
+
+ def handle(self, line_info):
+ """Handle input lines marked by python-mode."""
+
+ # Currently, nothing is done. Later more functionality can be added
+ # here if needed.
+
+ # The input cache shouldn't be updated
+ return line_info.line
+
+
+#-----------------------------------------------------------------------------
+# Defaults
+#-----------------------------------------------------------------------------
+
+
+_default_transformers = [
+]
+
+_default_checkers = [
+ EmacsChecker,
+ MacroChecker,
+ IPyAutocallChecker,
+ AssignmentChecker,
+ AutoMagicChecker,
+ PythonOpsChecker,
+ AutocallChecker
+]
+
+_default_handlers = [
+ PrefilterHandler,
+ MacroHandler,
+ MagicHandler,
+ AutoHandler,
+ EmacsHandler
+]
diff --git a/contrib/python/ipython/py2/IPython/core/profile/README_STARTUP b/contrib/python/ipython/py2/IPython/core/profile/README_STARTUP
index 051134cfc3..61d4700042 100644
--- a/contrib/python/ipython/py2/IPython/core/profile/README_STARTUP
+++ b/contrib/python/ipython/py2/IPython/core/profile/README_STARTUP
@@ -1,11 +1,11 @@
-This is the IPython startup directory
-
-.py and .ipy files in this directory will be run *prior* to any code or files specified
-via the exec_lines or exec_files configurables whenever you load this profile.
-
-Files will be run in lexicographical order, so you can control the execution order of files
-with a prefix, e.g.::
-
- 00-first.py
- 50-middle.py
- 99-last.ipy
+This is the IPython startup directory
+
+.py and .ipy files in this directory will be run *prior* to any code or files specified
+via the exec_lines or exec_files configurables whenever you load this profile.
+
+Files will be run in lexicographical order, so you can control the execution order of files
+with a prefix, e.g.::
+
+ 00-first.py
+ 50-middle.py
+ 99-last.ipy
diff --git a/contrib/python/ipython/py2/IPython/core/profileapp.py b/contrib/python/ipython/py2/IPython/core/profileapp.py
index 54e4f3b94c..b8e5fd26ac 100644
--- a/contrib/python/ipython/py2/IPython/core/profileapp.py
+++ b/contrib/python/ipython/py2/IPython/core/profileapp.py
@@ -1,314 +1,314 @@
-# encoding: utf-8
-"""
-An application for managing IPython profiles.
-
-To be invoked as the `ipython profile` subcommand.
-
-Authors:
-
-* Min RK
-
-"""
-from __future__ import print_function
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import os
-
-from traitlets.config.application import Application
-from IPython.core.application import (
- BaseIPythonApplication, base_flags
-)
-from IPython.core.profiledir import ProfileDir
-from IPython.utils.importstring import import_item
-from IPython.paths import get_ipython_dir, get_ipython_package_dir
-from IPython.utils import py3compat
+# encoding: utf-8
+"""
+An application for managing IPython profiles.
+
+To be invoked as the `ipython profile` subcommand.
+
+Authors:
+
+* Min RK
+
+"""
+from __future__ import print_function
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import os
+
+from traitlets.config.application import Application
+from IPython.core.application import (
+ BaseIPythonApplication, base_flags
+)
+from IPython.core.profiledir import ProfileDir
+from IPython.utils.importstring import import_item
+from IPython.paths import get_ipython_dir, get_ipython_package_dir
+from IPython.utils import py3compat
from traitlets import Unicode, Bool, Dict, observe
-
-#-----------------------------------------------------------------------------
-# Constants
-#-----------------------------------------------------------------------------
-
-create_help = """Create an IPython profile by name
-
-Create an ipython profile directory by its name or
-profile directory path. Profile directories contain
-configuration, log and security related files and are named
-using the convention 'profile_<name>'. By default they are
-located in your ipython directory. Once created, you will
-can edit the configuration files in the profile
-directory to configure IPython. Most users will create a
-profile directory by name,
-`ipython profile create myprofile`, which will put the directory
-in `<ipython_dir>/profile_myprofile`.
-"""
-list_help = """List available IPython profiles
-
-List all available profiles, by profile location, that can
-be found in the current working directly or in the ipython
-directory. Profile directories are named using the convention
-'profile_<profile>'.
-"""
-profile_help = """Manage IPython profiles
-
-Profile directories contain
-configuration, log and security related files and are named
-using the convention 'profile_<name>'. By default they are
-located in your ipython directory. You can create profiles
-with `ipython profile create <name>`, or see the profiles you
-already have with `ipython profile list`
-
-To get started configuring IPython, simply do:
-
-$> ipython profile create
-
-and IPython will create the default profile in <ipython_dir>/profile_default,
-where you can edit ipython_config.py to start configuring IPython.
-
-"""
-
-_list_examples = "ipython profile list # list all profiles"
-
-_create_examples = """
-ipython profile create foo # create profile foo w/ default config files
-ipython profile create foo --reset # restage default config files over current
-ipython profile create foo --parallel # also stage parallel config files
-"""
-
-_main_examples = """
-ipython profile create -h # show the help string for the create subcommand
-ipython profile list -h # show the help string for the list subcommand
-
-ipython locate profile foo # print the path to the directory for profile 'foo'
-"""
-
-#-----------------------------------------------------------------------------
-# Profile Application Class (for `ipython profile` subcommand)
-#-----------------------------------------------------------------------------
-
-
-def list_profiles_in(path):
- """list profiles in a given root directory"""
- files = os.listdir(path)
- profiles = []
- for f in files:
- try:
- full_path = os.path.join(path, f)
- except UnicodeError:
- continue
- if os.path.isdir(full_path) and f.startswith('profile_'):
- profiles.append(f.split('_',1)[-1])
- return profiles
-
-
-def list_bundled_profiles():
- """list profiles that are bundled with IPython."""
- path = os.path.join(get_ipython_package_dir(), u'core', u'profile')
- files = os.listdir(path)
- profiles = []
- for profile in files:
- full_path = os.path.join(path, profile)
- if os.path.isdir(full_path) and profile != "__pycache__":
- profiles.append(profile)
- return profiles
-
-
-class ProfileLocate(BaseIPythonApplication):
- description = """print the path to an IPython profile dir"""
-
- def parse_command_line(self, argv=None):
- super(ProfileLocate, self).parse_command_line(argv)
- if self.extra_args:
- self.profile = self.extra_args[0]
-
- def start(self):
- print(self.profile_dir.location)
-
-
-class ProfileList(Application):
- name = u'ipython-profile'
- description = list_help
- examples = _list_examples
-
- aliases = Dict({
- 'ipython-dir' : 'ProfileList.ipython_dir',
- 'log-level' : 'Application.log_level',
- })
- flags = Dict(dict(
- debug = ({'Application' : {'log_level' : 0}},
- "Set Application.log_level to 0, maximizing log output."
- )
- ))
-
+
+#-----------------------------------------------------------------------------
+# Constants
+#-----------------------------------------------------------------------------
+
+create_help = """Create an IPython profile by name
+
+Create an ipython profile directory by its name or
+profile directory path. Profile directories contain
+configuration, log and security related files and are named
+using the convention 'profile_<name>'. By default they are
+located in your ipython directory. Once created, you will
+can edit the configuration files in the profile
+directory to configure IPython. Most users will create a
+profile directory by name,
+`ipython profile create myprofile`, which will put the directory
+in `<ipython_dir>/profile_myprofile`.
+"""
+list_help = """List available IPython profiles
+
+List all available profiles, by profile location, that can
+be found in the current working directly or in the ipython
+directory. Profile directories are named using the convention
+'profile_<profile>'.
+"""
+profile_help = """Manage IPython profiles
+
+Profile directories contain
+configuration, log and security related files and are named
+using the convention 'profile_<name>'. By default they are
+located in your ipython directory. You can create profiles
+with `ipython profile create <name>`, or see the profiles you
+already have with `ipython profile list`
+
+To get started configuring IPython, simply do:
+
+$> ipython profile create
+
+and IPython will create the default profile in <ipython_dir>/profile_default,
+where you can edit ipython_config.py to start configuring IPython.
+
+"""
+
+_list_examples = "ipython profile list # list all profiles"
+
+_create_examples = """
+ipython profile create foo # create profile foo w/ default config files
+ipython profile create foo --reset # restage default config files over current
+ipython profile create foo --parallel # also stage parallel config files
+"""
+
+_main_examples = """
+ipython profile create -h # show the help string for the create subcommand
+ipython profile list -h # show the help string for the list subcommand
+
+ipython locate profile foo # print the path to the directory for profile 'foo'
+"""
+
+#-----------------------------------------------------------------------------
+# Profile Application Class (for `ipython profile` subcommand)
+#-----------------------------------------------------------------------------
+
+
+def list_profiles_in(path):
+ """list profiles in a given root directory"""
+ files = os.listdir(path)
+ profiles = []
+ for f in files:
+ try:
+ full_path = os.path.join(path, f)
+ except UnicodeError:
+ continue
+ if os.path.isdir(full_path) and f.startswith('profile_'):
+ profiles.append(f.split('_',1)[-1])
+ return profiles
+
+
+def list_bundled_profiles():
+ """list profiles that are bundled with IPython."""
+ path = os.path.join(get_ipython_package_dir(), u'core', u'profile')
+ files = os.listdir(path)
+ profiles = []
+ for profile in files:
+ full_path = os.path.join(path, profile)
+ if os.path.isdir(full_path) and profile != "__pycache__":
+ profiles.append(profile)
+ return profiles
+
+
+class ProfileLocate(BaseIPythonApplication):
+ description = """print the path to an IPython profile dir"""
+
+ def parse_command_line(self, argv=None):
+ super(ProfileLocate, self).parse_command_line(argv)
+ if self.extra_args:
+ self.profile = self.extra_args[0]
+
+ def start(self):
+ print(self.profile_dir.location)
+
+
+class ProfileList(Application):
+ name = u'ipython-profile'
+ description = list_help
+ examples = _list_examples
+
+ aliases = Dict({
+ 'ipython-dir' : 'ProfileList.ipython_dir',
+ 'log-level' : 'Application.log_level',
+ })
+ flags = Dict(dict(
+ debug = ({'Application' : {'log_level' : 0}},
+ "Set Application.log_level to 0, maximizing log output."
+ )
+ ))
+
ipython_dir = Unicode(get_ipython_dir(),
- help="""
- The name of the IPython directory. This directory is used for logging
- configuration (through profiles), history storage, etc. The default
- is usually $HOME/.ipython. This options can also be specified through
- the environment variable IPYTHONDIR.
- """
+ help="""
+ The name of the IPython directory. This directory is used for logging
+ configuration (through profiles), history storage, etc. The default
+ is usually $HOME/.ipython. This options can also be specified through
+ the environment variable IPYTHONDIR.
+ """
).tag(config=True)
-
-
- def _print_profiles(self, profiles):
- """print list of profiles, indented."""
- for profile in profiles:
- print(' %s' % profile)
-
- def list_profile_dirs(self):
- profiles = list_bundled_profiles()
- if profiles:
- print()
- print("Available profiles in IPython:")
- self._print_profiles(profiles)
- print()
- print(" The first request for a bundled profile will copy it")
- print(" into your IPython directory (%s)," % self.ipython_dir)
- print(" where you can customize it.")
-
- profiles = list_profiles_in(self.ipython_dir)
- if profiles:
- print()
- print("Available profiles in %s:" % self.ipython_dir)
- self._print_profiles(profiles)
-
- profiles = list_profiles_in(py3compat.getcwd())
- if profiles:
- print()
- print("Available profiles in current directory (%s):" % py3compat.getcwd())
- self._print_profiles(profiles)
-
- print()
- print("To use any of the above profiles, start IPython with:")
- print(" ipython --profile=<name>")
- print()
-
- def start(self):
- self.list_profile_dirs()
-
-
-create_flags = {}
-create_flags.update(base_flags)
-# don't include '--init' flag, which implies running profile create in other apps
-create_flags.pop('init')
-create_flags['reset'] = ({'ProfileCreate': {'overwrite' : True}},
- "reset config files in this profile to the defaults.")
-create_flags['parallel'] = ({'ProfileCreate': {'parallel' : True}},
- "Include the config files for parallel "
- "computing apps (ipengine, ipcontroller, etc.)")
-
-
-class ProfileCreate(BaseIPythonApplication):
- name = u'ipython-profile'
- description = create_help
- examples = _create_examples
+
+
+ def _print_profiles(self, profiles):
+ """print list of profiles, indented."""
+ for profile in profiles:
+ print(' %s' % profile)
+
+ def list_profile_dirs(self):
+ profiles = list_bundled_profiles()
+ if profiles:
+ print()
+ print("Available profiles in IPython:")
+ self._print_profiles(profiles)
+ print()
+ print(" The first request for a bundled profile will copy it")
+ print(" into your IPython directory (%s)," % self.ipython_dir)
+ print(" where you can customize it.")
+
+ profiles = list_profiles_in(self.ipython_dir)
+ if profiles:
+ print()
+ print("Available profiles in %s:" % self.ipython_dir)
+ self._print_profiles(profiles)
+
+ profiles = list_profiles_in(py3compat.getcwd())
+ if profiles:
+ print()
+ print("Available profiles in current directory (%s):" % py3compat.getcwd())
+ self._print_profiles(profiles)
+
+ print()
+ print("To use any of the above profiles, start IPython with:")
+ print(" ipython --profile=<name>")
+ print()
+
+ def start(self):
+ self.list_profile_dirs()
+
+
+create_flags = {}
+create_flags.update(base_flags)
+# don't include '--init' flag, which implies running profile create in other apps
+create_flags.pop('init')
+create_flags['reset'] = ({'ProfileCreate': {'overwrite' : True}},
+ "reset config files in this profile to the defaults.")
+create_flags['parallel'] = ({'ProfileCreate': {'parallel' : True}},
+ "Include the config files for parallel "
+ "computing apps (ipengine, ipcontroller, etc.)")
+
+
+class ProfileCreate(BaseIPythonApplication):
+ name = u'ipython-profile'
+ description = create_help
+ examples = _create_examples
auto_create = Bool(True)
- def _log_format_default(self):
- return "[%(name)s] %(message)s"
-
- def _copy_config_files_default(self):
- return True
-
+ def _log_format_default(self):
+ return "[%(name)s] %(message)s"
+
+ def _copy_config_files_default(self):
+ return True
+
parallel = Bool(False,
help="whether to include parallel computing config files"
).tag(config=True)
@observe('parallel')
def _parallel_changed(self, change):
- parallel_files = [ 'ipcontroller_config.py',
- 'ipengine_config.py',
- 'ipcluster_config.py'
- ]
+ parallel_files = [ 'ipcontroller_config.py',
+ 'ipengine_config.py',
+ 'ipcluster_config.py'
+ ]
if change['new']:
- for cf in parallel_files:
- self.config_files.append(cf)
- else:
- for cf in parallel_files:
- if cf in self.config_files:
- self.config_files.remove(cf)
-
- def parse_command_line(self, argv):
- super(ProfileCreate, self).parse_command_line(argv)
- # accept positional arg as profile name
- if self.extra_args:
- self.profile = self.extra_args[0]
-
- flags = Dict(create_flags)
-
- classes = [ProfileDir]
-
- def _import_app(self, app_path):
- """import an app class"""
- app = None
- name = app_path.rsplit('.', 1)[-1]
- try:
- app = import_item(app_path)
- except ImportError:
- self.log.info("Couldn't import %s, config file will be excluded", name)
- except Exception:
- self.log.warning('Unexpected error importing %s', name, exc_info=True)
- return app
-
- def init_config_files(self):
- super(ProfileCreate, self).init_config_files()
- # use local imports, since these classes may import from here
- from IPython.terminal.ipapp import TerminalIPythonApp
- apps = [TerminalIPythonApp]
- for app_path in (
- 'ipykernel.kernelapp.IPKernelApp',
- ):
- app = self._import_app(app_path)
- if app is not None:
- apps.append(app)
- if self.parallel:
- from ipyparallel.apps.ipcontrollerapp import IPControllerApp
- from ipyparallel.apps.ipengineapp import IPEngineApp
- from ipyparallel.apps.ipclusterapp import IPClusterStart
- apps.extend([
- IPControllerApp,
- IPEngineApp,
- IPClusterStart,
- ])
- for App in apps:
- app = App()
- app.config.update(self.config)
- app.log = self.log
- app.overwrite = self.overwrite
- app.copy_config_files=True
- app.ipython_dir=self.ipython_dir
- app.profile_dir=self.profile_dir
- app.init_config_files()
-
- def stage_default_config_file(self):
- pass
-
-
-class ProfileApp(Application):
- name = u'ipython profile'
- description = profile_help
- examples = _main_examples
-
- subcommands = Dict(dict(
- create = (ProfileCreate, ProfileCreate.description.splitlines()[0]),
- list = (ProfileList, ProfileList.description.splitlines()[0]),
- locate = (ProfileLocate, ProfileLocate.description.splitlines()[0]),
- ))
-
- def start(self):
- if self.subapp is None:
- print("No subcommand specified. Must specify one of: %s"%(self.subcommands.keys()))
- print()
- self.print_description()
- self.print_subcommands()
- self.exit(1)
- else:
- return self.subapp.start()
+ for cf in parallel_files:
+ self.config_files.append(cf)
+ else:
+ for cf in parallel_files:
+ if cf in self.config_files:
+ self.config_files.remove(cf)
+
+ def parse_command_line(self, argv):
+ super(ProfileCreate, self).parse_command_line(argv)
+ # accept positional arg as profile name
+ if self.extra_args:
+ self.profile = self.extra_args[0]
+
+ flags = Dict(create_flags)
+
+ classes = [ProfileDir]
+
+ def _import_app(self, app_path):
+ """import an app class"""
+ app = None
+ name = app_path.rsplit('.', 1)[-1]
+ try:
+ app = import_item(app_path)
+ except ImportError:
+ self.log.info("Couldn't import %s, config file will be excluded", name)
+ except Exception:
+ self.log.warning('Unexpected error importing %s', name, exc_info=True)
+ return app
+
+ def init_config_files(self):
+ super(ProfileCreate, self).init_config_files()
+ # use local imports, since these classes may import from here
+ from IPython.terminal.ipapp import TerminalIPythonApp
+ apps = [TerminalIPythonApp]
+ for app_path in (
+ 'ipykernel.kernelapp.IPKernelApp',
+ ):
+ app = self._import_app(app_path)
+ if app is not None:
+ apps.append(app)
+ if self.parallel:
+ from ipyparallel.apps.ipcontrollerapp import IPControllerApp
+ from ipyparallel.apps.ipengineapp import IPEngineApp
+ from ipyparallel.apps.ipclusterapp import IPClusterStart
+ apps.extend([
+ IPControllerApp,
+ IPEngineApp,
+ IPClusterStart,
+ ])
+ for App in apps:
+ app = App()
+ app.config.update(self.config)
+ app.log = self.log
+ app.overwrite = self.overwrite
+ app.copy_config_files=True
+ app.ipython_dir=self.ipython_dir
+ app.profile_dir=self.profile_dir
+ app.init_config_files()
+
+ def stage_default_config_file(self):
+ pass
+
+
+class ProfileApp(Application):
+ name = u'ipython profile'
+ description = profile_help
+ examples = _main_examples
+
+ subcommands = Dict(dict(
+ create = (ProfileCreate, ProfileCreate.description.splitlines()[0]),
+ list = (ProfileList, ProfileList.description.splitlines()[0]),
+ locate = (ProfileLocate, ProfileLocate.description.splitlines()[0]),
+ ))
+
+ def start(self):
+ if self.subapp is None:
+ print("No subcommand specified. Must specify one of: %s"%(self.subcommands.keys()))
+ print()
+ self.print_description()
+ self.print_subcommands()
+ self.exit(1)
+ else:
+ return self.subapp.start()
diff --git a/contrib/python/ipython/py2/IPython/core/profiledir.py b/contrib/python/ipython/py2/IPython/core/profiledir.py
index 4e54f8c68c..b777f13da0 100644
--- a/contrib/python/ipython/py2/IPython/core/profiledir.py
+++ b/contrib/python/ipython/py2/IPython/core/profiledir.py
@@ -1,222 +1,222 @@
-# encoding: utf-8
-"""An object for managing IPython profile directories."""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import os
-import shutil
-import errno
-
-from traitlets.config.configurable import LoggingConfigurable
-from IPython.paths import get_ipython_package_dir
-from IPython.utils.path import expand_path, ensure_dir_exists
-from IPython.utils import py3compat
+# encoding: utf-8
+"""An object for managing IPython profile directories."""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import os
+import shutil
+import errno
+
+from traitlets.config.configurable import LoggingConfigurable
+from IPython.paths import get_ipython_package_dir
+from IPython.utils.path import expand_path, ensure_dir_exists
+from IPython.utils import py3compat
from traitlets import Unicode, Bool, observe
-
-#-----------------------------------------------------------------------------
-# Module errors
-#-----------------------------------------------------------------------------
-
-class ProfileDirError(Exception):
- pass
-
-
-#-----------------------------------------------------------------------------
-# Class for managing profile directories
-#-----------------------------------------------------------------------------
-
-class ProfileDir(LoggingConfigurable):
- """An object to manage the profile directory and its resources.
-
- The profile directory is used by all IPython applications, to manage
- configuration, logging and security.
-
- This object knows how to find, create and manage these directories. This
- should be used by any code that wants to handle profiles.
- """
-
- security_dir_name = Unicode('security')
- log_dir_name = Unicode('log')
- startup_dir_name = Unicode('startup')
- pid_dir_name = Unicode('pid')
- static_dir_name = Unicode('static')
- security_dir = Unicode(u'')
- log_dir = Unicode(u'')
- startup_dir = Unicode(u'')
- pid_dir = Unicode(u'')
- static_dir = Unicode(u'')
-
+
+#-----------------------------------------------------------------------------
+# Module errors
+#-----------------------------------------------------------------------------
+
+class ProfileDirError(Exception):
+ pass
+
+
+#-----------------------------------------------------------------------------
+# Class for managing profile directories
+#-----------------------------------------------------------------------------
+
+class ProfileDir(LoggingConfigurable):
+ """An object to manage the profile directory and its resources.
+
+ The profile directory is used by all IPython applications, to manage
+ configuration, logging and security.
+
+ This object knows how to find, create and manage these directories. This
+ should be used by any code that wants to handle profiles.
+ """
+
+ security_dir_name = Unicode('security')
+ log_dir_name = Unicode('log')
+ startup_dir_name = Unicode('startup')
+ pid_dir_name = Unicode('pid')
+ static_dir_name = Unicode('static')
+ security_dir = Unicode(u'')
+ log_dir = Unicode(u'')
+ startup_dir = Unicode(u'')
+ pid_dir = Unicode(u'')
+ static_dir = Unicode(u'')
+
location = Unicode(u'',
- help="""Set the profile location directly. This overrides the logic used by the
- `profile` option.""",
+ help="""Set the profile location directly. This overrides the logic used by the
+ `profile` option.""",
).tag(config=True)
-
- _location_isset = Bool(False) # flag for detecting multiply set location
+
+ _location_isset = Bool(False) # flag for detecting multiply set location
@observe('location')
def _location_changed(self, change):
- if self._location_isset:
- raise RuntimeError("Cannot set profile location more than once.")
- self._location_isset = True
+ if self._location_isset:
+ raise RuntimeError("Cannot set profile location more than once.")
+ self._location_isset = True
new = change['new']
- ensure_dir_exists(new)
-
- # ensure config files exist:
- self.security_dir = os.path.join(new, self.security_dir_name)
- self.log_dir = os.path.join(new, self.log_dir_name)
- self.startup_dir = os.path.join(new, self.startup_dir_name)
- self.pid_dir = os.path.join(new, self.pid_dir_name)
- self.static_dir = os.path.join(new, self.static_dir_name)
- self.check_dirs()
+ ensure_dir_exists(new)
+
+ # ensure config files exist:
+ self.security_dir = os.path.join(new, self.security_dir_name)
+ self.log_dir = os.path.join(new, self.log_dir_name)
+ self.startup_dir = os.path.join(new, self.startup_dir_name)
+ self.pid_dir = os.path.join(new, self.pid_dir_name)
+ self.static_dir = os.path.join(new, self.static_dir_name)
+ self.check_dirs()
- def _mkdir(self, path, mode=None):
- """ensure a directory exists at a given path
-
- This is a version of os.mkdir, with the following differences:
-
- - returns True if it created the directory, False otherwise
- - ignores EEXIST, protecting against race conditions where
- the dir may have been created in between the check and
- the creation
- - sets permissions if requested and the dir already exists
- """
- if os.path.exists(path):
- if mode and os.stat(path).st_mode != mode:
- try:
- os.chmod(path, mode)
- except OSError:
- self.log.warning(
- "Could not set permissions on %s",
- path
- )
- return False
- try:
- if mode:
- os.mkdir(path, mode)
- else:
- os.mkdir(path)
- except OSError as e:
- if e.errno == errno.EEXIST:
- return False
- else:
- raise
-
- return True
+ def _mkdir(self, path, mode=None):
+ """ensure a directory exists at a given path
+
+ This is a version of os.mkdir, with the following differences:
+
+ - returns True if it created the directory, False otherwise
+ - ignores EEXIST, protecting against race conditions where
+ the dir may have been created in between the check and
+ the creation
+ - sets permissions if requested and the dir already exists
+ """
+ if os.path.exists(path):
+ if mode and os.stat(path).st_mode != mode:
+ try:
+ os.chmod(path, mode)
+ except OSError:
+ self.log.warning(
+ "Could not set permissions on %s",
+ path
+ )
+ return False
+ try:
+ if mode:
+ os.mkdir(path, mode)
+ else:
+ os.mkdir(path)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ return False
+ else:
+ raise
+
+ return True
@observe('log_dir')
def check_log_dir(self, change=None):
- self._mkdir(self.log_dir)
+ self._mkdir(self.log_dir)
@observe('startup_dir')
def check_startup_dir(self, change=None):
- self._mkdir(self.startup_dir)
-
- readme = os.path.join(self.startup_dir, 'README')
-
+ self._mkdir(self.startup_dir)
+
+ readme = os.path.join(self.startup_dir, 'README')
+
if not os.path.exists(readme):
import pkgutil
with open(readme, 'wb') as f:
f.write(pkgutil.get_data(__name__, 'profile/README_STARTUP'))
-
+
@observe('security_dir')
def check_security_dir(self, change=None):
- self._mkdir(self.security_dir, 0o40700)
-
+ self._mkdir(self.security_dir, 0o40700)
+
@observe('pid_dir')
def check_pid_dir(self, change=None):
- self._mkdir(self.pid_dir, 0o40700)
-
- def check_dirs(self):
- self.check_security_dir()
- self.check_log_dir()
- self.check_pid_dir()
- self.check_startup_dir()
-
- def copy_config_file(self, config_file, path=None, overwrite=False):
- """Copy a default config file into the active profile directory.
-
- Default configuration files are kept in :mod:`IPython.core.profile`.
- This function moves these from that location to the working profile
- directory.
- """
- dst = os.path.join(self.location, config_file)
- if os.path.isfile(dst) and not overwrite:
- return False
- if path is None:
- path = os.path.join(get_ipython_package_dir(), u'core', u'profile', u'default')
- src = os.path.join(path, config_file)
- shutil.copy(src, dst)
- return True
-
- @classmethod
- def create_profile_dir(cls, profile_dir, config=None):
- """Create a new profile directory given a full path.
-
- Parameters
- ----------
- profile_dir : str
- The full path to the profile directory. If it does exist, it will
- be used. If not, it will be created.
- """
- return cls(location=profile_dir, config=config)
-
- @classmethod
- def create_profile_dir_by_name(cls, path, name=u'default', config=None):
- """Create a profile dir by profile name and path.
-
- Parameters
- ----------
- path : unicode
- The path (directory) to put the profile directory in.
- name : unicode
- The name of the profile. The name of the profile directory will
- be "profile_<profile>".
- """
- if not os.path.isdir(path):
- raise ProfileDirError('Directory not found: %s' % path)
- profile_dir = os.path.join(path, u'profile_' + name)
- return cls(location=profile_dir, config=config)
-
- @classmethod
- def find_profile_dir_by_name(cls, ipython_dir, name=u'default', config=None):
- """Find an existing profile dir by profile name, return its ProfileDir.
-
- This searches through a sequence of paths for a profile dir. If it
- is not found, a :class:`ProfileDirError` exception will be raised.
-
- The search path algorithm is:
- 1. ``py3compat.getcwd()``
- 2. ``ipython_dir``
-
- Parameters
- ----------
- ipython_dir : unicode or str
- The IPython directory to use.
- name : unicode or str
- The name of the profile. The name of the profile directory
- will be "profile_<profile>".
- """
- dirname = u'profile_' + name
- paths = [py3compat.getcwd(), ipython_dir]
- for p in paths:
- profile_dir = os.path.join(p, dirname)
- if os.path.isdir(profile_dir):
- return cls(location=profile_dir, config=config)
- else:
- raise ProfileDirError('Profile directory not found in paths: %s' % dirname)
-
- @classmethod
- def find_profile_dir(cls, profile_dir, config=None):
- """Find/create a profile dir and return its ProfileDir.
-
- This will create the profile directory if it doesn't exist.
-
- Parameters
- ----------
- profile_dir : unicode or str
- The path of the profile directory.
- """
- profile_dir = expand_path(profile_dir)
- if not os.path.isdir(profile_dir):
- raise ProfileDirError('Profile directory not found: %s' % profile_dir)
- return cls(location=profile_dir, config=config)
+ self._mkdir(self.pid_dir, 0o40700)
+
+ def check_dirs(self):
+ self.check_security_dir()
+ self.check_log_dir()
+ self.check_pid_dir()
+ self.check_startup_dir()
+
+ def copy_config_file(self, config_file, path=None, overwrite=False):
+ """Copy a default config file into the active profile directory.
+
+ Default configuration files are kept in :mod:`IPython.core.profile`.
+ This function moves these from that location to the working profile
+ directory.
+ """
+ dst = os.path.join(self.location, config_file)
+ if os.path.isfile(dst) and not overwrite:
+ return False
+ if path is None:
+ path = os.path.join(get_ipython_package_dir(), u'core', u'profile', u'default')
+ src = os.path.join(path, config_file)
+ shutil.copy(src, dst)
+ return True
+
+ @classmethod
+ def create_profile_dir(cls, profile_dir, config=None):
+ """Create a new profile directory given a full path.
+
+ Parameters
+ ----------
+ profile_dir : str
+ The full path to the profile directory. If it does exist, it will
+ be used. If not, it will be created.
+ """
+ return cls(location=profile_dir, config=config)
+
+ @classmethod
+ def create_profile_dir_by_name(cls, path, name=u'default', config=None):
+ """Create a profile dir by profile name and path.
+
+ Parameters
+ ----------
+ path : unicode
+ The path (directory) to put the profile directory in.
+ name : unicode
+ The name of the profile. The name of the profile directory will
+ be "profile_<profile>".
+ """
+ if not os.path.isdir(path):
+ raise ProfileDirError('Directory not found: %s' % path)
+ profile_dir = os.path.join(path, u'profile_' + name)
+ return cls(location=profile_dir, config=config)
+
+ @classmethod
+ def find_profile_dir_by_name(cls, ipython_dir, name=u'default', config=None):
+ """Find an existing profile dir by profile name, return its ProfileDir.
+
+ This searches through a sequence of paths for a profile dir. If it
+ is not found, a :class:`ProfileDirError` exception will be raised.
+
+ The search path algorithm is:
+ 1. ``py3compat.getcwd()``
+ 2. ``ipython_dir``
+
+ Parameters
+ ----------
+ ipython_dir : unicode or str
+ The IPython directory to use.
+ name : unicode or str
+ The name of the profile. The name of the profile directory
+ will be "profile_<profile>".
+ """
+ dirname = u'profile_' + name
+ paths = [py3compat.getcwd(), ipython_dir]
+ for p in paths:
+ profile_dir = os.path.join(p, dirname)
+ if os.path.isdir(profile_dir):
+ return cls(location=profile_dir, config=config)
+ else:
+ raise ProfileDirError('Profile directory not found in paths: %s' % dirname)
+
+ @classmethod
+ def find_profile_dir(cls, profile_dir, config=None):
+ """Find/create a profile dir and return its ProfileDir.
+
+ This will create the profile directory if it doesn't exist.
+
+ Parameters
+ ----------
+ profile_dir : unicode or str
+ The path of the profile directory.
+ """
+ profile_dir = expand_path(profile_dir)
+ if not os.path.isdir(profile_dir):
+ raise ProfileDirError('Profile directory not found: %s' % profile_dir)
+ return cls(location=profile_dir, config=config)
diff --git a/contrib/python/ipython/py2/IPython/core/prompts.py b/contrib/python/ipython/py2/IPython/core/prompts.py
index 126e84fdfd..7802bc5363 100644
--- a/contrib/python/ipython/py2/IPython/core/prompts.py
+++ b/contrib/python/ipython/py2/IPython/core/prompts.py
@@ -1,26 +1,26 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""Being removed
-"""
-
+"""
+
from IPython.utils import py3compat
-
-class LazyEvaluate(object):
- """This is used for formatting strings with values that need to be updated
- at that time, such as the current time or working directory."""
- def __init__(self, func, *args, **kwargs):
- self.func = func
- self.args = args
- self.kwargs = kwargs
-
- def __call__(self, **kwargs):
- self.kwargs.update(kwargs)
- return self.func(*self.args, **self.kwargs)
-
- def __str__(self):
- return str(self())
-
- def __unicode__(self):
- return py3compat.unicode_type(self())
-
- def __format__(self, format_spec):
- return format(self(), format_spec)
+
+class LazyEvaluate(object):
+ """This is used for formatting strings with values that need to be updated
+ at that time, such as the current time or working directory."""
+ def __init__(self, func, *args, **kwargs):
+ self.func = func
+ self.args = args
+ self.kwargs = kwargs
+
+ def __call__(self, **kwargs):
+ self.kwargs.update(kwargs)
+ return self.func(*self.args, **self.kwargs)
+
+ def __str__(self):
+ return str(self())
+
+ def __unicode__(self):
+ return py3compat.unicode_type(self())
+
+ def __format__(self, format_spec):
+ return format(self(), format_spec)
diff --git a/contrib/python/ipython/py2/IPython/core/pylabtools.py b/contrib/python/ipython/py2/IPython/core/pylabtools.py
index 79072b41a9..a1932d8c48 100644
--- a/contrib/python/ipython/py2/IPython/core/pylabtools.py
+++ b/contrib/python/ipython/py2/IPython/core/pylabtools.py
@@ -1,28 +1,28 @@
-# -*- coding: utf-8 -*-
-"""Pylab (matplotlib) support utilities."""
-from __future__ import print_function
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from io import BytesIO
-
-from IPython.core.display import _pngxy
-from IPython.utils.decorators import flag_calls
-from IPython.utils import py3compat
-
-# If user specifies a GUI, that dictates the backend, otherwise we read the
-# user's mpl default from the mpl rc structure
-backends = {'tk': 'TkAgg',
- 'gtk': 'GTKAgg',
- 'gtk3': 'GTK3Agg',
- 'wx': 'WXAgg',
- 'qt4': 'Qt4Agg',
- 'qt5': 'Qt5Agg',
+# -*- coding: utf-8 -*-
+"""Pylab (matplotlib) support utilities."""
+from __future__ import print_function
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from io import BytesIO
+
+from IPython.core.display import _pngxy
+from IPython.utils.decorators import flag_calls
+from IPython.utils import py3compat
+
+# If user specifies a GUI, that dictates the backend, otherwise we read the
+# user's mpl default from the mpl rc structure
+backends = {'tk': 'TkAgg',
+ 'gtk': 'GTKAgg',
+ 'gtk3': 'GTK3Agg',
+ 'wx': 'WXAgg',
+ 'qt4': 'Qt4Agg',
+ 'qt5': 'Qt5Agg',
'qt': 'Qt5Agg',
- 'osx': 'MacOSX',
- 'nbagg': 'nbAgg',
- 'notebook': 'nbAgg',
+ 'osx': 'MacOSX',
+ 'nbagg': 'nbAgg',
+ 'notebook': 'nbAgg',
'agg': 'agg',
'svg': 'svg',
'pdf': 'pdf',
@@ -31,20 +31,20 @@ backends = {'tk': 'TkAgg',
'ipympl': 'module://ipympl.backend_nbagg',
'widget': 'module://ipympl.backend_nbagg',
}
-
-# We also need a reverse backends2guis mapping that will properly choose which
-# GUI support to activate based on the desired matplotlib backend. For the
-# most part it's just a reverse of the above dict, but we also need to add a
-# few others that map to the same GUI manually:
-backend2gui = dict(zip(backends.values(), backends.keys()))
-# Our tests expect backend2gui to just return 'qt'
-backend2gui['Qt4Agg'] = 'qt'
-# In the reverse mapping, there are a few extra valid matplotlib backends that
-# map to the same GUI support
-backend2gui['GTK'] = backend2gui['GTKCairo'] = 'gtk'
-backend2gui['GTK3Cairo'] = 'gtk3'
-backend2gui['WX'] = 'wx'
-backend2gui['CocoaAgg'] = 'osx'
+
+# We also need a reverse backends2guis mapping that will properly choose which
+# GUI support to activate based on the desired matplotlib backend. For the
+# most part it's just a reverse of the above dict, but we also need to add a
+# few others that map to the same GUI manually:
+backend2gui = dict(zip(backends.values(), backends.keys()))
+# Our tests expect backend2gui to just return 'qt'
+backend2gui['Qt4Agg'] = 'qt'
+# In the reverse mapping, there are a few extra valid matplotlib backends that
+# map to the same GUI support
+backend2gui['GTK'] = backend2gui['GTKCairo'] = 'gtk'
+backend2gui['GTK3Cairo'] = 'gtk3'
+backend2gui['WX'] = 'wx'
+backend2gui['CocoaAgg'] = 'osx'
# And some backends that don't need GUI integration
del backend2gui['nbAgg']
del backend2gui['agg']
@@ -52,138 +52,138 @@ del backend2gui['svg']
del backend2gui['pdf']
del backend2gui['ps']
del backend2gui['module://ipykernel.pylab.backend_inline']
-
-#-----------------------------------------------------------------------------
-# Matplotlib utilities
-#-----------------------------------------------------------------------------
-
-
-def getfigs(*fig_nums):
- """Get a list of matplotlib figures by figure numbers.
-
- If no arguments are given, all available figures are returned. If the
- argument list contains references to invalid figures, a warning is printed
- but the function continues pasting further figures.
-
- Parameters
- ----------
- figs : tuple
- A tuple of ints giving the figure numbers of the figures to return.
- """
- from matplotlib._pylab_helpers import Gcf
- if not fig_nums:
- fig_managers = Gcf.get_all_fig_managers()
- return [fm.canvas.figure for fm in fig_managers]
- else:
- figs = []
- for num in fig_nums:
- f = Gcf.figs.get(num)
- if f is None:
- print('Warning: figure %s not available.' % num)
- else:
- figs.append(f.canvas.figure)
- return figs
-
-
-def figsize(sizex, sizey):
- """Set the default figure size to be [sizex, sizey].
-
- This is just an easy to remember, convenience wrapper that sets::
-
- matplotlib.rcParams['figure.figsize'] = [sizex, sizey]
- """
- import matplotlib
- matplotlib.rcParams['figure.figsize'] = [sizex, sizey]
-
-
-def print_figure(fig, fmt='png', bbox_inches='tight', **kwargs):
- """Print a figure to an image, and return the resulting file data
-
- Returned data will be bytes unless ``fmt='svg'``,
- in which case it will be unicode.
-
- Any keyword args are passed to fig.canvas.print_figure,
- such as ``quality`` or ``bbox_inches``.
- """
- from matplotlib import rcParams
- # When there's an empty figure, we shouldn't return anything, otherwise we
- # get big blank areas in the qt console.
- if not fig.axes and not fig.lines:
- return
-
+
+#-----------------------------------------------------------------------------
+# Matplotlib utilities
+#-----------------------------------------------------------------------------
+
+
+def getfigs(*fig_nums):
+ """Get a list of matplotlib figures by figure numbers.
+
+ If no arguments are given, all available figures are returned. If the
+ argument list contains references to invalid figures, a warning is printed
+ but the function continues pasting further figures.
+
+ Parameters
+ ----------
+ figs : tuple
+ A tuple of ints giving the figure numbers of the figures to return.
+ """
+ from matplotlib._pylab_helpers import Gcf
+ if not fig_nums:
+ fig_managers = Gcf.get_all_fig_managers()
+ return [fm.canvas.figure for fm in fig_managers]
+ else:
+ figs = []
+ for num in fig_nums:
+ f = Gcf.figs.get(num)
+ if f is None:
+ print('Warning: figure %s not available.' % num)
+ else:
+ figs.append(f.canvas.figure)
+ return figs
+
+
+def figsize(sizex, sizey):
+ """Set the default figure size to be [sizex, sizey].
+
+ This is just an easy to remember, convenience wrapper that sets::
+
+ matplotlib.rcParams['figure.figsize'] = [sizex, sizey]
+ """
+ import matplotlib
+ matplotlib.rcParams['figure.figsize'] = [sizex, sizey]
+
+
+def print_figure(fig, fmt='png', bbox_inches='tight', **kwargs):
+ """Print a figure to an image, and return the resulting file data
+
+ Returned data will be bytes unless ``fmt='svg'``,
+ in which case it will be unicode.
+
+ Any keyword args are passed to fig.canvas.print_figure,
+ such as ``quality`` or ``bbox_inches``.
+ """
+ from matplotlib import rcParams
+ # When there's an empty figure, we shouldn't return anything, otherwise we
+ # get big blank areas in the qt console.
+ if not fig.axes and not fig.lines:
+ return
+
dpi = fig.dpi
- if fmt == 'retina':
- dpi = dpi * 2
- fmt = 'png'
-
- # build keyword args
- kw = dict(
- format=fmt,
- facecolor=fig.get_facecolor(),
- edgecolor=fig.get_edgecolor(),
- dpi=dpi,
- bbox_inches=bbox_inches,
- )
- # **kwargs get higher priority
- kw.update(kwargs)
-
- bytes_io = BytesIO()
- fig.canvas.print_figure(bytes_io, **kw)
- data = bytes_io.getvalue()
- if fmt == 'svg':
- data = data.decode('utf-8')
- return data
-
-def retina_figure(fig, **kwargs):
- """format a figure as a pixel-doubled (retina) PNG"""
- pngdata = print_figure(fig, fmt='retina', **kwargs)
- # Make sure that retina_figure acts just like print_figure and returns
- # None when the figure is empty.
- if pngdata is None:
- return
- w, h = _pngxy(pngdata)
- metadata = dict(width=w//2, height=h//2)
- return pngdata, metadata
-
-# We need a little factory function here to create the closure where
-# safe_execfile can live.
-def mpl_runner(safe_execfile):
- """Factory to return a matplotlib-enabled runner for %run.
-
- Parameters
- ----------
- safe_execfile : function
- This must be a function with the same interface as the
- :meth:`safe_execfile` method of IPython.
-
- Returns
- -------
- A function suitable for use as the ``runner`` argument of the %run magic
- function.
- """
-
- def mpl_execfile(fname,*where,**kw):
- """matplotlib-aware wrapper around safe_execfile.
-
- Its interface is identical to that of the :func:`execfile` builtin.
-
- This is ultimately a call to execfile(), but wrapped in safeties to
- properly handle interactive rendering."""
-
- import matplotlib
+ if fmt == 'retina':
+ dpi = dpi * 2
+ fmt = 'png'
+
+ # build keyword args
+ kw = dict(
+ format=fmt,
+ facecolor=fig.get_facecolor(),
+ edgecolor=fig.get_edgecolor(),
+ dpi=dpi,
+ bbox_inches=bbox_inches,
+ )
+ # **kwargs get higher priority
+ kw.update(kwargs)
+
+ bytes_io = BytesIO()
+ fig.canvas.print_figure(bytes_io, **kw)
+ data = bytes_io.getvalue()
+ if fmt == 'svg':
+ data = data.decode('utf-8')
+ return data
+
+def retina_figure(fig, **kwargs):
+ """format a figure as a pixel-doubled (retina) PNG"""
+ pngdata = print_figure(fig, fmt='retina', **kwargs)
+ # Make sure that retina_figure acts just like print_figure and returns
+ # None when the figure is empty.
+ if pngdata is None:
+ return
+ w, h = _pngxy(pngdata)
+ metadata = dict(width=w//2, height=h//2)
+ return pngdata, metadata
+
+# We need a little factory function here to create the closure where
+# safe_execfile can live.
+def mpl_runner(safe_execfile):
+ """Factory to return a matplotlib-enabled runner for %run.
+
+ Parameters
+ ----------
+ safe_execfile : function
+ This must be a function with the same interface as the
+ :meth:`safe_execfile` method of IPython.
+
+ Returns
+ -------
+ A function suitable for use as the ``runner`` argument of the %run magic
+ function.
+ """
+
+ def mpl_execfile(fname,*where,**kw):
+ """matplotlib-aware wrapper around safe_execfile.
+
+ Its interface is identical to that of the :func:`execfile` builtin.
+
+ This is ultimately a call to execfile(), but wrapped in safeties to
+ properly handle interactive rendering."""
+
+ import matplotlib
import matplotlib.pyplot as plt
-
- #print '*** Matplotlib runner ***' # dbg
- # turn off rendering until end of script
- is_interactive = matplotlib.rcParams['interactive']
- matplotlib.interactive(False)
- safe_execfile(fname,*where,**kw)
- matplotlib.interactive(is_interactive)
- # make rendering call now, if the user tried to do it
+
+ #print '*** Matplotlib runner ***' # dbg
+ # turn off rendering until end of script
+ is_interactive = matplotlib.rcParams['interactive']
+ matplotlib.interactive(False)
+ safe_execfile(fname,*where,**kw)
+ matplotlib.interactive(is_interactive)
+ # make rendering call now, if the user tried to do it
if plt.draw_if_interactive.called:
plt.draw()
plt.draw_if_interactive.called = False
-
+
# re-draw everything that is stale
try:
da = plt.draw_all
@@ -192,225 +192,225 @@ def mpl_runner(safe_execfile):
else:
da()
- return mpl_execfile
-
-
-def _reshow_nbagg_figure(fig):
- """reshow an nbagg figure"""
- try:
- reshow = fig.canvas.manager.reshow
- except AttributeError:
- raise NotImplementedError()
- else:
- reshow()
-
-
-def select_figure_formats(shell, formats, **kwargs):
- """Select figure formats for the inline backend.
-
- Parameters
- ==========
- shell : InteractiveShell
- The main IPython instance.
- formats : str or set
- One or a set of figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'.
- **kwargs : any
- Extra keyword arguments to be passed to fig.canvas.print_figure.
- """
- import matplotlib
- from matplotlib.figure import Figure
-
- svg_formatter = shell.display_formatter.formatters['image/svg+xml']
- png_formatter = shell.display_formatter.formatters['image/png']
- jpg_formatter = shell.display_formatter.formatters['image/jpeg']
- pdf_formatter = shell.display_formatter.formatters['application/pdf']
-
- if isinstance(formats, py3compat.string_types):
- formats = {formats}
- # cast in case of list / tuple
- formats = set(formats)
-
- [ f.pop(Figure, None) for f in shell.display_formatter.formatters.values() ]
+ return mpl_execfile
+
+
+def _reshow_nbagg_figure(fig):
+ """reshow an nbagg figure"""
+ try:
+ reshow = fig.canvas.manager.reshow
+ except AttributeError:
+ raise NotImplementedError()
+ else:
+ reshow()
+
+
+def select_figure_formats(shell, formats, **kwargs):
+ """Select figure formats for the inline backend.
+
+ Parameters
+ ==========
+ shell : InteractiveShell
+ The main IPython instance.
+ formats : str or set
+ One or a set of figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'.
+ **kwargs : any
+ Extra keyword arguments to be passed to fig.canvas.print_figure.
+ """
+ import matplotlib
+ from matplotlib.figure import Figure
+
+ svg_formatter = shell.display_formatter.formatters['image/svg+xml']
+ png_formatter = shell.display_formatter.formatters['image/png']
+ jpg_formatter = shell.display_formatter.formatters['image/jpeg']
+ pdf_formatter = shell.display_formatter.formatters['application/pdf']
+
+ if isinstance(formats, py3compat.string_types):
+ formats = {formats}
+ # cast in case of list / tuple
+ formats = set(formats)
+
+ [ f.pop(Figure, None) for f in shell.display_formatter.formatters.values() ]
mplbackend = matplotlib.get_backend().lower()
if mplbackend == 'nbagg' or mplbackend == 'module://ipympl.backend_nbagg':
- formatter = shell.display_formatter.ipython_display_formatter
- formatter.for_type(Figure, _reshow_nbagg_figure)
-
- supported = {'png', 'png2x', 'retina', 'jpg', 'jpeg', 'svg', 'pdf'}
- bad = formats.difference(supported)
- if bad:
- bs = "%s" % ','.join([repr(f) for f in bad])
- gs = "%s" % ','.join([repr(f) for f in supported])
- raise ValueError("supported formats are: %s not %s" % (gs, bs))
-
- if 'png' in formats:
- png_formatter.for_type(Figure, lambda fig: print_figure(fig, 'png', **kwargs))
- if 'retina' in formats or 'png2x' in formats:
- png_formatter.for_type(Figure, lambda fig: retina_figure(fig, **kwargs))
- if 'jpg' in formats or 'jpeg' in formats:
- jpg_formatter.for_type(Figure, lambda fig: print_figure(fig, 'jpg', **kwargs))
- if 'svg' in formats:
- svg_formatter.for_type(Figure, lambda fig: print_figure(fig, 'svg', **kwargs))
- if 'pdf' in formats:
- pdf_formatter.for_type(Figure, lambda fig: print_figure(fig, 'pdf', **kwargs))
-
-#-----------------------------------------------------------------------------
-# Code for initializing matplotlib and importing pylab
-#-----------------------------------------------------------------------------
-
-
-def find_gui_and_backend(gui=None, gui_select=None):
- """Given a gui string return the gui and mpl backend.
-
- Parameters
- ----------
- gui : str
- Can be one of ('tk','gtk','wx','qt','qt4','inline').
- gui_select : str
- Can be one of ('tk','gtk','wx','qt','qt4','inline').
- This is any gui already selected by the shell.
-
- Returns
- -------
- A tuple of (gui, backend) where backend is one of ('TkAgg','GTKAgg',
- 'WXAgg','Qt4Agg','module://ipykernel.pylab.backend_inline').
- """
-
- import matplotlib
-
- if gui and gui != 'auto':
- # select backend based on requested gui
- backend = backends[gui]
- else:
- # We need to read the backend from the original data structure, *not*
- # from mpl.rcParams, since a prior invocation of %matplotlib may have
- # overwritten that.
- # WARNING: this assumes matplotlib 1.1 or newer!!
- backend = matplotlib.rcParamsOrig['backend']
- # In this case, we need to find what the appropriate gui selection call
- # should be for IPython, so we can activate inputhook accordingly
- gui = backend2gui.get(backend, None)
-
- # If we have already had a gui active, we need it and inline are the
- # ones allowed.
- if gui_select and gui != gui_select:
- gui = gui_select
- backend = backends[gui]
-
- return gui, backend
-
-
-def activate_matplotlib(backend):
- """Activate the given backend and set interactive to True."""
-
- import matplotlib
- matplotlib.interactive(True)
-
- # Matplotlib had a bug where even switch_backend could not force
- # the rcParam to update. This needs to be set *before* the module
- # magic of switch_backend().
- matplotlib.rcParams['backend'] = backend
-
- import matplotlib.pyplot
- matplotlib.pyplot.switch_backend(backend)
-
- # This must be imported last in the matplotlib series, after
- # backend/interactivity choices have been made
+ formatter = shell.display_formatter.ipython_display_formatter
+ formatter.for_type(Figure, _reshow_nbagg_figure)
+
+ supported = {'png', 'png2x', 'retina', 'jpg', 'jpeg', 'svg', 'pdf'}
+ bad = formats.difference(supported)
+ if bad:
+ bs = "%s" % ','.join([repr(f) for f in bad])
+ gs = "%s" % ','.join([repr(f) for f in supported])
+ raise ValueError("supported formats are: %s not %s" % (gs, bs))
+
+ if 'png' in formats:
+ png_formatter.for_type(Figure, lambda fig: print_figure(fig, 'png', **kwargs))
+ if 'retina' in formats or 'png2x' in formats:
+ png_formatter.for_type(Figure, lambda fig: retina_figure(fig, **kwargs))
+ if 'jpg' in formats or 'jpeg' in formats:
+ jpg_formatter.for_type(Figure, lambda fig: print_figure(fig, 'jpg', **kwargs))
+ if 'svg' in formats:
+ svg_formatter.for_type(Figure, lambda fig: print_figure(fig, 'svg', **kwargs))
+ if 'pdf' in formats:
+ pdf_formatter.for_type(Figure, lambda fig: print_figure(fig, 'pdf', **kwargs))
+
+#-----------------------------------------------------------------------------
+# Code for initializing matplotlib and importing pylab
+#-----------------------------------------------------------------------------
+
+
+def find_gui_and_backend(gui=None, gui_select=None):
+ """Given a gui string return the gui and mpl backend.
+
+ Parameters
+ ----------
+ gui : str
+ Can be one of ('tk','gtk','wx','qt','qt4','inline').
+ gui_select : str
+ Can be one of ('tk','gtk','wx','qt','qt4','inline').
+ This is any gui already selected by the shell.
+
+ Returns
+ -------
+ A tuple of (gui, backend) where backend is one of ('TkAgg','GTKAgg',
+ 'WXAgg','Qt4Agg','module://ipykernel.pylab.backend_inline').
+ """
+
+ import matplotlib
+
+ if gui and gui != 'auto':
+ # select backend based on requested gui
+ backend = backends[gui]
+ else:
+ # We need to read the backend from the original data structure, *not*
+ # from mpl.rcParams, since a prior invocation of %matplotlib may have
+ # overwritten that.
+ # WARNING: this assumes matplotlib 1.1 or newer!!
+ backend = matplotlib.rcParamsOrig['backend']
+ # In this case, we need to find what the appropriate gui selection call
+ # should be for IPython, so we can activate inputhook accordingly
+ gui = backend2gui.get(backend, None)
+
+ # If we have already had a gui active, we need it and inline are the
+ # ones allowed.
+ if gui_select and gui != gui_select:
+ gui = gui_select
+ backend = backends[gui]
+
+ return gui, backend
+
+
+def activate_matplotlib(backend):
+ """Activate the given backend and set interactive to True."""
+
+ import matplotlib
+ matplotlib.interactive(True)
+
+ # Matplotlib had a bug where even switch_backend could not force
+ # the rcParam to update. This needs to be set *before* the module
+ # magic of switch_backend().
+ matplotlib.rcParams['backend'] = backend
+
+ import matplotlib.pyplot
+ matplotlib.pyplot.switch_backend(backend)
+
+ # This must be imported last in the matplotlib series, after
+ # backend/interactivity choices have been made
import matplotlib.pyplot as plt
-
+
plt.show._needmain = False
- # We need to detect at runtime whether show() is called by the user.
- # For this, we wrap it into a decorator which adds a 'called' flag.
+ # We need to detect at runtime whether show() is called by the user.
+ # For this, we wrap it into a decorator which adds a 'called' flag.
plt.draw_if_interactive = flag_calls(plt.draw_if_interactive)
-
-
-def import_pylab(user_ns, import_all=True):
- """Populate the namespace with pylab-related values.
-
- Imports matplotlib, pylab, numpy, and everything from pylab and numpy.
-
- Also imports a few names from IPython (figsize, display, getfigs)
-
- """
-
- # Import numpy as np/pyplot as plt are conventions we're trying to
- # somewhat standardize on. Making them available to users by default
- # will greatly help this.
- s = ("import numpy\n"
- "import matplotlib\n"
- "from matplotlib import pylab, mlab, pyplot\n"
- "np = numpy\n"
- "plt = pyplot\n"
- )
- exec(s, user_ns)
-
- if import_all:
- s = ("from matplotlib.pylab import *\n"
- "from numpy import *\n")
- exec(s, user_ns)
-
- # IPython symbols to add
- user_ns['figsize'] = figsize
- from IPython.core.display import display
- # Add display and getfigs to the user's namespace
- user_ns['display'] = display
- user_ns['getfigs'] = getfigs
-
-
-def configure_inline_support(shell, backend):
- """Configure an IPython shell object for matplotlib use.
-
- Parameters
- ----------
- shell : InteractiveShell instance
-
- backend : matplotlib backend
- """
- # If using our svg payload backend, register the post-execution
- # function that will pick up the results for display. This can only be
- # done with access to the real shell object.
-
- # Note: if we can't load the inline backend, then there's no point
- # continuing (such as in terminal-only shells in environments without
- # zeromq available).
- try:
- from ipykernel.pylab.backend_inline import InlineBackend
- except ImportError:
- return
+
+
+def import_pylab(user_ns, import_all=True):
+ """Populate the namespace with pylab-related values.
+
+ Imports matplotlib, pylab, numpy, and everything from pylab and numpy.
+
+ Also imports a few names from IPython (figsize, display, getfigs)
+
+ """
+
+ # Import numpy as np/pyplot as plt are conventions we're trying to
+ # somewhat standardize on. Making them available to users by default
+ # will greatly help this.
+ s = ("import numpy\n"
+ "import matplotlib\n"
+ "from matplotlib import pylab, mlab, pyplot\n"
+ "np = numpy\n"
+ "plt = pyplot\n"
+ )
+ exec(s, user_ns)
+
+ if import_all:
+ s = ("from matplotlib.pylab import *\n"
+ "from numpy import *\n")
+ exec(s, user_ns)
+
+ # IPython symbols to add
+ user_ns['figsize'] = figsize
+ from IPython.core.display import display
+ # Add display and getfigs to the user's namespace
+ user_ns['display'] = display
+ user_ns['getfigs'] = getfigs
+
+
+def configure_inline_support(shell, backend):
+ """Configure an IPython shell object for matplotlib use.
+
+ Parameters
+ ----------
+ shell : InteractiveShell instance
+
+ backend : matplotlib backend
+ """
+ # If using our svg payload backend, register the post-execution
+ # function that will pick up the results for display. This can only be
+ # done with access to the real shell object.
+
+ # Note: if we can't load the inline backend, then there's no point
+ # continuing (such as in terminal-only shells in environments without
+ # zeromq available).
+ try:
+ from ipykernel.pylab.backend_inline import InlineBackend
+ except ImportError:
+ return
import matplotlib
-
- cfg = InlineBackend.instance(parent=shell)
- cfg.shell = shell
- if cfg not in shell.configurables:
- shell.configurables.append(cfg)
-
- if backend == backends['inline']:
- from ipykernel.pylab.backend_inline import flush_figures
- shell.events.register('post_execute', flush_figures)
-
- # Save rcParams that will be overwrittern
- shell._saved_rcParams = dict()
- for k in cfg.rc:
+
+ cfg = InlineBackend.instance(parent=shell)
+ cfg.shell = shell
+ if cfg not in shell.configurables:
+ shell.configurables.append(cfg)
+
+ if backend == backends['inline']:
+ from ipykernel.pylab.backend_inline import flush_figures
+ shell.events.register('post_execute', flush_figures)
+
+ # Save rcParams that will be overwrittern
+ shell._saved_rcParams = dict()
+ for k in cfg.rc:
shell._saved_rcParams[k] = matplotlib.rcParams[k]
- # load inline_rc
+ # load inline_rc
matplotlib.rcParams.update(cfg.rc)
- new_backend_name = "inline"
- else:
- from ipykernel.pylab.backend_inline import flush_figures
- try:
- shell.events.unregister('post_execute', flush_figures)
- except ValueError:
- pass
- if hasattr(shell, '_saved_rcParams'):
+ new_backend_name = "inline"
+ else:
+ from ipykernel.pylab.backend_inline import flush_figures
+ try:
+ shell.events.unregister('post_execute', flush_figures)
+ except ValueError:
+ pass
+ if hasattr(shell, '_saved_rcParams'):
matplotlib.rcParams.update(shell._saved_rcParams)
- del shell._saved_rcParams
- new_backend_name = "other"
-
- # only enable the formats once -> don't change the enabled formats (which the user may
- # has changed) when getting another "%matplotlib inline" call.
- # See https://github.com/ipython/ipykernel/issues/29
- cur_backend = getattr(configure_inline_support, "current_backend", "unset")
- if new_backend_name != cur_backend:
- # Setup the default figure format
- select_figure_formats(shell, cfg.figure_formats, **cfg.print_figure_kwargs)
- configure_inline_support.current_backend = new_backend_name
+ del shell._saved_rcParams
+ new_backend_name = "other"
+
+ # only enable the formats once -> don't change the enabled formats (which the user may
+ # has changed) when getting another "%matplotlib inline" call.
+ # See https://github.com/ipython/ipykernel/issues/29
+ cur_backend = getattr(configure_inline_support, "current_backend", "unset")
+ if new_backend_name != cur_backend:
+ # Setup the default figure format
+ select_figure_formats(shell, cfg.figure_formats, **cfg.print_figure_kwargs)
+ configure_inline_support.current_backend = new_backend_name
diff --git a/contrib/python/ipython/py2/IPython/core/release.py b/contrib/python/ipython/py2/IPython/core/release.py
index 94d54bb828..94dea1073b 100644
--- a/contrib/python/ipython/py2/IPython/core/release.py
+++ b/contrib/python/ipython/py2/IPython/core/release.py
@@ -1,123 +1,123 @@
-# -*- coding: utf-8 -*-
-"""Release data for the IPython project."""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2008, IPython Development Team.
-# Copyright (c) 2001, Fernando Perez <fernando.perez@colorado.edu>
-# Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
-# Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-# Name of the package for release purposes. This is the name which labels
-# the tarballs and RPMs made by distutils, so it's best to lowercase it.
-name = 'ipython'
-
-# IPython version information. An empty _version_extra corresponds to a full
-# release. 'dev' as a _version_extra string means this is a development
-# version
+# -*- coding: utf-8 -*-
+"""Release data for the IPython project."""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2008, IPython Development Team.
+# Copyright (c) 2001, Fernando Perez <fernando.perez@colorado.edu>
+# Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
+# Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+# Name of the package for release purposes. This is the name which labels
+# the tarballs and RPMs made by distutils, so it's best to lowercase it.
+name = 'ipython'
+
+# IPython version information. An empty _version_extra corresponds to a full
+# release. 'dev' as a _version_extra string means this is a development
+# version
_version_major = 5
_version_minor = 9
_version_patch = 0
-_version_extra = '.dev'
+_version_extra = '.dev'
# _version_extra = 'rc1'
-_version_extra = '' # Uncomment this for full releases
-
-# release.codename is deprecated in 2.0, will be removed in 3.0
-codename = ''
-
-# Construct full version string from these.
-_ver = [_version_major, _version_minor, _version_patch]
-
-__version__ = '.'.join(map(str, _ver))
-if _version_extra:
- __version__ = __version__ + _version_extra
-
-version = __version__ # backwards compatibility name
-version_info = (_version_major, _version_minor, _version_patch, _version_extra)
-
-# Change this when incrementing the kernel protocol version
-kernel_protocol_version_info = (5, 0)
-kernel_protocol_version = "%i.%i" % kernel_protocol_version_info
-
-description = "IPython: Productive Interactive Computing"
-
-long_description = \
-"""
-IPython provides a rich toolkit to help you make the most out of using Python
-interactively. Its main components are:
-
-* A powerful interactive Python shell
-* A `Jupyter <http://jupyter.org/>`_ kernel to work with Python code in Jupyter
- notebooks and other interactive frontends.
-
-The enhanced interactive Python shells have the following main features:
-
-* Comprehensive object introspection.
-
-* Input history, persistent across sessions.
-
-* Caching of output results during a session with automatically generated
- references.
-
-* Extensible tab completion, with support by default for completion of python
- variables and keywords, filenames and function keywords.
-
-* Extensible system of 'magic' commands for controlling the environment and
- performing many tasks related either to IPython or the operating system.
-
-* A rich configuration system with easy switching between different setups
- (simpler than changing $PYTHONSTARTUP environment variables every time).
-
-* Session logging and reloading.
-
-* Extensible syntax processing for special purpose situations.
-
-* Access to the system shell with user-extensible alias system.
-
-* Easily embeddable in other Python programs and GUIs.
-
-* Integrated access to the pdb debugger and the Python profiler.
-
-The latest development version is always available from IPython's `GitHub
-site <http://github.com/ipython>`_.
-"""
-
-license = 'BSD'
-
-authors = {'Fernando' : ('Fernando Perez','fperez.net@gmail.com'),
- 'Janko' : ('Janko Hauser','jhauser@zscout.de'),
- 'Nathan' : ('Nathaniel Gray','n8gray@caltech.edu'),
- 'Ville' : ('Ville Vainio','vivainio@gmail.com'),
- 'Brian' : ('Brian E Granger', 'ellisonbg@gmail.com'),
- 'Min' : ('Min Ragan-Kelley', 'benjaminrk@gmail.com'),
- 'Thomas' : ('Thomas A. Kluyver', 'takowl@gmail.com'),
- 'Jorgen' : ('Jorgen Stenarson', 'jorgen.stenarson@bostream.nu'),
- 'Matthias' : ('Matthias Bussonnier', 'bussonniermatthias@gmail.com'),
- }
-
-author = 'The IPython Development Team'
-
+_version_extra = '' # Uncomment this for full releases
+
+# release.codename is deprecated in 2.0, will be removed in 3.0
+codename = ''
+
+# Construct full version string from these.
+_ver = [_version_major, _version_minor, _version_patch]
+
+__version__ = '.'.join(map(str, _ver))
+if _version_extra:
+ __version__ = __version__ + _version_extra
+
+version = __version__ # backwards compatibility name
+version_info = (_version_major, _version_minor, _version_patch, _version_extra)
+
+# Change this when incrementing the kernel protocol version
+kernel_protocol_version_info = (5, 0)
+kernel_protocol_version = "%i.%i" % kernel_protocol_version_info
+
+description = "IPython: Productive Interactive Computing"
+
+long_description = \
+"""
+IPython provides a rich toolkit to help you make the most out of using Python
+interactively. Its main components are:
+
+* A powerful interactive Python shell
+* A `Jupyter <http://jupyter.org/>`_ kernel to work with Python code in Jupyter
+ notebooks and other interactive frontends.
+
+The enhanced interactive Python shells have the following main features:
+
+* Comprehensive object introspection.
+
+* Input history, persistent across sessions.
+
+* Caching of output results during a session with automatically generated
+ references.
+
+* Extensible tab completion, with support by default for completion of python
+ variables and keywords, filenames and function keywords.
+
+* Extensible system of 'magic' commands for controlling the environment and
+ performing many tasks related either to IPython or the operating system.
+
+* A rich configuration system with easy switching between different setups
+ (simpler than changing $PYTHONSTARTUP environment variables every time).
+
+* Session logging and reloading.
+
+* Extensible syntax processing for special purpose situations.
+
+* Access to the system shell with user-extensible alias system.
+
+* Easily embeddable in other Python programs and GUIs.
+
+* Integrated access to the pdb debugger and the Python profiler.
+
+The latest development version is always available from IPython's `GitHub
+site <http://github.com/ipython>`_.
+"""
+
+license = 'BSD'
+
+authors = {'Fernando' : ('Fernando Perez','fperez.net@gmail.com'),
+ 'Janko' : ('Janko Hauser','jhauser@zscout.de'),
+ 'Nathan' : ('Nathaniel Gray','n8gray@caltech.edu'),
+ 'Ville' : ('Ville Vainio','vivainio@gmail.com'),
+ 'Brian' : ('Brian E Granger', 'ellisonbg@gmail.com'),
+ 'Min' : ('Min Ragan-Kelley', 'benjaminrk@gmail.com'),
+ 'Thomas' : ('Thomas A. Kluyver', 'takowl@gmail.com'),
+ 'Jorgen' : ('Jorgen Stenarson', 'jorgen.stenarson@bostream.nu'),
+ 'Matthias' : ('Matthias Bussonnier', 'bussonniermatthias@gmail.com'),
+ }
+
+author = 'The IPython Development Team'
+
author_email = 'ipython-dev@python.org'
-
+
url = 'https://ipython.org'
-
-
-platforms = ['Linux','Mac OSX','Windows']
-
-keywords = ['Interactive','Interpreter','Shell', 'Embedding']
-
-classifiers = [
- 'Framework :: IPython',
- 'Intended Audience :: Developers',
- 'Intended Audience :: Science/Research',
- 'License :: OSI Approved :: BSD License',
- 'Programming Language :: Python',
- 'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 2.7',
- 'Programming Language :: Python :: 3',
- 'Topic :: System :: Shells'
- ]
+
+
+platforms = ['Linux','Mac OSX','Windows']
+
+keywords = ['Interactive','Interpreter','Shell', 'Embedding']
+
+classifiers = [
+ 'Framework :: IPython',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: Science/Research',
+ 'License :: OSI Approved :: BSD License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Topic :: System :: Shells'
+ ]
diff --git a/contrib/python/ipython/py2/IPython/core/shadowns.py b/contrib/python/ipython/py2/IPython/core/shadowns.py
index c9868ea569..d2d93b61bd 100644
--- a/contrib/python/ipython/py2/IPython/core/shadowns.py
+++ b/contrib/python/ipython/py2/IPython/core/shadowns.py
@@ -1 +1 @@
-""" Shadow namespace """ \ No newline at end of file
+""" Shadow namespace """ \ No newline at end of file
diff --git a/contrib/python/ipython/py2/IPython/core/shellapp.py b/contrib/python/ipython/py2/IPython/core/shellapp.py
index 18ef594527..213648246e 100644
--- a/contrib/python/ipython/py2/IPython/core/shellapp.py
+++ b/contrib/python/ipython/py2/IPython/core/shellapp.py
@@ -1,415 +1,415 @@
-# encoding: utf-8
-"""
-A mixin for :class:`~IPython.core.application.Application` classes that
-launch InteractiveShell instances, load extensions, etc.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import absolute_import
-from __future__ import print_function
-
-import glob
+# encoding: utf-8
+"""
+A mixin for :class:`~IPython.core.application.Application` classes that
+launch InteractiveShell instances, load extensions, etc.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import glob
from itertools import chain
-import os
-import sys
-
-from traitlets.config.application import boolean_flag
-from traitlets.config.configurable import Configurable
-from traitlets.config.loader import Config
+import os
+import sys
+
+from traitlets.config.application import boolean_flag
+from traitlets.config.configurable import Configurable
+from traitlets.config.loader import Config
from IPython.core.application import SYSTEM_CONFIG_DIRS, ENV_CONFIG_DIRS
-from IPython.core import pylabtools
-from IPython.utils import py3compat
-from IPython.utils.contexts import preserve_keys
-from IPython.utils.path import filefind
-from traitlets import (
+from IPython.core import pylabtools
+from IPython.utils import py3compat
+from IPython.utils.contexts import preserve_keys
+from IPython.utils.path import filefind
+from traitlets import (
Unicode, Instance, List, Bool, CaselessStrEnum, observe,
-)
+)
from IPython.terminal import pt_inputhooks
-
-#-----------------------------------------------------------------------------
-# Aliases and Flags
-#-----------------------------------------------------------------------------
-
+
+#-----------------------------------------------------------------------------
+# Aliases and Flags
+#-----------------------------------------------------------------------------
+
gui_keys = tuple(sorted(pt_inputhooks.backends) + sorted(pt_inputhooks.aliases))
-
-backend_keys = sorted(pylabtools.backends.keys())
-backend_keys.insert(0, 'auto')
-
-shell_flags = {}
-
-addflag = lambda *args: shell_flags.update(boolean_flag(*args))
-addflag('autoindent', 'InteractiveShell.autoindent',
- 'Turn on autoindenting.', 'Turn off autoindenting.'
-)
-addflag('automagic', 'InteractiveShell.automagic',
- """Turn on the auto calling of magic commands. Type %%magic at the
- IPython prompt for more information.""",
- 'Turn off the auto calling of magic commands.'
-)
-addflag('pdb', 'InteractiveShell.pdb',
- "Enable auto calling the pdb debugger after every exception.",
- "Disable auto calling the pdb debugger after every exception."
-)
-addflag('pprint', 'PlainTextFormatter.pprint',
- "Enable auto pretty printing of results.",
- "Disable auto pretty printing of results."
-)
-addflag('color-info', 'InteractiveShell.color_info',
- """IPython can display information about objects via a set of functions,
- and optionally can use colors for this, syntax highlighting
- source code and various other elements. This is on by default, but can cause
- problems with some pagers. If you see such problems, you can disable the
- colours.""",
- "Disable using colors for info related things."
-)
-nosep_config = Config()
-nosep_config.InteractiveShell.separate_in = ''
-nosep_config.InteractiveShell.separate_out = ''
-nosep_config.InteractiveShell.separate_out2 = ''
-
-shell_flags['nosep']=(nosep_config, "Eliminate all spacing between prompts.")
-shell_flags['pylab'] = (
- {'InteractiveShellApp' : {'pylab' : 'auto'}},
- """Pre-load matplotlib and numpy for interactive use with
- the default matplotlib backend."""
-)
-shell_flags['matplotlib'] = (
- {'InteractiveShellApp' : {'matplotlib' : 'auto'}},
- """Configure matplotlib for interactive use with
- the default matplotlib backend."""
-)
-
-# it's possible we don't want short aliases for *all* of these:
-shell_aliases = dict(
- autocall='InteractiveShell.autocall',
- colors='InteractiveShell.colors',
- logfile='InteractiveShell.logfile',
- logappend='InteractiveShell.logappend',
- c='InteractiveShellApp.code_to_run',
- m='InteractiveShellApp.module_to_run',
- ext='InteractiveShellApp.extra_extension',
- gui='InteractiveShellApp.gui',
- pylab='InteractiveShellApp.pylab',
- matplotlib='InteractiveShellApp.matplotlib',
-)
-shell_aliases['cache-size'] = 'InteractiveShell.cache_size'
-
-#-----------------------------------------------------------------------------
-# Main classes and functions
-#-----------------------------------------------------------------------------
-
-class InteractiveShellApp(Configurable):
- """A Mixin for applications that start InteractiveShell instances.
-
- Provides configurables for loading extensions and executing files
- as part of configuring a Shell environment.
-
- The following methods should be called by the :meth:`initialize` method
- of the subclass:
-
- - :meth:`init_path`
- - :meth:`init_shell` (to be implemented by the subclass)
- - :meth:`init_gui_pylab`
- - :meth:`init_extensions`
- - :meth:`init_code`
- """
+
+backend_keys = sorted(pylabtools.backends.keys())
+backend_keys.insert(0, 'auto')
+
+shell_flags = {}
+
+addflag = lambda *args: shell_flags.update(boolean_flag(*args))
+addflag('autoindent', 'InteractiveShell.autoindent',
+ 'Turn on autoindenting.', 'Turn off autoindenting.'
+)
+addflag('automagic', 'InteractiveShell.automagic',
+ """Turn on the auto calling of magic commands. Type %%magic at the
+ IPython prompt for more information.""",
+ 'Turn off the auto calling of magic commands.'
+)
+addflag('pdb', 'InteractiveShell.pdb',
+ "Enable auto calling the pdb debugger after every exception.",
+ "Disable auto calling the pdb debugger after every exception."
+)
+addflag('pprint', 'PlainTextFormatter.pprint',
+ "Enable auto pretty printing of results.",
+ "Disable auto pretty printing of results."
+)
+addflag('color-info', 'InteractiveShell.color_info',
+ """IPython can display information about objects via a set of functions,
+ and optionally can use colors for this, syntax highlighting
+ source code and various other elements. This is on by default, but can cause
+ problems with some pagers. If you see such problems, you can disable the
+ colours.""",
+ "Disable using colors for info related things."
+)
+nosep_config = Config()
+nosep_config.InteractiveShell.separate_in = ''
+nosep_config.InteractiveShell.separate_out = ''
+nosep_config.InteractiveShell.separate_out2 = ''
+
+shell_flags['nosep']=(nosep_config, "Eliminate all spacing between prompts.")
+shell_flags['pylab'] = (
+ {'InteractiveShellApp' : {'pylab' : 'auto'}},
+ """Pre-load matplotlib and numpy for interactive use with
+ the default matplotlib backend."""
+)
+shell_flags['matplotlib'] = (
+ {'InteractiveShellApp' : {'matplotlib' : 'auto'}},
+ """Configure matplotlib for interactive use with
+ the default matplotlib backend."""
+)
+
+# it's possible we don't want short aliases for *all* of these:
+shell_aliases = dict(
+ autocall='InteractiveShell.autocall',
+ colors='InteractiveShell.colors',
+ logfile='InteractiveShell.logfile',
+ logappend='InteractiveShell.logappend',
+ c='InteractiveShellApp.code_to_run',
+ m='InteractiveShellApp.module_to_run',
+ ext='InteractiveShellApp.extra_extension',
+ gui='InteractiveShellApp.gui',
+ pylab='InteractiveShellApp.pylab',
+ matplotlib='InteractiveShellApp.matplotlib',
+)
+shell_aliases['cache-size'] = 'InteractiveShell.cache_size'
+
+#-----------------------------------------------------------------------------
+# Main classes and functions
+#-----------------------------------------------------------------------------
+
+class InteractiveShellApp(Configurable):
+ """A Mixin for applications that start InteractiveShell instances.
+
+ Provides configurables for loading extensions and executing files
+ as part of configuring a Shell environment.
+
+ The following methods should be called by the :meth:`initialize` method
+ of the subclass:
+
+ - :meth:`init_path`
+ - :meth:`init_shell` (to be implemented by the subclass)
+ - :meth:`init_gui_pylab`
+ - :meth:`init_extensions`
+ - :meth:`init_code`
+ """
extensions = List(Unicode(),
- help="A list of dotted module names of IPython extensions to load."
+ help="A list of dotted module names of IPython extensions to load."
).tag(config=True)
extra_extension = Unicode('',
- help="dotted module name of an IPython extension to load."
+ help="dotted module name of an IPython extension to load."
).tag(config=True)
-
+
reraise_ipython_extension_failures = Bool(False,
- help="Reraise exceptions encountered loading IPython extensions?",
+ help="Reraise exceptions encountered loading IPython extensions?",
).tag(config=True)
-
- # Extensions that are always loaded (not configurable)
+
+ # Extensions that are always loaded (not configurable)
default_extensions = List(Unicode(), [u'storemagic']).tag(config=False)
hide_initial_ns = Bool(True,
- help="""Should variables loaded at startup (by startup files, exec_lines, etc.)
- be hidden from tools like %who?"""
+ help="""Should variables loaded at startup (by startup files, exec_lines, etc.)
+ be hidden from tools like %who?"""
).tag(config=True)
-
+
exec_files = List(Unicode(),
- help="""List of files to run at IPython startup."""
+ help="""List of files to run at IPython startup."""
).tag(config=True)
exec_PYTHONSTARTUP = Bool(True,
- help="""Run the file referenced by the PYTHONSTARTUP environment
- variable at IPython startup."""
+ help="""Run the file referenced by the PYTHONSTARTUP environment
+ variable at IPython startup."""
).tag(config=True)
file_to_run = Unicode('',
help="""A file to be run""").tag(config=True)
-
+
exec_lines = List(Unicode(),
- help="""lines of code to run at IPython startup."""
+ help="""lines of code to run at IPython startup."""
).tag(config=True)
code_to_run = Unicode('',
- help="Execute the given command string."
+ help="Execute the given command string."
).tag(config=True)
module_to_run = Unicode('',
- help="Run the module as a script."
+ help="Run the module as a script."
).tag(config=True)
gui = CaselessStrEnum(gui_keys, allow_none=True,
- help="Enable GUI event loop integration with any of {0}.".format(gui_keys)
+ help="Enable GUI event loop integration with any of {0}.".format(gui_keys)
).tag(config=True)
- matplotlib = CaselessStrEnum(backend_keys, allow_none=True,
- help="""Configure matplotlib for interactive use with
- the default matplotlib backend."""
+ matplotlib = CaselessStrEnum(backend_keys, allow_none=True,
+ help="""Configure matplotlib for interactive use with
+ the default matplotlib backend."""
).tag(config=True)
- pylab = CaselessStrEnum(backend_keys, allow_none=True,
- help="""Pre-load matplotlib and numpy for interactive use,
- selecting a particular matplotlib backend and loop integration.
- """
+ pylab = CaselessStrEnum(backend_keys, allow_none=True,
+ help="""Pre-load matplotlib and numpy for interactive use,
+ selecting a particular matplotlib backend and loop integration.
+ """
).tag(config=True)
pylab_import_all = Bool(True,
- help="""If true, IPython will populate the user namespace with numpy, pylab, etc.
- and an ``import *`` is done from numpy and pylab, when using pylab mode.
+ help="""If true, IPython will populate the user namespace with numpy, pylab, etc.
+ and an ``import *`` is done from numpy and pylab, when using pylab mode.
- When False, pylab mode should not import any names into the user namespace.
- """
+ When False, pylab mode should not import any names into the user namespace.
+ """
).tag(config=True)
- shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
- allow_none=True)
+ shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
+ allow_none=True)
# whether interact-loop should start
interact = Bool(True)
- user_ns = Instance(dict, args=None, allow_none=True)
+ user_ns = Instance(dict, args=None, allow_none=True)
@observe('user_ns')
def _user_ns_changed(self, change):
- if self.shell is not None:
+ if self.shell is not None:
self.shell.user_ns = change['new']
- self.shell.init_user_ns()
-
- def init_path(self):
- """Add current working directory, '', to sys.path"""
- if sys.path[0] != '':
- sys.path.insert(0, '')
-
- def init_shell(self):
- raise NotImplementedError("Override in subclasses")
-
- def init_gui_pylab(self):
- """Enable GUI event loop integration, taking pylab into account."""
- enable = False
- shell = self.shell
- if self.pylab:
- enable = lambda key: shell.enable_pylab(key, import_all=self.pylab_import_all)
- key = self.pylab
- elif self.matplotlib:
- enable = shell.enable_matplotlib
- key = self.matplotlib
- elif self.gui:
- enable = shell.enable_gui
- key = self.gui
-
- if not enable:
- return
-
- try:
- r = enable(key)
- except ImportError:
- self.log.warning("Eventloop or matplotlib integration failed. Is matplotlib installed?")
- self.shell.showtraceback()
- return
- except Exception:
- self.log.warning("GUI event loop or pylab initialization failed")
- self.shell.showtraceback()
- return
-
- if isinstance(r, tuple):
- gui, backend = r[:2]
- self.log.info("Enabling GUI event loop integration, "
- "eventloop=%s, matplotlib=%s", gui, backend)
- if key == "auto":
- print("Using matplotlib backend: %s" % backend)
- else:
- gui = r
- self.log.info("Enabling GUI event loop integration, "
- "eventloop=%s", gui)
-
- def init_extensions(self):
- """Load all IPython extensions in IPythonApp.extensions.
-
- This uses the :meth:`ExtensionManager.load_extensions` to load all
- the extensions listed in ``self.extensions``.
- """
- try:
- self.log.debug("Loading IPython extensions...")
- extensions = self.default_extensions + self.extensions
- if self.extra_extension:
- extensions.append(self.extra_extension)
- for ext in extensions:
- try:
- self.log.info("Loading IPython extension: %s" % ext)
- self.shell.extension_manager.load_extension(ext)
- except:
- if self.reraise_ipython_extension_failures:
- raise
- msg = ("Error in loading extension: {ext}\n"
- "Check your config files in {location}".format(
- ext=ext,
- location=self.profile_dir.location
- ))
- self.log.warning(msg, exc_info=True)
- except:
- if self.reraise_ipython_extension_failures:
- raise
- self.log.warning("Unknown error in loading extensions:", exc_info=True)
-
- def init_code(self):
- """run the pre-flight code, specified via exec_lines"""
- self._run_startup_files()
- self._run_exec_lines()
- self._run_exec_files()
-
- # Hide variables defined here from %who etc.
- if self.hide_initial_ns:
- self.shell.user_ns_hidden.update(self.shell.user_ns)
-
- # command-line execution (ipython -i script.py, ipython -m module)
- # should *not* be excluded from %whos
- self._run_cmd_line_code()
- self._run_module()
-
- # flush output, so itwon't be attached to the first cell
- sys.stdout.flush()
- sys.stderr.flush()
-
- def _run_exec_lines(self):
- """Run lines of code in IPythonApp.exec_lines in the user's namespace."""
- if not self.exec_lines:
- return
- try:
- self.log.debug("Running code from IPythonApp.exec_lines...")
- for line in self.exec_lines:
- try:
- self.log.info("Running code in user namespace: %s" %
- line)
- self.shell.run_cell(line, store_history=False)
- except:
- self.log.warning("Error in executing line in user "
- "namespace: %s" % line)
- self.shell.showtraceback()
- except:
- self.log.warning("Unknown error in handling IPythonApp.exec_lines:")
- self.shell.showtraceback()
-
- def _exec_file(self, fname, shell_futures=False):
- try:
- full_filename = filefind(fname, [u'.', self.ipython_dir])
+ self.shell.init_user_ns()
+
+ def init_path(self):
+ """Add current working directory, '', to sys.path"""
+ if sys.path[0] != '':
+ sys.path.insert(0, '')
+
+ def init_shell(self):
+ raise NotImplementedError("Override in subclasses")
+
+ def init_gui_pylab(self):
+ """Enable GUI event loop integration, taking pylab into account."""
+ enable = False
+ shell = self.shell
+ if self.pylab:
+ enable = lambda key: shell.enable_pylab(key, import_all=self.pylab_import_all)
+ key = self.pylab
+ elif self.matplotlib:
+ enable = shell.enable_matplotlib
+ key = self.matplotlib
+ elif self.gui:
+ enable = shell.enable_gui
+ key = self.gui
+
+ if not enable:
+ return
+
+ try:
+ r = enable(key)
+ except ImportError:
+ self.log.warning("Eventloop or matplotlib integration failed. Is matplotlib installed?")
+ self.shell.showtraceback()
+ return
+ except Exception:
+ self.log.warning("GUI event loop or pylab initialization failed")
+ self.shell.showtraceback()
+ return
+
+ if isinstance(r, tuple):
+ gui, backend = r[:2]
+ self.log.info("Enabling GUI event loop integration, "
+ "eventloop=%s, matplotlib=%s", gui, backend)
+ if key == "auto":
+ print("Using matplotlib backend: %s" % backend)
+ else:
+ gui = r
+ self.log.info("Enabling GUI event loop integration, "
+ "eventloop=%s", gui)
+
+ def init_extensions(self):
+ """Load all IPython extensions in IPythonApp.extensions.
+
+ This uses the :meth:`ExtensionManager.load_extensions` to load all
+ the extensions listed in ``self.extensions``.
+ """
+ try:
+ self.log.debug("Loading IPython extensions...")
+ extensions = self.default_extensions + self.extensions
+ if self.extra_extension:
+ extensions.append(self.extra_extension)
+ for ext in extensions:
+ try:
+ self.log.info("Loading IPython extension: %s" % ext)
+ self.shell.extension_manager.load_extension(ext)
+ except:
+ if self.reraise_ipython_extension_failures:
+ raise
+ msg = ("Error in loading extension: {ext}\n"
+ "Check your config files in {location}".format(
+ ext=ext,
+ location=self.profile_dir.location
+ ))
+ self.log.warning(msg, exc_info=True)
+ except:
+ if self.reraise_ipython_extension_failures:
+ raise
+ self.log.warning("Unknown error in loading extensions:", exc_info=True)
+
+ def init_code(self):
+ """run the pre-flight code, specified via exec_lines"""
+ self._run_startup_files()
+ self._run_exec_lines()
+ self._run_exec_files()
+
+ # Hide variables defined here from %who etc.
+ if self.hide_initial_ns:
+ self.shell.user_ns_hidden.update(self.shell.user_ns)
+
+ # command-line execution (ipython -i script.py, ipython -m module)
+ # should *not* be excluded from %whos
+ self._run_cmd_line_code()
+ self._run_module()
+
+ # flush output, so itwon't be attached to the first cell
+ sys.stdout.flush()
+ sys.stderr.flush()
+
+ def _run_exec_lines(self):
+ """Run lines of code in IPythonApp.exec_lines in the user's namespace."""
+ if not self.exec_lines:
+ return
+ try:
+ self.log.debug("Running code from IPythonApp.exec_lines...")
+ for line in self.exec_lines:
+ try:
+ self.log.info("Running code in user namespace: %s" %
+ line)
+ self.shell.run_cell(line, store_history=False)
+ except:
+ self.log.warning("Error in executing line in user "
+ "namespace: %s" % line)
+ self.shell.showtraceback()
+ except:
+ self.log.warning("Unknown error in handling IPythonApp.exec_lines:")
+ self.shell.showtraceback()
+
+ def _exec_file(self, fname, shell_futures=False):
+ try:
+ full_filename = filefind(fname, [u'.', self.ipython_dir])
except IOError:
- self.log.warning("File not found: %r"%fname)
- return
- # Make sure that the running script gets a proper sys.argv as if it
- # were run from a system shell.
- save_argv = sys.argv
- sys.argv = [full_filename] + self.extra_args[1:]
- # protect sys.argv from potential unicode strings on Python 2:
- if not py3compat.PY3:
- sys.argv = [ py3compat.cast_bytes(a) for a in sys.argv ]
- try:
- if os.path.isfile(full_filename):
- self.log.info("Running file in user namespace: %s" %
- full_filename)
- # Ensure that __file__ is always defined to match Python
- # behavior.
- with preserve_keys(self.shell.user_ns, '__file__'):
- self.shell.user_ns['__file__'] = fname
- if full_filename.endswith('.ipy'):
- self.shell.safe_execfile_ipy(full_filename,
- shell_futures=shell_futures)
- else:
- # default to python, even without extension
- self.shell.safe_execfile(full_filename,
- self.shell.user_ns,
- shell_futures=shell_futures,
- raise_exceptions=True)
- finally:
- sys.argv = save_argv
-
- def _run_startup_files(self):
- """Run files from profile startup directory"""
+ self.log.warning("File not found: %r"%fname)
+ return
+ # Make sure that the running script gets a proper sys.argv as if it
+ # were run from a system shell.
+ save_argv = sys.argv
+ sys.argv = [full_filename] + self.extra_args[1:]
+ # protect sys.argv from potential unicode strings on Python 2:
+ if not py3compat.PY3:
+ sys.argv = [ py3compat.cast_bytes(a) for a in sys.argv ]
+ try:
+ if os.path.isfile(full_filename):
+ self.log.info("Running file in user namespace: %s" %
+ full_filename)
+ # Ensure that __file__ is always defined to match Python
+ # behavior.
+ with preserve_keys(self.shell.user_ns, '__file__'):
+ self.shell.user_ns['__file__'] = fname
+ if full_filename.endswith('.ipy'):
+ self.shell.safe_execfile_ipy(full_filename,
+ shell_futures=shell_futures)
+ else:
+ # default to python, even without extension
+ self.shell.safe_execfile(full_filename,
+ self.shell.user_ns,
+ shell_futures=shell_futures,
+ raise_exceptions=True)
+ finally:
+ sys.argv = save_argv
+
+ def _run_startup_files(self):
+ """Run files from profile startup directory"""
startup_dirs = [self.profile_dir.startup_dir] + [
os.path.join(p, 'startup') for p in chain(ENV_CONFIG_DIRS, SYSTEM_CONFIG_DIRS)
]
- startup_files = []
-
- if self.exec_PYTHONSTARTUP and os.environ.get('PYTHONSTARTUP', False) and \
- not (self.file_to_run or self.code_to_run or self.module_to_run):
- python_startup = os.environ['PYTHONSTARTUP']
- self.log.debug("Running PYTHONSTARTUP file %s...", python_startup)
- try:
- self._exec_file(python_startup)
- except:
- self.log.warning("Unknown error in handling PYTHONSTARTUP file %s:", python_startup)
- self.shell.showtraceback()
+ startup_files = []
+
+ if self.exec_PYTHONSTARTUP and os.environ.get('PYTHONSTARTUP', False) and \
+ not (self.file_to_run or self.code_to_run or self.module_to_run):
+ python_startup = os.environ['PYTHONSTARTUP']
+ self.log.debug("Running PYTHONSTARTUP file %s...", python_startup)
+ try:
+ self._exec_file(python_startup)
+ except:
+ self.log.warning("Unknown error in handling PYTHONSTARTUP file %s:", python_startup)
+ self.shell.showtraceback()
for startup_dir in startup_dirs[::-1]:
startup_files += glob.glob(os.path.join(startup_dir, '*.py'))
startup_files += glob.glob(os.path.join(startup_dir, '*.ipy'))
- if not startup_files:
- return
-
- self.log.debug("Running startup files from %s...", startup_dir)
- try:
- for fname in sorted(startup_files):
- self._exec_file(fname)
- except:
- self.log.warning("Unknown error in handling startup files:")
- self.shell.showtraceback()
-
- def _run_exec_files(self):
- """Run files from IPythonApp.exec_files"""
- if not self.exec_files:
- return
-
- self.log.debug("Running files in IPythonApp.exec_files...")
- try:
- for fname in self.exec_files:
- self._exec_file(fname)
- except:
- self.log.warning("Unknown error in handling IPythonApp.exec_files:")
- self.shell.showtraceback()
-
- def _run_cmd_line_code(self):
- """Run code or file specified at the command-line"""
- if self.code_to_run:
- line = self.code_to_run
- try:
- self.log.info("Running code given at command line (c=): %s" %
- line)
- self.shell.run_cell(line, store_history=False)
- except:
- self.log.warning("Error in executing line in user namespace: %s" %
- line)
- self.shell.showtraceback()
+ if not startup_files:
+ return
+
+ self.log.debug("Running startup files from %s...", startup_dir)
+ try:
+ for fname in sorted(startup_files):
+ self._exec_file(fname)
+ except:
+ self.log.warning("Unknown error in handling startup files:")
+ self.shell.showtraceback()
+
+ def _run_exec_files(self):
+ """Run files from IPythonApp.exec_files"""
+ if not self.exec_files:
+ return
+
+ self.log.debug("Running files in IPythonApp.exec_files...")
+ try:
+ for fname in self.exec_files:
+ self._exec_file(fname)
+ except:
+ self.log.warning("Unknown error in handling IPythonApp.exec_files:")
+ self.shell.showtraceback()
+
+ def _run_cmd_line_code(self):
+ """Run code or file specified at the command-line"""
+ if self.code_to_run:
+ line = self.code_to_run
+ try:
+ self.log.info("Running code given at command line (c=): %s" %
+ line)
+ self.shell.run_cell(line, store_history=False)
+ except:
+ self.log.warning("Error in executing line in user namespace: %s" %
+ line)
+ self.shell.showtraceback()
if not self.interact:
self.exit(1)
-
- # Like Python itself, ignore the second if the first of these is present
- elif self.file_to_run:
- fname = self.file_to_run
+
+ # Like Python itself, ignore the second if the first of these is present
+ elif self.file_to_run:
+ fname = self.file_to_run
if os.path.isdir(fname):
fname = os.path.join(fname, "__main__.py")
- try:
- self._exec_file(fname, shell_futures=True)
- except:
- self.shell.showtraceback(tb_offset=4)
+ try:
+ self._exec_file(fname, shell_futures=True)
+ except:
+ self.shell.showtraceback(tb_offset=4)
if not self.interact:
self.exit(1)
-
- def _run_module(self):
- """Run module specified at the command-line."""
- if self.module_to_run:
- # Make sure that the module gets a proper sys.argv as if it were
- # run using `python -m`.
- save_argv = sys.argv
- sys.argv = [sys.executable] + self.extra_args
- try:
- self.shell.safe_run_module(self.module_to_run,
- self.shell.user_ns)
- finally:
- sys.argv = save_argv
+
+ def _run_module(self):
+ """Run module specified at the command-line."""
+ if self.module_to_run:
+ # Make sure that the module gets a proper sys.argv as if it were
+ # run using `python -m`.
+ save_argv = sys.argv
+ sys.argv = [sys.executable] + self.extra_args
+ try:
+ self.shell.safe_run_module(self.module_to_run,
+ self.shell.user_ns)
+ finally:
+ sys.argv = save_argv
diff --git a/contrib/python/ipython/py2/IPython/core/splitinput.py b/contrib/python/ipython/py2/IPython/core/splitinput.py
index 2c54687cb1..7b957726fb 100644
--- a/contrib/python/ipython/py2/IPython/core/splitinput.py
+++ b/contrib/python/ipython/py2/IPython/core/splitinput.py
@@ -1,137 +1,137 @@
-# encoding: utf-8
-"""
-Simple utility for splitting user input. This is used by both inputsplitter and
-prefilter.
-
-Authors:
-
-* Brian Granger
-* Fernando Perez
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import re
-import sys
-
-from IPython.utils import py3compat
-from IPython.utils.encoding import get_stream_enc
-
-#-----------------------------------------------------------------------------
-# Main function
-#-----------------------------------------------------------------------------
-
-# RegExp for splitting line contents into pre-char//first word-method//rest.
-# For clarity, each group in on one line.
-
-# WARNING: update the regexp if the escapes in interactiveshell are changed, as
-# they are hardwired in.
-
-# Although it's not solely driven by the regex, note that:
-# ,;/% only trigger if they are the first character on the line
-# ! and !! trigger if they are first char(s) *or* follow an indent
-# ? triggers as first or last char.
-
-line_split = re.compile("""
- ^(\s*) # any leading space
- ([,;/%]|!!?|\?\??)? # escape character or characters
- \s*(%{0,2}[\w\.\*]*) # function/method, possibly with leading %
- # to correctly treat things like '?%magic'
- (.*?$|$) # rest of line
- """, re.VERBOSE)
-
-
-def split_user_input(line, pattern=None):
- """Split user input into initial whitespace, escape character, function part
- and the rest.
- """
- # We need to ensure that the rest of this routine deals only with unicode
- encoding = get_stream_enc(sys.stdin, 'utf-8')
- line = py3compat.cast_unicode(line, encoding)
-
- if pattern is None:
- pattern = line_split
- match = pattern.match(line)
- if not match:
- # print "match failed for line '%s'" % line
- try:
- ifun, the_rest = line.split(None,1)
- except ValueError:
- # print "split failed for line '%s'" % line
- ifun, the_rest = line, u''
- pre = re.match('^(\s*)(.*)',line).groups()[0]
- esc = ""
- else:
- pre, esc, ifun, the_rest = match.groups()
-
- #print 'line:<%s>' % line # dbg
- #print 'pre <%s> ifun <%s> rest <%s>' % (pre,ifun.strip(),the_rest) # dbg
- return pre, esc or '', ifun.strip(), the_rest.lstrip()
-
-
-class LineInfo(object):
- """A single line of input and associated info.
-
- Includes the following as properties:
-
- line
- The original, raw line
-
- continue_prompt
- Is this line a continuation in a sequence of multiline input?
-
- pre
- Any leading whitespace.
-
- esc
- The escape character(s) in pre or the empty string if there isn't one.
- Note that '!!' and '??' are possible values for esc. Otherwise it will
- always be a single character.
-
- ifun
- The 'function part', which is basically the maximal initial sequence
- of valid python identifiers and the '.' character. This is what is
- checked for alias and magic transformations, used for auto-calling,
- etc. In contrast to Python identifiers, it may start with "%" and contain
- "*".
-
- the_rest
- Everything else on the line.
- """
- def __init__(self, line, continue_prompt=False):
- self.line = line
- self.continue_prompt = continue_prompt
- self.pre, self.esc, self.ifun, self.the_rest = split_user_input(line)
-
- self.pre_char = self.pre.strip()
- if self.pre_char:
- self.pre_whitespace = '' # No whitespace allowd before esc chars
- else:
- self.pre_whitespace = self.pre
-
- def ofind(self, ip):
- """Do a full, attribute-walking lookup of the ifun in the various
- namespaces for the given IPython InteractiveShell instance.
-
- Return a dict with keys: {found, obj, ospace, ismagic}
-
- Note: can cause state changes because of calling getattr, but should
- only be run if autocall is on and if the line hasn't matched any
- other, less dangerous handlers.
-
- Does cache the results of the call, so can be called multiple times
- without worrying about *further* damaging state.
- """
- return ip._ofind(self.ifun)
-
- def __str__(self):
- return "LineInfo [%s|%s|%s|%s]" %(self.pre, self.esc, self.ifun, self.the_rest)
+# encoding: utf-8
+"""
+Simple utility for splitting user input. This is used by both inputsplitter and
+prefilter.
+
+Authors:
+
+* Brian Granger
+* Fernando Perez
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import re
+import sys
+
+from IPython.utils import py3compat
+from IPython.utils.encoding import get_stream_enc
+
+#-----------------------------------------------------------------------------
+# Main function
+#-----------------------------------------------------------------------------
+
+# RegExp for splitting line contents into pre-char//first word-method//rest.
+# For clarity, each group in on one line.
+
+# WARNING: update the regexp if the escapes in interactiveshell are changed, as
+# they are hardwired in.
+
+# Although it's not solely driven by the regex, note that:
+# ,;/% only trigger if they are the first character on the line
+# ! and !! trigger if they are first char(s) *or* follow an indent
+# ? triggers as first or last char.
+
+line_split = re.compile("""
+ ^(\s*) # any leading space
+ ([,;/%]|!!?|\?\??)? # escape character or characters
+ \s*(%{0,2}[\w\.\*]*) # function/method, possibly with leading %
+ # to correctly treat things like '?%magic'
+ (.*?$|$) # rest of line
+ """, re.VERBOSE)
+
+
+def split_user_input(line, pattern=None):
+ """Split user input into initial whitespace, escape character, function part
+ and the rest.
+ """
+ # We need to ensure that the rest of this routine deals only with unicode
+ encoding = get_stream_enc(sys.stdin, 'utf-8')
+ line = py3compat.cast_unicode(line, encoding)
+
+ if pattern is None:
+ pattern = line_split
+ match = pattern.match(line)
+ if not match:
+ # print "match failed for line '%s'" % line
+ try:
+ ifun, the_rest = line.split(None,1)
+ except ValueError:
+ # print "split failed for line '%s'" % line
+ ifun, the_rest = line, u''
+ pre = re.match('^(\s*)(.*)',line).groups()[0]
+ esc = ""
+ else:
+ pre, esc, ifun, the_rest = match.groups()
+
+ #print 'line:<%s>' % line # dbg
+ #print 'pre <%s> ifun <%s> rest <%s>' % (pre,ifun.strip(),the_rest) # dbg
+ return pre, esc or '', ifun.strip(), the_rest.lstrip()
+
+
+class LineInfo(object):
+ """A single line of input and associated info.
+
+ Includes the following as properties:
+
+ line
+ The original, raw line
+
+ continue_prompt
+ Is this line a continuation in a sequence of multiline input?
+
+ pre
+ Any leading whitespace.
+
+ esc
+ The escape character(s) in pre or the empty string if there isn't one.
+ Note that '!!' and '??' are possible values for esc. Otherwise it will
+ always be a single character.
+
+ ifun
+ The 'function part', which is basically the maximal initial sequence
+ of valid python identifiers and the '.' character. This is what is
+ checked for alias and magic transformations, used for auto-calling,
+ etc. In contrast to Python identifiers, it may start with "%" and contain
+ "*".
+
+ the_rest
+ Everything else on the line.
+ """
+ def __init__(self, line, continue_prompt=False):
+ self.line = line
+ self.continue_prompt = continue_prompt
+ self.pre, self.esc, self.ifun, self.the_rest = split_user_input(line)
+
+ self.pre_char = self.pre.strip()
+ if self.pre_char:
+ self.pre_whitespace = '' # No whitespace allowd before esc chars
+ else:
+ self.pre_whitespace = self.pre
+
+ def ofind(self, ip):
+ """Do a full, attribute-walking lookup of the ifun in the various
+ namespaces for the given IPython InteractiveShell instance.
+
+ Return a dict with keys: {found, obj, ospace, ismagic}
+
+ Note: can cause state changes because of calling getattr, but should
+ only be run if autocall is on and if the line hasn't matched any
+ other, less dangerous handlers.
+
+ Does cache the results of the call, so can be called multiple times
+ without worrying about *further* damaging state.
+ """
+ return ip._ofind(self.ifun)
+
+ def __str__(self):
+ return "LineInfo [%s|%s|%s|%s]" %(self.pre, self.esc, self.ifun, self.the_rest)
diff --git a/contrib/python/ipython/py2/IPython/core/ultratb.py b/contrib/python/ipython/py2/IPython/core/ultratb.py
index 4fc1e9574b..a855145825 100644
--- a/contrib/python/ipython/py2/IPython/core/ultratb.py
+++ b/contrib/python/ipython/py2/IPython/core/ultratb.py
@@ -1,1499 +1,1499 @@
-# -*- coding: utf-8 -*-
-"""
-Verbose and colourful traceback formatting.
-
-**ColorTB**
-
-I've always found it a bit hard to visually parse tracebacks in Python. The
-ColorTB class is a solution to that problem. It colors the different parts of a
-traceback in a manner similar to what you would expect from a syntax-highlighting
-text editor.
-
-Installation instructions for ColorTB::
-
- import sys,ultratb
- sys.excepthook = ultratb.ColorTB()
-
-**VerboseTB**
-
-I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds
-of useful info when a traceback occurs. Ping originally had it spit out HTML
-and intended it for CGI programmers, but why should they have all the fun? I
-altered it to spit out colored text to the terminal. It's a bit overwhelming,
-but kind of neat, and maybe useful for long-running programs that you believe
-are bug-free. If a crash *does* occur in that type of program you want details.
-Give it a shot--you'll love it or you'll hate it.
-
-.. note::
-
- The Verbose mode prints the variables currently visible where the exception
- happened (shortening their strings if too long). This can potentially be
- very slow, if you happen to have a huge data structure whose string
- representation is complex to compute. Your computer may appear to freeze for
- a while with cpu usage at 100%. If this occurs, you can cancel the traceback
- with Ctrl-C (maybe hitting it more than once).
-
- If you encounter this kind of situation often, you may want to use the
- Verbose_novars mode instead of the regular Verbose, which avoids formatting
- variables (but otherwise includes the information and context given by
- Verbose).
-
+# -*- coding: utf-8 -*-
+"""
+Verbose and colourful traceback formatting.
+
+**ColorTB**
+
+I've always found it a bit hard to visually parse tracebacks in Python. The
+ColorTB class is a solution to that problem. It colors the different parts of a
+traceback in a manner similar to what you would expect from a syntax-highlighting
+text editor.
+
+Installation instructions for ColorTB::
+
+ import sys,ultratb
+ sys.excepthook = ultratb.ColorTB()
+
+**VerboseTB**
+
+I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds
+of useful info when a traceback occurs. Ping originally had it spit out HTML
+and intended it for CGI programmers, but why should they have all the fun? I
+altered it to spit out colored text to the terminal. It's a bit overwhelming,
+but kind of neat, and maybe useful for long-running programs that you believe
+are bug-free. If a crash *does* occur in that type of program you want details.
+Give it a shot--you'll love it or you'll hate it.
+
+.. note::
+
+ The Verbose mode prints the variables currently visible where the exception
+ happened (shortening their strings if too long). This can potentially be
+ very slow, if you happen to have a huge data structure whose string
+ representation is complex to compute. Your computer may appear to freeze for
+ a while with cpu usage at 100%. If this occurs, you can cancel the traceback
+ with Ctrl-C (maybe hitting it more than once).
+
+ If you encounter this kind of situation often, you may want to use the
+ Verbose_novars mode instead of the regular Verbose, which avoids formatting
+ variables (but otherwise includes the information and context given by
+ Verbose).
+
.. note::
-
+
The verbose mode print all variables in the stack, which means it can
potentially leak sensitive information like access keys, or unencryted
password.
-Installation instructions for VerboseTB::
-
- import sys,ultratb
- sys.excepthook = ultratb.VerboseTB()
-
-Note: Much of the code in this module was lifted verbatim from the standard
-library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'.
-
-Color schemes
--------------
-
-The colors are defined in the class TBTools through the use of the
-ColorSchemeTable class. Currently the following exist:
-
- - NoColor: allows all of this module to be used in any terminal (the color
- escapes are just dummy blank strings).
-
- - Linux: is meant to look good in a terminal like the Linux console (black
- or very dark background).
-
- - LightBG: similar to Linux but swaps dark/light colors to be more readable
- in light background terminals.
-
+Installation instructions for VerboseTB::
+
+ import sys,ultratb
+ sys.excepthook = ultratb.VerboseTB()
+
+Note: Much of the code in this module was lifted verbatim from the standard
+library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'.
+
+Color schemes
+-------------
+
+The colors are defined in the class TBTools through the use of the
+ColorSchemeTable class. Currently the following exist:
+
+ - NoColor: allows all of this module to be used in any terminal (the color
+ escapes are just dummy blank strings).
+
+ - Linux: is meant to look good in a terminal like the Linux console (black
+ or very dark background).
+
+ - LightBG: similar to Linux but swaps dark/light colors to be more readable
+ in light background terminals.
+
- Neutral: a neutral color scheme that should be readable on both light and
dark background
-You can implement other color schemes easily, the syntax is fairly
-self-explanatory. Please send back new schemes you develop to the author for
-possible inclusion in future releases.
-
-Inheritance diagram:
-
-.. inheritance-diagram:: IPython.core.ultratb
- :parts: 3
-"""
-
-#*****************************************************************************
-# Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu>
-# Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu>
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#*****************************************************************************
-
+You can implement other color schemes easily, the syntax is fairly
+self-explanatory. Please send back new schemes you develop to the author for
+possible inclusion in future releases.
+
+Inheritance diagram:
+
+.. inheritance-diagram:: IPython.core.ultratb
+ :parts: 3
+"""
+
+#*****************************************************************************
+# Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu>
+# Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu>
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#*****************************************************************************
+
from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import dis
-import inspect
-import keyword
-import linecache
-import os
-import pydoc
-import re
-import sys
-import time
-import tokenize
-import traceback
-import types
-
-try: # Python 2
- generate_tokens = tokenize.generate_tokens
-except AttributeError: # Python 3
- generate_tokens = tokenize.tokenize
-
-# For purposes of monkeypatching inspect to fix a bug in it.
-from inspect import getsourcefile, getfile, getmodule, \
- ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode
-
-# IPython's own modules
-from IPython import get_ipython
-from IPython.core import debugger
-from IPython.core.display_trap import DisplayTrap
-from IPython.core.excolors import exception_colors
-from IPython.utils import PyColorize
-from IPython.utils import openpy
-from IPython.utils import path as util_path
-from IPython.utils import py3compat
-from IPython.utils import ulinecache
-from IPython.utils.data import uniq_stable
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import dis
+import inspect
+import keyword
+import linecache
+import os
+import pydoc
+import re
+import sys
+import time
+import tokenize
+import traceback
+import types
+
+try: # Python 2
+ generate_tokens = tokenize.generate_tokens
+except AttributeError: # Python 3
+ generate_tokens = tokenize.tokenize
+
+# For purposes of monkeypatching inspect to fix a bug in it.
+from inspect import getsourcefile, getfile, getmodule, \
+ ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode
+
+# IPython's own modules
+from IPython import get_ipython
+from IPython.core import debugger
+from IPython.core.display_trap import DisplayTrap
+from IPython.core.excolors import exception_colors
+from IPython.utils import PyColorize
+from IPython.utils import openpy
+from IPython.utils import path as util_path
+from IPython.utils import py3compat
+from IPython.utils import ulinecache
+from IPython.utils.data import uniq_stable
from IPython.utils.terminal import get_terminal_size
from logging import info, error
-
+
import IPython.utils.colorable as colorable
-# Globals
-# amount of space to put line numbers before verbose tracebacks
-INDENT_SIZE = 8
-
-# Default color scheme. This is used, for example, by the traceback
-# formatter. When running in an actual IPython instance, the user's rc.colors
-# value is used, but having a module global makes this functionality available
-# to users of ultratb who are NOT running inside ipython.
-DEFAULT_SCHEME = 'NoColor'
-
-# ---------------------------------------------------------------------------
-# Code begins
-
-# Utility functions
-def inspect_error():
- """Print a message about internal inspect errors.
-
- These are unfortunately quite common."""
-
- error('Internal Python error in the inspect module.\n'
- 'Below is the traceback from this internal error.\n')
-
-
-# This function is a monkeypatch we apply to the Python inspect module. We have
-# now found when it's needed (see discussion on issue gh-1456), and we have a
-# test case (IPython.core.tests.test_ultratb.ChangedPyFileTest) that fails if
-# the monkeypatch is not applied. TK, Aug 2012.
-def findsource(object):
- """Return the entire source file and starting line number for an object.
-
- The argument may be a module, class, method, function, traceback, frame,
- or code object. The source code is returned as a list of all the lines
- in the file and the line number indexes a line in that list. An IOError
- is raised if the source code cannot be retrieved.
-
- FIXED version with which we monkeypatch the stdlib to work around a bug."""
-
- file = getsourcefile(object) or getfile(object)
- # If the object is a frame, then trying to get the globals dict from its
- # module won't work. Instead, the frame object itself has the globals
- # dictionary.
- globals_dict = None
- if inspect.isframe(object):
- # XXX: can this ever be false?
- globals_dict = object.f_globals
- else:
- module = getmodule(object, file)
- if module:
- globals_dict = module.__dict__
- lines = linecache.getlines(file, globals_dict)
- if not lines:
- raise IOError('could not get source code')
-
- if ismodule(object):
- return lines, 0
-
- if isclass(object):
- name = object.__name__
- pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
- # make some effort to find the best matching class definition:
- # use the one with the least indentation, which is the one
- # that's most probably not inside a function definition.
- candidates = []
- for i in range(len(lines)):
- match = pat.match(lines[i])
- if match:
- # if it's at toplevel, it's already the best one
- if lines[i][0] == 'c':
- return lines, i
- # else add whitespace to candidate list
- candidates.append((match.group(1), i))
- if candidates:
- # this will sort by whitespace, and by line number,
- # less whitespace first
- candidates.sort()
- return lines, candidates[0][1]
- else:
- raise IOError('could not find class definition')
-
- if ismethod(object):
- object = object.__func__
- if isfunction(object):
- object = object.__code__
- if istraceback(object):
- object = object.tb_frame
- if isframe(object):
- object = object.f_code
- if iscode(object):
- if not hasattr(object, 'co_firstlineno'):
- raise IOError('could not find function definition')
- pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
- pmatch = pat.match
- # fperez - fix: sometimes, co_firstlineno can give a number larger than
- # the length of lines, which causes an error. Safeguard against that.
- lnum = min(object.co_firstlineno, len(lines)) - 1
- while lnum > 0:
+# Globals
+# amount of space to put line numbers before verbose tracebacks
+INDENT_SIZE = 8
+
+# Default color scheme. This is used, for example, by the traceback
+# formatter. When running in an actual IPython instance, the user's rc.colors
+# value is used, but having a module global makes this functionality available
+# to users of ultratb who are NOT running inside ipython.
+DEFAULT_SCHEME = 'NoColor'
+
+# ---------------------------------------------------------------------------
+# Code begins
+
+# Utility functions
+def inspect_error():
+ """Print a message about internal inspect errors.
+
+ These are unfortunately quite common."""
+
+ error('Internal Python error in the inspect module.\n'
+ 'Below is the traceback from this internal error.\n')
+
+
+# This function is a monkeypatch we apply to the Python inspect module. We have
+# now found when it's needed (see discussion on issue gh-1456), and we have a
+# test case (IPython.core.tests.test_ultratb.ChangedPyFileTest) that fails if
+# the monkeypatch is not applied. TK, Aug 2012.
+def findsource(object):
+ """Return the entire source file and starting line number for an object.
+
+ The argument may be a module, class, method, function, traceback, frame,
+ or code object. The source code is returned as a list of all the lines
+ in the file and the line number indexes a line in that list. An IOError
+ is raised if the source code cannot be retrieved.
+
+ FIXED version with which we monkeypatch the stdlib to work around a bug."""
+
+ file = getsourcefile(object) or getfile(object)
+ # If the object is a frame, then trying to get the globals dict from its
+ # module won't work. Instead, the frame object itself has the globals
+ # dictionary.
+ globals_dict = None
+ if inspect.isframe(object):
+ # XXX: can this ever be false?
+ globals_dict = object.f_globals
+ else:
+ module = getmodule(object, file)
+ if module:
+ globals_dict = module.__dict__
+ lines = linecache.getlines(file, globals_dict)
+ if not lines:
+ raise IOError('could not get source code')
+
+ if ismodule(object):
+ return lines, 0
+
+ if isclass(object):
+ name = object.__name__
+ pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
+ # make some effort to find the best matching class definition:
+ # use the one with the least indentation, which is the one
+ # that's most probably not inside a function definition.
+ candidates = []
+ for i in range(len(lines)):
+ match = pat.match(lines[i])
+ if match:
+ # if it's at toplevel, it's already the best one
+ if lines[i][0] == 'c':
+ return lines, i
+ # else add whitespace to candidate list
+ candidates.append((match.group(1), i))
+ if candidates:
+ # this will sort by whitespace, and by line number,
+ # less whitespace first
+ candidates.sort()
+ return lines, candidates[0][1]
+ else:
+ raise IOError('could not find class definition')
+
+ if ismethod(object):
+ object = object.__func__
+ if isfunction(object):
+ object = object.__code__
+ if istraceback(object):
+ object = object.tb_frame
+ if isframe(object):
+ object = object.f_code
+ if iscode(object):
+ if not hasattr(object, 'co_firstlineno'):
+ raise IOError('could not find function definition')
+ pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
+ pmatch = pat.match
+ # fperez - fix: sometimes, co_firstlineno can give a number larger than
+ # the length of lines, which causes an error. Safeguard against that.
+ lnum = min(object.co_firstlineno, len(lines)) - 1
+ while lnum > 0:
if pmatch(lines[lnum]):
break
- lnum -= 1
-
- return lines, lnum
- raise IOError('could not find code object')
-
-
-# This is a patched version of inspect.getargs that applies the (unmerged)
-# patch for http://bugs.python.org/issue14611 by Stefano Taschini. This fixes
-# https://github.com/ipython/ipython/issues/8205 and
-# https://github.com/ipython/ipython/issues/8293
-def getargs(co):
- """Get information about the arguments accepted by a code object.
-
- Three things are returned: (args, varargs, varkw), where 'args' is
- a list of argument names (possibly containing nested lists), and
- 'varargs' and 'varkw' are the names of the * and ** arguments or None."""
- if not iscode(co):
- raise TypeError('{!r} is not a code object'.format(co))
-
- nargs = co.co_argcount
- names = co.co_varnames
- args = list(names[:nargs])
- step = 0
-
- # The following acrobatics are for anonymous (tuple) arguments.
- for i in range(nargs):
- if args[i][:1] in ('', '.'):
- stack, remain, count = [], [], []
- while step < len(co.co_code):
- op = ord(co.co_code[step])
- step = step + 1
- if op >= dis.HAVE_ARGUMENT:
- opname = dis.opname[op]
- value = ord(co.co_code[step]) + ord(co.co_code[step+1])*256
- step = step + 2
- if opname in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
- remain.append(value)
- count.append(value)
- elif opname in ('STORE_FAST', 'STORE_DEREF'):
- if op in dis.haslocal:
- stack.append(co.co_varnames[value])
- elif op in dis.hasfree:
- stack.append((co.co_cellvars + co.co_freevars)[value])
- # Special case for sublists of length 1: def foo((bar))
- # doesn't generate the UNPACK_TUPLE bytecode, so if
- # `remain` is empty here, we have such a sublist.
- if not remain:
- stack[0] = [stack[0]]
- break
- else:
- remain[-1] = remain[-1] - 1
- while remain[-1] == 0:
- remain.pop()
- size = count.pop()
- stack[-size:] = [stack[-size:]]
+ lnum -= 1
+
+ return lines, lnum
+ raise IOError('could not find code object')
+
+
+# This is a patched version of inspect.getargs that applies the (unmerged)
+# patch for http://bugs.python.org/issue14611 by Stefano Taschini. This fixes
+# https://github.com/ipython/ipython/issues/8205 and
+# https://github.com/ipython/ipython/issues/8293
+def getargs(co):
+ """Get information about the arguments accepted by a code object.
+
+ Three things are returned: (args, varargs, varkw), where 'args' is
+ a list of argument names (possibly containing nested lists), and
+ 'varargs' and 'varkw' are the names of the * and ** arguments or None."""
+ if not iscode(co):
+ raise TypeError('{!r} is not a code object'.format(co))
+
+ nargs = co.co_argcount
+ names = co.co_varnames
+ args = list(names[:nargs])
+ step = 0
+
+ # The following acrobatics are for anonymous (tuple) arguments.
+ for i in range(nargs):
+ if args[i][:1] in ('', '.'):
+ stack, remain, count = [], [], []
+ while step < len(co.co_code):
+ op = ord(co.co_code[step])
+ step = step + 1
+ if op >= dis.HAVE_ARGUMENT:
+ opname = dis.opname[op]
+ value = ord(co.co_code[step]) + ord(co.co_code[step+1])*256
+ step = step + 2
+ if opname in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
+ remain.append(value)
+ count.append(value)
+ elif opname in ('STORE_FAST', 'STORE_DEREF'):
+ if op in dis.haslocal:
+ stack.append(co.co_varnames[value])
+ elif op in dis.hasfree:
+ stack.append((co.co_cellvars + co.co_freevars)[value])
+ # Special case for sublists of length 1: def foo((bar))
+ # doesn't generate the UNPACK_TUPLE bytecode, so if
+ # `remain` is empty here, we have such a sublist.
+ if not remain:
+ stack[0] = [stack[0]]
+ break
+ else:
+ remain[-1] = remain[-1] - 1
+ while remain[-1] == 0:
+ remain.pop()
+ size = count.pop()
+ stack[-size:] = [stack[-size:]]
if not remain:
break
- remain[-1] = remain[-1] - 1
+ remain[-1] = remain[-1] - 1
if not remain:
break
- args[i] = stack[0]
-
- varargs = None
- if co.co_flags & inspect.CO_VARARGS:
- varargs = co.co_varnames[nargs]
- nargs = nargs + 1
- varkw = None
- if co.co_flags & inspect.CO_VARKEYWORDS:
- varkw = co.co_varnames[nargs]
- return inspect.Arguments(args, varargs, varkw)
-
-
-# Monkeypatch inspect to apply our bugfix.
-def with_patch_inspect(f):
- """decorator for monkeypatching inspect.findsource"""
-
- def wrapped(*args, **kwargs):
- save_findsource = inspect.findsource
- save_getargs = inspect.getargs
- inspect.findsource = findsource
- inspect.getargs = getargs
- try:
- return f(*args, **kwargs)
- finally:
- inspect.findsource = save_findsource
- inspect.getargs = save_getargs
-
- return wrapped
-
-
-if py3compat.PY3:
- fixed_getargvalues = inspect.getargvalues
-else:
- # Fixes for https://github.com/ipython/ipython/issues/8293
- # and https://github.com/ipython/ipython/issues/8205.
- # The relevant bug is caused by failure to correctly handle anonymous tuple
- # unpacking, which only exists in Python 2.
- fixed_getargvalues = with_patch_inspect(inspect.getargvalues)
-
-
-def fix_frame_records_filenames(records):
- """Try to fix the filenames in each record from inspect.getinnerframes().
-
- Particularly, modules loaded from within zip files have useless filenames
- attached to their code object, and inspect.getinnerframes() just uses it.
- """
- fixed_records = []
- for frame, filename, line_no, func_name, lines, index in records:
- # Look inside the frame's globals dictionary for __file__,
- # which should be better. However, keep Cython filenames since
- # we prefer the source filenames over the compiled .so file.
- filename = py3compat.cast_unicode_py2(filename, "utf-8")
- if not filename.endswith(('.pyx', '.pxd', '.pxi')):
- better_fn = frame.f_globals.get('__file__', None)
- if isinstance(better_fn, str):
- # Check the type just in case someone did something weird with
- # __file__. It might also be None if the error occurred during
- # import.
- filename = better_fn
- fixed_records.append((frame, filename, line_no, func_name, lines, index))
- return fixed_records
-
-
-@with_patch_inspect
-def _fixed_getinnerframes(etb, context=1, tb_offset=0):
- LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5
-
- records = fix_frame_records_filenames(inspect.getinnerframes(etb, context))
- # If the error is at the console, don't build any context, since it would
- # otherwise produce 5 blank lines printed out (there is no file at the
- # console)
- rec_check = records[tb_offset:]
- try:
- rname = rec_check[0][1]
- if rname == '<ipython console>' or rname.endswith('<string>'):
- return rec_check
- except IndexError:
- pass
-
- aux = traceback.extract_tb(etb)
- assert len(records) == len(aux)
- for i, (file, lnum, _, _) in zip(range(len(records)), aux):
- maybeStart = lnum - 1 - context // 2
- start = max(maybeStart, 0)
- end = start + context
- lines = ulinecache.getlines(file)[start:end]
- buf = list(records[i])
- buf[LNUM_POS] = lnum
- buf[INDEX_POS] = lnum - 1 - start
- buf[LINES_POS] = lines
- records[i] = tuple(buf)
- return records[tb_offset:]
-
-# Helper function -- largely belongs to VerboseTB, but we need the same
-# functionality to produce a pseudo verbose TB for SyntaxErrors, so that they
-# can be recognized properly by ipython.el's py-traceback-line-re
-# (SyntaxErrors have to be treated specially because they have no traceback)
-
-_parser = PyColorize.Parser()
-
-
-def _format_traceback_lines(lnum, index, lines, Colors, lvals=None, scheme=None):
- numbers_width = INDENT_SIZE - 1
- res = []
- i = lnum - index
-
- # This lets us get fully syntax-highlighted tracebacks.
- if scheme is None:
- ipinst = get_ipython()
- if ipinst is not None:
- scheme = ipinst.colors
- else:
- scheme = DEFAULT_SCHEME
-
- _line_format = _parser.format2
-
- for line in lines:
- line = py3compat.cast_unicode(line)
-
- new_line, err = _line_format(line, 'str', scheme)
- if not err: line = new_line
-
- if i == lnum:
- # This is the line with the error
- pad = numbers_width - len(str(i))
+ args[i] = stack[0]
+
+ varargs = None
+ if co.co_flags & inspect.CO_VARARGS:
+ varargs = co.co_varnames[nargs]
+ nargs = nargs + 1
+ varkw = None
+ if co.co_flags & inspect.CO_VARKEYWORDS:
+ varkw = co.co_varnames[nargs]
+ return inspect.Arguments(args, varargs, varkw)
+
+
+# Monkeypatch inspect to apply our bugfix.
+def with_patch_inspect(f):
+ """decorator for monkeypatching inspect.findsource"""
+
+ def wrapped(*args, **kwargs):
+ save_findsource = inspect.findsource
+ save_getargs = inspect.getargs
+ inspect.findsource = findsource
+ inspect.getargs = getargs
+ try:
+ return f(*args, **kwargs)
+ finally:
+ inspect.findsource = save_findsource
+ inspect.getargs = save_getargs
+
+ return wrapped
+
+
+if py3compat.PY3:
+ fixed_getargvalues = inspect.getargvalues
+else:
+ # Fixes for https://github.com/ipython/ipython/issues/8293
+ # and https://github.com/ipython/ipython/issues/8205.
+ # The relevant bug is caused by failure to correctly handle anonymous tuple
+ # unpacking, which only exists in Python 2.
+ fixed_getargvalues = with_patch_inspect(inspect.getargvalues)
+
+
+def fix_frame_records_filenames(records):
+ """Try to fix the filenames in each record from inspect.getinnerframes().
+
+ Particularly, modules loaded from within zip files have useless filenames
+ attached to their code object, and inspect.getinnerframes() just uses it.
+ """
+ fixed_records = []
+ for frame, filename, line_no, func_name, lines, index in records:
+ # Look inside the frame's globals dictionary for __file__,
+ # which should be better. However, keep Cython filenames since
+ # we prefer the source filenames over the compiled .so file.
+ filename = py3compat.cast_unicode_py2(filename, "utf-8")
+ if not filename.endswith(('.pyx', '.pxd', '.pxi')):
+ better_fn = frame.f_globals.get('__file__', None)
+ if isinstance(better_fn, str):
+ # Check the type just in case someone did something weird with
+ # __file__. It might also be None if the error occurred during
+ # import.
+ filename = better_fn
+ fixed_records.append((frame, filename, line_no, func_name, lines, index))
+ return fixed_records
+
+
+@with_patch_inspect
+def _fixed_getinnerframes(etb, context=1, tb_offset=0):
+ LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5
+
+ records = fix_frame_records_filenames(inspect.getinnerframes(etb, context))
+ # If the error is at the console, don't build any context, since it would
+ # otherwise produce 5 blank lines printed out (there is no file at the
+ # console)
+ rec_check = records[tb_offset:]
+ try:
+ rname = rec_check[0][1]
+ if rname == '<ipython console>' or rname.endswith('<string>'):
+ return rec_check
+ except IndexError:
+ pass
+
+ aux = traceback.extract_tb(etb)
+ assert len(records) == len(aux)
+ for i, (file, lnum, _, _) in zip(range(len(records)), aux):
+ maybeStart = lnum - 1 - context // 2
+ start = max(maybeStart, 0)
+ end = start + context
+ lines = ulinecache.getlines(file)[start:end]
+ buf = list(records[i])
+ buf[LNUM_POS] = lnum
+ buf[INDEX_POS] = lnum - 1 - start
+ buf[LINES_POS] = lines
+ records[i] = tuple(buf)
+ return records[tb_offset:]
+
+# Helper function -- largely belongs to VerboseTB, but we need the same
+# functionality to produce a pseudo verbose TB for SyntaxErrors, so that they
+# can be recognized properly by ipython.el's py-traceback-line-re
+# (SyntaxErrors have to be treated specially because they have no traceback)
+
+_parser = PyColorize.Parser()
+
+
+def _format_traceback_lines(lnum, index, lines, Colors, lvals=None, scheme=None):
+ numbers_width = INDENT_SIZE - 1
+ res = []
+ i = lnum - index
+
+ # This lets us get fully syntax-highlighted tracebacks.
+ if scheme is None:
+ ipinst = get_ipython()
+ if ipinst is not None:
+ scheme = ipinst.colors
+ else:
+ scheme = DEFAULT_SCHEME
+
+ _line_format = _parser.format2
+
+ for line in lines:
+ line = py3compat.cast_unicode(line)
+
+ new_line, err = _line_format(line, 'str', scheme)
+ if not err: line = new_line
+
+ if i == lnum:
+ # This is the line with the error
+ pad = numbers_width - len(str(i))
num = '%s%s' % (debugger.make_arrow(pad), str(lnum))
- line = '%s%s%s %s%s' % (Colors.linenoEm, num,
- Colors.line, line, Colors.Normal)
- else:
- num = '%*s' % (numbers_width, i)
- line = '%s%s%s %s' % (Colors.lineno, num,
- Colors.Normal, line)
-
- res.append(line)
- if lvals and i == lnum:
- res.append(lvals + '\n')
- i = i + 1
- return res
-
-def is_recursion_error(etype, value, records):
- try:
- # RecursionError is new in Python 3.5
- recursion_error_type = RecursionError
- except NameError:
- recursion_error_type = RuntimeError
-
- # The default recursion limit is 1000, but some of that will be taken up
- # by stack frames in IPython itself. >500 frames probably indicates
- # a recursion error.
- return (etype is recursion_error_type) \
+ line = '%s%s%s %s%s' % (Colors.linenoEm, num,
+ Colors.line, line, Colors.Normal)
+ else:
+ num = '%*s' % (numbers_width, i)
+ line = '%s%s%s %s' % (Colors.lineno, num,
+ Colors.Normal, line)
+
+ res.append(line)
+ if lvals and i == lnum:
+ res.append(lvals + '\n')
+ i = i + 1
+ return res
+
+def is_recursion_error(etype, value, records):
+ try:
+ # RecursionError is new in Python 3.5
+ recursion_error_type = RecursionError
+ except NameError:
+ recursion_error_type = RuntimeError
+
+ # The default recursion limit is 1000, but some of that will be taken up
+ # by stack frames in IPython itself. >500 frames probably indicates
+ # a recursion error.
+ return (etype is recursion_error_type) \
and str("recursion") in str(value).lower() \
- and len(records) > 500
-
-def find_recursion(etype, value, records):
- """Identify the repeating stack frames from a RecursionError traceback
-
- 'records' is a list as returned by VerboseTB.get_records()
-
- Returns (last_unique, repeat_length)
- """
- # This involves a bit of guesswork - we want to show enough of the traceback
- # to indicate where the recursion is occurring. We guess that the innermost
- # quarter of the traceback (250 frames by default) is repeats, and find the
- # first frame (from in to out) that looks different.
- if not is_recursion_error(etype, value, records):
- return len(records), 0
-
- # Select filename, lineno, func_name to track frames with
- records = [r[1:4] for r in records]
- inner_frames = records[-(len(records)//4):]
- frames_repeated = set(inner_frames)
-
- last_seen_at = {}
- longest_repeat = 0
- i = len(records)
- for frame in reversed(records):
- i -= 1
- if frame not in frames_repeated:
- last_unique = i
- break
-
- if frame in last_seen_at:
- distance = last_seen_at[frame] - i
- longest_repeat = max(longest_repeat, distance)
-
- last_seen_at[frame] = i
- else:
- last_unique = 0 # The whole traceback was recursion
-
- return last_unique, longest_repeat
-
-#---------------------------------------------------------------------------
-# Module classes
+ and len(records) > 500
+
+def find_recursion(etype, value, records):
+ """Identify the repeating stack frames from a RecursionError traceback
+
+ 'records' is a list as returned by VerboseTB.get_records()
+
+ Returns (last_unique, repeat_length)
+ """
+ # This involves a bit of guesswork - we want to show enough of the traceback
+ # to indicate where the recursion is occurring. We guess that the innermost
+ # quarter of the traceback (250 frames by default) is repeats, and find the
+ # first frame (from in to out) that looks different.
+ if not is_recursion_error(etype, value, records):
+ return len(records), 0
+
+ # Select filename, lineno, func_name to track frames with
+ records = [r[1:4] for r in records]
+ inner_frames = records[-(len(records)//4):]
+ frames_repeated = set(inner_frames)
+
+ last_seen_at = {}
+ longest_repeat = 0
+ i = len(records)
+ for frame in reversed(records):
+ i -= 1
+ if frame not in frames_repeated:
+ last_unique = i
+ break
+
+ if frame in last_seen_at:
+ distance = last_seen_at[frame] - i
+ longest_repeat = max(longest_repeat, distance)
+
+ last_seen_at[frame] = i
+ else:
+ last_unique = 0 # The whole traceback was recursion
+
+ return last_unique, longest_repeat
+
+#---------------------------------------------------------------------------
+# Module classes
class TBTools(colorable.Colorable):
- """Basic tools used by all traceback printer classes."""
-
- # Number of frames to skip when reporting tracebacks
- tb_offset = 0
-
+ """Basic tools used by all traceback printer classes."""
+
+ # Number of frames to skip when reporting tracebacks
+ tb_offset = 0
+
def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None, parent=None, config=None):
- # Whether to call the interactive pdb debugger after printing
- # tracebacks or not
+ # Whether to call the interactive pdb debugger after printing
+ # tracebacks or not
super(TBTools, self).__init__(parent=parent, config=config)
- self.call_pdb = call_pdb
-
- # Output stream to write to. Note that we store the original value in
- # a private attribute and then make the public ostream a property, so
+ self.call_pdb = call_pdb
+
+ # Output stream to write to. Note that we store the original value in
+ # a private attribute and then make the public ostream a property, so
# that we can delay accessing sys.stdout until runtime. The way
# things are written now, the sys.stdout object is dynamically managed
- # so a reference to it should NEVER be stored statically. This
- # property approach confines this detail to a single location, and all
- # subclasses can simply access self.ostream for writing.
- self._ostream = ostream
-
- # Create color table
- self.color_scheme_table = exception_colors()
-
- self.set_colors(color_scheme)
- self.old_scheme = color_scheme # save initial value for toggles
-
- if call_pdb:
+ # so a reference to it should NEVER be stored statically. This
+ # property approach confines this detail to a single location, and all
+ # subclasses can simply access self.ostream for writing.
+ self._ostream = ostream
+
+ # Create color table
+ self.color_scheme_table = exception_colors()
+
+ self.set_colors(color_scheme)
+ self.old_scheme = color_scheme # save initial value for toggles
+
+ if call_pdb:
self.pdb = debugger.Pdb()
- else:
- self.pdb = None
-
- def _get_ostream(self):
- """Output stream that exceptions are written to.
-
- Valid values are:
-
- - None: the default, which means that IPython will dynamically resolve
+ else:
+ self.pdb = None
+
+ def _get_ostream(self):
+ """Output stream that exceptions are written to.
+
+ Valid values are:
+
+ - None: the default, which means that IPython will dynamically resolve
to sys.stdout. This ensures compatibility with most tools, including
- Windows (where plain stdout doesn't recognize ANSI escapes).
-
- - Any object with 'write' and 'flush' attributes.
- """
+ Windows (where plain stdout doesn't recognize ANSI escapes).
+
+ - Any object with 'write' and 'flush' attributes.
+ """
return sys.stdout if self._ostream is None else self._ostream
-
- def _set_ostream(self, val):
- assert val is None or (hasattr(val, 'write') and hasattr(val, 'flush'))
- self._ostream = val
-
- ostream = property(_get_ostream, _set_ostream)
-
- def set_colors(self, *args, **kw):
- """Shorthand access to the color table scheme selector method."""
-
- # Set own color table
- self.color_scheme_table.set_active_scheme(*args, **kw)
- # for convenience, set Colors to the active scheme
- self.Colors = self.color_scheme_table.active_colors
- # Also set colors of debugger
- if hasattr(self, 'pdb') and self.pdb is not None:
- self.pdb.set_colors(*args, **kw)
-
- def color_toggle(self):
- """Toggle between the currently active color scheme and NoColor."""
-
- if self.color_scheme_table.active_scheme_name == 'NoColor':
- self.color_scheme_table.set_active_scheme(self.old_scheme)
- self.Colors = self.color_scheme_table.active_colors
- else:
- self.old_scheme = self.color_scheme_table.active_scheme_name
- self.color_scheme_table.set_active_scheme('NoColor')
- self.Colors = self.color_scheme_table.active_colors
-
- def stb2text(self, stb):
- """Convert a structured traceback (a list) to a string."""
- return '\n'.join(stb)
-
- def text(self, etype, value, tb, tb_offset=None, context=5):
- """Return formatted traceback.
-
- Subclasses may override this if they add extra arguments.
- """
- tb_list = self.structured_traceback(etype, value, tb,
- tb_offset, context)
- return self.stb2text(tb_list)
-
- def structured_traceback(self, etype, evalue, tb, tb_offset=None,
- context=5, mode=None):
- """Return a list of traceback frames.
-
- Must be implemented by each class.
- """
- raise NotImplementedError()
-
-
-#---------------------------------------------------------------------------
-class ListTB(TBTools):
- """Print traceback information from a traceback list, with optional color.
-
- Calling requires 3 arguments: (etype, evalue, elist)
- as would be obtained by::
-
- etype, evalue, tb = sys.exc_info()
- if tb:
- elist = traceback.extract_tb(tb)
- else:
- elist = None
-
- It can thus be used by programs which need to process the traceback before
- printing (such as console replacements based on the code module from the
- standard library).
-
- Because they are meant to be called without a full traceback (only a
- list), instances of this class can't call the interactive pdb debugger."""
-
+
+ def _set_ostream(self, val):
+ assert val is None or (hasattr(val, 'write') and hasattr(val, 'flush'))
+ self._ostream = val
+
+ ostream = property(_get_ostream, _set_ostream)
+
+ def set_colors(self, *args, **kw):
+ """Shorthand access to the color table scheme selector method."""
+
+ # Set own color table
+ self.color_scheme_table.set_active_scheme(*args, **kw)
+ # for convenience, set Colors to the active scheme
+ self.Colors = self.color_scheme_table.active_colors
+ # Also set colors of debugger
+ if hasattr(self, 'pdb') and self.pdb is not None:
+ self.pdb.set_colors(*args, **kw)
+
+ def color_toggle(self):
+ """Toggle between the currently active color scheme and NoColor."""
+
+ if self.color_scheme_table.active_scheme_name == 'NoColor':
+ self.color_scheme_table.set_active_scheme(self.old_scheme)
+ self.Colors = self.color_scheme_table.active_colors
+ else:
+ self.old_scheme = self.color_scheme_table.active_scheme_name
+ self.color_scheme_table.set_active_scheme('NoColor')
+ self.Colors = self.color_scheme_table.active_colors
+
+ def stb2text(self, stb):
+ """Convert a structured traceback (a list) to a string."""
+ return '\n'.join(stb)
+
+ def text(self, etype, value, tb, tb_offset=None, context=5):
+ """Return formatted traceback.
+
+ Subclasses may override this if they add extra arguments.
+ """
+ tb_list = self.structured_traceback(etype, value, tb,
+ tb_offset, context)
+ return self.stb2text(tb_list)
+
+ def structured_traceback(self, etype, evalue, tb, tb_offset=None,
+ context=5, mode=None):
+ """Return a list of traceback frames.
+
+ Must be implemented by each class.
+ """
+ raise NotImplementedError()
+
+
+#---------------------------------------------------------------------------
+class ListTB(TBTools):
+ """Print traceback information from a traceback list, with optional color.
+
+ Calling requires 3 arguments: (etype, evalue, elist)
+ as would be obtained by::
+
+ etype, evalue, tb = sys.exc_info()
+ if tb:
+ elist = traceback.extract_tb(tb)
+ else:
+ elist = None
+
+ It can thus be used by programs which need to process the traceback before
+ printing (such as console replacements based on the code module from the
+ standard library).
+
+ Because they are meant to be called without a full traceback (only a
+ list), instances of this class can't call the interactive pdb debugger."""
+
def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None, parent=None):
- TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
+ TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
ostream=ostream, parent=parent)
-
- def __call__(self, etype, value, elist):
- self.ostream.flush()
- self.ostream.write(self.text(etype, value, elist))
- self.ostream.write('\n')
-
- def structured_traceback(self, etype, value, elist, tb_offset=None,
- context=5):
- """Return a color formatted string with the traceback info.
-
- Parameters
- ----------
- etype : exception type
- Type of the exception raised.
-
- value : object
- Data stored in the exception
-
- elist : list
- List of frames, see class docstring for details.
-
- tb_offset : int, optional
- Number of frames in the traceback to skip. If not given, the
- instance value is used (set in constructor).
-
- context : int, optional
- Number of lines of context information to print.
-
- Returns
- -------
- String with formatted exception.
- """
- tb_offset = self.tb_offset if tb_offset is None else tb_offset
- Colors = self.Colors
- out_list = []
- if elist:
-
- if tb_offset and len(elist) > tb_offset:
- elist = elist[tb_offset:]
-
- out_list.append('Traceback %s(most recent call last)%s:' %
- (Colors.normalEm, Colors.Normal) + '\n')
- out_list.extend(self._format_list(elist))
- # The exception info should be a single entry in the list.
- lines = ''.join(self._format_exception_only(etype, value))
- out_list.append(lines)
-
- # Note: this code originally read:
-
- ## for line in lines[:-1]:
- ## out_list.append(" "+line)
- ## out_list.append(lines[-1])
-
- # This means it was indenting everything but the last line by a little
- # bit. I've disabled this for now, but if we see ugliness somewhere we
- # can restore it.
-
- return out_list
-
- def _format_list(self, extracted_list):
- """Format a list of traceback entry tuples for printing.
-
- Given a list of tuples as returned by extract_tb() or
- extract_stack(), return a list of strings ready for printing.
- Each string in the resulting list corresponds to the item with the
- same index in the argument list. Each string ends in a newline;
- the strings may contain internal newlines as well, for those items
- whose source text line is not None.
-
- Lifted almost verbatim from traceback.py
- """
-
- Colors = self.Colors
- list = []
- for filename, lineno, name, line in extracted_list[:-1]:
- item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \
- (Colors.filename, py3compat.cast_unicode_py2(filename, "utf-8"), Colors.Normal,
- Colors.lineno, lineno, Colors.Normal,
- Colors.name, py3compat.cast_unicode_py2(name, "utf-8"), Colors.Normal)
- if line:
- item += ' %s\n' % line.strip()
- list.append(item)
- # Emphasize the last entry
- filename, lineno, name, line = extracted_list[-1]
- item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \
- (Colors.normalEm,
- Colors.filenameEm, py3compat.cast_unicode_py2(filename, "utf-8"), Colors.normalEm,
- Colors.linenoEm, lineno, Colors.normalEm,
- Colors.nameEm, py3compat.cast_unicode_py2(name, "utf-8"), Colors.normalEm,
- Colors.Normal)
- if line:
- item += '%s %s%s\n' % (Colors.line, line.strip(),
- Colors.Normal)
- list.append(item)
- return list
-
- def _format_exception_only(self, etype, value):
- """Format the exception part of a traceback.
-
- The arguments are the exception type and value such as given by
- sys.exc_info()[:2]. The return value is a list of strings, each ending
- in a newline. Normally, the list contains a single string; however,
- for SyntaxError exceptions, it contains several lines that (when
- printed) display detailed information about where the syntax error
- occurred. The message indicating which exception occurred is the
- always last string in the list.
-
- Also lifted nearly verbatim from traceback.py
- """
- have_filedata = False
- Colors = self.Colors
- list = []
+
+ def __call__(self, etype, value, elist):
+ self.ostream.flush()
+ self.ostream.write(self.text(etype, value, elist))
+ self.ostream.write('\n')
+
+ def structured_traceback(self, etype, value, elist, tb_offset=None,
+ context=5):
+ """Return a color formatted string with the traceback info.
+
+ Parameters
+ ----------
+ etype : exception type
+ Type of the exception raised.
+
+ value : object
+ Data stored in the exception
+
+ elist : list
+ List of frames, see class docstring for details.
+
+ tb_offset : int, optional
+ Number of frames in the traceback to skip. If not given, the
+ instance value is used (set in constructor).
+
+ context : int, optional
+ Number of lines of context information to print.
+
+ Returns
+ -------
+ String with formatted exception.
+ """
+ tb_offset = self.tb_offset if tb_offset is None else tb_offset
+ Colors = self.Colors
+ out_list = []
+ if elist:
+
+ if tb_offset and len(elist) > tb_offset:
+ elist = elist[tb_offset:]
+
+ out_list.append('Traceback %s(most recent call last)%s:' %
+ (Colors.normalEm, Colors.Normal) + '\n')
+ out_list.extend(self._format_list(elist))
+ # The exception info should be a single entry in the list.
+ lines = ''.join(self._format_exception_only(etype, value))
+ out_list.append(lines)
+
+ # Note: this code originally read:
+
+ ## for line in lines[:-1]:
+ ## out_list.append(" "+line)
+ ## out_list.append(lines[-1])
+
+ # This means it was indenting everything but the last line by a little
+ # bit. I've disabled this for now, but if we see ugliness somewhere we
+ # can restore it.
+
+ return out_list
+
+ def _format_list(self, extracted_list):
+ """Format a list of traceback entry tuples for printing.
+
+ Given a list of tuples as returned by extract_tb() or
+ extract_stack(), return a list of strings ready for printing.
+ Each string in the resulting list corresponds to the item with the
+ same index in the argument list. Each string ends in a newline;
+ the strings may contain internal newlines as well, for those items
+ whose source text line is not None.
+
+ Lifted almost verbatim from traceback.py
+ """
+
+ Colors = self.Colors
+ list = []
+ for filename, lineno, name, line in extracted_list[:-1]:
+ item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \
+ (Colors.filename, py3compat.cast_unicode_py2(filename, "utf-8"), Colors.Normal,
+ Colors.lineno, lineno, Colors.Normal,
+ Colors.name, py3compat.cast_unicode_py2(name, "utf-8"), Colors.Normal)
+ if line:
+ item += ' %s\n' % line.strip()
+ list.append(item)
+ # Emphasize the last entry
+ filename, lineno, name, line = extracted_list[-1]
+ item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \
+ (Colors.normalEm,
+ Colors.filenameEm, py3compat.cast_unicode_py2(filename, "utf-8"), Colors.normalEm,
+ Colors.linenoEm, lineno, Colors.normalEm,
+ Colors.nameEm, py3compat.cast_unicode_py2(name, "utf-8"), Colors.normalEm,
+ Colors.Normal)
+ if line:
+ item += '%s %s%s\n' % (Colors.line, line.strip(),
+ Colors.Normal)
+ list.append(item)
+ return list
+
+ def _format_exception_only(self, etype, value):
+ """Format the exception part of a traceback.
+
+ The arguments are the exception type and value such as given by
+ sys.exc_info()[:2]. The return value is a list of strings, each ending
+ in a newline. Normally, the list contains a single string; however,
+ for SyntaxError exceptions, it contains several lines that (when
+ printed) display detailed information about where the syntax error
+ occurred. The message indicating which exception occurred is the
+ always last string in the list.
+
+ Also lifted nearly verbatim from traceback.py
+ """
+ have_filedata = False
+ Colors = self.Colors
+ list = []
stype = py3compat.cast_unicode(Colors.excName + etype.__name__ + Colors.Normal)
- if value is None:
- # Not sure if this can still happen in Python 2.6 and above
+ if value is None:
+ # Not sure if this can still happen in Python 2.6 and above
list.append(stype + '\n')
- else:
- if issubclass(etype, SyntaxError):
- have_filedata = True
- if not value.filename: value.filename = "<string>"
- if value.lineno:
- lineno = value.lineno
- textline = ulinecache.getline(value.filename, value.lineno)
- else:
- lineno = 'unknown'
- textline = ''
- list.append('%s File %s"%s"%s, line %s%s%s\n' % \
- (Colors.normalEm,
- Colors.filenameEm, py3compat.cast_unicode(value.filename), Colors.normalEm,
- Colors.linenoEm, lineno, Colors.Normal ))
- if textline == '':
- textline = py3compat.cast_unicode(value.text, "utf-8")
-
- if textline is not None:
- i = 0
- while i < len(textline) and textline[i].isspace():
- i += 1
- list.append('%s %s%s\n' % (Colors.line,
- textline.strip(),
- Colors.Normal))
- if value.offset is not None:
- s = ' '
- for c in textline[i:value.offset - 1]:
- if c.isspace():
- s += c
- else:
- s += ' '
- list.append('%s%s^%s\n' % (Colors.caret, s,
- Colors.Normal))
-
- try:
- s = value.msg
- except Exception:
- s = self._some_str(value)
- if s:
+ else:
+ if issubclass(etype, SyntaxError):
+ have_filedata = True
+ if not value.filename: value.filename = "<string>"
+ if value.lineno:
+ lineno = value.lineno
+ textline = ulinecache.getline(value.filename, value.lineno)
+ else:
+ lineno = 'unknown'
+ textline = ''
+ list.append('%s File %s"%s"%s, line %s%s%s\n' % \
+ (Colors.normalEm,
+ Colors.filenameEm, py3compat.cast_unicode(value.filename), Colors.normalEm,
+ Colors.linenoEm, lineno, Colors.Normal ))
+ if textline == '':
+ textline = py3compat.cast_unicode(value.text, "utf-8")
+
+ if textline is not None:
+ i = 0
+ while i < len(textline) and textline[i].isspace():
+ i += 1
+ list.append('%s %s%s\n' % (Colors.line,
+ textline.strip(),
+ Colors.Normal))
+ if value.offset is not None:
+ s = ' '
+ for c in textline[i:value.offset - 1]:
+ if c.isspace():
+ s += c
+ else:
+ s += ' '
+ list.append('%s%s^%s\n' % (Colors.caret, s,
+ Colors.Normal))
+
+ try:
+ s = value.msg
+ except Exception:
+ s = self._some_str(value)
+ if s:
list.append('%s%s:%s %s\n' % (stype, Colors.excName,
- Colors.Normal, s))
- else:
+ Colors.Normal, s))
+ else:
list.append('%s\n' % stype)
-
- # sync with user hooks
- if have_filedata:
- ipinst = get_ipython()
- if ipinst is not None:
- ipinst.hooks.synchronize_with_editor(value.filename, value.lineno, 0)
-
- return list
-
- def get_exception_only(self, etype, value):
- """Only print the exception type and message, without a traceback.
-
- Parameters
- ----------
- etype : exception type
- value : exception value
- """
- return ListTB.structured_traceback(self, etype, value, [])
-
- def show_exception_only(self, etype, evalue):
- """Only print the exception type and message, without a traceback.
-
- Parameters
- ----------
- etype : exception type
- value : exception value
- """
- # This method needs to use __call__ from *this* class, not the one from
- # a subclass whose signature or behavior may be different
- ostream = self.ostream
- ostream.flush()
- ostream.write('\n'.join(self.get_exception_only(etype, evalue)))
- ostream.flush()
-
- def _some_str(self, value):
- # Lifted from traceback.py
- try:
+
+ # sync with user hooks
+ if have_filedata:
+ ipinst = get_ipython()
+ if ipinst is not None:
+ ipinst.hooks.synchronize_with_editor(value.filename, value.lineno, 0)
+
+ return list
+
+ def get_exception_only(self, etype, value):
+ """Only print the exception type and message, without a traceback.
+
+ Parameters
+ ----------
+ etype : exception type
+ value : exception value
+ """
+ return ListTB.structured_traceback(self, etype, value, [])
+
+ def show_exception_only(self, etype, evalue):
+ """Only print the exception type and message, without a traceback.
+
+ Parameters
+ ----------
+ etype : exception type
+ value : exception value
+ """
+ # This method needs to use __call__ from *this* class, not the one from
+ # a subclass whose signature or behavior may be different
+ ostream = self.ostream
+ ostream.flush()
+ ostream.write('\n'.join(self.get_exception_only(etype, evalue)))
+ ostream.flush()
+
+ def _some_str(self, value):
+ # Lifted from traceback.py
+ try:
return py3compat.cast_unicode(str(value))
- except:
+ except:
return u'<unprintable %s object>' % type(value).__name__
-
-
-#----------------------------------------------------------------------------
-class VerboseTB(TBTools):
- """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead
- of HTML. Requires inspect and pydoc. Crazy, man.
-
- Modified version which optionally strips the topmost entries from the
- traceback, to be used with alternate interpreters (because their own code
- would appear in the traceback)."""
-
- def __init__(self, color_scheme='Linux', call_pdb=False, ostream=None,
- tb_offset=0, long_header=False, include_vars=True,
+
+
+#----------------------------------------------------------------------------
+class VerboseTB(TBTools):
+ """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead
+ of HTML. Requires inspect and pydoc. Crazy, man.
+
+ Modified version which optionally strips the topmost entries from the
+ traceback, to be used with alternate interpreters (because their own code
+ would appear in the traceback)."""
+
+ def __init__(self, color_scheme='Linux', call_pdb=False, ostream=None,
+ tb_offset=0, long_header=False, include_vars=True,
check_cache=None, debugger_cls = None):
- """Specify traceback offset, headers and color scheme.
-
- Define how many frames to drop from the tracebacks. Calling it with
- tb_offset=1 allows use of this handler in interpreters which will have
- their own code at the top of the traceback (VerboseTB will first
- remove that frame before printing the traceback info)."""
- TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
- ostream=ostream)
- self.tb_offset = tb_offset
- self.long_header = long_header
- self.include_vars = include_vars
- # By default we use linecache.checkcache, but the user can provide a
- # different check_cache implementation. This is used by the IPython
- # kernel to provide tracebacks for interactive code that is cached,
- # by a compiler instance that flushes the linecache but preserves its
- # own code cache.
- if check_cache is None:
- check_cache = linecache.checkcache
- self.check_cache = check_cache
-
+ """Specify traceback offset, headers and color scheme.
+
+ Define how many frames to drop from the tracebacks. Calling it with
+ tb_offset=1 allows use of this handler in interpreters which will have
+ their own code at the top of the traceback (VerboseTB will first
+ remove that frame before printing the traceback info)."""
+ TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
+ ostream=ostream)
+ self.tb_offset = tb_offset
+ self.long_header = long_header
+ self.include_vars = include_vars
+ # By default we use linecache.checkcache, but the user can provide a
+ # different check_cache implementation. This is used by the IPython
+ # kernel to provide tracebacks for interactive code that is cached,
+ # by a compiler instance that flushes the linecache but preserves its
+ # own code cache.
+ if check_cache is None:
+ check_cache = linecache.checkcache
+ self.check_cache = check_cache
+
self.debugger_cls = debugger_cls or debugger.Pdb
- def format_records(self, records, last_unique, recursion_repeat):
- """Format the stack frames of the traceback"""
- frames = []
- for r in records[:last_unique+recursion_repeat+1]:
- #print '*** record:',file,lnum,func,lines,index # dbg
- frames.append(self.format_record(*r))
-
- if recursion_repeat:
- frames.append('... last %d frames repeated, from the frame below ...\n' % recursion_repeat)
- frames.append(self.format_record(*records[last_unique+recursion_repeat+1]))
-
- return frames
-
- def format_record(self, frame, file, lnum, func, lines, index):
- """Format a single stack frame"""
- Colors = self.Colors # just a shorthand + quicker name lookup
- ColorsNormal = Colors.Normal # used a lot
- col_scheme = self.color_scheme_table.active_scheme_name
- indent = ' ' * INDENT_SIZE
- em_normal = '%s\n%s%s' % (Colors.valEm, indent, ColorsNormal)
- undefined = '%sundefined%s' % (Colors.em, ColorsNormal)
- tpl_link = '%s%%s%s' % (Colors.filenameEm, ColorsNormal)
- tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm,
- ColorsNormal)
- tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \
- (Colors.vName, Colors.valEm, ColorsNormal)
- tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal)
- tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal,
- Colors.vName, ColorsNormal)
- tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal)
-
- tpl_line = '%s%%s%s %%s' % (Colors.lineno, ColorsNormal)
- tpl_line_em = '%s%%s%s %%s%s' % (Colors.linenoEm, Colors.line,
- ColorsNormal)
-
- abspath = os.path.abspath
-
-
- if not file:
- file = '?'
- elif file.startswith(str("<")) and file.endswith(str(">")):
- # Not a real filename, no problem...
- pass
- elif not os.path.isabs(file):
- # Try to make the filename absolute by trying all
- # sys.path entries (which is also what linecache does)
- for dirname in sys.path:
- try:
- fullname = os.path.join(dirname, file)
- if os.path.isfile(fullname):
- file = os.path.abspath(fullname)
- break
- except Exception:
- # Just in case that sys.path contains very
- # strange entries...
- pass
-
- file = py3compat.cast_unicode(file, util_path.fs_encoding)
- link = tpl_link % file
- args, varargs, varkw, locals = fixed_getargvalues(frame)
-
- if func == '?':
- call = ''
- else:
- # Decide whether to include variable details or not
- var_repr = self.include_vars and eqrepr or nullrepr
- try:
- call = tpl_call % (func, inspect.formatargvalues(args,
- varargs, varkw,
- locals, formatvalue=var_repr))
- except KeyError:
- # This happens in situations like errors inside generator
- # expressions, where local variables are listed in the
- # line, but can't be extracted from the frame. I'm not
- # 100% sure this isn't actually a bug in inspect itself,
- # but since there's no info for us to compute with, the
- # best we can do is report the failure and move on. Here
- # we must *not* call any traceback construction again,
- # because that would mess up use of %debug later on. So we
- # simply report the failure and move on. The only
- # limitation will be that this frame won't have locals
- # listed in the call signature. Quite subtle problem...
- # I can't think of a good way to validate this in a unit
- # test, but running a script consisting of:
- # dict( (k,v.strip()) for (k,v) in range(10) )
- # will illustrate the error, if this exception catch is
- # disabled.
- call = tpl_call_fail % func
-
- # Don't attempt to tokenize binary files.
- if file.endswith(('.so', '.pyd', '.dll')):
- return '%s %s\n' % (link, call)
-
- elif file.endswith(('.pyc', '.pyo')):
- # Look up the corresponding source file.
+ def format_records(self, records, last_unique, recursion_repeat):
+ """Format the stack frames of the traceback"""
+ frames = []
+ for r in records[:last_unique+recursion_repeat+1]:
+ #print '*** record:',file,lnum,func,lines,index # dbg
+ frames.append(self.format_record(*r))
+
+ if recursion_repeat:
+ frames.append('... last %d frames repeated, from the frame below ...\n' % recursion_repeat)
+ frames.append(self.format_record(*records[last_unique+recursion_repeat+1]))
+
+ return frames
+
+ def format_record(self, frame, file, lnum, func, lines, index):
+ """Format a single stack frame"""
+ Colors = self.Colors # just a shorthand + quicker name lookup
+ ColorsNormal = Colors.Normal # used a lot
+ col_scheme = self.color_scheme_table.active_scheme_name
+ indent = ' ' * INDENT_SIZE
+ em_normal = '%s\n%s%s' % (Colors.valEm, indent, ColorsNormal)
+ undefined = '%sundefined%s' % (Colors.em, ColorsNormal)
+ tpl_link = '%s%%s%s' % (Colors.filenameEm, ColorsNormal)
+ tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm,
+ ColorsNormal)
+ tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \
+ (Colors.vName, Colors.valEm, ColorsNormal)
+ tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal)
+ tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal,
+ Colors.vName, ColorsNormal)
+ tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal)
+
+ tpl_line = '%s%%s%s %%s' % (Colors.lineno, ColorsNormal)
+ tpl_line_em = '%s%%s%s %%s%s' % (Colors.linenoEm, Colors.line,
+ ColorsNormal)
+
+ abspath = os.path.abspath
+
+
+ if not file:
+ file = '?'
+ elif file.startswith(str("<")) and file.endswith(str(">")):
+ # Not a real filename, no problem...
+ pass
+ elif not os.path.isabs(file):
+ # Try to make the filename absolute by trying all
+ # sys.path entries (which is also what linecache does)
+ for dirname in sys.path:
+ try:
+ fullname = os.path.join(dirname, file)
+ if os.path.isfile(fullname):
+ file = os.path.abspath(fullname)
+ break
+ except Exception:
+ # Just in case that sys.path contains very
+ # strange entries...
+ pass
+
+ file = py3compat.cast_unicode(file, util_path.fs_encoding)
+ link = tpl_link % file
+ args, varargs, varkw, locals = fixed_getargvalues(frame)
+
+ if func == '?':
+ call = ''
+ else:
+ # Decide whether to include variable details or not
+ var_repr = self.include_vars and eqrepr or nullrepr
+ try:
+ call = tpl_call % (func, inspect.formatargvalues(args,
+ varargs, varkw,
+ locals, formatvalue=var_repr))
+ except KeyError:
+ # This happens in situations like errors inside generator
+ # expressions, where local variables are listed in the
+ # line, but can't be extracted from the frame. I'm not
+ # 100% sure this isn't actually a bug in inspect itself,
+ # but since there's no info for us to compute with, the
+ # best we can do is report the failure and move on. Here
+ # we must *not* call any traceback construction again,
+ # because that would mess up use of %debug later on. So we
+ # simply report the failure and move on. The only
+ # limitation will be that this frame won't have locals
+ # listed in the call signature. Quite subtle problem...
+ # I can't think of a good way to validate this in a unit
+ # test, but running a script consisting of:
+ # dict( (k,v.strip()) for (k,v) in range(10) )
+ # will illustrate the error, if this exception catch is
+ # disabled.
+ call = tpl_call_fail % func
+
+ # Don't attempt to tokenize binary files.
+ if file.endswith(('.so', '.pyd', '.dll')):
+ return '%s %s\n' % (link, call)
+
+ elif file.endswith(('.pyc', '.pyo')):
+ # Look up the corresponding source file.
try:
file = openpy.source_from_cache(file)
except ValueError:
# Failed to get the source file for some reason
# E.g. https://github.com/ipython/ipython/issues/9486
return '%s %s\n' % (link, call)
-
- def linereader(file=file, lnum=[lnum], getline=ulinecache.getline):
- line = getline(file, lnum[0])
- lnum[0] += 1
- return line
-
- # Build the list of names on this line of code where the exception
- # occurred.
- try:
- names = []
- name_cont = False
-
- for token_type, token, start, end, line in generate_tokens(linereader):
- # build composite names
- if token_type == tokenize.NAME and token not in keyword.kwlist:
- if name_cont:
- # Continuation of a dotted name
- try:
- names[-1].append(token)
- except IndexError:
- names.append([token])
- name_cont = False
- else:
- # Regular new names. We append everything, the caller
- # will be responsible for pruning the list later. It's
- # very tricky to try to prune as we go, b/c composite
- # names can fool us. The pruning at the end is easy
- # to do (or the caller can print a list with repeated
- # names if so desired.
- names.append([token])
- elif token == '.':
- name_cont = True
- elif token_type == tokenize.NEWLINE:
- break
-
- except (IndexError, UnicodeDecodeError, SyntaxError):
- # signals exit of tokenizer
- # SyntaxError can occur if the file is not actually Python
- # - see gh-6300
- pass
- except tokenize.TokenError as msg:
- _m = ("An unexpected error occurred while tokenizing input\n"
- "The following traceback may be corrupted or invalid\n"
- "The error message is: %s\n" % msg)
- error(_m)
-
- # Join composite names (e.g. "dict.fromkeys")
- names = ['.'.join(n) for n in names]
- # prune names list of duplicates, but keep the right order
- unique_names = uniq_stable(names)
-
- # Start loop over vars
- lvals = []
- if self.include_vars:
- for name_full in unique_names:
- name_base = name_full.split('.', 1)[0]
- if name_base in frame.f_code.co_varnames:
- if name_base in locals:
- try:
- value = repr(eval(name_full, locals))
- except:
- value = undefined
- else:
- value = undefined
- name = tpl_local_var % name_full
- else:
- if name_base in frame.f_globals:
- try:
- value = repr(eval(name_full, frame.f_globals))
- except:
- value = undefined
- else:
- value = undefined
- name = tpl_global_var % name_full
- lvals.append(tpl_name_val % (name, value))
- if lvals:
- lvals = '%s%s' % (indent, em_normal.join(lvals))
- else:
- lvals = ''
-
- level = '%s %s\n' % (link, call)
-
- if index is None:
- return level
- else:
- return '%s%s' % (level, ''.join(
- _format_traceback_lines(lnum, index, lines, Colors, lvals,
- col_scheme)))
-
- def prepare_chained_exception_message(self, cause):
- direct_cause = "\nThe above exception was the direct cause of the following exception:\n"
- exception_during_handling = "\nDuring handling of the above exception, another exception occurred:\n"
-
- if cause:
- message = [[direct_cause]]
- else:
- message = [[exception_during_handling]]
- return message
-
- def prepare_header(self, etype, long_version=False):
- colors = self.Colors # just a shorthand + quicker name lookup
- colorsnormal = colors.Normal # used a lot
- exc = '%s%s%s' % (colors.excName, etype, colorsnormal)
+
+ def linereader(file=file, lnum=[lnum], getline=ulinecache.getline):
+ line = getline(file, lnum[0])
+ lnum[0] += 1
+ return line
+
+ # Build the list of names on this line of code where the exception
+ # occurred.
+ try:
+ names = []
+ name_cont = False
+
+ for token_type, token, start, end, line in generate_tokens(linereader):
+ # build composite names
+ if token_type == tokenize.NAME and token not in keyword.kwlist:
+ if name_cont:
+ # Continuation of a dotted name
+ try:
+ names[-1].append(token)
+ except IndexError:
+ names.append([token])
+ name_cont = False
+ else:
+ # Regular new names. We append everything, the caller
+ # will be responsible for pruning the list later. It's
+ # very tricky to try to prune as we go, b/c composite
+ # names can fool us. The pruning at the end is easy
+ # to do (or the caller can print a list with repeated
+ # names if so desired.
+ names.append([token])
+ elif token == '.':
+ name_cont = True
+ elif token_type == tokenize.NEWLINE:
+ break
+
+ except (IndexError, UnicodeDecodeError, SyntaxError):
+ # signals exit of tokenizer
+ # SyntaxError can occur if the file is not actually Python
+ # - see gh-6300
+ pass
+ except tokenize.TokenError as msg:
+ _m = ("An unexpected error occurred while tokenizing input\n"
+ "The following traceback may be corrupted or invalid\n"
+ "The error message is: %s\n" % msg)
+ error(_m)
+
+ # Join composite names (e.g. "dict.fromkeys")
+ names = ['.'.join(n) for n in names]
+ # prune names list of duplicates, but keep the right order
+ unique_names = uniq_stable(names)
+
+ # Start loop over vars
+ lvals = []
+ if self.include_vars:
+ for name_full in unique_names:
+ name_base = name_full.split('.', 1)[0]
+ if name_base in frame.f_code.co_varnames:
+ if name_base in locals:
+ try:
+ value = repr(eval(name_full, locals))
+ except:
+ value = undefined
+ else:
+ value = undefined
+ name = tpl_local_var % name_full
+ else:
+ if name_base in frame.f_globals:
+ try:
+ value = repr(eval(name_full, frame.f_globals))
+ except:
+ value = undefined
+ else:
+ value = undefined
+ name = tpl_global_var % name_full
+ lvals.append(tpl_name_val % (name, value))
+ if lvals:
+ lvals = '%s%s' % (indent, em_normal.join(lvals))
+ else:
+ lvals = ''
+
+ level = '%s %s\n' % (link, call)
+
+ if index is None:
+ return level
+ else:
+ return '%s%s' % (level, ''.join(
+ _format_traceback_lines(lnum, index, lines, Colors, lvals,
+ col_scheme)))
+
+ def prepare_chained_exception_message(self, cause):
+ direct_cause = "\nThe above exception was the direct cause of the following exception:\n"
+ exception_during_handling = "\nDuring handling of the above exception, another exception occurred:\n"
+
+ if cause:
+ message = [[direct_cause]]
+ else:
+ message = [[exception_during_handling]]
+ return message
+
+ def prepare_header(self, etype, long_version=False):
+ colors = self.Colors # just a shorthand + quicker name lookup
+ colorsnormal = colors.Normal # used a lot
+ exc = '%s%s%s' % (colors.excName, etype, colorsnormal)
width = min(75, get_terminal_size()[0])
- if long_version:
- # Header with the exception type, python version, and date
- pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
- date = time.ctime(time.time())
-
+ if long_version:
+ # Header with the exception type, python version, and date
+ pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
+ date = time.ctime(time.time())
+
head = '%s%s%s\n%s%s%s\n%s' % (colors.topline, '-' * width, colorsnormal,
exc, ' ' * (width - len(str(etype)) - len(pyver)),
pyver, date.rjust(width) )
- head += "\nA problem occurred executing Python code. Here is the sequence of function" \
- "\ncalls leading up to the error, with the most recent (innermost) call last."
- else:
- # Simplified header
- head = '%s%s' % (exc, 'Traceback (most recent call last)'. \
+ head += "\nA problem occurred executing Python code. Here is the sequence of function" \
+ "\ncalls leading up to the error, with the most recent (innermost) call last."
+ else:
+ # Simplified header
+ head = '%s%s' % (exc, 'Traceback (most recent call last)'. \
rjust(width - len(str(etype))) )
-
- return head
-
- def format_exception(self, etype, evalue):
- colors = self.Colors # just a shorthand + quicker name lookup
- colorsnormal = colors.Normal # used a lot
- indent = ' ' * INDENT_SIZE
- # Get (safely) a string form of the exception info
- try:
- etype_str, evalue_str = map(str, (etype, evalue))
- except:
- # User exception is improperly defined.
- etype, evalue = str, sys.exc_info()[:2]
- etype_str, evalue_str = map(str, (etype, evalue))
- # ... and format it
- exception = ['%s%s%s: %s' % (colors.excName, etype_str,
- colorsnormal, py3compat.cast_unicode(evalue_str))]
-
- if (not py3compat.PY3) and type(evalue) is types.InstanceType:
- try:
- names = [w for w in dir(evalue) if isinstance(w, py3compat.string_types)]
- except:
- # Every now and then, an object with funny internals blows up
- # when dir() is called on it. We do the best we can to report
- # the problem and continue
- _m = '%sException reporting error (object with broken dir())%s:'
- exception.append(_m % (colors.excName, colorsnormal))
- etype_str, evalue_str = map(str, sys.exc_info()[:2])
- exception.append('%s%s%s: %s' % (colors.excName, etype_str,
- colorsnormal, py3compat.cast_unicode(evalue_str)))
- names = []
- for name in names:
- value = text_repr(getattr(evalue, name))
- exception.append('\n%s%s = %s' % (indent, name, value))
-
- return exception
-
- def format_exception_as_a_whole(self, etype, evalue, etb, number_of_lines_of_context, tb_offset):
- """Formats the header, traceback and exception message for a single exception.
-
- This may be called multiple times by Python 3 exception chaining
- (PEP 3134).
- """
- # some locals
- orig_etype = etype
- try:
- etype = etype.__name__
- except AttributeError:
- pass
-
- tb_offset = self.tb_offset if tb_offset is None else tb_offset
- head = self.prepare_header(etype, self.long_header)
- records = self.get_records(etb, number_of_lines_of_context, tb_offset)
-
- if records is None:
- return ""
-
- last_unique, recursion_repeat = find_recursion(orig_etype, evalue, records)
-
- frames = self.format_records(records, last_unique, recursion_repeat)
-
- formatted_exception = self.format_exception(etype, evalue)
- if records:
- filepath, lnum = records[-1][1:3]
- filepath = os.path.abspath(filepath)
- ipinst = get_ipython()
- if ipinst is not None:
- ipinst.hooks.synchronize_with_editor(filepath, lnum, 0)
-
- return [[head] + frames + [''.join(formatted_exception[0])]]
-
- def get_records(self, etb, number_of_lines_of_context, tb_offset):
- try:
- # Try the default getinnerframes and Alex's: Alex's fixes some
- # problems, but it generates empty tracebacks for console errors
- # (5 blanks lines) where none should be returned.
- return _fixed_getinnerframes(etb, number_of_lines_of_context, tb_offset)
+
+ return head
+
+ def format_exception(self, etype, evalue):
+ colors = self.Colors # just a shorthand + quicker name lookup
+ colorsnormal = colors.Normal # used a lot
+ indent = ' ' * INDENT_SIZE
+ # Get (safely) a string form of the exception info
+ try:
+ etype_str, evalue_str = map(str, (etype, evalue))
+ except:
+ # User exception is improperly defined.
+ etype, evalue = str, sys.exc_info()[:2]
+ etype_str, evalue_str = map(str, (etype, evalue))
+ # ... and format it
+ exception = ['%s%s%s: %s' % (colors.excName, etype_str,
+ colorsnormal, py3compat.cast_unicode(evalue_str))]
+
+ if (not py3compat.PY3) and type(evalue) is types.InstanceType:
+ try:
+ names = [w for w in dir(evalue) if isinstance(w, py3compat.string_types)]
+ except:
+ # Every now and then, an object with funny internals blows up
+ # when dir() is called on it. We do the best we can to report
+ # the problem and continue
+ _m = '%sException reporting error (object with broken dir())%s:'
+ exception.append(_m % (colors.excName, colorsnormal))
+ etype_str, evalue_str = map(str, sys.exc_info()[:2])
+ exception.append('%s%s%s: %s' % (colors.excName, etype_str,
+ colorsnormal, py3compat.cast_unicode(evalue_str)))
+ names = []
+ for name in names:
+ value = text_repr(getattr(evalue, name))
+ exception.append('\n%s%s = %s' % (indent, name, value))
+
+ return exception
+
+ def format_exception_as_a_whole(self, etype, evalue, etb, number_of_lines_of_context, tb_offset):
+ """Formats the header, traceback and exception message for a single exception.
+
+ This may be called multiple times by Python 3 exception chaining
+ (PEP 3134).
+ """
+ # some locals
+ orig_etype = etype
+ try:
+ etype = etype.__name__
+ except AttributeError:
+ pass
+
+ tb_offset = self.tb_offset if tb_offset is None else tb_offset
+ head = self.prepare_header(etype, self.long_header)
+ records = self.get_records(etb, number_of_lines_of_context, tb_offset)
+
+ if records is None:
+ return ""
+
+ last_unique, recursion_repeat = find_recursion(orig_etype, evalue, records)
+
+ frames = self.format_records(records, last_unique, recursion_repeat)
+
+ formatted_exception = self.format_exception(etype, evalue)
+ if records:
+ filepath, lnum = records[-1][1:3]
+ filepath = os.path.abspath(filepath)
+ ipinst = get_ipython()
+ if ipinst is not None:
+ ipinst.hooks.synchronize_with_editor(filepath, lnum, 0)
+
+ return [[head] + frames + [''.join(formatted_exception[0])]]
+
+ def get_records(self, etb, number_of_lines_of_context, tb_offset):
+ try:
+ # Try the default getinnerframes and Alex's: Alex's fixes some
+ # problems, but it generates empty tracebacks for console errors
+ # (5 blanks lines) where none should be returned.
+ return _fixed_getinnerframes(etb, number_of_lines_of_context, tb_offset)
except UnicodeDecodeError:
# This can occur if a file's encoding magic comment is wrong.
# I can't see a way to recover without duplicating a bunch of code
# from the stdlib traceback module. --TK
error('\nUnicodeDecodeError while processing traceback.\n')
return None
- except:
- # FIXME: I've been getting many crash reports from python 2.3
- # users, traceable to inspect.py. If I can find a small test-case
- # to reproduce this, I should either write a better workaround or
- # file a bug report against inspect (if that's the real problem).
- # So far, I haven't been able to find an isolated example to
- # reproduce the problem.
- inspect_error()
- traceback.print_exc(file=self.ostream)
- info('\nUnfortunately, your original traceback can not be constructed.\n')
- return None
-
- def get_parts_of_chained_exception(self, evalue):
- def get_chained_exception(exception_value):
- cause = getattr(exception_value, '__cause__', None)
- if cause:
- return cause
- if getattr(exception_value, '__suppress_context__', False):
- return None
- return getattr(exception_value, '__context__', None)
-
- chained_evalue = get_chained_exception(evalue)
-
- if chained_evalue:
- return chained_evalue.__class__, chained_evalue, chained_evalue.__traceback__
-
- def structured_traceback(self, etype, evalue, etb, tb_offset=None,
- number_of_lines_of_context=5):
- """Return a nice text document describing the traceback."""
-
- formatted_exception = self.format_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context,
- tb_offset)
-
- colors = self.Colors # just a shorthand + quicker name lookup
- colorsnormal = colors.Normal # used a lot
+ except:
+ # FIXME: I've been getting many crash reports from python 2.3
+ # users, traceable to inspect.py. If I can find a small test-case
+ # to reproduce this, I should either write a better workaround or
+ # file a bug report against inspect (if that's the real problem).
+ # So far, I haven't been able to find an isolated example to
+ # reproduce the problem.
+ inspect_error()
+ traceback.print_exc(file=self.ostream)
+ info('\nUnfortunately, your original traceback can not be constructed.\n')
+ return None
+
+ def get_parts_of_chained_exception(self, evalue):
+ def get_chained_exception(exception_value):
+ cause = getattr(exception_value, '__cause__', None)
+ if cause:
+ return cause
+ if getattr(exception_value, '__suppress_context__', False):
+ return None
+ return getattr(exception_value, '__context__', None)
+
+ chained_evalue = get_chained_exception(evalue)
+
+ if chained_evalue:
+ return chained_evalue.__class__, chained_evalue, chained_evalue.__traceback__
+
+ def structured_traceback(self, etype, evalue, etb, tb_offset=None,
+ number_of_lines_of_context=5):
+ """Return a nice text document describing the traceback."""
+
+ formatted_exception = self.format_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context,
+ tb_offset)
+
+ colors = self.Colors # just a shorthand + quicker name lookup
+ colorsnormal = colors.Normal # used a lot
head = '%s%s%s' % (colors.topline, '-' * min(75, get_terminal_size()[0]), colorsnormal)
- structured_traceback_parts = [head]
- if py3compat.PY3:
- chained_exceptions_tb_offset = 0
- lines_of_context = 3
- formatted_exceptions = formatted_exception
- exception = self.get_parts_of_chained_exception(evalue)
- if exception:
- formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__)
- etype, evalue, etb = exception
- else:
- evalue = None
- chained_exc_ids = set()
- while evalue:
- formatted_exceptions += self.format_exception_as_a_whole(etype, evalue, etb, lines_of_context,
- chained_exceptions_tb_offset)
- exception = self.get_parts_of_chained_exception(evalue)
-
- if exception and not id(exception[1]) in chained_exc_ids:
- chained_exc_ids.add(id(exception[1])) # trace exception to avoid infinite 'cause' loop
- formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__)
- etype, evalue, etb = exception
- else:
- evalue = None
-
- # we want to see exceptions in a reversed order:
- # the first exception should be on top
- for formatted_exception in reversed(formatted_exceptions):
- structured_traceback_parts += formatted_exception
- else:
- structured_traceback_parts += formatted_exception[0]
-
- return structured_traceback_parts
-
- def debugger(self, force=False):
- """Call up the pdb debugger if desired, always clean up the tb
- reference.
-
- Keywords:
-
- - force(False): by default, this routine checks the instance call_pdb
- flag and does not actually invoke the debugger if the flag is false.
- The 'force' option forces the debugger to activate even if the flag
- is false.
-
- If the call_pdb flag is set, the pdb interactive debugger is
- invoked. In all cases, the self.tb reference to the current traceback
- is deleted to prevent lingering references which hamper memory
- management.
-
- Note that each call to pdb() does an 'import readline', so if your app
- requires a special setup for the readline completers, you'll have to
- fix that by hand after invoking the exception handler."""
-
- if force or self.call_pdb:
- if self.pdb is None:
+ structured_traceback_parts = [head]
+ if py3compat.PY3:
+ chained_exceptions_tb_offset = 0
+ lines_of_context = 3
+ formatted_exceptions = formatted_exception
+ exception = self.get_parts_of_chained_exception(evalue)
+ if exception:
+ formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__)
+ etype, evalue, etb = exception
+ else:
+ evalue = None
+ chained_exc_ids = set()
+ while evalue:
+ formatted_exceptions += self.format_exception_as_a_whole(etype, evalue, etb, lines_of_context,
+ chained_exceptions_tb_offset)
+ exception = self.get_parts_of_chained_exception(evalue)
+
+ if exception and not id(exception[1]) in chained_exc_ids:
+ chained_exc_ids.add(id(exception[1])) # trace exception to avoid infinite 'cause' loop
+ formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__)
+ etype, evalue, etb = exception
+ else:
+ evalue = None
+
+ # we want to see exceptions in a reversed order:
+ # the first exception should be on top
+ for formatted_exception in reversed(formatted_exceptions):
+ structured_traceback_parts += formatted_exception
+ else:
+ structured_traceback_parts += formatted_exception[0]
+
+ return structured_traceback_parts
+
+ def debugger(self, force=False):
+ """Call up the pdb debugger if desired, always clean up the tb
+ reference.
+
+ Keywords:
+
+ - force(False): by default, this routine checks the instance call_pdb
+ flag and does not actually invoke the debugger if the flag is false.
+ The 'force' option forces the debugger to activate even if the flag
+ is false.
+
+ If the call_pdb flag is set, the pdb interactive debugger is
+ invoked. In all cases, the self.tb reference to the current traceback
+ is deleted to prevent lingering references which hamper memory
+ management.
+
+ Note that each call to pdb() does an 'import readline', so if your app
+ requires a special setup for the readline completers, you'll have to
+ fix that by hand after invoking the exception handler."""
+
+ if force or self.call_pdb:
+ if self.pdb is None:
self.pdb = self.debugger_cls()
- # the system displayhook may have changed, restore the original
- # for pdb
- display_trap = DisplayTrap(hook=sys.__displayhook__)
- with display_trap:
- self.pdb.reset()
- # Find the right frame so we don't pop up inside ipython itself
- if hasattr(self, 'tb') and self.tb is not None:
- etb = self.tb
- else:
- etb = self.tb = sys.last_traceback
- while self.tb is not None and self.tb.tb_next is not None:
- self.tb = self.tb.tb_next
- if etb and etb.tb_next:
- etb = etb.tb_next
- self.pdb.botframe = etb.tb_frame
- self.pdb.interaction(self.tb.tb_frame, self.tb)
-
- if hasattr(self, 'tb'):
- del self.tb
-
- def handler(self, info=None):
- (etype, evalue, etb) = info or sys.exc_info()
- self.tb = etb
- ostream = self.ostream
- ostream.flush()
- ostream.write(self.text(etype, evalue, etb))
- ostream.write('\n')
- ostream.flush()
-
- # Changed so an instance can just be called as VerboseTB_inst() and print
- # out the right info on its own.
- def __call__(self, etype=None, evalue=None, etb=None):
- """This hook can replace sys.excepthook (for Python 2.1 or higher)."""
- if etb is None:
- self.handler()
- else:
- self.handler((etype, evalue, etb))
- try:
- self.debugger()
- except KeyboardInterrupt:
- print("\nKeyboardInterrupt")
-
-
-#----------------------------------------------------------------------------
-class FormattedTB(VerboseTB, ListTB):
- """Subclass ListTB but allow calling with a traceback.
-
- It can thus be used as a sys.excepthook for Python > 2.1.
-
- Also adds 'Context' and 'Verbose' modes, not available in ListTB.
-
- Allows a tb_offset to be specified. This is useful for situations where
- one needs to remove a number of topmost frames from the traceback (such as
- occurs with python programs that themselves execute other python code,
- like Python shells). """
-
- def __init__(self, mode='Plain', color_scheme='Linux', call_pdb=False,
- ostream=None,
- tb_offset=0, long_header=False, include_vars=False,
+ # the system displayhook may have changed, restore the original
+ # for pdb
+ display_trap = DisplayTrap(hook=sys.__displayhook__)
+ with display_trap:
+ self.pdb.reset()
+ # Find the right frame so we don't pop up inside ipython itself
+ if hasattr(self, 'tb') and self.tb is not None:
+ etb = self.tb
+ else:
+ etb = self.tb = sys.last_traceback
+ while self.tb is not None and self.tb.tb_next is not None:
+ self.tb = self.tb.tb_next
+ if etb and etb.tb_next:
+ etb = etb.tb_next
+ self.pdb.botframe = etb.tb_frame
+ self.pdb.interaction(self.tb.tb_frame, self.tb)
+
+ if hasattr(self, 'tb'):
+ del self.tb
+
+ def handler(self, info=None):
+ (etype, evalue, etb) = info or sys.exc_info()
+ self.tb = etb
+ ostream = self.ostream
+ ostream.flush()
+ ostream.write(self.text(etype, evalue, etb))
+ ostream.write('\n')
+ ostream.flush()
+
+ # Changed so an instance can just be called as VerboseTB_inst() and print
+ # out the right info on its own.
+ def __call__(self, etype=None, evalue=None, etb=None):
+ """This hook can replace sys.excepthook (for Python 2.1 or higher)."""
+ if etb is None:
+ self.handler()
+ else:
+ self.handler((etype, evalue, etb))
+ try:
+ self.debugger()
+ except KeyboardInterrupt:
+ print("\nKeyboardInterrupt")
+
+
+#----------------------------------------------------------------------------
+class FormattedTB(VerboseTB, ListTB):
+ """Subclass ListTB but allow calling with a traceback.
+
+ It can thus be used as a sys.excepthook for Python > 2.1.
+
+ Also adds 'Context' and 'Verbose' modes, not available in ListTB.
+
+ Allows a tb_offset to be specified. This is useful for situations where
+ one needs to remove a number of topmost frames from the traceback (such as
+ occurs with python programs that themselves execute other python code,
+ like Python shells). """
+
+ def __init__(self, mode='Plain', color_scheme='Linux', call_pdb=False,
+ ostream=None,
+ tb_offset=0, long_header=False, include_vars=False,
check_cache=None, debugger_cls=None):
-
- # NEVER change the order of this list. Put new modes at the end:
- self.valid_modes = ['Plain', 'Context', 'Verbose']
- self.verbose_modes = self.valid_modes[1:3]
-
- VerboseTB.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
- ostream=ostream, tb_offset=tb_offset,
- long_header=long_header, include_vars=include_vars,
+
+ # NEVER change the order of this list. Put new modes at the end:
+ self.valid_modes = ['Plain', 'Context', 'Verbose']
+ self.verbose_modes = self.valid_modes[1:3]
+
+ VerboseTB.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb,
+ ostream=ostream, tb_offset=tb_offset,
+ long_header=long_header, include_vars=include_vars,
check_cache=check_cache, debugger_cls=debugger_cls)
-
- # Different types of tracebacks are joined with different separators to
- # form a single string. They are taken from this dict
- self._join_chars = dict(Plain='', Context='\n', Verbose='\n')
- # set_mode also sets the tb_join_char attribute
- self.set_mode(mode)
-
- def _extract_tb(self, tb):
- if tb:
- return traceback.extract_tb(tb)
- else:
- return None
-
- def structured_traceback(self, etype, value, tb, tb_offset=None, number_of_lines_of_context=5):
- tb_offset = self.tb_offset if tb_offset is None else tb_offset
- mode = self.mode
- if mode in self.verbose_modes:
- # Verbose modes need a full traceback
- return VerboseTB.structured_traceback(
- self, etype, value, tb, tb_offset, number_of_lines_of_context
- )
- else:
- # We must check the source cache because otherwise we can print
- # out-of-date source code.
- self.check_cache()
- # Now we can extract and format the exception
- elist = self._extract_tb(tb)
- return ListTB.structured_traceback(
- self, etype, value, elist, tb_offset, number_of_lines_of_context
- )
-
- def stb2text(self, stb):
- """Convert a structured traceback (a list) to a string."""
- return self.tb_join_char.join(stb)
-
-
- def set_mode(self, mode=None):
- """Switch to the desired mode.
-
- If mode is not specified, cycles through the available modes."""
-
- if not mode:
- new_idx = (self.valid_modes.index(self.mode) + 1 ) % \
- len(self.valid_modes)
- self.mode = self.valid_modes[new_idx]
- elif mode not in self.valid_modes:
- raise ValueError('Unrecognized mode in FormattedTB: <' + mode + '>\n'
- 'Valid modes: ' + str(self.valid_modes))
- else:
- self.mode = mode
- # include variable details only in 'Verbose' mode
- self.include_vars = (self.mode == self.valid_modes[2])
- # Set the join character for generating text tracebacks
- self.tb_join_char = self._join_chars[self.mode]
-
- # some convenient shortcuts
- def plain(self):
- self.set_mode(self.valid_modes[0])
-
- def context(self):
- self.set_mode(self.valid_modes[1])
-
- def verbose(self):
- self.set_mode(self.valid_modes[2])
-
-
-#----------------------------------------------------------------------------
-class AutoFormattedTB(FormattedTB):
- """A traceback printer which can be called on the fly.
-
- It will find out about exceptions by itself.
-
- A brief example::
-
- AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux')
- try:
- ...
- except:
- AutoTB() # or AutoTB(out=logfile) where logfile is an open file object
- """
-
- def __call__(self, etype=None, evalue=None, etb=None,
- out=None, tb_offset=None):
- """Print out a formatted exception traceback.
-
- Optional arguments:
- - out: an open file-like object to direct output to.
-
- - tb_offset: the number of frames to skip over in the stack, on a
- per-call basis (this overrides temporarily the instance's tb_offset
- given at initialization time. """
-
- if out is None:
- out = self.ostream
- out.flush()
- out.write(self.text(etype, evalue, etb, tb_offset))
- out.write('\n')
- out.flush()
- # FIXME: we should remove the auto pdb behavior from here and leave
- # that to the clients.
- try:
- self.debugger()
- except KeyboardInterrupt:
- print("\nKeyboardInterrupt")
-
- def structured_traceback(self, etype=None, value=None, tb=None,
- tb_offset=None, number_of_lines_of_context=5):
- if etype is None:
- etype, value, tb = sys.exc_info()
- self.tb = tb
- return FormattedTB.structured_traceback(
- self, etype, value, tb, tb_offset, number_of_lines_of_context)
-
-
-#---------------------------------------------------------------------------
-
-# A simple class to preserve Nathan's original functionality.
-class ColorTB(FormattedTB):
- """Shorthand to initialize a FormattedTB in Linux colors mode."""
-
- def __init__(self, color_scheme='Linux', call_pdb=0, **kwargs):
- FormattedTB.__init__(self, color_scheme=color_scheme,
- call_pdb=call_pdb, **kwargs)
-
-
-class SyntaxTB(ListTB):
- """Extension which holds some state: the last exception value"""
-
- def __init__(self, color_scheme='NoColor'):
- ListTB.__init__(self, color_scheme)
- self.last_syntax_error = None
-
- def __call__(self, etype, value, elist):
- self.last_syntax_error = value
-
- ListTB.__call__(self, etype, value, elist)
-
- def structured_traceback(self, etype, value, elist, tb_offset=None,
- context=5):
- # If the source file has been edited, the line in the syntax error can
- # be wrong (retrieved from an outdated cache). This replaces it with
- # the current value.
- if isinstance(value, SyntaxError) \
- and isinstance(value.filename, py3compat.string_types) \
- and isinstance(value.lineno, int):
- linecache.checkcache(value.filename)
- newtext = ulinecache.getline(value.filename, value.lineno)
- if newtext:
- value.text = newtext
+
+ # Different types of tracebacks are joined with different separators to
+ # form a single string. They are taken from this dict
+ self._join_chars = dict(Plain='', Context='\n', Verbose='\n')
+ # set_mode also sets the tb_join_char attribute
+ self.set_mode(mode)
+
+ def _extract_tb(self, tb):
+ if tb:
+ return traceback.extract_tb(tb)
+ else:
+ return None
+
+ def structured_traceback(self, etype, value, tb, tb_offset=None, number_of_lines_of_context=5):
+ tb_offset = self.tb_offset if tb_offset is None else tb_offset
+ mode = self.mode
+ if mode in self.verbose_modes:
+ # Verbose modes need a full traceback
+ return VerboseTB.structured_traceback(
+ self, etype, value, tb, tb_offset, number_of_lines_of_context
+ )
+ else:
+ # We must check the source cache because otherwise we can print
+ # out-of-date source code.
+ self.check_cache()
+ # Now we can extract and format the exception
+ elist = self._extract_tb(tb)
+ return ListTB.structured_traceback(
+ self, etype, value, elist, tb_offset, number_of_lines_of_context
+ )
+
+ def stb2text(self, stb):
+ """Convert a structured traceback (a list) to a string."""
+ return self.tb_join_char.join(stb)
+
+
+ def set_mode(self, mode=None):
+ """Switch to the desired mode.
+
+ If mode is not specified, cycles through the available modes."""
+
+ if not mode:
+ new_idx = (self.valid_modes.index(self.mode) + 1 ) % \
+ len(self.valid_modes)
+ self.mode = self.valid_modes[new_idx]
+ elif mode not in self.valid_modes:
+ raise ValueError('Unrecognized mode in FormattedTB: <' + mode + '>\n'
+ 'Valid modes: ' + str(self.valid_modes))
+ else:
+ self.mode = mode
+ # include variable details only in 'Verbose' mode
+ self.include_vars = (self.mode == self.valid_modes[2])
+ # Set the join character for generating text tracebacks
+ self.tb_join_char = self._join_chars[self.mode]
+
+ # some convenient shortcuts
+ def plain(self):
+ self.set_mode(self.valid_modes[0])
+
+ def context(self):
+ self.set_mode(self.valid_modes[1])
+
+ def verbose(self):
+ self.set_mode(self.valid_modes[2])
+
+
+#----------------------------------------------------------------------------
+class AutoFormattedTB(FormattedTB):
+ """A traceback printer which can be called on the fly.
+
+ It will find out about exceptions by itself.
+
+ A brief example::
+
+ AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux')
+ try:
+ ...
+ except:
+ AutoTB() # or AutoTB(out=logfile) where logfile is an open file object
+ """
+
+ def __call__(self, etype=None, evalue=None, etb=None,
+ out=None, tb_offset=None):
+ """Print out a formatted exception traceback.
+
+ Optional arguments:
+ - out: an open file-like object to direct output to.
+
+ - tb_offset: the number of frames to skip over in the stack, on a
+ per-call basis (this overrides temporarily the instance's tb_offset
+ given at initialization time. """
+
+ if out is None:
+ out = self.ostream
+ out.flush()
+ out.write(self.text(etype, evalue, etb, tb_offset))
+ out.write('\n')
+ out.flush()
+ # FIXME: we should remove the auto pdb behavior from here and leave
+ # that to the clients.
+ try:
+ self.debugger()
+ except KeyboardInterrupt:
+ print("\nKeyboardInterrupt")
+
+ def structured_traceback(self, etype=None, value=None, tb=None,
+ tb_offset=None, number_of_lines_of_context=5):
+ if etype is None:
+ etype, value, tb = sys.exc_info()
+ self.tb = tb
+ return FormattedTB.structured_traceback(
+ self, etype, value, tb, tb_offset, number_of_lines_of_context)
+
+
+#---------------------------------------------------------------------------
+
+# A simple class to preserve Nathan's original functionality.
+class ColorTB(FormattedTB):
+ """Shorthand to initialize a FormattedTB in Linux colors mode."""
+
+ def __init__(self, color_scheme='Linux', call_pdb=0, **kwargs):
+ FormattedTB.__init__(self, color_scheme=color_scheme,
+ call_pdb=call_pdb, **kwargs)
+
+
+class SyntaxTB(ListTB):
+ """Extension which holds some state: the last exception value"""
+
+ def __init__(self, color_scheme='NoColor'):
+ ListTB.__init__(self, color_scheme)
+ self.last_syntax_error = None
+
+ def __call__(self, etype, value, elist):
+ self.last_syntax_error = value
+
+ ListTB.__call__(self, etype, value, elist)
+
+ def structured_traceback(self, etype, value, elist, tb_offset=None,
+ context=5):
+ # If the source file has been edited, the line in the syntax error can
+ # be wrong (retrieved from an outdated cache). This replaces it with
+ # the current value.
+ if isinstance(value, SyntaxError) \
+ and isinstance(value.filename, py3compat.string_types) \
+ and isinstance(value.lineno, int):
+ linecache.checkcache(value.filename)
+ newtext = ulinecache.getline(value.filename, value.lineno)
+ if newtext:
+ value.text = newtext
self.last_syntax_error = value
- return super(SyntaxTB, self).structured_traceback(etype, value, elist,
- tb_offset=tb_offset, context=context)
-
- def clear_err_state(self):
- """Return the current error state and clear it"""
- e = self.last_syntax_error
- self.last_syntax_error = None
- return e
-
- def stb2text(self, stb):
- """Convert a structured traceback (a list) to a string."""
- return ''.join(stb)
-
-
-# some internal-use functions
-def text_repr(value):
- """Hopefully pretty robust repr equivalent."""
- # this is pretty horrible but should always return *something*
- try:
- return pydoc.text.repr(value)
- except KeyboardInterrupt:
- raise
- except:
- try:
- return repr(value)
- except KeyboardInterrupt:
- raise
- except:
- try:
- # all still in an except block so we catch
- # getattr raising
- name = getattr(value, '__name__', None)
- if name:
- # ick, recursion
- return text_repr(name)
- klass = getattr(value, '__class__', None)
- if klass:
- return '%s instance' % text_repr(klass)
- except KeyboardInterrupt:
- raise
- except:
- return 'UNRECOVERABLE REPR FAILURE'
-
-
-def eqrepr(value, repr=text_repr):
- return '=%s' % repr(value)
-
-
-def nullrepr(value, repr=text_repr):
- return ''
+ return super(SyntaxTB, self).structured_traceback(etype, value, elist,
+ tb_offset=tb_offset, context=context)
+
+ def clear_err_state(self):
+ """Return the current error state and clear it"""
+ e = self.last_syntax_error
+ self.last_syntax_error = None
+ return e
+
+ def stb2text(self, stb):
+ """Convert a structured traceback (a list) to a string."""
+ return ''.join(stb)
+
+
+# some internal-use functions
+def text_repr(value):
+ """Hopefully pretty robust repr equivalent."""
+ # this is pretty horrible but should always return *something*
+ try:
+ return pydoc.text.repr(value)
+ except KeyboardInterrupt:
+ raise
+ except:
+ try:
+ return repr(value)
+ except KeyboardInterrupt:
+ raise
+ except:
+ try:
+ # all still in an except block so we catch
+ # getattr raising
+ name = getattr(value, '__name__', None)
+ if name:
+ # ick, recursion
+ return text_repr(name)
+ klass = getattr(value, '__class__', None)
+ if klass:
+ return '%s instance' % text_repr(klass)
+ except KeyboardInterrupt:
+ raise
+ except:
+ return 'UNRECOVERABLE REPR FAILURE'
+
+
+def eqrepr(value, repr=text_repr):
+ return '=%s' % repr(value)
+
+
+def nullrepr(value, repr=text_repr):
+ return ''
diff --git a/contrib/python/ipython/py2/IPython/core/usage.py b/contrib/python/ipython/py2/IPython/core/usage.py
index 8a890c7792..c4d3c16eca 100644
--- a/contrib/python/ipython/py2/IPython/core/usage.py
+++ b/contrib/python/ipython/py2/IPython/core/usage.py
@@ -1,344 +1,344 @@
-# -*- coding: utf-8 -*-
-"""Usage information for the main IPython applications.
-"""
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-import sys
-from IPython.core import release
-
-cl_usage = """\
-=========
- IPython
-=========
-
-Tools for Interactive Computing in Python
-=========================================
-
- A Python shell with automatic history (input and output), dynamic object
- introspection, easier configuration, command completion, access to the
- system shell and more. IPython can also be embedded in running programs.
-
-
-Usage
-
- ipython [subcommand] [options] [-c cmd | -m mod | file] [--] [arg] ...
-
- If invoked with no options, it executes the file and exits, passing the
- remaining arguments to the script, just as if you had specified the same
- command with python. You may need to specify `--` before args to be passed
- to the script, to prevent IPython from attempting to parse them. If you
- specify the option `-i` before the filename, it will enter an interactive
- IPython session after running the script, rather than exiting. Files ending
- in .py will be treated as normal Python, but files ending in .ipy can
- contain special IPython syntax (magic commands, shell expansions, etc.).
-
- Almost all configuration in IPython is available via the command-line. Do
- `ipython --help-all` to see all available options. For persistent
- configuration, look into your `ipython_config.py` configuration file for
- details.
-
- This file is typically installed in the `IPYTHONDIR` directory, and there
- is a separate configuration directory for each profile. The default profile
- directory will be located in $IPYTHONDIR/profile_default. IPYTHONDIR
- defaults to to `$HOME/.ipython`. For Windows users, $HOME resolves to
- C:\\Users\\YourUserName in most instances.
-
- To initialize a profile with the default configuration file, do::
-
- $> ipython profile create
-
- and start editing `IPYTHONDIR/profile_default/ipython_config.py`
-
- In IPython's documentation, we will refer to this directory as
- `IPYTHONDIR`, you can change its default location by creating an
- environment variable with this name and setting it to the desired path.
-
- For more information, see the manual available in HTML and PDF in your
- installation, or online at http://ipython.org/documentation.html.
-"""
-
-interactive_usage = """
-IPython -- An enhanced Interactive Python
-=========================================
-
+# -*- coding: utf-8 -*-
+"""Usage information for the main IPython applications.
+"""
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+import sys
+from IPython.core import release
+
+cl_usage = """\
+=========
+ IPython
+=========
+
+Tools for Interactive Computing in Python
+=========================================
+
+ A Python shell with automatic history (input and output), dynamic object
+ introspection, easier configuration, command completion, access to the
+ system shell and more. IPython can also be embedded in running programs.
+
+
+Usage
+
+ ipython [subcommand] [options] [-c cmd | -m mod | file] [--] [arg] ...
+
+ If invoked with no options, it executes the file and exits, passing the
+ remaining arguments to the script, just as if you had specified the same
+ command with python. You may need to specify `--` before args to be passed
+ to the script, to prevent IPython from attempting to parse them. If you
+ specify the option `-i` before the filename, it will enter an interactive
+ IPython session after running the script, rather than exiting. Files ending
+ in .py will be treated as normal Python, but files ending in .ipy can
+ contain special IPython syntax (magic commands, shell expansions, etc.).
+
+ Almost all configuration in IPython is available via the command-line. Do
+ `ipython --help-all` to see all available options. For persistent
+ configuration, look into your `ipython_config.py` configuration file for
+ details.
+
+ This file is typically installed in the `IPYTHONDIR` directory, and there
+ is a separate configuration directory for each profile. The default profile
+ directory will be located in $IPYTHONDIR/profile_default. IPYTHONDIR
+ defaults to to `$HOME/.ipython`. For Windows users, $HOME resolves to
+ C:\\Users\\YourUserName in most instances.
+
+ To initialize a profile with the default configuration file, do::
+
+ $> ipython profile create
+
+ and start editing `IPYTHONDIR/profile_default/ipython_config.py`
+
+ In IPython's documentation, we will refer to this directory as
+ `IPYTHONDIR`, you can change its default location by creating an
+ environment variable with this name and setting it to the desired path.
+
+ For more information, see the manual available in HTML and PDF in your
+ installation, or online at http://ipython.org/documentation.html.
+"""
+
+interactive_usage = """
+IPython -- An enhanced Interactive Python
+=========================================
+
IPython offers a fully compatible replacement for the standard Python
interpreter, with convenient shell features, special commands, command
history mechanism and output results caching.
-
-At your system command line, type 'ipython -h' to see the command line
-options available. This document only describes interactive features.
-
-MAIN FEATURES
--------------
-
+
+At your system command line, type 'ipython -h' to see the command line
+options available. This document only describes interactive features.
+
+MAIN FEATURES
+-------------
+
* Access to the standard Python help with object docstrings and the Python
manuals. Simply type 'help' (no quotes) to invoke it.
-
-* Magic commands: type %magic for information on the magic subsystem.
-
-* System command aliases, via the %alias command or the configuration file(s).
-
-* Dynamic object information:
-
+
+* Magic commands: type %magic for information on the magic subsystem.
+
+* System command aliases, via the %alias command or the configuration file(s).
+
+* Dynamic object information:
+
Typing ?word or word? prints detailed information about an object. Certain
long strings (code, etc.) get snipped in the center for brevity.
-
- Typing ??word or word?? gives access to the full information without
+
+ Typing ??word or word?? gives access to the full information without
snipping long strings. Strings that are longer than the screen are printed
through the less pager.
-
- The ?/?? system gives access to the full source code for any object (if
- available), shows function prototypes and other useful information.
-
- If you just want to see an object's docstring, type '%pdoc object' (without
- quotes, and without % if you have automagic on).
-
+
+ The ?/?? system gives access to the full source code for any object (if
+ available), shows function prototypes and other useful information.
+
+ If you just want to see an object's docstring, type '%pdoc object' (without
+ quotes, and without % if you have automagic on).
+
* Tab completion in the local namespace:
-
- At any time, hitting tab will complete any available python commands or
- variable names, and show you a list of the possible completions if there's
- no unambiguous one. It will also complete filenames in the current directory.
-
+
+ At any time, hitting tab will complete any available python commands or
+ variable names, and show you a list of the possible completions if there's
+ no unambiguous one. It will also complete filenames in the current directory.
+
* Search previous command history in multiple ways:
-
+
- Start typing, and then use arrow keys up/down or (Ctrl-p/Ctrl-n) to search
through the history items that match what you've typed so far.
-
- - Hit Ctrl-r: opens a search prompt. Begin typing and the system searches
- your history for lines that match what you've typed so far, completing as
- much as it can.
-
+
+ - Hit Ctrl-r: opens a search prompt. Begin typing and the system searches
+ your history for lines that match what you've typed so far, completing as
+ much as it can.
+
- %hist: search history by index.
-
-* Persistent command history across sessions.
-
-* Logging of input with the ability to save and restore a working session.
-
+
+* Persistent command history across sessions.
+
+* Logging of input with the ability to save and restore a working session.
+
* System shell with !. Typing !ls will run 'ls' in the current directory.
-
-* The reload command does a 'deep' reload of a module: changes made to the
- module since you imported will actually be available without having to exit.
-
-* Verbose and colored exception traceback printouts. See the magic xmode and
- xcolor functions for details (just type %magic).
-
-* Input caching system:
-
- IPython offers numbered prompts (In/Out) with input and output caching. All
- input is saved and can be retrieved as variables (besides the usual arrow
- key recall).
-
- The following GLOBAL variables always exist (so don't overwrite them!):
- _i: stores previous input.
- _ii: next previous.
- _iii: next-next previous.
- _ih : a list of all input _ih[n] is the input from line n.
-
- Additionally, global variables named _i<n> are dynamically created (<n>
- being the prompt counter), such that _i<n> == _ih[<n>]
-
- For example, what you typed at prompt 14 is available as _i14 and _ih[14].
-
- You can create macros which contain multiple input lines from this history,
- for later re-execution, with the %macro function.
-
- The history function %hist allows you to see any part of your input history
- by printing a range of the _i variables. Note that inputs which contain
- magic functions (%) appear in the history with a prepended comment. This is
- because they aren't really valid Python code, so you can't exec them.
-
-* Output caching system:
-
- For output that is returned from actions, a system similar to the input
- cache exists but using _ instead of _i. Only actions that produce a result
- (NOT assignments, for example) are cached. If you are familiar with
- Mathematica, IPython's _ variables behave exactly like Mathematica's %
- variables.
-
- The following GLOBAL variables always exist (so don't overwrite them!):
- _ (one underscore): previous output.
- __ (two underscores): next previous.
- ___ (three underscores): next-next previous.
-
- Global variables named _<n> are dynamically created (<n> being the prompt
- counter), such that the result of output <n> is always available as _<n>.
-
- Finally, a global dictionary named _oh exists with entries for all lines
- which generated output.
-
-* Directory history:
-
- Your history of visited directories is kept in the global list _dh, and the
- magic %cd command can be used to go to any entry in that list.
-
-* Auto-parentheses and auto-quotes (adapted from Nathan Gray's LazyPython)
-
- 1. Auto-parentheses
-
- Callable objects (i.e. functions, methods, etc) can be invoked like
- this (notice the commas between the arguments)::
+
+* The reload command does a 'deep' reload of a module: changes made to the
+ module since you imported will actually be available without having to exit.
+
+* Verbose and colored exception traceback printouts. See the magic xmode and
+ xcolor functions for details (just type %magic).
+
+* Input caching system:
+
+ IPython offers numbered prompts (In/Out) with input and output caching. All
+ input is saved and can be retrieved as variables (besides the usual arrow
+ key recall).
+
+ The following GLOBAL variables always exist (so don't overwrite them!):
+ _i: stores previous input.
+ _ii: next previous.
+ _iii: next-next previous.
+ _ih : a list of all input _ih[n] is the input from line n.
+
+ Additionally, global variables named _i<n> are dynamically created (<n>
+ being the prompt counter), such that _i<n> == _ih[<n>]
+
+ For example, what you typed at prompt 14 is available as _i14 and _ih[14].
+
+ You can create macros which contain multiple input lines from this history,
+ for later re-execution, with the %macro function.
+
+ The history function %hist allows you to see any part of your input history
+ by printing a range of the _i variables. Note that inputs which contain
+ magic functions (%) appear in the history with a prepended comment. This is
+ because they aren't really valid Python code, so you can't exec them.
+
+* Output caching system:
+
+ For output that is returned from actions, a system similar to the input
+ cache exists but using _ instead of _i. Only actions that produce a result
+ (NOT assignments, for example) are cached. If you are familiar with
+ Mathematica, IPython's _ variables behave exactly like Mathematica's %
+ variables.
+
+ The following GLOBAL variables always exist (so don't overwrite them!):
+ _ (one underscore): previous output.
+ __ (two underscores): next previous.
+ ___ (three underscores): next-next previous.
+
+ Global variables named _<n> are dynamically created (<n> being the prompt
+ counter), such that the result of output <n> is always available as _<n>.
+
+ Finally, a global dictionary named _oh exists with entries for all lines
+ which generated output.
+
+* Directory history:
+
+ Your history of visited directories is kept in the global list _dh, and the
+ magic %cd command can be used to go to any entry in that list.
+
+* Auto-parentheses and auto-quotes (adapted from Nathan Gray's LazyPython)
+
+ 1. Auto-parentheses
- In [1]: callable_ob arg1, arg2, arg3
-
- and the input will be translated to this::
-
- callable_ob(arg1, arg2, arg3)
-
- This feature is off by default (in rare cases it can produce
- undesirable side-effects), but you can activate it at the command-line
- by starting IPython with `--autocall 1`, set it permanently in your
- configuration file, or turn on at runtime with `%autocall 1`.
-
- You can force auto-parentheses by using '/' as the first character
- of a line. For example::
-
- In [1]: /globals # becomes 'globals()'
-
- Note that the '/' MUST be the first character on the line! This
- won't work::
-
- In [2]: print /globals # syntax error
-
- In most cases the automatic algorithm should work, so you should
- rarely need to explicitly invoke /. One notable exception is if you
- are trying to call a function with a list of tuples as arguments (the
- parenthesis will confuse IPython)::
-
- In [1]: zip (1,2,3),(4,5,6) # won't work
-
- but this will work::
-
- In [2]: /zip (1,2,3),(4,5,6)
- ------> zip ((1,2,3),(4,5,6))
- Out[2]= [(1, 4), (2, 5), (3, 6)]
-
- IPython tells you that it has altered your command line by
- displaying the new command line preceded by -->. e.g.::
-
- In [18]: callable list
- -------> callable (list)
-
- 2. Auto-Quoting
-
- You can force auto-quoting of a function's arguments by using ',' as
- the first character of a line. For example::
-
- In [1]: ,my_function /home/me # becomes my_function("/home/me")
-
- If you use ';' instead, the whole argument is quoted as a single
- string (while ',' splits on whitespace)::
-
- In [2]: ,my_function a b c # becomes my_function("a","b","c")
- In [3]: ;my_function a b c # becomes my_function("a b c")
-
- Note that the ',' MUST be the first character on the line! This
- won't work::
-
- In [4]: x = ,my_function /home/me # syntax error
-"""
-
-interactive_usage_min = """\
-An enhanced console for Python.
-Some of its features are:
-- Tab completion in the local namespace.
-- Logging of input, see command-line options.
-- System shell escape via ! , eg !ls.
-- Magic commands, starting with a % (like %ls, %pwd, %cd, etc.)
-- Keeps track of locally defined variables via %who, %whos.
-- Show object information with a ? eg ?x or x? (use ?? for more info).
-"""
-
-quick_reference = r"""
-IPython -- An enhanced Interactive Python - Quick Reference Card
-================================================================
-
-obj?, obj?? : Get help, or more help for object (also works as
- ?obj, ??obj).
-?foo.*abc* : List names in 'foo' containing 'abc' in them.
-%magic : Information about IPython's 'magic' % functions.
-
-Magic functions are prefixed by % or %%, and typically take their arguments
-without parentheses, quotes or even commas for convenience. Line magics take a
-single % and cell magics are prefixed with two %%.
-
-Example magic function calls:
-
-%alias d ls -F : 'd' is now an alias for 'ls -F'
-alias d ls -F : Works if 'alias' not a python name
-alist = %alias : Get list of aliases to 'alist'
-cd /usr/share : Obvious. cd -<tab> to choose from visited dirs.
-%cd?? : See help AND source for magic %cd
-%timeit x=10 : time the 'x=10' statement with high precision.
-%%timeit x=2**100
-x**100 : time 'x**100' with a setup of 'x=2**100'; setup code is not
- counted. This is an example of a cell magic.
-
-System commands:
-
-!cp a.txt b/ : System command escape, calls os.system()
-cp a.txt b/ : after %rehashx, most system commands work without !
-cp ${f}.txt $bar : Variable expansion in magics and system commands
-files = !ls /usr : Capture sytem command output
-files.s, files.l, files.n: "a b c", ['a','b','c'], 'a\nb\nc'
-
-History:
-
-_i, _ii, _iii : Previous, next previous, next next previous input
-_i4, _ih[2:5] : Input history line 4, lines 2-4
-exec _i81 : Execute input history line #81 again
-%rep 81 : Edit input history line #81
-_, __, ___ : previous, next previous, next next previous output
-_dh : Directory history
-_oh : Output history
-%hist : Command history of current session.
-%hist -g foo : Search command history of (almost) all sessions for 'foo'.
-%hist -g : Command history of (almost) all sessions.
-%hist 1/2-8 : Command history containing lines 2-8 of session 1.
-%hist 1/ ~2/ : Command history of session 1 and 2 sessions before current.
-%hist ~8/1-~6/5 : Command history from line 1 of 8 sessions ago to
- line 5 of 6 sessions ago.
-%edit 0/ : Open editor to execute code with history of current session.
-
-Autocall:
-
-f 1,2 : f(1,2) # Off by default, enable with %autocall magic.
-/f 1,2 : f(1,2) (forced autoparen)
-,f 1 2 : f("1","2")
-;f 1 2 : f("1 2")
-
-Remember: TAB completion works in many contexts, not just file names
-or python names.
-
-The following magic functions are currently available:
-
-"""
-
-quick_guide = """\
-? -> Introduction and overview of IPython's features.
-%quickref -> Quick reference.
-help -> Python's own help system.
-object? -> Details about 'object', use 'object??' for extra details.
-"""
-
-default_banner_parts = [
- 'Python %s\n' % (sys.version.split('\n')[0],),
- 'Type "copyright", "credits" or "license" for more information.\n\n',
- 'IPython {version} -- An enhanced Interactive Python.\n'.format(
- version=release.version,
- ),
- quick_guide
-]
-
-default_banner = ''.join(default_banner_parts)
-
+ Callable objects (i.e. functions, methods, etc) can be invoked like
+ this (notice the commas between the arguments)::
+
+ In [1]: callable_ob arg1, arg2, arg3
+
+ and the input will be translated to this::
+
+ callable_ob(arg1, arg2, arg3)
+
+ This feature is off by default (in rare cases it can produce
+ undesirable side-effects), but you can activate it at the command-line
+ by starting IPython with `--autocall 1`, set it permanently in your
+ configuration file, or turn on at runtime with `%autocall 1`.
+
+ You can force auto-parentheses by using '/' as the first character
+ of a line. For example::
+
+ In [1]: /globals # becomes 'globals()'
+
+ Note that the '/' MUST be the first character on the line! This
+ won't work::
+
+ In [2]: print /globals # syntax error
+
+ In most cases the automatic algorithm should work, so you should
+ rarely need to explicitly invoke /. One notable exception is if you
+ are trying to call a function with a list of tuples as arguments (the
+ parenthesis will confuse IPython)::
+
+ In [1]: zip (1,2,3),(4,5,6) # won't work
+
+ but this will work::
+
+ In [2]: /zip (1,2,3),(4,5,6)
+ ------> zip ((1,2,3),(4,5,6))
+ Out[2]= [(1, 4), (2, 5), (3, 6)]
+
+ IPython tells you that it has altered your command line by
+ displaying the new command line preceded by -->. e.g.::
+
+ In [18]: callable list
+ -------> callable (list)
+
+ 2. Auto-Quoting
+
+ You can force auto-quoting of a function's arguments by using ',' as
+ the first character of a line. For example::
+
+ In [1]: ,my_function /home/me # becomes my_function("/home/me")
+
+ If you use ';' instead, the whole argument is quoted as a single
+ string (while ',' splits on whitespace)::
+
+ In [2]: ,my_function a b c # becomes my_function("a","b","c")
+ In [3]: ;my_function a b c # becomes my_function("a b c")
+
+ Note that the ',' MUST be the first character on the line! This
+ won't work::
+
+ In [4]: x = ,my_function /home/me # syntax error
+"""
+
+interactive_usage_min = """\
+An enhanced console for Python.
+Some of its features are:
+- Tab completion in the local namespace.
+- Logging of input, see command-line options.
+- System shell escape via ! , eg !ls.
+- Magic commands, starting with a % (like %ls, %pwd, %cd, etc.)
+- Keeps track of locally defined variables via %who, %whos.
+- Show object information with a ? eg ?x or x? (use ?? for more info).
+"""
+
+quick_reference = r"""
+IPython -- An enhanced Interactive Python - Quick Reference Card
+================================================================
+
+obj?, obj?? : Get help, or more help for object (also works as
+ ?obj, ??obj).
+?foo.*abc* : List names in 'foo' containing 'abc' in them.
+%magic : Information about IPython's 'magic' % functions.
+
+Magic functions are prefixed by % or %%, and typically take their arguments
+without parentheses, quotes or even commas for convenience. Line magics take a
+single % and cell magics are prefixed with two %%.
+
+Example magic function calls:
+
+%alias d ls -F : 'd' is now an alias for 'ls -F'
+alias d ls -F : Works if 'alias' not a python name
+alist = %alias : Get list of aliases to 'alist'
+cd /usr/share : Obvious. cd -<tab> to choose from visited dirs.
+%cd?? : See help AND source for magic %cd
+%timeit x=10 : time the 'x=10' statement with high precision.
+%%timeit x=2**100
+x**100 : time 'x**100' with a setup of 'x=2**100'; setup code is not
+ counted. This is an example of a cell magic.
+
+System commands:
+
+!cp a.txt b/ : System command escape, calls os.system()
+cp a.txt b/ : after %rehashx, most system commands work without !
+cp ${f}.txt $bar : Variable expansion in magics and system commands
+files = !ls /usr : Capture sytem command output
+files.s, files.l, files.n: "a b c", ['a','b','c'], 'a\nb\nc'
+
+History:
+
+_i, _ii, _iii : Previous, next previous, next next previous input
+_i4, _ih[2:5] : Input history line 4, lines 2-4
+exec _i81 : Execute input history line #81 again
+%rep 81 : Edit input history line #81
+_, __, ___ : previous, next previous, next next previous output
+_dh : Directory history
+_oh : Output history
+%hist : Command history of current session.
+%hist -g foo : Search command history of (almost) all sessions for 'foo'.
+%hist -g : Command history of (almost) all sessions.
+%hist 1/2-8 : Command history containing lines 2-8 of session 1.
+%hist 1/ ~2/ : Command history of session 1 and 2 sessions before current.
+%hist ~8/1-~6/5 : Command history from line 1 of 8 sessions ago to
+ line 5 of 6 sessions ago.
+%edit 0/ : Open editor to execute code with history of current session.
+
+Autocall:
+
+f 1,2 : f(1,2) # Off by default, enable with %autocall magic.
+/f 1,2 : f(1,2) (forced autoparen)
+,f 1 2 : f("1","2")
+;f 1 2 : f("1 2")
+
+Remember: TAB completion works in many contexts, not just file names
+or python names.
+
+The following magic functions are currently available:
+
+"""
+
+quick_guide = """\
+? -> Introduction and overview of IPython's features.
+%quickref -> Quick reference.
+help -> Python's own help system.
+object? -> Details about 'object', use 'object??' for extra details.
+"""
+
+default_banner_parts = [
+ 'Python %s\n' % (sys.version.split('\n')[0],),
+ 'Type "copyright", "credits" or "license" for more information.\n\n',
+ 'IPython {version} -- An enhanced Interactive Python.\n'.format(
+ version=release.version,
+ ),
+ quick_guide
+]
+
+default_banner = ''.join(default_banner_parts)
+
# deprecated GUI banner
-
+
default_gui_banner = '\n'.join([
'DEPRECATED: IPython.core.usage.default_gui_banner is deprecated and will be removed',
default_banner,
diff --git a/contrib/python/ipython/py2/IPython/display.py b/contrib/python/ipython/py2/IPython/display.py
index 872b93e92b..7d248ba023 100644
--- a/contrib/python/ipython/py2/IPython/display.py
+++ b/contrib/python/ipython/py2/IPython/display.py
@@ -1,16 +1,16 @@
-"""Public API for display tools in IPython.
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2012 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-from IPython.core.display import *
-from IPython.lib.display import *
+"""Public API for display tools in IPython.
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2012 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+from IPython.core.display import *
+from IPython.lib.display import *
diff --git a/contrib/python/ipython/py2/IPython/extensions/__init__.py b/contrib/python/ipython/py2/IPython/extensions/__init__.py
index acc4dc7601..db7f79fca6 100644
--- a/contrib/python/ipython/py2/IPython/extensions/__init__.py
+++ b/contrib/python/ipython/py2/IPython/extensions/__init__.py
@@ -1,2 +1,2 @@
-# -*- coding: utf-8 -*-
-"""This directory is meant for IPython extensions."""
+# -*- coding: utf-8 -*-
+"""This directory is meant for IPython extensions."""
diff --git a/contrib/python/ipython/py2/IPython/extensions/autoreload.py b/contrib/python/ipython/py2/IPython/extensions/autoreload.py
index 9406e697d2..d3e420574d 100644
--- a/contrib/python/ipython/py2/IPython/extensions/autoreload.py
+++ b/contrib/python/ipython/py2/IPython/extensions/autoreload.py
@@ -1,536 +1,536 @@
-"""IPython extension to reload modules before executing user code.
-
-``autoreload`` reloads modules automatically before entering the execution of
-code typed at the IPython prompt.
-
-This makes for example the following workflow possible:
-
-.. sourcecode:: ipython
-
- In [1]: %load_ext autoreload
-
- In [2]: %autoreload 2
-
- In [3]: from foo import some_function
-
- In [4]: some_function()
- Out[4]: 42
-
- In [5]: # open foo.py in an editor and change some_function to return 43
-
- In [6]: some_function()
- Out[6]: 43
-
-The module was reloaded without reloading it explicitly, and the object
-imported with ``from foo import ...`` was also updated.
-
-Usage
-=====
-
-The following magic commands are provided:
-
-``%autoreload``
-
- Reload all modules (except those excluded by ``%aimport``)
- automatically now.
-
-``%autoreload 0``
-
- Disable automatic reloading.
-
-``%autoreload 1``
-
- Reload all modules imported with ``%aimport`` every time before
- executing the Python code typed.
-
-``%autoreload 2``
-
- Reload all modules (except those excluded by ``%aimport``) every
- time before executing the Python code typed.
-
-``%aimport``
-
- List modules which are to be automatically imported or not to be imported.
-
-``%aimport foo``
-
- Import module 'foo' and mark it to be autoreloaded for ``%autoreload 1``
-
-``%aimport -foo``
-
- Mark module 'foo' to not be autoreloaded.
-
-Caveats
-=======
-
-Reloading Python modules in a reliable way is in general difficult,
-and unexpected things may occur. ``%autoreload`` tries to work around
-common pitfalls by replacing function code objects and parts of
-classes previously in the module with new versions. This makes the
-following things to work:
-
-- Functions and classes imported via 'from xxx import foo' are upgraded
- to new versions when 'xxx' is reloaded.
-
-- Methods and properties of classes are upgraded on reload, so that
- calling 'c.foo()' on an object 'c' created before the reload causes
- the new code for 'foo' to be executed.
-
-Some of the known remaining caveats are:
-
-- Replacing code objects does not always succeed: changing a @property
- in a class to an ordinary method or a method to a member variable
- can cause problems (but in old objects only).
-
-- Functions that are removed (eg. via monkey-patching) from a module
- before it is reloaded are not upgraded.
-
-- C extension modules cannot be reloaded, and so cannot be autoreloaded.
-"""
-from __future__ import print_function
-
-skip_doctest = True
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2000 Thomas Heller
-# Copyright (C) 2008 Pauli Virtanen <pav@iki.fi>
-# Copyright (C) 2012 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-#
-# This IPython module is written by Pauli Virtanen, based on the autoreload
-# code by Thomas Heller.
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import os
-import sys
-import traceback
-import types
-import weakref
-
-try:
- # Reload is not defined by default in Python3.
- reload
-except NameError:
- from imp import reload
-
-from IPython.utils import openpy
-from IPython.utils.py3compat import PY3
-
-#------------------------------------------------------------------------------
-# Autoreload functionality
-#------------------------------------------------------------------------------
-
-class ModuleReloader(object):
- enabled = False
- """Whether this reloader is enabled"""
-
- check_all = True
- """Autoreload all modules, not just those listed in 'modules'"""
-
- def __init__(self):
- # Modules that failed to reload: {module: mtime-on-failed-reload, ...}
- self.failed = {}
- # Modules specially marked as autoreloadable.
- self.modules = {}
- # Modules specially marked as not autoreloadable.
- self.skip_modules = {}
- # (module-name, name) -> weakref, for replacing old code objects
- self.old_objects = {}
- # Module modification timestamps
- self.modules_mtimes = {}
-
- # Cache module modification times
- self.check(check_all=True, do_reload=False)
-
- def mark_module_skipped(self, module_name):
- """Skip reloading the named module in the future"""
- try:
- del self.modules[module_name]
- except KeyError:
- pass
- self.skip_modules[module_name] = True
-
- def mark_module_reloadable(self, module_name):
- """Reload the named module in the future (if it is imported)"""
- try:
- del self.skip_modules[module_name]
- except KeyError:
- pass
- self.modules[module_name] = True
-
- def aimport_module(self, module_name):
- """Import a module, and mark it reloadable
-
- Returns
- -------
- top_module : module
- The imported module if it is top-level, or the top-level
- top_name : module
- Name of top_module
-
- """
- self.mark_module_reloadable(module_name)
-
- __import__(module_name)
- top_name = module_name.split('.')[0]
- top_module = sys.modules[top_name]
- return top_module, top_name
-
- def filename_and_mtime(self, module):
- if not hasattr(module, '__file__') or module.__file__ is None:
- return None, None
-
+"""IPython extension to reload modules before executing user code.
+
+``autoreload`` reloads modules automatically before entering the execution of
+code typed at the IPython prompt.
+
+This makes for example the following workflow possible:
+
+.. sourcecode:: ipython
+
+ In [1]: %load_ext autoreload
+
+ In [2]: %autoreload 2
+
+ In [3]: from foo import some_function
+
+ In [4]: some_function()
+ Out[4]: 42
+
+ In [5]: # open foo.py in an editor and change some_function to return 43
+
+ In [6]: some_function()
+ Out[6]: 43
+
+The module was reloaded without reloading it explicitly, and the object
+imported with ``from foo import ...`` was also updated.
+
+Usage
+=====
+
+The following magic commands are provided:
+
+``%autoreload``
+
+ Reload all modules (except those excluded by ``%aimport``)
+ automatically now.
+
+``%autoreload 0``
+
+ Disable automatic reloading.
+
+``%autoreload 1``
+
+ Reload all modules imported with ``%aimport`` every time before
+ executing the Python code typed.
+
+``%autoreload 2``
+
+ Reload all modules (except those excluded by ``%aimport``) every
+ time before executing the Python code typed.
+
+``%aimport``
+
+ List modules which are to be automatically imported or not to be imported.
+
+``%aimport foo``
+
+ Import module 'foo' and mark it to be autoreloaded for ``%autoreload 1``
+
+``%aimport -foo``
+
+ Mark module 'foo' to not be autoreloaded.
+
+Caveats
+=======
+
+Reloading Python modules in a reliable way is in general difficult,
+and unexpected things may occur. ``%autoreload`` tries to work around
+common pitfalls by replacing function code objects and parts of
+classes previously in the module with new versions. This makes the
+following things to work:
+
+- Functions and classes imported via 'from xxx import foo' are upgraded
+ to new versions when 'xxx' is reloaded.
+
+- Methods and properties of classes are upgraded on reload, so that
+ calling 'c.foo()' on an object 'c' created before the reload causes
+ the new code for 'foo' to be executed.
+
+Some of the known remaining caveats are:
+
+- Replacing code objects does not always succeed: changing a @property
+ in a class to an ordinary method or a method to a member variable
+ can cause problems (but in old objects only).
+
+- Functions that are removed (eg. via monkey-patching) from a module
+ before it is reloaded are not upgraded.
+
+- C extension modules cannot be reloaded, and so cannot be autoreloaded.
+"""
+from __future__ import print_function
+
+skip_doctest = True
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2000 Thomas Heller
+# Copyright (C) 2008 Pauli Virtanen <pav@iki.fi>
+# Copyright (C) 2012 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+#
+# This IPython module is written by Pauli Virtanen, based on the autoreload
+# code by Thomas Heller.
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import os
+import sys
+import traceback
+import types
+import weakref
+
+try:
+ # Reload is not defined by default in Python3.
+ reload
+except NameError:
+ from imp import reload
+
+from IPython.utils import openpy
+from IPython.utils.py3compat import PY3
+
+#------------------------------------------------------------------------------
+# Autoreload functionality
+#------------------------------------------------------------------------------
+
+class ModuleReloader(object):
+ enabled = False
+ """Whether this reloader is enabled"""
+
+ check_all = True
+ """Autoreload all modules, not just those listed in 'modules'"""
+
+ def __init__(self):
+ # Modules that failed to reload: {module: mtime-on-failed-reload, ...}
+ self.failed = {}
+ # Modules specially marked as autoreloadable.
+ self.modules = {}
+ # Modules specially marked as not autoreloadable.
+ self.skip_modules = {}
+ # (module-name, name) -> weakref, for replacing old code objects
+ self.old_objects = {}
+ # Module modification timestamps
+ self.modules_mtimes = {}
+
+ # Cache module modification times
+ self.check(check_all=True, do_reload=False)
+
+ def mark_module_skipped(self, module_name):
+ """Skip reloading the named module in the future"""
+ try:
+ del self.modules[module_name]
+ except KeyError:
+ pass
+ self.skip_modules[module_name] = True
+
+ def mark_module_reloadable(self, module_name):
+ """Reload the named module in the future (if it is imported)"""
+ try:
+ del self.skip_modules[module_name]
+ except KeyError:
+ pass
+ self.modules[module_name] = True
+
+ def aimport_module(self, module_name):
+ """Import a module, and mark it reloadable
+
+ Returns
+ -------
+ top_module : module
+ The imported module if it is top-level, or the top-level
+ top_name : module
+ Name of top_module
+
+ """
+ self.mark_module_reloadable(module_name)
+
+ __import__(module_name)
+ top_name = module_name.split('.')[0]
+ top_module = sys.modules[top_name]
+ return top_module, top_name
+
+ def filename_and_mtime(self, module):
+ if not hasattr(module, '__file__') or module.__file__ is None:
+ return None, None
+
if getattr(module, '__name__', None) in [None, '__mp_main__', '__main__']:
# we cannot reload(__main__) or reload(__mp_main__)
- return None, None
-
- filename = module.__file__
- path, ext = os.path.splitext(filename)
-
- if ext.lower() == '.py':
- py_filename = filename
- else:
- try:
- py_filename = openpy.source_from_cache(filename)
- except ValueError:
- return None, None
-
- try:
- pymtime = os.stat(py_filename).st_mtime
- except OSError:
- return None, None
-
- return py_filename, pymtime
-
- def check(self, check_all=False, do_reload=True):
- """Check whether some modules need to be reloaded."""
-
- if not self.enabled and not check_all:
- return
-
- if check_all or self.check_all:
- modules = list(sys.modules.keys())
- else:
- modules = list(self.modules.keys())
-
- for modname in modules:
- m = sys.modules.get(modname, None)
-
- if modname in self.skip_modules:
- continue
-
- py_filename, pymtime = self.filename_and_mtime(m)
- if py_filename is None:
- continue
-
- try:
- if pymtime <= self.modules_mtimes[modname]:
- continue
- except KeyError:
- self.modules_mtimes[modname] = pymtime
- continue
- else:
- if self.failed.get(py_filename, None) == pymtime:
- continue
-
- self.modules_mtimes[modname] = pymtime
-
- # If we've reached this point, we should try to reload the module
- if do_reload:
- try:
- superreload(m, reload, self.old_objects)
- if py_filename in self.failed:
- del self.failed[py_filename]
- except:
- print("[autoreload of %s failed: %s]" % (
- modname, traceback.format_exc(1)), file=sys.stderr)
- self.failed[py_filename] = pymtime
-
-#------------------------------------------------------------------------------
-# superreload
-#------------------------------------------------------------------------------
-
-if PY3:
- func_attrs = ['__code__', '__defaults__', '__doc__',
- '__closure__', '__globals__', '__dict__']
-else:
- func_attrs = ['func_code', 'func_defaults', 'func_doc',
- 'func_closure', 'func_globals', 'func_dict']
-
-
-def update_function(old, new):
- """Upgrade the code object of a function"""
- for name in func_attrs:
- try:
- setattr(old, name, getattr(new, name))
- except (AttributeError, TypeError):
- pass
-
-
-def update_class(old, new):
- """Replace stuff in the __dict__ of a class, and upgrade
- method code objects"""
- for key in list(old.__dict__.keys()):
- old_obj = getattr(old, key)
-
- try:
- new_obj = getattr(new, key)
- except AttributeError:
- # obsolete attribute: remove it
- try:
- delattr(old, key)
- except (AttributeError, TypeError):
- pass
- continue
-
- if update_generic(old_obj, new_obj): continue
-
- try:
- setattr(old, key, getattr(new, key))
- except (AttributeError, TypeError):
- pass # skip non-writable attributes
-
-
-def update_property(old, new):
- """Replace get/set/del functions of a property"""
- update_generic(old.fdel, new.fdel)
- update_generic(old.fget, new.fget)
- update_generic(old.fset, new.fset)
-
-
-def isinstance2(a, b, typ):
- return isinstance(a, typ) and isinstance(b, typ)
-
-
-UPDATE_RULES = [
- (lambda a, b: isinstance2(a, b, type),
- update_class),
- (lambda a, b: isinstance2(a, b, types.FunctionType),
- update_function),
- (lambda a, b: isinstance2(a, b, property),
- update_property),
-]
-
-
-if PY3:
- UPDATE_RULES.extend([(lambda a, b: isinstance2(a, b, types.MethodType),
- lambda a, b: update_function(a.__func__, b.__func__)),
- ])
-else:
- UPDATE_RULES.extend([(lambda a, b: isinstance2(a, b, types.ClassType),
- update_class),
- (lambda a, b: isinstance2(a, b, types.MethodType),
- lambda a, b: update_function(a.__func__, b.__func__)),
- ])
-
-
-def update_generic(a, b):
- for type_check, update in UPDATE_RULES:
- if type_check(a, b):
- update(a, b)
- return True
- return False
-
-
-class StrongRef(object):
- def __init__(self, obj):
- self.obj = obj
- def __call__(self):
- return self.obj
-
-
-def superreload(module, reload=reload, old_objects={}):
- """Enhanced version of the builtin reload function.
-
- superreload remembers objects previously in the module, and
-
- - upgrades the class dictionary of every old class in the module
- - upgrades the code object of every old function and method
- - clears the module's namespace before reloading
-
- """
-
- # collect old objects in the module
- for name, obj in list(module.__dict__.items()):
- if not hasattr(obj, '__module__') or obj.__module__ != module.__name__:
- continue
- key = (module.__name__, name)
- try:
- old_objects.setdefault(key, []).append(weakref.ref(obj))
- except TypeError:
- # weakref doesn't work for all types;
- # create strong references for 'important' cases
- if not PY3 and isinstance(obj, types.ClassType):
- old_objects.setdefault(key, []).append(StrongRef(obj))
-
- # reload module
- try:
- # clear namespace first from old cruft
- old_dict = module.__dict__.copy()
- old_name = module.__name__
- module.__dict__.clear()
- module.__dict__['__name__'] = old_name
- module.__dict__['__loader__'] = old_dict['__loader__']
- except (TypeError, AttributeError, KeyError):
- pass
-
- try:
- module = reload(module)
- except:
- # restore module dictionary on failed reload
- module.__dict__.update(old_dict)
- raise
-
- # iterate over all objects and update functions & classes
- for name, new_obj in list(module.__dict__.items()):
- key = (module.__name__, name)
- if key not in old_objects: continue
-
- new_refs = []
- for old_ref in old_objects[key]:
- old_obj = old_ref()
- if old_obj is None: continue
- new_refs.append(old_ref)
- update_generic(old_obj, new_obj)
-
- if new_refs:
- old_objects[key] = new_refs
- else:
- del old_objects[key]
-
- return module
-
-#------------------------------------------------------------------------------
-# IPython connectivity
-#------------------------------------------------------------------------------
-
-from IPython.core.magic import Magics, magics_class, line_magic
-
-@magics_class
-class AutoreloadMagics(Magics):
- def __init__(self, *a, **kw):
- super(AutoreloadMagics, self).__init__(*a, **kw)
- self._reloader = ModuleReloader()
- self._reloader.check_all = False
- self.loaded_modules = set(sys.modules)
-
- @line_magic
- def autoreload(self, parameter_s=''):
- r"""%autoreload => Reload modules automatically
-
- %autoreload
- Reload all modules (except those excluded by %aimport) automatically
- now.
-
- %autoreload 0
- Disable automatic reloading.
-
- %autoreload 1
- Reload all modules imported with %aimport every time before executing
- the Python code typed.
-
- %autoreload 2
- Reload all modules (except those excluded by %aimport) every time
- before executing the Python code typed.
-
- Reloading Python modules in a reliable way is in general
- difficult, and unexpected things may occur. %autoreload tries to
- work around common pitfalls by replacing function code objects and
- parts of classes previously in the module with new versions. This
- makes the following things to work:
-
- - Functions and classes imported via 'from xxx import foo' are upgraded
- to new versions when 'xxx' is reloaded.
-
- - Methods and properties of classes are upgraded on reload, so that
- calling 'c.foo()' on an object 'c' created before the reload causes
- the new code for 'foo' to be executed.
-
- Some of the known remaining caveats are:
-
- - Replacing code objects does not always succeed: changing a @property
- in a class to an ordinary method or a method to a member variable
- can cause problems (but in old objects only).
-
- - Functions that are removed (eg. via monkey-patching) from a module
- before it is reloaded are not upgraded.
-
- - C extension modules cannot be reloaded, and so cannot be
- autoreloaded.
-
- """
- if parameter_s == '':
- self._reloader.check(True)
- elif parameter_s == '0':
- self._reloader.enabled = False
- elif parameter_s == '1':
- self._reloader.check_all = False
- self._reloader.enabled = True
- elif parameter_s == '2':
- self._reloader.check_all = True
- self._reloader.enabled = True
-
- @line_magic
- def aimport(self, parameter_s='', stream=None):
- """%aimport => Import modules for automatic reloading.
-
- %aimport
- List modules to automatically import and not to import.
-
- %aimport foo
- Import module 'foo' and mark it to be autoreloaded for %autoreload 1
-
- %aimport -foo
- Mark module 'foo' to not be autoreloaded for %autoreload 1
- """
- modname = parameter_s
- if not modname:
- to_reload = sorted(self._reloader.modules.keys())
- to_skip = sorted(self._reloader.skip_modules.keys())
- if stream is None:
- stream = sys.stdout
- if self._reloader.check_all:
- stream.write("Modules to reload:\nall-except-skipped\n")
- else:
- stream.write("Modules to reload:\n%s\n" % ' '.join(to_reload))
- stream.write("\nModules to skip:\n%s\n" % ' '.join(to_skip))
- elif modname.startswith('-'):
- modname = modname[1:]
- self._reloader.mark_module_skipped(modname)
- else:
- top_module, top_name = self._reloader.aimport_module(modname)
-
- # Inject module to user namespace
- self.shell.push({top_name: top_module})
-
- def pre_run_cell(self):
- if self._reloader.enabled:
- try:
- self._reloader.check()
- except:
- pass
-
- def post_execute_hook(self):
- """Cache the modification times of any modules imported in this execution
- """
- newly_loaded_modules = set(sys.modules) - self.loaded_modules
- for modname in newly_loaded_modules:
- _, pymtime = self._reloader.filename_and_mtime(sys.modules[modname])
- if pymtime is not None:
- self._reloader.modules_mtimes[modname] = pymtime
-
- self.loaded_modules.update(newly_loaded_modules)
-
-
-def load_ipython_extension(ip):
- """Load the extension in IPython."""
- auto_reload = AutoreloadMagics(ip)
- ip.register_magics(auto_reload)
- ip.events.register('pre_run_cell', auto_reload.pre_run_cell)
- ip.events.register('post_execute', auto_reload.post_execute_hook)
+ return None, None
+
+ filename = module.__file__
+ path, ext = os.path.splitext(filename)
+
+ if ext.lower() == '.py':
+ py_filename = filename
+ else:
+ try:
+ py_filename = openpy.source_from_cache(filename)
+ except ValueError:
+ return None, None
+
+ try:
+ pymtime = os.stat(py_filename).st_mtime
+ except OSError:
+ return None, None
+
+ return py_filename, pymtime
+
+ def check(self, check_all=False, do_reload=True):
+ """Check whether some modules need to be reloaded."""
+
+ if not self.enabled and not check_all:
+ return
+
+ if check_all or self.check_all:
+ modules = list(sys.modules.keys())
+ else:
+ modules = list(self.modules.keys())
+
+ for modname in modules:
+ m = sys.modules.get(modname, None)
+
+ if modname in self.skip_modules:
+ continue
+
+ py_filename, pymtime = self.filename_and_mtime(m)
+ if py_filename is None:
+ continue
+
+ try:
+ if pymtime <= self.modules_mtimes[modname]:
+ continue
+ except KeyError:
+ self.modules_mtimes[modname] = pymtime
+ continue
+ else:
+ if self.failed.get(py_filename, None) == pymtime:
+ continue
+
+ self.modules_mtimes[modname] = pymtime
+
+ # If we've reached this point, we should try to reload the module
+ if do_reload:
+ try:
+ superreload(m, reload, self.old_objects)
+ if py_filename in self.failed:
+ del self.failed[py_filename]
+ except:
+ print("[autoreload of %s failed: %s]" % (
+ modname, traceback.format_exc(1)), file=sys.stderr)
+ self.failed[py_filename] = pymtime
+
+#------------------------------------------------------------------------------
+# superreload
+#------------------------------------------------------------------------------
+
+if PY3:
+ func_attrs = ['__code__', '__defaults__', '__doc__',
+ '__closure__', '__globals__', '__dict__']
+else:
+ func_attrs = ['func_code', 'func_defaults', 'func_doc',
+ 'func_closure', 'func_globals', 'func_dict']
+
+
+def update_function(old, new):
+ """Upgrade the code object of a function"""
+ for name in func_attrs:
+ try:
+ setattr(old, name, getattr(new, name))
+ except (AttributeError, TypeError):
+ pass
+
+
+def update_class(old, new):
+ """Replace stuff in the __dict__ of a class, and upgrade
+ method code objects"""
+ for key in list(old.__dict__.keys()):
+ old_obj = getattr(old, key)
+
+ try:
+ new_obj = getattr(new, key)
+ except AttributeError:
+ # obsolete attribute: remove it
+ try:
+ delattr(old, key)
+ except (AttributeError, TypeError):
+ pass
+ continue
+
+ if update_generic(old_obj, new_obj): continue
+
+ try:
+ setattr(old, key, getattr(new, key))
+ except (AttributeError, TypeError):
+ pass # skip non-writable attributes
+
+
+def update_property(old, new):
+ """Replace get/set/del functions of a property"""
+ update_generic(old.fdel, new.fdel)
+ update_generic(old.fget, new.fget)
+ update_generic(old.fset, new.fset)
+
+
+def isinstance2(a, b, typ):
+ return isinstance(a, typ) and isinstance(b, typ)
+
+
+UPDATE_RULES = [
+ (lambda a, b: isinstance2(a, b, type),
+ update_class),
+ (lambda a, b: isinstance2(a, b, types.FunctionType),
+ update_function),
+ (lambda a, b: isinstance2(a, b, property),
+ update_property),
+]
+
+
+if PY3:
+ UPDATE_RULES.extend([(lambda a, b: isinstance2(a, b, types.MethodType),
+ lambda a, b: update_function(a.__func__, b.__func__)),
+ ])
+else:
+ UPDATE_RULES.extend([(lambda a, b: isinstance2(a, b, types.ClassType),
+ update_class),
+ (lambda a, b: isinstance2(a, b, types.MethodType),
+ lambda a, b: update_function(a.__func__, b.__func__)),
+ ])
+
+
+def update_generic(a, b):
+ for type_check, update in UPDATE_RULES:
+ if type_check(a, b):
+ update(a, b)
+ return True
+ return False
+
+
+class StrongRef(object):
+ def __init__(self, obj):
+ self.obj = obj
+ def __call__(self):
+ return self.obj
+
+
+def superreload(module, reload=reload, old_objects={}):
+ """Enhanced version of the builtin reload function.
+
+ superreload remembers objects previously in the module, and
+
+ - upgrades the class dictionary of every old class in the module
+ - upgrades the code object of every old function and method
+ - clears the module's namespace before reloading
+
+ """
+
+ # collect old objects in the module
+ for name, obj in list(module.__dict__.items()):
+ if not hasattr(obj, '__module__') or obj.__module__ != module.__name__:
+ continue
+ key = (module.__name__, name)
+ try:
+ old_objects.setdefault(key, []).append(weakref.ref(obj))
+ except TypeError:
+ # weakref doesn't work for all types;
+ # create strong references for 'important' cases
+ if not PY3 and isinstance(obj, types.ClassType):
+ old_objects.setdefault(key, []).append(StrongRef(obj))
+
+ # reload module
+ try:
+ # clear namespace first from old cruft
+ old_dict = module.__dict__.copy()
+ old_name = module.__name__
+ module.__dict__.clear()
+ module.__dict__['__name__'] = old_name
+ module.__dict__['__loader__'] = old_dict['__loader__']
+ except (TypeError, AttributeError, KeyError):
+ pass
+
+ try:
+ module = reload(module)
+ except:
+ # restore module dictionary on failed reload
+ module.__dict__.update(old_dict)
+ raise
+
+ # iterate over all objects and update functions & classes
+ for name, new_obj in list(module.__dict__.items()):
+ key = (module.__name__, name)
+ if key not in old_objects: continue
+
+ new_refs = []
+ for old_ref in old_objects[key]:
+ old_obj = old_ref()
+ if old_obj is None: continue
+ new_refs.append(old_ref)
+ update_generic(old_obj, new_obj)
+
+ if new_refs:
+ old_objects[key] = new_refs
+ else:
+ del old_objects[key]
+
+ return module
+
+#------------------------------------------------------------------------------
+# IPython connectivity
+#------------------------------------------------------------------------------
+
+from IPython.core.magic import Magics, magics_class, line_magic
+
+@magics_class
+class AutoreloadMagics(Magics):
+ def __init__(self, *a, **kw):
+ super(AutoreloadMagics, self).__init__(*a, **kw)
+ self._reloader = ModuleReloader()
+ self._reloader.check_all = False
+ self.loaded_modules = set(sys.modules)
+
+ @line_magic
+ def autoreload(self, parameter_s=''):
+ r"""%autoreload => Reload modules automatically
+
+ %autoreload
+ Reload all modules (except those excluded by %aimport) automatically
+ now.
+
+ %autoreload 0
+ Disable automatic reloading.
+
+ %autoreload 1
+ Reload all modules imported with %aimport every time before executing
+ the Python code typed.
+
+ %autoreload 2
+ Reload all modules (except those excluded by %aimport) every time
+ before executing the Python code typed.
+
+ Reloading Python modules in a reliable way is in general
+ difficult, and unexpected things may occur. %autoreload tries to
+ work around common pitfalls by replacing function code objects and
+ parts of classes previously in the module with new versions. This
+ makes the following things to work:
+
+ - Functions and classes imported via 'from xxx import foo' are upgraded
+ to new versions when 'xxx' is reloaded.
+
+ - Methods and properties of classes are upgraded on reload, so that
+ calling 'c.foo()' on an object 'c' created before the reload causes
+ the new code for 'foo' to be executed.
+
+ Some of the known remaining caveats are:
+
+ - Replacing code objects does not always succeed: changing a @property
+ in a class to an ordinary method or a method to a member variable
+ can cause problems (but in old objects only).
+
+ - Functions that are removed (eg. via monkey-patching) from a module
+ before it is reloaded are not upgraded.
+
+ - C extension modules cannot be reloaded, and so cannot be
+ autoreloaded.
+
+ """
+ if parameter_s == '':
+ self._reloader.check(True)
+ elif parameter_s == '0':
+ self._reloader.enabled = False
+ elif parameter_s == '1':
+ self._reloader.check_all = False
+ self._reloader.enabled = True
+ elif parameter_s == '2':
+ self._reloader.check_all = True
+ self._reloader.enabled = True
+
+ @line_magic
+ def aimport(self, parameter_s='', stream=None):
+ """%aimport => Import modules for automatic reloading.
+
+ %aimport
+ List modules to automatically import and not to import.
+
+ %aimport foo
+ Import module 'foo' and mark it to be autoreloaded for %autoreload 1
+
+ %aimport -foo
+ Mark module 'foo' to not be autoreloaded for %autoreload 1
+ """
+ modname = parameter_s
+ if not modname:
+ to_reload = sorted(self._reloader.modules.keys())
+ to_skip = sorted(self._reloader.skip_modules.keys())
+ if stream is None:
+ stream = sys.stdout
+ if self._reloader.check_all:
+ stream.write("Modules to reload:\nall-except-skipped\n")
+ else:
+ stream.write("Modules to reload:\n%s\n" % ' '.join(to_reload))
+ stream.write("\nModules to skip:\n%s\n" % ' '.join(to_skip))
+ elif modname.startswith('-'):
+ modname = modname[1:]
+ self._reloader.mark_module_skipped(modname)
+ else:
+ top_module, top_name = self._reloader.aimport_module(modname)
+
+ # Inject module to user namespace
+ self.shell.push({top_name: top_module})
+
+ def pre_run_cell(self):
+ if self._reloader.enabled:
+ try:
+ self._reloader.check()
+ except:
+ pass
+
+ def post_execute_hook(self):
+ """Cache the modification times of any modules imported in this execution
+ """
+ newly_loaded_modules = set(sys.modules) - self.loaded_modules
+ for modname in newly_loaded_modules:
+ _, pymtime = self._reloader.filename_and_mtime(sys.modules[modname])
+ if pymtime is not None:
+ self._reloader.modules_mtimes[modname] = pymtime
+
+ self.loaded_modules.update(newly_loaded_modules)
+
+
+def load_ipython_extension(ip):
+ """Load the extension in IPython."""
+ auto_reload = AutoreloadMagics(ip)
+ ip.register_magics(auto_reload)
+ ip.events.register('pre_run_cell', auto_reload.pre_run_cell)
+ ip.events.register('post_execute', auto_reload.post_execute_hook)
diff --git a/contrib/python/ipython/py2/IPython/extensions/cythonmagic.py b/contrib/python/ipython/py2/IPython/extensions/cythonmagic.py
index 65c3e39548..3c88e7c2a1 100644
--- a/contrib/python/ipython/py2/IPython/extensions/cythonmagic.py
+++ b/contrib/python/ipython/py2/IPython/extensions/cythonmagic.py
@@ -1,21 +1,21 @@
-# -*- coding: utf-8 -*-
-"""
-**DEPRECATED**
-
-The cython magic has been integrated into Cython itself,
-which is now released in version 0.21.
-
-cf github `Cython` organisation, `Cython` repo, under the
-file `Cython/Build/IpythonMagic.py`
-"""
-#-----------------------------------------------------------------------------
-# Copyright (C) 2010-2011, IPython Development Team.
-#-----------------------------------------------------------------------------
-
-import warnings
-
-## still load the magic in IPython 3.x, remove completely in future versions.
-def load_ipython_extension(ip):
- """Load the extension in IPython."""
-
- warnings.warn("""The Cython magic has been moved to the Cython package""")
+# -*- coding: utf-8 -*-
+"""
+**DEPRECATED**
+
+The cython magic has been integrated into Cython itself,
+which is now released in version 0.21.
+
+cf github `Cython` organisation, `Cython` repo, under the
+file `Cython/Build/IpythonMagic.py`
+"""
+#-----------------------------------------------------------------------------
+# Copyright (C) 2010-2011, IPython Development Team.
+#-----------------------------------------------------------------------------
+
+import warnings
+
+## still load the magic in IPython 3.x, remove completely in future versions.
+def load_ipython_extension(ip):
+ """Load the extension in IPython."""
+
+ warnings.warn("""The Cython magic has been moved to the Cython package""")
diff --git a/contrib/python/ipython/py2/IPython/extensions/rmagic.py b/contrib/python/ipython/py2/IPython/extensions/rmagic.py
index ecc25bc4f8..ec5763972e 100644
--- a/contrib/python/ipython/py2/IPython/extensions/rmagic.py
+++ b/contrib/python/ipython/py2/IPython/extensions/rmagic.py
@@ -1,12 +1,12 @@
-# -*- coding: utf-8 -*-
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2012 The IPython Development Team
-#-----------------------------------------------------------------------------
-
-import warnings
-
-def load_ipython_extension(ip):
- """Load the extension in IPython."""
- warnings.warn("The rmagic extension in IPython has moved to "
- "`rpy2.ipython`, please see `rpy2` documentation.")
+# -*- coding: utf-8 -*-
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2012 The IPython Development Team
+#-----------------------------------------------------------------------------
+
+import warnings
+
+def load_ipython_extension(ip):
+ """Load the extension in IPython."""
+ warnings.warn("The rmagic extension in IPython has moved to "
+ "`rpy2.ipython`, please see `rpy2` documentation.")
diff --git a/contrib/python/ipython/py2/IPython/extensions/storemagic.py b/contrib/python/ipython/py2/IPython/extensions/storemagic.py
index 34371a30f2..2fd1abf993 100644
--- a/contrib/python/ipython/py2/IPython/extensions/storemagic.py
+++ b/contrib/python/ipython/py2/IPython/extensions/storemagic.py
@@ -1,228 +1,228 @@
-# -*- coding: utf-8 -*-
-"""
-%store magic for lightweight persistence.
-
-Stores variables, aliases and macros in IPython's database.
-
-To automatically restore stored variables at startup, add this to your
-:file:`ipython_config.py` file::
-
- c.StoreMagics.autorestore = True
-"""
-from __future__ import print_function
-
+# -*- coding: utf-8 -*-
+"""
+%store magic for lightweight persistence.
+
+Stores variables, aliases and macros in IPython's database.
+
+To automatically restore stored variables at startup, add this to your
+:file:`ipython_config.py` file::
+
+ c.StoreMagics.autorestore = True
+"""
+from __future__ import print_function
+
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
-
-import inspect, os, sys, textwrap
-
-from IPython.core.error import UsageError
-from IPython.core.magic import Magics, magics_class, line_magic
-from traitlets import Bool
-from IPython.utils.py3compat import string_types
-
-
-def restore_aliases(ip):
- staliases = ip.db.get('stored_aliases', {})
- for k,v in staliases.items():
- #print "restore alias",k,v # dbg
- #self.alias_table[k] = v
- ip.alias_manager.define_alias(k,v)
-
-
-def refresh_variables(ip):
- db = ip.db
- for key in db.keys('autorestore/*'):
- # strip autorestore
- justkey = os.path.basename(key)
- try:
- obj = db[key]
- except KeyError:
- print("Unable to restore variable '%s', ignoring (use %%store -d to forget!)" % justkey)
- print("The error was:", sys.exc_info()[0])
- else:
- #print "restored",justkey,"=",obj #dbg
- ip.user_ns[justkey] = obj
-
-
-def restore_dhist(ip):
- ip.user_ns['_dh'] = ip.db.get('dhist',[])
-
-
-def restore_data(ip):
- refresh_variables(ip)
- restore_aliases(ip)
- restore_dhist(ip)
-
-
-@magics_class
-class StoreMagics(Magics):
- """Lightweight persistence for python variables.
-
- Provides the %store magic."""
-
+
+import inspect, os, sys, textwrap
+
+from IPython.core.error import UsageError
+from IPython.core.magic import Magics, magics_class, line_magic
+from traitlets import Bool
+from IPython.utils.py3compat import string_types
+
+
+def restore_aliases(ip):
+ staliases = ip.db.get('stored_aliases', {})
+ for k,v in staliases.items():
+ #print "restore alias",k,v # dbg
+ #self.alias_table[k] = v
+ ip.alias_manager.define_alias(k,v)
+
+
+def refresh_variables(ip):
+ db = ip.db
+ for key in db.keys('autorestore/*'):
+ # strip autorestore
+ justkey = os.path.basename(key)
+ try:
+ obj = db[key]
+ except KeyError:
+ print("Unable to restore variable '%s', ignoring (use %%store -d to forget!)" % justkey)
+ print("The error was:", sys.exc_info()[0])
+ else:
+ #print "restored",justkey,"=",obj #dbg
+ ip.user_ns[justkey] = obj
+
+
+def restore_dhist(ip):
+ ip.user_ns['_dh'] = ip.db.get('dhist',[])
+
+
+def restore_data(ip):
+ refresh_variables(ip)
+ restore_aliases(ip)
+ restore_dhist(ip)
+
+
+@magics_class
+class StoreMagics(Magics):
+ """Lightweight persistence for python variables.
+
+ Provides the %store magic."""
+
autorestore = Bool(False, help=
- """If True, any %store-d variables will be automatically restored
- when IPython starts.
- """
+ """If True, any %store-d variables will be automatically restored
+ when IPython starts.
+ """
).tag(config=True)
-
- def __init__(self, shell):
- super(StoreMagics, self).__init__(shell=shell)
- self.shell.configurables.append(self)
- if self.autorestore:
- restore_data(self.shell)
-
- @line_magic
- def store(self, parameter_s=''):
- """Lightweight persistence for python variables.
-
- Example::
-
- In [1]: l = ['hello',10,'world']
- In [2]: %store l
- In [3]: exit
-
- (IPython session is closed and started again...)
-
- ville@badger:~$ ipython
- In [1]: l
- NameError: name 'l' is not defined
- In [2]: %store -r
- In [3]: l
- Out[3]: ['hello', 10, 'world']
-
- Usage:
-
- * ``%store`` - Show list of all variables and their current
- values
- * ``%store spam`` - Store the *current* value of the variable spam
- to disk
- * ``%store -d spam`` - Remove the variable and its value from storage
- * ``%store -z`` - Remove all variables from storage
- * ``%store -r`` - Refresh all variables from store (overwrite
- current vals)
- * ``%store -r spam bar`` - Refresh specified variables from store
- (delete current val)
- * ``%store foo >a.txt`` - Store value of foo to new file a.txt
- * ``%store foo >>a.txt`` - Append value of foo to file a.txt
-
- It should be noted that if you change the value of a variable, you
- need to %store it again if you want to persist the new value.
-
- Note also that the variables will need to be pickleable; most basic
- python types can be safely %store'd.
-
- Also aliases can be %store'd across sessions.
- """
-
- opts,argsl = self.parse_options(parameter_s,'drz',mode='string')
- args = argsl.split(None,1)
- ip = self.shell
- db = ip.db
- # delete
- if 'd' in opts:
- try:
- todel = args[0]
- except IndexError:
- raise UsageError('You must provide the variable to forget')
- else:
- try:
- del db['autorestore/' + todel]
- except:
- raise UsageError("Can't delete variable '%s'" % todel)
- # reset
- elif 'z' in opts:
- for k in db.keys('autorestore/*'):
- del db[k]
-
- elif 'r' in opts:
- if args:
- for arg in args:
- try:
- obj = db['autorestore/' + arg]
- except KeyError:
- print("no stored variable %s" % arg)
- else:
- ip.user_ns[arg] = obj
- else:
- restore_data(ip)
-
- # run without arguments -> list variables & values
- elif not args:
- vars = db.keys('autorestore/*')
- vars.sort()
- if vars:
- size = max(map(len, vars))
- else:
- size = 0
-
- print('Stored variables and their in-db values:')
- fmt = '%-'+str(size)+'s -> %s'
- get = db.get
- for var in vars:
- justkey = os.path.basename(var)
- # print 30 first characters from every var
- print(fmt % (justkey, repr(get(var, '<unavailable>'))[:50]))
-
- # default action - store the variable
- else:
- # %store foo >file.txt or >>file.txt
- if len(args) > 1 and args[1].startswith('>'):
- fnam = os.path.expanduser(args[1].lstrip('>').lstrip())
- if args[1].startswith('>>'):
- fil = open(fnam, 'a')
- else:
- fil = open(fnam, 'w')
- obj = ip.ev(args[0])
- print("Writing '%s' (%s) to file '%s'." % (args[0],
- obj.__class__.__name__, fnam))
-
-
- if not isinstance (obj, string_types):
- from pprint import pprint
- pprint(obj, fil)
- else:
- fil.write(obj)
- if not obj.endswith('\n'):
- fil.write('\n')
-
- fil.close()
- return
-
- # %store foo
- try:
- obj = ip.user_ns[args[0]]
- except KeyError:
- # it might be an alias
- name = args[0]
- try:
- cmd = ip.alias_manager.retrieve_alias(name)
- except ValueError:
- raise UsageError("Unknown variable '%s'" % name)
-
- staliases = db.get('stored_aliases',{})
- staliases[name] = cmd
- db['stored_aliases'] = staliases
- print("Alias stored: %s (%s)" % (name, cmd))
- return
-
- else:
- modname = getattr(inspect.getmodule(obj), '__name__', '')
- if modname == '__main__':
- print(textwrap.dedent("""\
- Warning:%s is %s
- Proper storage of interactively declared classes (or instances
- of those classes) is not possible! Only instances
- of classes in real modules on file system can be %%store'd.
- """ % (args[0], obj) ))
- return
- #pickled = pickle.dumps(obj)
- db[ 'autorestore/' + args[0] ] = obj
- print("Stored '%s' (%s)" % (args[0], obj.__class__.__name__))
-
-
-def load_ipython_extension(ip):
- """Load the extension in IPython."""
- ip.register_magics(StoreMagics)
-
+
+ def __init__(self, shell):
+ super(StoreMagics, self).__init__(shell=shell)
+ self.shell.configurables.append(self)
+ if self.autorestore:
+ restore_data(self.shell)
+
+ @line_magic
+ def store(self, parameter_s=''):
+ """Lightweight persistence for python variables.
+
+ Example::
+
+ In [1]: l = ['hello',10,'world']
+ In [2]: %store l
+ In [3]: exit
+
+ (IPython session is closed and started again...)
+
+ ville@badger:~$ ipython
+ In [1]: l
+ NameError: name 'l' is not defined
+ In [2]: %store -r
+ In [3]: l
+ Out[3]: ['hello', 10, 'world']
+
+ Usage:
+
+ * ``%store`` - Show list of all variables and their current
+ values
+ * ``%store spam`` - Store the *current* value of the variable spam
+ to disk
+ * ``%store -d spam`` - Remove the variable and its value from storage
+ * ``%store -z`` - Remove all variables from storage
+ * ``%store -r`` - Refresh all variables from store (overwrite
+ current vals)
+ * ``%store -r spam bar`` - Refresh specified variables from store
+ (delete current val)
+ * ``%store foo >a.txt`` - Store value of foo to new file a.txt
+ * ``%store foo >>a.txt`` - Append value of foo to file a.txt
+
+ It should be noted that if you change the value of a variable, you
+ need to %store it again if you want to persist the new value.
+
+ Note also that the variables will need to be pickleable; most basic
+ python types can be safely %store'd.
+
+ Also aliases can be %store'd across sessions.
+ """
+
+ opts,argsl = self.parse_options(parameter_s,'drz',mode='string')
+ args = argsl.split(None,1)
+ ip = self.shell
+ db = ip.db
+ # delete
+ if 'd' in opts:
+ try:
+ todel = args[0]
+ except IndexError:
+ raise UsageError('You must provide the variable to forget')
+ else:
+ try:
+ del db['autorestore/' + todel]
+ except:
+ raise UsageError("Can't delete variable '%s'" % todel)
+ # reset
+ elif 'z' in opts:
+ for k in db.keys('autorestore/*'):
+ del db[k]
+
+ elif 'r' in opts:
+ if args:
+ for arg in args:
+ try:
+ obj = db['autorestore/' + arg]
+ except KeyError:
+ print("no stored variable %s" % arg)
+ else:
+ ip.user_ns[arg] = obj
+ else:
+ restore_data(ip)
+
+ # run without arguments -> list variables & values
+ elif not args:
+ vars = db.keys('autorestore/*')
+ vars.sort()
+ if vars:
+ size = max(map(len, vars))
+ else:
+ size = 0
+
+ print('Stored variables and their in-db values:')
+ fmt = '%-'+str(size)+'s -> %s'
+ get = db.get
+ for var in vars:
+ justkey = os.path.basename(var)
+ # print 30 first characters from every var
+ print(fmt % (justkey, repr(get(var, '<unavailable>'))[:50]))
+
+ # default action - store the variable
+ else:
+ # %store foo >file.txt or >>file.txt
+ if len(args) > 1 and args[1].startswith('>'):
+ fnam = os.path.expanduser(args[1].lstrip('>').lstrip())
+ if args[1].startswith('>>'):
+ fil = open(fnam, 'a')
+ else:
+ fil = open(fnam, 'w')
+ obj = ip.ev(args[0])
+ print("Writing '%s' (%s) to file '%s'." % (args[0],
+ obj.__class__.__name__, fnam))
+
+
+ if not isinstance (obj, string_types):
+ from pprint import pprint
+ pprint(obj, fil)
+ else:
+ fil.write(obj)
+ if not obj.endswith('\n'):
+ fil.write('\n')
+
+ fil.close()
+ return
+
+ # %store foo
+ try:
+ obj = ip.user_ns[args[0]]
+ except KeyError:
+ # it might be an alias
+ name = args[0]
+ try:
+ cmd = ip.alias_manager.retrieve_alias(name)
+ except ValueError:
+ raise UsageError("Unknown variable '%s'" % name)
+
+ staliases = db.get('stored_aliases',{})
+ staliases[name] = cmd
+ db['stored_aliases'] = staliases
+ print("Alias stored: %s (%s)" % (name, cmd))
+ return
+
+ else:
+ modname = getattr(inspect.getmodule(obj), '__name__', '')
+ if modname == '__main__':
+ print(textwrap.dedent("""\
+ Warning:%s is %s
+ Proper storage of interactively declared classes (or instances
+ of those classes) is not possible! Only instances
+ of classes in real modules on file system can be %%store'd.
+ """ % (args[0], obj) ))
+ return
+ #pickled = pickle.dumps(obj)
+ db[ 'autorestore/' + args[0] ] = obj
+ print("Stored '%s' (%s)" % (args[0], obj.__class__.__name__))
+
+
+def load_ipython_extension(ip):
+ """Load the extension in IPython."""
+ ip.register_magics(StoreMagics)
+
diff --git a/contrib/python/ipython/py2/IPython/extensions/sympyprinting.py b/contrib/python/ipython/py2/IPython/extensions/sympyprinting.py
index 1428605ed2..7f9fb2ef98 100644
--- a/contrib/python/ipython/py2/IPython/extensions/sympyprinting.py
+++ b/contrib/python/ipython/py2/IPython/extensions/sympyprinting.py
@@ -1,32 +1,32 @@
-"""
-**DEPRECATED**
-
-A print function that pretty prints sympy Basic objects.
-
-:moduleauthor: Brian Granger
-
-Usage
-=====
-
-Once the extension is loaded, Sympy Basic objects are automatically
-pretty-printed.
-
-As of SymPy 0.7.2, maintenance of this extension has moved to SymPy under
-sympy.interactive.ipythonprinting, any modifications to account for changes to
-SymPy should be submitted to SymPy rather than changed here. This module is
-maintained here for backwards compatablitiy with old SymPy versions.
-
-"""
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008 The IPython Development Team
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import warnings
-
-def load_ipython_extension(ip):
- warnings.warn("The sympyprinting extension has moved to `sympy`, "
- "use `from sympy import init_printing; init_printing()`")
+"""
+**DEPRECATED**
+
+A print function that pretty prints sympy Basic objects.
+
+:moduleauthor: Brian Granger
+
+Usage
+=====
+
+Once the extension is loaded, Sympy Basic objects are automatically
+pretty-printed.
+
+As of SymPy 0.7.2, maintenance of this extension has moved to SymPy under
+sympy.interactive.ipythonprinting, any modifications to account for changes to
+SymPy should be submitted to SymPy rather than changed here. This module is
+maintained here for backwards compatablitiy with old SymPy versions.
+
+"""
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008 The IPython Development Team
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import warnings
+
+def load_ipython_extension(ip):
+ warnings.warn("The sympyprinting extension has moved to `sympy`, "
+ "use `from sympy import init_printing; init_printing()`")
diff --git a/contrib/python/ipython/py2/IPython/external/__init__.py b/contrib/python/ipython/py2/IPython/external/__init__.py
index 5e948bfe1d..3104c19462 100644
--- a/contrib/python/ipython/py2/IPython/external/__init__.py
+++ b/contrib/python/ipython/py2/IPython/external/__init__.py
@@ -1,5 +1,5 @@
-"""
-This package contains all third-party modules bundled with IPython.
-"""
-
-__all__ = ["simplegeneric"]
+"""
+This package contains all third-party modules bundled with IPython.
+"""
+
+__all__ = ["simplegeneric"]
diff --git a/contrib/python/ipython/py2/IPython/external/decorators/__init__.py b/contrib/python/ipython/py2/IPython/external/decorators/__init__.py
index 7aae6b74cd..dd8f52b711 100644
--- a/contrib/python/ipython/py2/IPython/external/decorators/__init__.py
+++ b/contrib/python/ipython/py2/IPython/external/decorators/__init__.py
@@ -1,9 +1,9 @@
-try:
- from numpy.testing.decorators import *
- from numpy.testing.noseclasses import KnownFailure
-except ImportError:
- from ._decorators import *
- try:
- from ._numpy_testing_noseclasses import KnownFailure
- except ImportError:
- pass
+try:
+ from numpy.testing.decorators import *
+ from numpy.testing.noseclasses import KnownFailure
+except ImportError:
+ from ._decorators import *
+ try:
+ from ._numpy_testing_noseclasses import KnownFailure
+ except ImportError:
+ pass
diff --git a/contrib/python/ipython/py2/IPython/external/decorators/_decorators.py b/contrib/python/ipython/py2/IPython/external/decorators/_decorators.py
index b0df539b0b..19de5e5cde 100644
--- a/contrib/python/ipython/py2/IPython/external/decorators/_decorators.py
+++ b/contrib/python/ipython/py2/IPython/external/decorators/_decorators.py
@@ -1,281 +1,281 @@
-"""
-Decorators for labeling and modifying behavior of test objects.
-
-Decorators that merely return a modified version of the original
-function object are straightforward. Decorators that return a new
-function object need to use
-::
-
- nose.tools.make_decorator(original_function)(decorator)
-
-in returning the decorator, in order to preserve meta-data such as
-function name, setup and teardown functions and so on - see
-``nose.tools`` for more information.
-
-"""
-import warnings
-
-# IPython changes: make this work if numpy not available
-# Original code:
-#from numpy.testing.utils import \
-# WarningManager, WarningMessage
-# Our version:
-from ._numpy_testing_utils import WarningManager
-try:
- from ._numpy_testing_noseclasses import KnownFailureTest
-except:
- pass
-
-# End IPython changes
-
-def slow(t):
- """
- Label a test as 'slow'.
-
- The exact definition of a slow test is obviously both subjective and
- hardware-dependent, but in general any individual test that requires more
- than a second or two should be labeled as slow (the whole suite consists of
- thousands of tests, so even a second is significant).
-
- Parameters
- ----------
- t : callable
- The test to label as slow.
-
- Returns
- -------
- t : callable
- The decorated test `t`.
-
- Examples
- --------
- The `numpy.testing` module includes ``import decorators as dec``.
- A test can be decorated as slow like this::
-
- from numpy.testing import *
-
- @dec.slow
- def test_big(self):
- print 'Big, slow test'
-
- """
-
- t.slow = True
- return t
-
-def setastest(tf=True):
- """
- Signals to nose that this function is or is not a test.
-
- Parameters
- ----------
- tf : bool
- If True, specifies that the decorated callable is a test.
- If False, specifies that the decorated callable is not a test.
- Default is True.
-
- Notes
- -----
- This decorator can't use the nose namespace, because it can be
- called from a non-test module. See also ``istest`` and ``nottest`` in
- ``nose.tools``.
-
- Examples
- --------
- `setastest` can be used in the following way::
-
- from numpy.testing.decorators import setastest
-
- @setastest(False)
- def func_with_test_in_name(arg1, arg2):
- pass
-
- """
- def set_test(t):
- t.__test__ = tf
- return t
- return set_test
-
-def skipif(skip_condition, msg=None):
- """
- Make function raise SkipTest exception if a given condition is true.
-
- If the condition is a callable, it is used at runtime to dynamically
- make the decision. This is useful for tests that may require costly
- imports, to delay the cost until the test suite is actually executed.
-
- Parameters
- ----------
- skip_condition : bool or callable
- Flag to determine whether to skip the decorated test.
- msg : str, optional
- Message to give on raising a SkipTest exception. Default is None.
-
- Returns
- -------
- decorator : function
- Decorator which, when applied to a function, causes SkipTest
- to be raised when `skip_condition` is True, and the function
- to be called normally otherwise.
-
- Notes
- -----
- The decorator itself is decorated with the ``nose.tools.make_decorator``
- function in order to transmit function name, and various other metadata.
-
- """
-
- def skip_decorator(f):
- # Local import to avoid a hard nose dependency and only incur the
- # import time overhead at actual test-time.
- import nose
-
- # Allow for both boolean or callable skip conditions.
- if callable(skip_condition):
- skip_val = lambda : skip_condition()
- else:
- skip_val = lambda : skip_condition
-
- def get_msg(func,msg=None):
- """Skip message with information about function being skipped."""
- if msg is None:
- out = 'Test skipped due to test condition'
- else:
- out = '\n'+msg
-
- return "Skipping test: %s%s" % (func.__name__,out)
-
- # We need to define *two* skippers because Python doesn't allow both
- # return with value and yield inside the same function.
- def skipper_func(*args, **kwargs):
- """Skipper for normal test functions."""
- if skip_val():
- raise nose.SkipTest(get_msg(f,msg))
- else:
- return f(*args, **kwargs)
-
- def skipper_gen(*args, **kwargs):
- """Skipper for test generators."""
- if skip_val():
- raise nose.SkipTest(get_msg(f,msg))
- else:
- for x in f(*args, **kwargs):
- yield x
-
- # Choose the right skipper to use when building the actual decorator.
- if nose.util.isgenerator(f):
- skipper = skipper_gen
- else:
- skipper = skipper_func
-
- return nose.tools.make_decorator(f)(skipper)
-
- return skip_decorator
-
-def knownfailureif(fail_condition, msg=None):
- """
- Make function raise KnownFailureTest exception if given condition is true.
-
- If the condition is a callable, it is used at runtime to dynamically
- make the decision. This is useful for tests that may require costly
- imports, to delay the cost until the test suite is actually executed.
-
- Parameters
- ----------
- fail_condition : bool or callable
- Flag to determine whether to mark the decorated test as a known
- failure (if True) or not (if False).
- msg : str, optional
- Message to give on raising a KnownFailureTest exception.
- Default is None.
-
- Returns
- -------
- decorator : function
- Decorator, which, when applied to a function, causes SkipTest
- to be raised when `skip_condition` is True, and the function
- to be called normally otherwise.
-
- Notes
- -----
- The decorator itself is decorated with the ``nose.tools.make_decorator``
- function in order to transmit function name, and various other metadata.
-
- """
- if msg is None:
- msg = 'Test skipped due to known failure'
-
- # Allow for both boolean or callable known failure conditions.
- if callable(fail_condition):
- fail_val = lambda : fail_condition()
- else:
- fail_val = lambda : fail_condition
-
- def knownfail_decorator(f):
- # Local import to avoid a hard nose dependency and only incur the
- # import time overhead at actual test-time.
- import nose
- def knownfailer(*args, **kwargs):
- if fail_val():
- raise KnownFailureTest(msg)
- else:
- return f(*args, **kwargs)
- return nose.tools.make_decorator(f)(knownfailer)
-
- return knownfail_decorator
-
-def deprecated(conditional=True):
- """
- Filter deprecation warnings while running the test suite.
-
- This decorator can be used to filter DeprecationWarning's, to avoid
- printing them during the test suite run, while checking that the test
- actually raises a DeprecationWarning.
-
- Parameters
- ----------
- conditional : bool or callable, optional
- Flag to determine whether to mark test as deprecated or not. If the
- condition is a callable, it is used at runtime to dynamically make the
- decision. Default is True.
-
- Returns
- -------
- decorator : function
- The `deprecated` decorator itself.
-
- Notes
- -----
- .. versionadded:: 1.4.0
-
- """
- def deprecate_decorator(f):
- # Local import to avoid a hard nose dependency and only incur the
- # import time overhead at actual test-time.
- import nose
-
- def _deprecated_imp(*args, **kwargs):
- # Poor man's replacement for the with statement
- ctx = WarningManager(record=True)
- l = ctx.__enter__()
- warnings.simplefilter('always')
- try:
- f(*args, **kwargs)
- if not len(l) > 0:
- raise AssertionError("No warning raised when calling %s"
- % f.__name__)
- if not l[0].category is DeprecationWarning:
- raise AssertionError("First warning for %s is not a " \
- "DeprecationWarning( is %s)" % (f.__name__, l[0]))
- finally:
- ctx.__exit__()
-
- if callable(conditional):
- cond = conditional()
- else:
- cond = conditional
- if cond:
- return nose.tools.make_decorator(f)(_deprecated_imp)
- else:
- return f
- return deprecate_decorator
+"""
+Decorators for labeling and modifying behavior of test objects.
+
+Decorators that merely return a modified version of the original
+function object are straightforward. Decorators that return a new
+function object need to use
+::
+
+ nose.tools.make_decorator(original_function)(decorator)
+
+in returning the decorator, in order to preserve meta-data such as
+function name, setup and teardown functions and so on - see
+``nose.tools`` for more information.
+
+"""
+import warnings
+
+# IPython changes: make this work if numpy not available
+# Original code:
+#from numpy.testing.utils import \
+# WarningManager, WarningMessage
+# Our version:
+from ._numpy_testing_utils import WarningManager
+try:
+ from ._numpy_testing_noseclasses import KnownFailureTest
+except:
+ pass
+
+# End IPython changes
+
+def slow(t):
+ """
+ Label a test as 'slow'.
+
+ The exact definition of a slow test is obviously both subjective and
+ hardware-dependent, but in general any individual test that requires more
+ than a second or two should be labeled as slow (the whole suite consists of
+ thousands of tests, so even a second is significant).
+
+ Parameters
+ ----------
+ t : callable
+ The test to label as slow.
+
+ Returns
+ -------
+ t : callable
+ The decorated test `t`.
+
+ Examples
+ --------
+ The `numpy.testing` module includes ``import decorators as dec``.
+ A test can be decorated as slow like this::
+
+ from numpy.testing import *
+
+ @dec.slow
+ def test_big(self):
+ print 'Big, slow test'
+
+ """
+
+ t.slow = True
+ return t
+
+def setastest(tf=True):
+ """
+ Signals to nose that this function is or is not a test.
+
+ Parameters
+ ----------
+ tf : bool
+ If True, specifies that the decorated callable is a test.
+ If False, specifies that the decorated callable is not a test.
+ Default is True.
+
+ Notes
+ -----
+ This decorator can't use the nose namespace, because it can be
+ called from a non-test module. See also ``istest`` and ``nottest`` in
+ ``nose.tools``.
+
+ Examples
+ --------
+ `setastest` can be used in the following way::
+
+ from numpy.testing.decorators import setastest
+
+ @setastest(False)
+ def func_with_test_in_name(arg1, arg2):
+ pass
+
+ """
+ def set_test(t):
+ t.__test__ = tf
+ return t
+ return set_test
+
+def skipif(skip_condition, msg=None):
+ """
+ Make function raise SkipTest exception if a given condition is true.
+
+ If the condition is a callable, it is used at runtime to dynamically
+ make the decision. This is useful for tests that may require costly
+ imports, to delay the cost until the test suite is actually executed.
+
+ Parameters
+ ----------
+ skip_condition : bool or callable
+ Flag to determine whether to skip the decorated test.
+ msg : str, optional
+ Message to give on raising a SkipTest exception. Default is None.
+
+ Returns
+ -------
+ decorator : function
+ Decorator which, when applied to a function, causes SkipTest
+ to be raised when `skip_condition` is True, and the function
+ to be called normally otherwise.
+
+ Notes
+ -----
+ The decorator itself is decorated with the ``nose.tools.make_decorator``
+ function in order to transmit function name, and various other metadata.
+
+ """
+
+ def skip_decorator(f):
+ # Local import to avoid a hard nose dependency and only incur the
+ # import time overhead at actual test-time.
+ import nose
+
+ # Allow for both boolean or callable skip conditions.
+ if callable(skip_condition):
+ skip_val = lambda : skip_condition()
+ else:
+ skip_val = lambda : skip_condition
+
+ def get_msg(func,msg=None):
+ """Skip message with information about function being skipped."""
+ if msg is None:
+ out = 'Test skipped due to test condition'
+ else:
+ out = '\n'+msg
+
+ return "Skipping test: %s%s" % (func.__name__,out)
+
+ # We need to define *two* skippers because Python doesn't allow both
+ # return with value and yield inside the same function.
+ def skipper_func(*args, **kwargs):
+ """Skipper for normal test functions."""
+ if skip_val():
+ raise nose.SkipTest(get_msg(f,msg))
+ else:
+ return f(*args, **kwargs)
+
+ def skipper_gen(*args, **kwargs):
+ """Skipper for test generators."""
+ if skip_val():
+ raise nose.SkipTest(get_msg(f,msg))
+ else:
+ for x in f(*args, **kwargs):
+ yield x
+
+ # Choose the right skipper to use when building the actual decorator.
+ if nose.util.isgenerator(f):
+ skipper = skipper_gen
+ else:
+ skipper = skipper_func
+
+ return nose.tools.make_decorator(f)(skipper)
+
+ return skip_decorator
+
+def knownfailureif(fail_condition, msg=None):
+ """
+ Make function raise KnownFailureTest exception if given condition is true.
+
+ If the condition is a callable, it is used at runtime to dynamically
+ make the decision. This is useful for tests that may require costly
+ imports, to delay the cost until the test suite is actually executed.
+
+ Parameters
+ ----------
+ fail_condition : bool or callable
+ Flag to determine whether to mark the decorated test as a known
+ failure (if True) or not (if False).
+ msg : str, optional
+ Message to give on raising a KnownFailureTest exception.
+ Default is None.
+
+ Returns
+ -------
+ decorator : function
+ Decorator, which, when applied to a function, causes SkipTest
+ to be raised when `skip_condition` is True, and the function
+ to be called normally otherwise.
+
+ Notes
+ -----
+ The decorator itself is decorated with the ``nose.tools.make_decorator``
+ function in order to transmit function name, and various other metadata.
+
+ """
+ if msg is None:
+ msg = 'Test skipped due to known failure'
+
+ # Allow for both boolean or callable known failure conditions.
+ if callable(fail_condition):
+ fail_val = lambda : fail_condition()
+ else:
+ fail_val = lambda : fail_condition
+
+ def knownfail_decorator(f):
+ # Local import to avoid a hard nose dependency and only incur the
+ # import time overhead at actual test-time.
+ import nose
+ def knownfailer(*args, **kwargs):
+ if fail_val():
+ raise KnownFailureTest(msg)
+ else:
+ return f(*args, **kwargs)
+ return nose.tools.make_decorator(f)(knownfailer)
+
+ return knownfail_decorator
+
+def deprecated(conditional=True):
+ """
+ Filter deprecation warnings while running the test suite.
+
+ This decorator can be used to filter DeprecationWarning's, to avoid
+ printing them during the test suite run, while checking that the test
+ actually raises a DeprecationWarning.
+
+ Parameters
+ ----------
+ conditional : bool or callable, optional
+ Flag to determine whether to mark test as deprecated or not. If the
+ condition is a callable, it is used at runtime to dynamically make the
+ decision. Default is True.
+
+ Returns
+ -------
+ decorator : function
+ The `deprecated` decorator itself.
+
+ Notes
+ -----
+ .. versionadded:: 1.4.0
+
+ """
+ def deprecate_decorator(f):
+ # Local import to avoid a hard nose dependency and only incur the
+ # import time overhead at actual test-time.
+ import nose
+
+ def _deprecated_imp(*args, **kwargs):
+ # Poor man's replacement for the with statement
+ ctx = WarningManager(record=True)
+ l = ctx.__enter__()
+ warnings.simplefilter('always')
+ try:
+ f(*args, **kwargs)
+ if not len(l) > 0:
+ raise AssertionError("No warning raised when calling %s"
+ % f.__name__)
+ if not l[0].category is DeprecationWarning:
+ raise AssertionError("First warning for %s is not a " \
+ "DeprecationWarning( is %s)" % (f.__name__, l[0]))
+ finally:
+ ctx.__exit__()
+
+ if callable(conditional):
+ cond = conditional()
+ else:
+ cond = conditional
+ if cond:
+ return nose.tools.make_decorator(f)(_deprecated_imp)
+ else:
+ return f
+ return deprecate_decorator
diff --git a/contrib/python/ipython/py2/IPython/external/decorators/_numpy_testing_noseclasses.py b/contrib/python/ipython/py2/IPython/external/decorators/_numpy_testing_noseclasses.py
index 2b3d2841a7..ca6ccd87bb 100644
--- a/contrib/python/ipython/py2/IPython/external/decorators/_numpy_testing_noseclasses.py
+++ b/contrib/python/ipython/py2/IPython/external/decorators/_numpy_testing_noseclasses.py
@@ -1,41 +1,41 @@
-# IPython: modified copy of numpy.testing.noseclasses, so
-# IPython.external._decorators works without numpy being installed.
-
-# These classes implement a "known failure" error class.
-
-import os
-
-from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin
-
-class KnownFailureTest(Exception):
- '''Raise this exception to mark a test as a known failing test.'''
- pass
-
-
-class KnownFailure(ErrorClassPlugin):
- '''Plugin that installs a KNOWNFAIL error class for the
- KnownFailureClass exception. When KnownFailureTest is raised,
- the exception will be logged in the knownfail attribute of the
- result, 'K' or 'KNOWNFAIL' (verbose) will be output, and the
- exception will not be counted as an error or failure.'''
- enabled = True
- knownfail = ErrorClass(KnownFailureTest,
- label='KNOWNFAIL',
- isfailure=False)
-
- def options(self, parser, env=os.environ):
- env_opt = 'NOSE_WITHOUT_KNOWNFAIL'
- parser.add_option('--no-knownfail', action='store_true',
- dest='noKnownFail', default=env.get(env_opt, False),
- help='Disable special handling of KnownFailureTest '
- 'exceptions')
-
- def configure(self, options, conf):
- if not self.can_configure:
- return
- self.conf = conf
- disable = getattr(options, 'noKnownFail', False)
- if disable:
- self.enabled = False
-
-
+# IPython: modified copy of numpy.testing.noseclasses, so
+# IPython.external._decorators works without numpy being installed.
+
+# These classes implement a "known failure" error class.
+
+import os
+
+from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin
+
+class KnownFailureTest(Exception):
+ '''Raise this exception to mark a test as a known failing test.'''
+ pass
+
+
+class KnownFailure(ErrorClassPlugin):
+ '''Plugin that installs a KNOWNFAIL error class for the
+ KnownFailureClass exception. When KnownFailureTest is raised,
+ the exception will be logged in the knownfail attribute of the
+ result, 'K' or 'KNOWNFAIL' (verbose) will be output, and the
+ exception will not be counted as an error or failure.'''
+ enabled = True
+ knownfail = ErrorClass(KnownFailureTest,
+ label='KNOWNFAIL',
+ isfailure=False)
+
+ def options(self, parser, env=os.environ):
+ env_opt = 'NOSE_WITHOUT_KNOWNFAIL'
+ parser.add_option('--no-knownfail', action='store_true',
+ dest='noKnownFail', default=env.get(env_opt, False),
+ help='Disable special handling of KnownFailureTest '
+ 'exceptions')
+
+ def configure(self, options, conf):
+ if not self.can_configure:
+ return
+ self.conf = conf
+ disable = getattr(options, 'noKnownFail', False)
+ if disable:
+ self.enabled = False
+
+
diff --git a/contrib/python/ipython/py2/IPython/external/decorators/_numpy_testing_utils.py b/contrib/python/ipython/py2/IPython/external/decorators/_numpy_testing_utils.py
index d12cba38ac..ad7bd0f981 100644
--- a/contrib/python/ipython/py2/IPython/external/decorators/_numpy_testing_utils.py
+++ b/contrib/python/ipython/py2/IPython/external/decorators/_numpy_testing_utils.py
@@ -1,112 +1,112 @@
-# IPython: modified copy of numpy.testing.utils, so
-# IPython.external._decorators works without numpy being installed.
-"""
-Utility function to facilitate testing.
-"""
-
-import sys
-import warnings
-
-# The following two classes are copied from python 2.6 warnings module (context
-# manager)
-class WarningMessage(object):
-
- """
- Holds the result of a single showwarning() call.
-
- Notes
- -----
- `WarningMessage` is copied from the Python 2.6 warnings module,
- so it can be used in NumPy with older Python versions.
-
- """
-
- _WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
- "line")
-
- def __init__(self, message, category, filename, lineno, file=None,
- line=None):
- local_values = locals()
- for attr in self._WARNING_DETAILS:
- setattr(self, attr, local_values[attr])
- if category:
- self._category_name = category.__name__
- else:
- self._category_name = None
-
- def __str__(self):
- return ("{message : %r, category : %r, filename : %r, lineno : %s, "
- "line : %r}" % (self.message, self._category_name,
- self.filename, self.lineno, self.line))
-
-class WarningManager:
- """
- A context manager that copies and restores the warnings filter upon
- exiting the context.
-
- The 'record' argument specifies whether warnings should be captured by a
- custom implementation of ``warnings.showwarning()`` and be appended to a
- list returned by the context manager. Otherwise None is returned by the
- context manager. The objects appended to the list are arguments whose
- attributes mirror the arguments to ``showwarning()``.
-
- The 'module' argument is to specify an alternative module to the module
- named 'warnings' and imported under that name. This argument is only useful
- when testing the warnings module itself.
-
- Notes
- -----
- `WarningManager` is a copy of the ``catch_warnings`` context manager
- from the Python 2.6 warnings module, with slight modifications.
- It is copied so it can be used in NumPy with older Python versions.
-
- """
- def __init__(self, record=False, module=None):
- self._record = record
- if module is None:
- self._module = sys.modules['warnings']
- else:
- self._module = module
- self._entered = False
-
- def __enter__(self):
- if self._entered:
- raise RuntimeError("Cannot enter %r twice" % self)
- self._entered = True
- self._filters = self._module.filters
- self._module.filters = self._filters[:]
- self._showwarning = self._module.showwarning
- if self._record:
- log = []
- def showwarning(*args, **kwargs):
- log.append(WarningMessage(*args, **kwargs))
- self._module.showwarning = showwarning
- return log
- else:
- return None
-
+# IPython: modified copy of numpy.testing.utils, so
+# IPython.external._decorators works without numpy being installed.
+"""
+Utility function to facilitate testing.
+"""
+
+import sys
+import warnings
+
+# The following two classes are copied from python 2.6 warnings module (context
+# manager)
+class WarningMessage(object):
+
+ """
+ Holds the result of a single showwarning() call.
+
+ Notes
+ -----
+ `WarningMessage` is copied from the Python 2.6 warnings module,
+ so it can be used in NumPy with older Python versions.
+
+ """
+
+ _WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
+ "line")
+
+ def __init__(self, message, category, filename, lineno, file=None,
+ line=None):
+ local_values = locals()
+ for attr in self._WARNING_DETAILS:
+ setattr(self, attr, local_values[attr])
+ if category:
+ self._category_name = category.__name__
+ else:
+ self._category_name = None
+
+ def __str__(self):
+ return ("{message : %r, category : %r, filename : %r, lineno : %s, "
+ "line : %r}" % (self.message, self._category_name,
+ self.filename, self.lineno, self.line))
+
+class WarningManager:
+ """
+ A context manager that copies and restores the warnings filter upon
+ exiting the context.
+
+ The 'record' argument specifies whether warnings should be captured by a
+ custom implementation of ``warnings.showwarning()`` and be appended to a
+ list returned by the context manager. Otherwise None is returned by the
+ context manager. The objects appended to the list are arguments whose
+ attributes mirror the arguments to ``showwarning()``.
+
+ The 'module' argument is to specify an alternative module to the module
+ named 'warnings' and imported under that name. This argument is only useful
+ when testing the warnings module itself.
+
+ Notes
+ -----
+ `WarningManager` is a copy of the ``catch_warnings`` context manager
+ from the Python 2.6 warnings module, with slight modifications.
+ It is copied so it can be used in NumPy with older Python versions.
+
+ """
+ def __init__(self, record=False, module=None):
+ self._record = record
+ if module is None:
+ self._module = sys.modules['warnings']
+ else:
+ self._module = module
+ self._entered = False
+
+ def __enter__(self):
+ if self._entered:
+ raise RuntimeError("Cannot enter %r twice" % self)
+ self._entered = True
+ self._filters = self._module.filters
+ self._module.filters = self._filters[:]
+ self._showwarning = self._module.showwarning
+ if self._record:
+ log = []
+ def showwarning(*args, **kwargs):
+ log.append(WarningMessage(*args, **kwargs))
+ self._module.showwarning = showwarning
+ return log
+ else:
+ return None
+
def __exit__(self, type_, value, traceback):
- if not self._entered:
- raise RuntimeError("Cannot exit %r without entering first" % self)
- self._module.filters = self._filters
- self._module.showwarning = self._showwarning
-
-def assert_warns(warning_class, func, *args, **kw):
- """Fail unless a warning of class warning_class is thrown by callable when
- invoked with arguments args and keyword arguments kwargs.
-
- If a different type of warning is thrown, it will not be caught, and the
- test case will be deemed to have suffered an error.
- """
-
- # XXX: once we may depend on python >= 2.6, this can be replaced by the
- # warnings module context manager.
+ if not self._entered:
+ raise RuntimeError("Cannot exit %r without entering first" % self)
+ self._module.filters = self._filters
+ self._module.showwarning = self._showwarning
+
+def assert_warns(warning_class, func, *args, **kw):
+ """Fail unless a warning of class warning_class is thrown by callable when
+ invoked with arguments args and keyword arguments kwargs.
+
+ If a different type of warning is thrown, it will not be caught, and the
+ test case will be deemed to have suffered an error.
+ """
+
+ # XXX: once we may depend on python >= 2.6, this can be replaced by the
+ # warnings module context manager.
with WarningManager(record=True) as l:
warnings.simplefilter('always')
- func(*args, **kw)
- if not len(l) > 0:
- raise AssertionError("No warning raised when calling %s"
- % func.__name__)
- if not l[0].category is warning_class:
- raise AssertionError("First warning for %s is not a " \
- "%s( is %s)" % (func.__name__, warning_class, l[0]))
+ func(*args, **kw)
+ if not len(l) > 0:
+ raise AssertionError("No warning raised when calling %s"
+ % func.__name__)
+ if not l[0].category is warning_class:
+ raise AssertionError("First warning for %s is not a " \
+ "%s( is %s)" % (func.__name__, warning_class, l[0]))
diff --git a/contrib/python/ipython/py2/IPython/external/mathjax.py b/contrib/python/ipython/py2/IPython/external/mathjax.py
index c614e46579..1b9b80905b 100644
--- a/contrib/python/ipython/py2/IPython/external/mathjax.py
+++ b/contrib/python/ipython/py2/IPython/external/mathjax.py
@@ -1,13 +1,13 @@
-#!/usr/bin/python
-"""
-`IPython.external.mathjax` is deprecated with IPython 4.0+
-
-mathjax is now install by default with the notebook package
-
-"""
-
-import sys
-
-if __name__ == '__main__' :
- sys.exit("IPython.external.mathjax is deprecated, Mathjax is now installed by default with the notebook package")
-
+#!/usr/bin/python
+"""
+`IPython.external.mathjax` is deprecated with IPython 4.0+
+
+mathjax is now install by default with the notebook package
+
+"""
+
+import sys
+
+if __name__ == '__main__' :
+ sys.exit("IPython.external.mathjax is deprecated, Mathjax is now installed by default with the notebook package")
+
diff --git a/contrib/python/ipython/py2/IPython/external/qt_for_kernel.py b/contrib/python/ipython/py2/IPython/external/qt_for_kernel.py
index fe1dc15dae..1a94e7e0a2 100644
--- a/contrib/python/ipython/py2/IPython/external/qt_for_kernel.py
+++ b/contrib/python/ipython/py2/IPython/external/qt_for_kernel.py
@@ -1,95 +1,95 @@
-""" Import Qt in a manner suitable for an IPython kernel.
-
-This is the import used for the `gui=qt` or `matplotlib=qt` initialization.
-
-Import Priority:
-
-if Qt has been imported anywhere else:
- use that
-
-if matplotlib has been imported and doesn't support v2 (<= 1.0.1):
- use PyQt4 @v1
-
-Next, ask QT_API env variable
-
-if QT_API not set:
- ask matplotlib what it's using. If Qt4Agg or Qt5Agg, then use the
- version matplotlib is configured with
-
- else: (matplotlib said nothing)
- # this is the default path - nobody told us anything
- try in this order:
- PyQt default version, PySide, PyQt5
-else:
- use what QT_API says
-
-"""
-# NOTE: This is no longer an external, third-party module, and should be
-# considered part of IPython. For compatibility however, it is being kept in
-# IPython/external.
-
-import os
-import sys
-
-from IPython.utils.version import check_version
-from IPython.external.qt_loaders import (load_qt, loaded_api, QT_API_PYSIDE,
+""" Import Qt in a manner suitable for an IPython kernel.
+
+This is the import used for the `gui=qt` or `matplotlib=qt` initialization.
+
+Import Priority:
+
+if Qt has been imported anywhere else:
+ use that
+
+if matplotlib has been imported and doesn't support v2 (<= 1.0.1):
+ use PyQt4 @v1
+
+Next, ask QT_API env variable
+
+if QT_API not set:
+ ask matplotlib what it's using. If Qt4Agg or Qt5Agg, then use the
+ version matplotlib is configured with
+
+ else: (matplotlib said nothing)
+ # this is the default path - nobody told us anything
+ try in this order:
+ PyQt default version, PySide, PyQt5
+else:
+ use what QT_API says
+
+"""
+# NOTE: This is no longer an external, third-party module, and should be
+# considered part of IPython. For compatibility however, it is being kept in
+# IPython/external.
+
+import os
+import sys
+
+from IPython.utils.version import check_version
+from IPython.external.qt_loaders import (load_qt, loaded_api, QT_API_PYSIDE,
QT_API_PYSIDE2, QT_API_PYQT, QT_API_PYQT5,
- QT_API_PYQTv1, QT_API_PYQT_DEFAULT)
-
+ QT_API_PYQTv1, QT_API_PYQT_DEFAULT)
+
_qt_apis = (QT_API_PYSIDE, QT_API_PYSIDE2, QT_API_PYQT, QT_API_PYQT5, QT_API_PYQTv1,
- QT_API_PYQT_DEFAULT)
-
-#Constraints placed on an imported matplotlib
-def matplotlib_options(mpl):
- if mpl is None:
- return
- backend = mpl.rcParams.get('backend', None)
- if backend == 'Qt4Agg':
- mpqt = mpl.rcParams.get('backend.qt4', None)
- if mpqt is None:
- return None
- if mpqt.lower() == 'pyside':
- return [QT_API_PYSIDE]
- elif mpqt.lower() == 'pyqt4':
- return [QT_API_PYQT_DEFAULT]
- elif mpqt.lower() == 'pyqt4v2':
- return [QT_API_PYQT]
- raise ImportError("unhandled value for backend.qt4 from matplotlib: %r" %
- mpqt)
- elif backend == 'Qt5Agg':
- mpqt = mpl.rcParams.get('backend.qt5', None)
- if mpqt is None:
- return None
- if mpqt.lower() == 'pyqt5':
- return [QT_API_PYQT5]
- raise ImportError("unhandled value for backend.qt5 from matplotlib: %r" %
- mpqt)
-
-def get_options():
- """Return a list of acceptable QT APIs, in decreasing order of
- preference
- """
- #already imported Qt somewhere. Use that
- loaded = loaded_api()
- if loaded is not None:
- return [loaded]
-
- mpl = sys.modules.get('matplotlib', None)
-
- if mpl is not None and not check_version(mpl.__version__, '1.0.2'):
- #1.0.1 only supports PyQt4 v1
- return [QT_API_PYQT_DEFAULT]
-
- qt_api = os.environ.get('QT_API', None)
- if qt_api is None:
- #no ETS variable. Ask mpl, then use default fallback path
+ QT_API_PYQT_DEFAULT)
+
+#Constraints placed on an imported matplotlib
+def matplotlib_options(mpl):
+ if mpl is None:
+ return
+ backend = mpl.rcParams.get('backend', None)
+ if backend == 'Qt4Agg':
+ mpqt = mpl.rcParams.get('backend.qt4', None)
+ if mpqt is None:
+ return None
+ if mpqt.lower() == 'pyside':
+ return [QT_API_PYSIDE]
+ elif mpqt.lower() == 'pyqt4':
+ return [QT_API_PYQT_DEFAULT]
+ elif mpqt.lower() == 'pyqt4v2':
+ return [QT_API_PYQT]
+ raise ImportError("unhandled value for backend.qt4 from matplotlib: %r" %
+ mpqt)
+ elif backend == 'Qt5Agg':
+ mpqt = mpl.rcParams.get('backend.qt5', None)
+ if mpqt is None:
+ return None
+ if mpqt.lower() == 'pyqt5':
+ return [QT_API_PYQT5]
+ raise ImportError("unhandled value for backend.qt5 from matplotlib: %r" %
+ mpqt)
+
+def get_options():
+ """Return a list of acceptable QT APIs, in decreasing order of
+ preference
+ """
+ #already imported Qt somewhere. Use that
+ loaded = loaded_api()
+ if loaded is not None:
+ return [loaded]
+
+ mpl = sys.modules.get('matplotlib', None)
+
+ if mpl is not None and not check_version(mpl.__version__, '1.0.2'):
+ #1.0.1 only supports PyQt4 v1
+ return [QT_API_PYQT_DEFAULT]
+
+ qt_api = os.environ.get('QT_API', None)
+ if qt_api is None:
+ #no ETS variable. Ask mpl, then use default fallback path
return matplotlib_options(mpl) or [QT_API_PYQT_DEFAULT, QT_API_PYSIDE,
QT_API_PYQT5, QT_API_PYSIDE2]
- elif qt_api not in _qt_apis:
- raise RuntimeError("Invalid Qt API %r, valid values are: %r" %
- (qt_api, ', '.join(_qt_apis)))
- else:
- return [qt_api]
-
-api_opts = get_options()
-QtCore, QtGui, QtSvg, QT_API = load_qt(api_opts)
+ elif qt_api not in _qt_apis:
+ raise RuntimeError("Invalid Qt API %r, valid values are: %r" %
+ (qt_api, ', '.join(_qt_apis)))
+ else:
+ return [qt_api]
+
+api_opts = get_options()
+QtCore, QtGui, QtSvg, QT_API = load_qt(api_opts)
diff --git a/contrib/python/ipython/py2/IPython/external/qt_loaders.py b/contrib/python/ipython/py2/IPython/external/qt_loaders.py
index e9912106e9..3b894fb2ab 100644
--- a/contrib/python/ipython/py2/IPython/external/qt_loaders.py
+++ b/contrib/python/ipython/py2/IPython/external/qt_loaders.py
@@ -1,27 +1,27 @@
-"""
-This module contains factory functions that attempt
-to return Qt submodules from the various python Qt bindings.
-
-It also protects against double-importing Qt with different
-bindings, which is unstable and likely to crash
-
-This is used primarily by qt and qt_for_kernel, and shouldn't
-be accessed directly from the outside
-"""
-import sys
-import types
-from functools import partial
-
-from IPython.utils.version import check_version
-
-# Available APIs.
-QT_API_PYQT = 'pyqt' # Force version 2
-QT_API_PYQT5 = 'pyqt5'
-QT_API_PYQTv1 = 'pyqtv1' # Force version 2
-QT_API_PYQT_DEFAULT = 'pyqtdefault' # use system default for version 1 vs. 2
-QT_API_PYSIDE = 'pyside'
+"""
+This module contains factory functions that attempt
+to return Qt submodules from the various python Qt bindings.
+
+It also protects against double-importing Qt with different
+bindings, which is unstable and likely to crash
+
+This is used primarily by qt and qt_for_kernel, and shouldn't
+be accessed directly from the outside
+"""
+import sys
+import types
+from functools import partial
+
+from IPython.utils.version import check_version
+
+# Available APIs.
+QT_API_PYQT = 'pyqt' # Force version 2
+QT_API_PYQT5 = 'pyqt5'
+QT_API_PYQTv1 = 'pyqtv1' # Force version 2
+QT_API_PYQT_DEFAULT = 'pyqtdefault' # use system default for version 1 vs. 2
+QT_API_PYSIDE = 'pyside'
QT_API_PYSIDE2 = 'pyside2'
-
+
api_to_module = {QT_API_PYSIDE2: 'PySide2',
QT_API_PYSIDE: 'PySide',
QT_API_PYQT: 'PyQt4',
@@ -29,124 +29,124 @@ api_to_module = {QT_API_PYSIDE2: 'PySide2',
QT_API_PYQT5: 'PyQt5',
QT_API_PYQT_DEFAULT: 'PyQt4',
}
-
-
-class ImportDenier(object):
- """Import Hook that will guard against bad Qt imports
- once IPython commits to a specific binding
- """
-
- def __init__(self):
- self.__forbidden = set()
-
- def forbid(self, module_name):
- sys.modules.pop(module_name, None)
- self.__forbidden.add(module_name)
-
- def find_module(self, fullname, path=None):
- if path:
- return
- if fullname in self.__forbidden:
- return self
-
- def load_module(self, fullname):
- raise ImportError("""
- Importing %s disabled by IPython, which has
- already imported an Incompatible QT Binding: %s
- """ % (fullname, loaded_api()))
-
-ID = ImportDenier()
+
+
+class ImportDenier(object):
+ """Import Hook that will guard against bad Qt imports
+ once IPython commits to a specific binding
+ """
+
+ def __init__(self):
+ self.__forbidden = set()
+
+ def forbid(self, module_name):
+ sys.modules.pop(module_name, None)
+ self.__forbidden.add(module_name)
+
+ def find_module(self, fullname, path=None):
+ if path:
+ return
+ if fullname in self.__forbidden:
+ return self
+
+ def load_module(self, fullname):
+ raise ImportError("""
+ Importing %s disabled by IPython, which has
+ already imported an Incompatible QT Binding: %s
+ """ % (fullname, loaded_api()))
+
+ID = ImportDenier()
sys.meta_path.insert(0, ID)
-
-
-def commit_api(api):
- """Commit to a particular API, and trigger ImportErrors on subsequent
- dangerous imports"""
-
+
+
+def commit_api(api):
+ """Commit to a particular API, and trigger ImportErrors on subsequent
+ dangerous imports"""
+
if api == QT_API_PYSIDE2:
ID.forbid('PySide')
ID.forbid('PyQt4')
ID.forbid('PyQt5')
- if api == QT_API_PYSIDE:
+ if api == QT_API_PYSIDE:
ID.forbid('PySide2')
- ID.forbid('PyQt4')
- ID.forbid('PyQt5')
- elif api == QT_API_PYQT5:
+ ID.forbid('PyQt4')
+ ID.forbid('PyQt5')
+ elif api == QT_API_PYQT5:
ID.forbid('PySide2')
- ID.forbid('PySide')
- ID.forbid('PyQt4')
- else: # There are three other possibilities, all representing PyQt4
- ID.forbid('PyQt5')
+ ID.forbid('PySide')
+ ID.forbid('PyQt4')
+ else: # There are three other possibilities, all representing PyQt4
+ ID.forbid('PyQt5')
ID.forbid('PySide2')
- ID.forbid('PySide')
-
-
-def loaded_api():
- """Return which API is loaded, if any
-
- If this returns anything besides None,
- importing any other Qt binding is unsafe.
-
- Returns
- -------
+ ID.forbid('PySide')
+
+
+def loaded_api():
+ """Return which API is loaded, if any
+
+ If this returns anything besides None,
+ importing any other Qt binding is unsafe.
+
+ Returns
+ -------
None, 'pyside2', 'pyside', 'pyqt', 'pyqt5', or 'pyqtv1'
- """
- if 'PyQt4.QtCore' in sys.modules:
- if qtapi_version() == 2:
- return QT_API_PYQT
- else:
- return QT_API_PYQTv1
- elif 'PySide.QtCore' in sys.modules:
- return QT_API_PYSIDE
+ """
+ if 'PyQt4.QtCore' in sys.modules:
+ if qtapi_version() == 2:
+ return QT_API_PYQT
+ else:
+ return QT_API_PYQTv1
+ elif 'PySide.QtCore' in sys.modules:
+ return QT_API_PYSIDE
elif 'PySide2.QtCore' in sys.modules:
return QT_API_PYSIDE2
- elif 'PyQt5.QtCore' in sys.modules:
- return QT_API_PYQT5
- return None
-
-
-def has_binding(api):
+ elif 'PyQt5.QtCore' in sys.modules:
+ return QT_API_PYQT5
+ return None
+
+
+def has_binding(api):
"""Safely check for PyQt4/5, PySide or PySide2, without importing submodules
-
+
Supports Python <= 3.3
- Parameters
- ----------
+ Parameters
+ ----------
api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault']
- Which module to check for
-
- Returns
- -------
- True if the relevant module appears to be importable
- """
- # we can't import an incomplete pyside and pyqt4
- # this will cause a crash in sip (#1431)
- # check for complete presence before importing
+ Which module to check for
+
+ Returns
+ -------
+ True if the relevant module appears to be importable
+ """
+ # we can't import an incomplete pyside and pyqt4
+ # this will cause a crash in sip (#1431)
+ # check for complete presence before importing
module_name = api_to_module[api]
-
- import imp
- try:
- #importing top level PyQt4/PySide module is ok...
- mod = __import__(module_name)
- #...importing submodules is not
- imp.find_module('QtCore', mod.__path__)
- imp.find_module('QtGui', mod.__path__)
- imp.find_module('QtSvg', mod.__path__)
+
+ import imp
+ try:
+ #importing top level PyQt4/PySide module is ok...
+ mod = __import__(module_name)
+ #...importing submodules is not
+ imp.find_module('QtCore', mod.__path__)
+ imp.find_module('QtGui', mod.__path__)
+ imp.find_module('QtSvg', mod.__path__)
if api in (QT_API_PYQT5, QT_API_PYSIDE2):
- # QT5 requires QtWidgets too
- imp.find_module('QtWidgets', mod.__path__)
-
- #we can also safely check PySide version
- if api == QT_API_PYSIDE:
- return check_version(mod.__version__, '1.0.3')
- else:
- return True
- except ImportError:
- return False
-
+ # QT5 requires QtWidgets too
+ imp.find_module('QtWidgets', mod.__path__)
+
+ #we can also safely check PySide version
+ if api == QT_API_PYSIDE:
+ return check_version(mod.__version__, '1.0.3')
+ else:
+ return True
+ except ImportError:
+ return False
+
def has_binding_new(api):
"""Safely check for PyQt4/5, PySide or PySide2, without importing submodules
-
+
Supports Python >= 3.4
Parameters
@@ -187,108 +187,108 @@ def has_binding_new(api):
if sys.version_info >= (3, 4):
has_binding = has_binding_new
-def qtapi_version():
- """Return which QString API has been set, if any
-
- Returns
- -------
- The QString API version (1 or 2), or None if not set
- """
- try:
- import sip
- except ImportError:
- return
- try:
- return sip.getapi('QString')
- except ValueError:
- return
-
-
-def can_import(api):
- """Safely query whether an API is importable, without importing it"""
- if not has_binding(api):
- return False
-
- current = loaded_api()
- if api == QT_API_PYQT_DEFAULT:
- return current in [QT_API_PYQT, QT_API_PYQTv1, None]
- else:
- return current in [api, None]
-
-
-def import_pyqt4(version=2):
- """
- Import PyQt4
-
- Parameters
- ----------
- version : 1, 2, or None
- Which QString/QVariant API to use. Set to None to use the system
- default
-
- ImportErrors rasied within this function are non-recoverable
- """
- # The new-style string API (version=2) automatically
- # converts QStrings to Unicode Python strings. Also, automatically unpacks
- # QVariants to their underlying objects.
- import sip
-
- if version is not None:
- sip.setapi('QString', version)
- sip.setapi('QVariant', version)
-
- from PyQt4 import QtGui, QtCore, QtSvg
-
- if not check_version(QtCore.PYQT_VERSION_STR, '4.7'):
- raise ImportError("IPython requires PyQt4 >= 4.7, found %s" %
- QtCore.PYQT_VERSION_STR)
-
- # Alias PyQt-specific functions for PySide compatibility.
- QtCore.Signal = QtCore.pyqtSignal
- QtCore.Slot = QtCore.pyqtSlot
-
- # query for the API version (in case version == None)
- version = sip.getapi('QString')
- api = QT_API_PYQTv1 if version == 1 else QT_API_PYQT
- return QtCore, QtGui, QtSvg, api
-
-
-def import_pyqt5():
- """
- Import PyQt5
-
- ImportErrors rasied within this function are non-recoverable
- """
- import sip
-
- from PyQt5 import QtCore, QtSvg, QtWidgets, QtGui
-
- # Alias PyQt-specific functions for PySide compatibility.
- QtCore.Signal = QtCore.pyqtSignal
- QtCore.Slot = QtCore.pyqtSlot
-
- # Join QtGui and QtWidgets for Qt4 compatibility.
- QtGuiCompat = types.ModuleType('QtGuiCompat')
- QtGuiCompat.__dict__.update(QtGui.__dict__)
- QtGuiCompat.__dict__.update(QtWidgets.__dict__)
-
- api = QT_API_PYQT5
- return QtCore, QtGuiCompat, QtSvg, api
-
-
-def import_pyside():
- """
- Import PySide
-
- ImportErrors raised within this function are non-recoverable
- """
- from PySide import QtGui, QtCore, QtSvg
- return QtCore, QtGui, QtSvg, QT_API_PYSIDE
-
+def qtapi_version():
+ """Return which QString API has been set, if any
+
+ Returns
+ -------
+ The QString API version (1 or 2), or None if not set
+ """
+ try:
+ import sip
+ except ImportError:
+ return
+ try:
+ return sip.getapi('QString')
+ except ValueError:
+ return
+
+
+def can_import(api):
+ """Safely query whether an API is importable, without importing it"""
+ if not has_binding(api):
+ return False
+
+ current = loaded_api()
+ if api == QT_API_PYQT_DEFAULT:
+ return current in [QT_API_PYQT, QT_API_PYQTv1, None]
+ else:
+ return current in [api, None]
+
+
+def import_pyqt4(version=2):
+ """
+ Import PyQt4
+
+ Parameters
+ ----------
+ version : 1, 2, or None
+ Which QString/QVariant API to use. Set to None to use the system
+ default
+
+ ImportErrors rasied within this function are non-recoverable
+ """
+ # The new-style string API (version=2) automatically
+ # converts QStrings to Unicode Python strings. Also, automatically unpacks
+ # QVariants to their underlying objects.
+ import sip
+
+ if version is not None:
+ sip.setapi('QString', version)
+ sip.setapi('QVariant', version)
+
+ from PyQt4 import QtGui, QtCore, QtSvg
+
+ if not check_version(QtCore.PYQT_VERSION_STR, '4.7'):
+ raise ImportError("IPython requires PyQt4 >= 4.7, found %s" %
+ QtCore.PYQT_VERSION_STR)
+
+ # Alias PyQt-specific functions for PySide compatibility.
+ QtCore.Signal = QtCore.pyqtSignal
+ QtCore.Slot = QtCore.pyqtSlot
+
+ # query for the API version (in case version == None)
+ version = sip.getapi('QString')
+ api = QT_API_PYQTv1 if version == 1 else QT_API_PYQT
+ return QtCore, QtGui, QtSvg, api
+
+
+def import_pyqt5():
+ """
+ Import PyQt5
+
+ ImportErrors rasied within this function are non-recoverable
+ """
+ import sip
+
+ from PyQt5 import QtCore, QtSvg, QtWidgets, QtGui
+
+ # Alias PyQt-specific functions for PySide compatibility.
+ QtCore.Signal = QtCore.pyqtSignal
+ QtCore.Slot = QtCore.pyqtSlot
+
+ # Join QtGui and QtWidgets for Qt4 compatibility.
+ QtGuiCompat = types.ModuleType('QtGuiCompat')
+ QtGuiCompat.__dict__.update(QtGui.__dict__)
+ QtGuiCompat.__dict__.update(QtWidgets.__dict__)
+
+ api = QT_API_PYQT5
+ return QtCore, QtGuiCompat, QtSvg, api
+
+
+def import_pyside():
+ """
+ Import PySide
+
+ ImportErrors raised within this function are non-recoverable
+ """
+ from PySide import QtGui, QtCore, QtSvg
+ return QtCore, QtGui, QtSvg, QT_API_PYSIDE
+
def import_pyside2():
"""
Import PySide2
-
+
ImportErrors raised within this function are non-recoverable
"""
from PySide2 import QtGui, QtCore, QtSvg, QtWidgets, QtPrintSupport
@@ -302,71 +302,71 @@ def import_pyside2():
return QtCore, QtGuiCompat, QtSvg, QT_API_PYSIDE2
-def load_qt(api_options):
- """
- Attempt to import Qt, given a preference list
- of permissible bindings
-
- It is safe to call this function multiple times.
-
- Parameters
- ----------
- api_options: List of strings
+def load_qt(api_options):
+ """
+ Attempt to import Qt, given a preference list
+ of permissible bindings
+
+ It is safe to call this function multiple times.
+
+ Parameters
+ ----------
+ api_options: List of strings
The order of APIs to try. Valid items are 'pyside', 'pyside2',
- 'pyqt', 'pyqt5', 'pyqtv1' and 'pyqtdefault'
-
- Returns
- -------
-
- A tuple of QtCore, QtGui, QtSvg, QT_API
- The first three are the Qt modules. The last is the
- string indicating which module was loaded.
-
- Raises
- ------
- ImportError, if it isn't possible to import any requested
- bindings (either becaues they aren't installed, or because
- an incompatible library has already been installed)
- """
+ 'pyqt', 'pyqt5', 'pyqtv1' and 'pyqtdefault'
+
+ Returns
+ -------
+
+ A tuple of QtCore, QtGui, QtSvg, QT_API
+ The first three are the Qt modules. The last is the
+ string indicating which module was loaded.
+
+ Raises
+ ------
+ ImportError, if it isn't possible to import any requested
+ bindings (either becaues they aren't installed, or because
+ an incompatible library has already been installed)
+ """
loaders = {
QT_API_PYSIDE2: import_pyside2,
QT_API_PYSIDE: import_pyside,
- QT_API_PYQT: import_pyqt4,
- QT_API_PYQT5: import_pyqt5,
- QT_API_PYQTv1: partial(import_pyqt4, version=1),
- QT_API_PYQT_DEFAULT: partial(import_pyqt4, version=None)
+ QT_API_PYQT: import_pyqt4,
+ QT_API_PYQT5: import_pyqt5,
+ QT_API_PYQTv1: partial(import_pyqt4, version=1),
+ QT_API_PYQT_DEFAULT: partial(import_pyqt4, version=None)
}
-
- for api in api_options:
-
- if api not in loaders:
- raise RuntimeError(
- "Invalid Qt API %r, valid values are: %s" %
- (api, ", ".join(["%r" % k for k in loaders.keys()])))
-
- if not can_import(api):
- continue
-
- #cannot safely recover from an ImportError during this
- result = loaders[api]()
- api = result[-1] # changed if api = QT_API_PYQT_DEFAULT
- commit_api(api)
- return result
- else:
- raise ImportError("""
- Could not load requested Qt binding. Please ensure that
+
+ for api in api_options:
+
+ if api not in loaders:
+ raise RuntimeError(
+ "Invalid Qt API %r, valid values are: %s" %
+ (api, ", ".join(["%r" % k for k in loaders.keys()])))
+
+ if not can_import(api):
+ continue
+
+ #cannot safely recover from an ImportError during this
+ result = loaders[api]()
+ api = result[-1] # changed if api = QT_API_PYQT_DEFAULT
+ commit_api(api)
+ return result
+ else:
+ raise ImportError("""
+ Could not load requested Qt binding. Please ensure that
PyQt4 >= 4.7, PyQt5, PySide >= 1.0.3 or PySide2 is available,
- and only one is imported per session.
-
- Currently-imported Qt library: %r
- PyQt4 available (requires QtCore, QtGui, QtSvg): %s
- PyQt5 available (requires QtCore, QtGui, QtSvg, QtWidgets): %s
- PySide >= 1.0.3 installed: %s
+ and only one is imported per session.
+
+ Currently-imported Qt library: %r
+ PyQt4 available (requires QtCore, QtGui, QtSvg): %s
+ PyQt5 available (requires QtCore, QtGui, QtSvg, QtWidgets): %s
+ PySide >= 1.0.3 installed: %s
PySide2 installed: %s
- Tried to load: %r
- """ % (loaded_api(),
- has_binding(QT_API_PYQT),
- has_binding(QT_API_PYQT5),
- has_binding(QT_API_PYSIDE),
+ Tried to load: %r
+ """ % (loaded_api(),
+ has_binding(QT_API_PYQT),
+ has_binding(QT_API_PYQT5),
+ has_binding(QT_API_PYSIDE),
has_binding(QT_API_PYSIDE2),
- api_options))
+ api_options))
diff --git a/contrib/python/ipython/py2/IPython/frontend.py b/contrib/python/ipython/py2/IPython/frontend.py
index d60957b19b..9cc3eaff2f 100644
--- a/contrib/python/ipython/py2/IPython/frontend.py
+++ b/contrib/python/ipython/py2/IPython/frontend.py
@@ -1,29 +1,29 @@
-"""
-Shim to maintain backwards compatibility with old frontend imports.
-
-We have moved all contents of the old `frontend` subpackage into top-level
-subpackages (`html`, `qt` and `terminal`), and flattened the notebook into
-just `IPython.html`, formerly `IPython.frontend.html.notebook`.
-
-This will let code that was making `from IPython.frontend...` calls continue
-working, though a warning will be printed.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import sys
-from warnings import warn
-
-from IPython.utils.shimmodule import ShimModule, ShimWarning
-
+"""
+Shim to maintain backwards compatibility with old frontend imports.
+
+We have moved all contents of the old `frontend` subpackage into top-level
+subpackages (`html`, `qt` and `terminal`), and flattened the notebook into
+just `IPython.html`, formerly `IPython.frontend.html.notebook`.
+
+This will let code that was making `from IPython.frontend...` calls continue
+working, though a warning will be printed.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import sys
+from warnings import warn
+
+from IPython.utils.shimmodule import ShimModule, ShimWarning
+
warn("The top-level `frontend` package has been deprecated since IPython 1.0. "
- "All its subpackages have been moved to the top `IPython` level.", ShimWarning)
-
-# Unconditionally insert the shim into sys.modules so that further import calls
-# trigger the custom attribute access above
-
-sys.modules['IPython.frontend.html.notebook'] = ShimModule(
- src='IPython.frontend.html.notebook', mirror='IPython.html')
-sys.modules['IPython.frontend'] = ShimModule(
- src='IPython.frontend', mirror='IPython')
+ "All its subpackages have been moved to the top `IPython` level.", ShimWarning)
+
+# Unconditionally insert the shim into sys.modules so that further import calls
+# trigger the custom attribute access above
+
+sys.modules['IPython.frontend.html.notebook'] = ShimModule(
+ src='IPython.frontend.html.notebook', mirror='IPython.html')
+sys.modules['IPython.frontend'] = ShimModule(
+ src='IPython.frontend', mirror='IPython')
diff --git a/contrib/python/ipython/py2/IPython/html.py b/contrib/python/ipython/py2/IPython/html.py
index 3e5595d8e6..050be5c599 100644
--- a/contrib/python/ipython/py2/IPython/html.py
+++ b/contrib/python/ipython/py2/IPython/html.py
@@ -1,28 +1,28 @@
-"""
-Shim to maintain backwards compatibility with old IPython.html imports.
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import sys
-from warnings import warn
-
-from IPython.utils.shimmodule import ShimModule, ShimWarning
-
+"""
+Shim to maintain backwards compatibility with old IPython.html imports.
+"""
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import sys
+from warnings import warn
+
+from IPython.utils.shimmodule import ShimModule, ShimWarning
+
warn("The `IPython.html` package has been deprecated since IPython 4.0. "
- "You should import from `notebook` instead. "
- "`IPython.html.widgets` has moved to `ipywidgets`.", ShimWarning)
-
-_widgets = sys.modules['IPython.html.widgets'] = ShimModule(
- src='IPython.html.widgets', mirror='ipywidgets')
-
-_html = ShimModule(
- src='IPython.html', mirror='notebook')
-
-# hook up widgets
-_html.widgets = _widgets
-sys.modules['IPython.html'] = _html
-
-if __name__ == '__main__':
- from notebook import notebookapp as app
- app.launch_new_instance()
+ "You should import from `notebook` instead. "
+ "`IPython.html.widgets` has moved to `ipywidgets`.", ShimWarning)
+
+_widgets = sys.modules['IPython.html.widgets'] = ShimModule(
+ src='IPython.html.widgets', mirror='ipywidgets')
+
+_html = ShimModule(
+ src='IPython.html', mirror='notebook')
+
+# hook up widgets
+_html.widgets = _widgets
+sys.modules['IPython.html'] = _html
+
+if __name__ == '__main__':
+ from notebook import notebookapp as app
+ app.launch_new_instance()
diff --git a/contrib/python/ipython/py2/IPython/kernel/__init__.py b/contrib/python/ipython/py2/IPython/kernel/__init__.py
index 036548fffa..70a05ed4aa 100644
--- a/contrib/python/ipython/py2/IPython/kernel/__init__.py
+++ b/contrib/python/ipython/py2/IPython/kernel/__init__.py
@@ -1,35 +1,35 @@
-"""
-Shim to maintain backwards compatibility with old IPython.kernel imports.
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import sys
-from warnings import warn
-
-from IPython.utils.shimmodule import ShimModule, ShimWarning
-
+"""
+Shim to maintain backwards compatibility with old IPython.kernel imports.
+"""
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import sys
+from warnings import warn
+
+from IPython.utils.shimmodule import ShimModule, ShimWarning
+
warn("The `IPython.kernel` package has been deprecated since IPython 4.0."
- "You should import from ipykernel or jupyter_client instead.", ShimWarning)
-
-
-# zmq subdir is gone
-sys.modules['IPython.kernel.zmq.session'] = ShimModule(
- src='IPython.kernel.zmq.session', mirror='jupyter_client.session')
-sys.modules['IPython.kernel.zmq'] = ShimModule(
- src='IPython.kernel.zmq', mirror='ipykernel')
-
-for pkg in ('comm', 'inprocess'):
- src = 'IPython.kernel.%s' % pkg
- sys.modules[src] = ShimModule(src=src, mirror='ipykernel.%s' % pkg)
-
-for pkg in ('ioloop', 'blocking'):
- src = 'IPython.kernel.%s' % pkg
- sys.modules[src] = ShimModule(src=src, mirror='jupyter_client.%s' % pkg)
-
-# required for `from IPython.kernel import PKG`
-from ipykernel import comm, inprocess
-from jupyter_client import ioloop, blocking
-# public API
-from ipykernel.connect import *
-from jupyter_client import *
+ "You should import from ipykernel or jupyter_client instead.", ShimWarning)
+
+
+# zmq subdir is gone
+sys.modules['IPython.kernel.zmq.session'] = ShimModule(
+ src='IPython.kernel.zmq.session', mirror='jupyter_client.session')
+sys.modules['IPython.kernel.zmq'] = ShimModule(
+ src='IPython.kernel.zmq', mirror='ipykernel')
+
+for pkg in ('comm', 'inprocess'):
+ src = 'IPython.kernel.%s' % pkg
+ sys.modules[src] = ShimModule(src=src, mirror='ipykernel.%s' % pkg)
+
+for pkg in ('ioloop', 'blocking'):
+ src = 'IPython.kernel.%s' % pkg
+ sys.modules[src] = ShimModule(src=src, mirror='jupyter_client.%s' % pkg)
+
+# required for `from IPython.kernel import PKG`
+from ipykernel import comm, inprocess
+from jupyter_client import ioloop, blocking
+# public API
+from ipykernel.connect import *
+from jupyter_client import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/__main__.py b/contrib/python/ipython/py2/IPython/kernel/__main__.py
index adafe73d02..d1f0cf5334 100644
--- a/contrib/python/ipython/py2/IPython/kernel/__main__.py
+++ b/contrib/python/ipython/py2/IPython/kernel/__main__.py
@@ -1,3 +1,3 @@
-if __name__ == '__main__':
- from ipykernel import kernelapp as app
- app.launch_new_instance()
+if __name__ == '__main__':
+ from ipykernel import kernelapp as app
+ app.launch_new_instance()
diff --git a/contrib/python/ipython/py2/IPython/kernel/adapter.py b/contrib/python/ipython/py2/IPython/kernel/adapter.py
index 8a52dbbc2b..3b8c046b2d 100644
--- a/contrib/python/ipython/py2/IPython/kernel/adapter.py
+++ b/contrib/python/ipython/py2/IPython/kernel/adapter.py
@@ -1 +1 @@
-from jupyter_client.adapter import *
+from jupyter_client.adapter import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/channels.py b/contrib/python/ipython/py2/IPython/kernel/channels.py
index f204db9ca3..8c7fe2a063 100644
--- a/contrib/python/ipython/py2/IPython/kernel/channels.py
+++ b/contrib/python/ipython/py2/IPython/kernel/channels.py
@@ -1 +1 @@
-from jupyter_client.channels import *
+from jupyter_client.channels import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/channelsabc.py b/contrib/python/ipython/py2/IPython/kernel/channelsabc.py
index c0a44511b1..88944012d4 100644
--- a/contrib/python/ipython/py2/IPython/kernel/channelsabc.py
+++ b/contrib/python/ipython/py2/IPython/kernel/channelsabc.py
@@ -1 +1 @@
-from jupyter_client.channelsabc import *
+from jupyter_client.channelsabc import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/client.py b/contrib/python/ipython/py2/IPython/kernel/client.py
index d9768e5bc4..a98690b74c 100644
--- a/contrib/python/ipython/py2/IPython/kernel/client.py
+++ b/contrib/python/ipython/py2/IPython/kernel/client.py
@@ -1 +1 @@
-from jupyter_client.client import *
+from jupyter_client.client import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/clientabc.py b/contrib/python/ipython/py2/IPython/kernel/clientabc.py
index e82cb19e2a..e0cf06c942 100644
--- a/contrib/python/ipython/py2/IPython/kernel/clientabc.py
+++ b/contrib/python/ipython/py2/IPython/kernel/clientabc.py
@@ -1 +1 @@
-from jupyter_client.clientabc import *
+from jupyter_client.clientabc import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/connect.py b/contrib/python/ipython/py2/IPython/kernel/connect.py
index be992cc709..5b6d40a5d3 100644
--- a/contrib/python/ipython/py2/IPython/kernel/connect.py
+++ b/contrib/python/ipython/py2/IPython/kernel/connect.py
@@ -1,2 +1,2 @@
-from ipykernel.connect import *
-from jupyter_client.connect import *
+from ipykernel.connect import *
+from jupyter_client.connect import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/kernelspec.py b/contrib/python/ipython/py2/IPython/kernel/kernelspec.py
index 8a64337920..123419b2f5 100644
--- a/contrib/python/ipython/py2/IPython/kernel/kernelspec.py
+++ b/contrib/python/ipython/py2/IPython/kernel/kernelspec.py
@@ -1 +1 @@
-from jupyter_client.kernelspec import *
+from jupyter_client.kernelspec import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/kernelspecapp.py b/contrib/python/ipython/py2/IPython/kernel/kernelspecapp.py
index 16f9f9eba9..28cd33abd3 100644
--- a/contrib/python/ipython/py2/IPython/kernel/kernelspecapp.py
+++ b/contrib/python/ipython/py2/IPython/kernel/kernelspecapp.py
@@ -1 +1 @@
-from jupyter_client.kernelspecapp import *
+from jupyter_client.kernelspecapp import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/launcher.py b/contrib/python/ipython/py2/IPython/kernel/launcher.py
index 0500ab198b..1953bc4809 100644
--- a/contrib/python/ipython/py2/IPython/kernel/launcher.py
+++ b/contrib/python/ipython/py2/IPython/kernel/launcher.py
@@ -1 +1 @@
-from jupyter_client.launcher import *
+from jupyter_client.launcher import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/manager.py b/contrib/python/ipython/py2/IPython/kernel/manager.py
index 9d9d84806f..c88097cff6 100644
--- a/contrib/python/ipython/py2/IPython/kernel/manager.py
+++ b/contrib/python/ipython/py2/IPython/kernel/manager.py
@@ -1 +1 @@
-from jupyter_client.manager import *
+from jupyter_client.manager import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/managerabc.py b/contrib/python/ipython/py2/IPython/kernel/managerabc.py
index f748bdf2ce..6b40827ff8 100644
--- a/contrib/python/ipython/py2/IPython/kernel/managerabc.py
+++ b/contrib/python/ipython/py2/IPython/kernel/managerabc.py
@@ -1 +1 @@
-from jupyter_client.managerabc import *
+from jupyter_client.managerabc import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/multikernelmanager.py b/contrib/python/ipython/py2/IPython/kernel/multikernelmanager.py
index 71fd8dbb34..ce576e27ea 100644
--- a/contrib/python/ipython/py2/IPython/kernel/multikernelmanager.py
+++ b/contrib/python/ipython/py2/IPython/kernel/multikernelmanager.py
@@ -1 +1 @@
-from jupyter_client.multikernelmanager import *
+from jupyter_client.multikernelmanager import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/restarter.py b/contrib/python/ipython/py2/IPython/kernel/restarter.py
index 2b1de99c8e..dc24117c3a 100644
--- a/contrib/python/ipython/py2/IPython/kernel/restarter.py
+++ b/contrib/python/ipython/py2/IPython/kernel/restarter.py
@@ -1 +1 @@
-from jupyter_client.restarter import *
+from jupyter_client.restarter import *
diff --git a/contrib/python/ipython/py2/IPython/kernel/threaded.py b/contrib/python/ipython/py2/IPython/kernel/threaded.py
index 97997eeb70..4a1072f7fe 100644
--- a/contrib/python/ipython/py2/IPython/kernel/threaded.py
+++ b/contrib/python/ipython/py2/IPython/kernel/threaded.py
@@ -1 +1 @@
-from jupyter_client.threaded import *
+from jupyter_client.threaded import *
diff --git a/contrib/python/ipython/py2/IPython/lib/__init__.py b/contrib/python/ipython/py2/IPython/lib/__init__.py
index 21e34d84ca..8eb89012df 100644
--- a/contrib/python/ipython/py2/IPython/lib/__init__.py
+++ b/contrib/python/ipython/py2/IPython/lib/__init__.py
@@ -1,21 +1,21 @@
-# encoding: utf-8
-"""
-Extra capabilities for IPython
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-from IPython.lib.security import passwd
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
+# encoding: utf-8
+"""
+Extra capabilities for IPython
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+from IPython.lib.security import passwd
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
diff --git a/contrib/python/ipython/py2/IPython/lib/backgroundjobs.py b/contrib/python/ipython/py2/IPython/lib/backgroundjobs.py
index 1acfe7df1e..b724126bbb 100644
--- a/contrib/python/ipython/py2/IPython/lib/backgroundjobs.py
+++ b/contrib/python/ipython/py2/IPython/lib/backgroundjobs.py
@@ -1,491 +1,491 @@
-# -*- coding: utf-8 -*-
-"""Manage background (threaded) jobs conveniently from an interactive shell.
-
-This module provides a BackgroundJobManager class. This is the main class
-meant for public usage, it implements an object which can create and manage
-new background jobs.
-
-It also provides the actual job classes managed by these BackgroundJobManager
-objects, see their docstrings below.
-
-
-This system was inspired by discussions with B. Granger and the
-BackgroundCommand class described in the book Python Scripting for
-Computational Science, by H. P. Langtangen:
-
-http://folk.uio.no/hpl/scripting
-
-(although ultimately no code from this text was used, as IPython's system is a
-separate implementation).
-
-An example notebook is provided in our documentation illustrating interactive
-use of the system.
-"""
-from __future__ import print_function
-
-#*****************************************************************************
-# Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu>
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#*****************************************************************************
-
-# Code begins
-import sys
-import threading
-
-from IPython import get_ipython
-from IPython.core.ultratb import AutoFormattedTB
+# -*- coding: utf-8 -*-
+"""Manage background (threaded) jobs conveniently from an interactive shell.
+
+This module provides a BackgroundJobManager class. This is the main class
+meant for public usage, it implements an object which can create and manage
+new background jobs.
+
+It also provides the actual job classes managed by these BackgroundJobManager
+objects, see their docstrings below.
+
+
+This system was inspired by discussions with B. Granger and the
+BackgroundCommand class described in the book Python Scripting for
+Computational Science, by H. P. Langtangen:
+
+http://folk.uio.no/hpl/scripting
+
+(although ultimately no code from this text was used, as IPython's system is a
+separate implementation).
+
+An example notebook is provided in our documentation illustrating interactive
+use of the system.
+"""
+from __future__ import print_function
+
+#*****************************************************************************
+# Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu>
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#*****************************************************************************
+
+# Code begins
+import sys
+import threading
+
+from IPython import get_ipython
+from IPython.core.ultratb import AutoFormattedTB
from logging import error
-from IPython.utils.py3compat import string_types
-
-
-class BackgroundJobManager(object):
- """Class to manage a pool of backgrounded threaded jobs.
-
- Below, we assume that 'jobs' is a BackgroundJobManager instance.
-
- Usage summary (see the method docstrings for details):
-
- jobs.new(...) -> start a new job
-
- jobs() or jobs.status() -> print status summary of all jobs
-
- jobs[N] -> returns job number N.
-
- foo = jobs[N].result -> assign to variable foo the result of job N
-
- jobs[N].traceback() -> print the traceback of dead job N
-
- jobs.remove(N) -> remove (finished) job N
-
- jobs.flush() -> remove all finished jobs
-
- As a convenience feature, BackgroundJobManager instances provide the
- utility result and traceback methods which retrieve the corresponding
- information from the jobs list:
-
- jobs.result(N) <--> jobs[N].result
- jobs.traceback(N) <--> jobs[N].traceback()
-
- While this appears minor, it allows you to use tab completion
- interactively on the job manager instance.
- """
-
- def __init__(self):
- # Lists for job management, accessed via a property to ensure they're
- # up to date.x
- self._running = []
- self._completed = []
- self._dead = []
- # A dict of all jobs, so users can easily access any of them
- self.all = {}
- # For reporting
- self._comp_report = []
- self._dead_report = []
- # Store status codes locally for fast lookups
- self._s_created = BackgroundJobBase.stat_created_c
- self._s_running = BackgroundJobBase.stat_running_c
- self._s_completed = BackgroundJobBase.stat_completed_c
- self._s_dead = BackgroundJobBase.stat_dead_c
-
- @property
- def running(self):
- self._update_status()
- return self._running
-
- @property
- def dead(self):
- self._update_status()
- return self._dead
-
- @property
- def completed(self):
- self._update_status()
- return self._completed
-
- def new(self, func_or_exp, *args, **kwargs):
- """Add a new background job and start it in a separate thread.
-
- There are two types of jobs which can be created:
-
- 1. Jobs based on expressions which can be passed to an eval() call.
- The expression must be given as a string. For example:
-
- job_manager.new('myfunc(x,y,z=1)'[,glob[,loc]])
-
- The given expression is passed to eval(), along with the optional
- global/local dicts provided. If no dicts are given, they are
- extracted automatically from the caller's frame.
-
- A Python statement is NOT a valid eval() expression. Basically, you
- can only use as an eval() argument something which can go on the right
- of an '=' sign and be assigned to a variable.
-
- For example,"print 'hello'" is not valid, but '2+3' is.
-
- 2. Jobs given a function object, optionally passing additional
- positional arguments:
-
- job_manager.new(myfunc, x, y)
-
- The function is called with the given arguments.
-
- If you need to pass keyword arguments to your function, you must
- supply them as a dict named kw:
-
- job_manager.new(myfunc, x, y, kw=dict(z=1))
-
- The reason for this assymmetry is that the new() method needs to
- maintain access to its own keywords, and this prevents name collisions
- between arguments to new() and arguments to your own functions.
-
- In both cases, the result is stored in the job.result field of the
- background job object.
-
- You can set `daemon` attribute of the thread by giving the keyword
- argument `daemon`.
-
- Notes and caveats:
-
- 1. All threads running share the same standard output. Thus, if your
- background jobs generate output, it will come out on top of whatever
- you are currently writing. For this reason, background jobs are best
- used with silent functions which simply return their output.
-
- 2. Threads also all work within the same global namespace, and this
- system does not lock interactive variables. So if you send job to the
- background which operates on a mutable object for a long time, and
- start modifying that same mutable object interactively (or in another
- backgrounded job), all sorts of bizarre behaviour will occur.
-
- 3. If a background job is spending a lot of time inside a C extension
- module which does not release the Python Global Interpreter Lock
- (GIL), this will block the IPython prompt. This is simply because the
- Python interpreter can only switch between threads at Python
- bytecodes. While the execution is inside C code, the interpreter must
- simply wait unless the extension module releases the GIL.
-
- 4. There is no way, due to limitations in the Python threads library,
- to kill a thread once it has started."""
-
- if callable(func_or_exp):
- kw = kwargs.get('kw',{})
- job = BackgroundJobFunc(func_or_exp,*args,**kw)
- elif isinstance(func_or_exp, string_types):
- if not args:
- frame = sys._getframe(1)
- glob, loc = frame.f_globals, frame.f_locals
- elif len(args)==1:
- glob = loc = args[0]
- elif len(args)==2:
- glob,loc = args
- else:
- raise ValueError(
- 'Expression jobs take at most 2 args (globals,locals)')
- job = BackgroundJobExpr(func_or_exp, glob, loc)
- else:
- raise TypeError('invalid args for new job')
-
- if kwargs.get('daemon', False):
- job.daemon = True
- job.num = len(self.all)+1 if self.all else 0
- self.running.append(job)
- self.all[job.num] = job
- print('Starting job # %s in a separate thread.' % job.num)
- job.start()
- return job
-
- def __getitem__(self, job_key):
- num = job_key if isinstance(job_key, int) else job_key.num
- return self.all[num]
-
- def __call__(self):
- """An alias to self.status(),
-
- This allows you to simply call a job manager instance much like the
- Unix `jobs` shell command."""
-
- return self.status()
-
- def _update_status(self):
- """Update the status of the job lists.
-
- This method moves finished jobs to one of two lists:
- - self.completed: jobs which completed successfully
- - self.dead: jobs which finished but died.
-
- It also copies those jobs to corresponding _report lists. These lists
- are used to report jobs completed/dead since the last update, and are
- then cleared by the reporting function after each call."""
-
- # Status codes
- srun, scomp, sdead = self._s_running, self._s_completed, self._s_dead
- # State lists, use the actual lists b/c the public names are properties
- # that call this very function on access
- running, completed, dead = self._running, self._completed, self._dead
-
- # Now, update all state lists
- for num, job in enumerate(running):
- stat = job.stat_code
- if stat == srun:
- continue
- elif stat == scomp:
- completed.append(job)
- self._comp_report.append(job)
- running[num] = False
- elif stat == sdead:
- dead.append(job)
- self._dead_report.append(job)
- running[num] = False
- # Remove dead/completed jobs from running list
- running[:] = filter(None, running)
-
- def _group_report(self,group,name):
- """Report summary for a given job group.
-
- Return True if the group had any elements."""
-
- if group:
- print('%s jobs:' % name)
- for job in group:
- print('%s : %s' % (job.num,job))
- print()
- return True
-
- def _group_flush(self,group,name):
- """Flush a given job group
-
- Return True if the group had any elements."""
-
- njobs = len(group)
- if njobs:
- plural = {1:''}.setdefault(njobs,'s')
- print('Flushing %s %s job%s.' % (njobs,name,plural))
- group[:] = []
- return True
-
- def _status_new(self):
- """Print the status of newly finished jobs.
-
- Return True if any new jobs are reported.
-
- This call resets its own state every time, so it only reports jobs
- which have finished since the last time it was called."""
-
- self._update_status()
- new_comp = self._group_report(self._comp_report, 'Completed')
- new_dead = self._group_report(self._dead_report,
- 'Dead, call jobs.traceback() for details')
- self._comp_report[:] = []
- self._dead_report[:] = []
- return new_comp or new_dead
-
- def status(self,verbose=0):
- """Print a status of all jobs currently being managed."""
-
- self._update_status()
- self._group_report(self.running,'Running')
- self._group_report(self.completed,'Completed')
- self._group_report(self.dead,'Dead')
- # Also flush the report queues
- self._comp_report[:] = []
- self._dead_report[:] = []
-
- def remove(self,num):
- """Remove a finished (completed or dead) job."""
-
- try:
- job = self.all[num]
- except KeyError:
- error('Job #%s not found' % num)
- else:
- stat_code = job.stat_code
- if stat_code == self._s_running:
- error('Job #%s is still running, it can not be removed.' % num)
- return
- elif stat_code == self._s_completed:
- self.completed.remove(job)
- elif stat_code == self._s_dead:
- self.dead.remove(job)
-
- def flush(self):
- """Flush all finished jobs (completed and dead) from lists.
-
- Running jobs are never flushed.
-
- It first calls _status_new(), to update info. If any jobs have
- completed since the last _status_new() call, the flush operation
- aborts."""
-
- # Remove the finished jobs from the master dict
- alljobs = self.all
- for job in self.completed+self.dead:
- del(alljobs[job.num])
-
- # Now flush these lists completely
- fl_comp = self._group_flush(self.completed, 'Completed')
- fl_dead = self._group_flush(self.dead, 'Dead')
- if not (fl_comp or fl_dead):
- print('No jobs to flush.')
-
- def result(self,num):
- """result(N) -> return the result of job N."""
- try:
- return self.all[num].result
- except KeyError:
- error('Job #%s not found' % num)
-
- def _traceback(self, job):
- num = job if isinstance(job, int) else job.num
- try:
- self.all[num].traceback()
- except KeyError:
- error('Job #%s not found' % num)
-
- def traceback(self, job=None):
- if job is None:
- self._update_status()
- for deadjob in self.dead:
- print("Traceback for: %r" % deadjob)
- self._traceback(deadjob)
- print()
- else:
- self._traceback(job)
-
-
-class BackgroundJobBase(threading.Thread):
- """Base class to build BackgroundJob classes.
-
- The derived classes must implement:
-
- - Their own __init__, since the one here raises NotImplementedError. The
- derived constructor must call self._init() at the end, to provide common
- initialization.
-
- - A strform attribute used in calls to __str__.
-
- - A call() method, which will make the actual execution call and must
- return a value to be held in the 'result' field of the job object.
- """
-
- # Class constants for status, in string and as numerical codes (when
- # updating jobs lists, we don't want to do string comparisons). This will
- # be done at every user prompt, so it has to be as fast as possible
- stat_created = 'Created'; stat_created_c = 0
- stat_running = 'Running'; stat_running_c = 1
- stat_completed = 'Completed'; stat_completed_c = 2
- stat_dead = 'Dead (Exception), call jobs.traceback() for details'
- stat_dead_c = -1
-
- def __init__(self):
- """Must be implemented in subclasses.
-
- Subclasses must call :meth:`_init` for standard initialisation.
- """
- raise NotImplementedError("This class can not be instantiated directly.")
-
- def _init(self):
- """Common initialization for all BackgroundJob objects"""
-
- for attr in ['call','strform']:
- assert hasattr(self,attr), "Missing attribute <%s>" % attr
-
- # The num tag can be set by an external job manager
- self.num = None
-
- self.status = BackgroundJobBase.stat_created
- self.stat_code = BackgroundJobBase.stat_created_c
- self.finished = False
- self.result = '<BackgroundJob has not completed>'
-
- # reuse the ipython traceback handler if we can get to it, otherwise
- # make a new one
- try:
- make_tb = get_ipython().InteractiveTB.text
- except:
- make_tb = AutoFormattedTB(mode = 'Context',
- color_scheme='NoColor',
- tb_offset = 1).text
- # Note that the actual API for text() requires the three args to be
- # passed in, so we wrap it in a simple lambda.
- self._make_tb = lambda : make_tb(None, None, None)
-
- # Hold a formatted traceback if one is generated.
- self._tb = None
-
- threading.Thread.__init__(self)
-
- def __str__(self):
- return self.strform
-
- def __repr__(self):
- return '<BackgroundJob #%d: %s>' % (self.num, self.strform)
-
- def traceback(self):
- print(self._tb)
-
- def run(self):
- try:
- self.status = BackgroundJobBase.stat_running
- self.stat_code = BackgroundJobBase.stat_running_c
- self.result = self.call()
- except:
- self.status = BackgroundJobBase.stat_dead
- self.stat_code = BackgroundJobBase.stat_dead_c
- self.finished = None
- self.result = ('<BackgroundJob died, call jobs.traceback() for details>')
- self._tb = self._make_tb()
- else:
- self.status = BackgroundJobBase.stat_completed
- self.stat_code = BackgroundJobBase.stat_completed_c
- self.finished = True
-
-
-class BackgroundJobExpr(BackgroundJobBase):
- """Evaluate an expression as a background job (uses a separate thread)."""
-
- def __init__(self, expression, glob=None, loc=None):
- """Create a new job from a string which can be fed to eval().
-
- global/locals dicts can be provided, which will be passed to the eval
- call."""
-
- # fail immediately if the given expression can't be compiled
- self.code = compile(expression,'<BackgroundJob compilation>','eval')
-
- glob = {} if glob is None else glob
- loc = {} if loc is None else loc
- self.expression = self.strform = expression
- self.glob = glob
- self.loc = loc
- self._init()
-
- def call(self):
- return eval(self.code,self.glob,self.loc)
-
-
-class BackgroundJobFunc(BackgroundJobBase):
- """Run a function call as a background job (uses a separate thread)."""
-
- def __init__(self, func, *args, **kwargs):
- """Create a new job from a callable object.
-
- Any positional arguments and keyword args given to this constructor
- after the initial callable are passed directly to it."""
-
- if not callable(func):
- raise TypeError(
- 'first argument to BackgroundJobFunc must be callable')
-
- self.func = func
- self.args = args
- self.kwargs = kwargs
- # The string form will only include the function passed, because
- # generating string representations of the arguments is a potentially
- # _very_ expensive operation (e.g. with large arrays).
- self.strform = str(func)
- self._init()
-
- def call(self):
- return self.func(*self.args, **self.kwargs)
+from IPython.utils.py3compat import string_types
+
+
+class BackgroundJobManager(object):
+ """Class to manage a pool of backgrounded threaded jobs.
+
+ Below, we assume that 'jobs' is a BackgroundJobManager instance.
+
+ Usage summary (see the method docstrings for details):
+
+ jobs.new(...) -> start a new job
+
+ jobs() or jobs.status() -> print status summary of all jobs
+
+ jobs[N] -> returns job number N.
+
+ foo = jobs[N].result -> assign to variable foo the result of job N
+
+ jobs[N].traceback() -> print the traceback of dead job N
+
+ jobs.remove(N) -> remove (finished) job N
+
+ jobs.flush() -> remove all finished jobs
+
+ As a convenience feature, BackgroundJobManager instances provide the
+ utility result and traceback methods which retrieve the corresponding
+ information from the jobs list:
+
+ jobs.result(N) <--> jobs[N].result
+ jobs.traceback(N) <--> jobs[N].traceback()
+
+ While this appears minor, it allows you to use tab completion
+ interactively on the job manager instance.
+ """
+
+ def __init__(self):
+ # Lists for job management, accessed via a property to ensure they're
+ # up to date.x
+ self._running = []
+ self._completed = []
+ self._dead = []
+ # A dict of all jobs, so users can easily access any of them
+ self.all = {}
+ # For reporting
+ self._comp_report = []
+ self._dead_report = []
+ # Store status codes locally for fast lookups
+ self._s_created = BackgroundJobBase.stat_created_c
+ self._s_running = BackgroundJobBase.stat_running_c
+ self._s_completed = BackgroundJobBase.stat_completed_c
+ self._s_dead = BackgroundJobBase.stat_dead_c
+
+ @property
+ def running(self):
+ self._update_status()
+ return self._running
+
+ @property
+ def dead(self):
+ self._update_status()
+ return self._dead
+
+ @property
+ def completed(self):
+ self._update_status()
+ return self._completed
+
+ def new(self, func_or_exp, *args, **kwargs):
+ """Add a new background job and start it in a separate thread.
+
+ There are two types of jobs which can be created:
+
+ 1. Jobs based on expressions which can be passed to an eval() call.
+ The expression must be given as a string. For example:
+
+ job_manager.new('myfunc(x,y,z=1)'[,glob[,loc]])
+
+ The given expression is passed to eval(), along with the optional
+ global/local dicts provided. If no dicts are given, they are
+ extracted automatically from the caller's frame.
+
+ A Python statement is NOT a valid eval() expression. Basically, you
+ can only use as an eval() argument something which can go on the right
+ of an '=' sign and be assigned to a variable.
+
+ For example,"print 'hello'" is not valid, but '2+3' is.
+
+ 2. Jobs given a function object, optionally passing additional
+ positional arguments:
+
+ job_manager.new(myfunc, x, y)
+
+ The function is called with the given arguments.
+
+ If you need to pass keyword arguments to your function, you must
+ supply them as a dict named kw:
+
+ job_manager.new(myfunc, x, y, kw=dict(z=1))
+
+ The reason for this assymmetry is that the new() method needs to
+ maintain access to its own keywords, and this prevents name collisions
+ between arguments to new() and arguments to your own functions.
+
+ In both cases, the result is stored in the job.result field of the
+ background job object.
+
+ You can set `daemon` attribute of the thread by giving the keyword
+ argument `daemon`.
+
+ Notes and caveats:
+
+ 1. All threads running share the same standard output. Thus, if your
+ background jobs generate output, it will come out on top of whatever
+ you are currently writing. For this reason, background jobs are best
+ used with silent functions which simply return their output.
+
+ 2. Threads also all work within the same global namespace, and this
+ system does not lock interactive variables. So if you send job to the
+ background which operates on a mutable object for a long time, and
+ start modifying that same mutable object interactively (or in another
+ backgrounded job), all sorts of bizarre behaviour will occur.
+
+ 3. If a background job is spending a lot of time inside a C extension
+ module which does not release the Python Global Interpreter Lock
+ (GIL), this will block the IPython prompt. This is simply because the
+ Python interpreter can only switch between threads at Python
+ bytecodes. While the execution is inside C code, the interpreter must
+ simply wait unless the extension module releases the GIL.
+
+ 4. There is no way, due to limitations in the Python threads library,
+ to kill a thread once it has started."""
+
+ if callable(func_or_exp):
+ kw = kwargs.get('kw',{})
+ job = BackgroundJobFunc(func_or_exp,*args,**kw)
+ elif isinstance(func_or_exp, string_types):
+ if not args:
+ frame = sys._getframe(1)
+ glob, loc = frame.f_globals, frame.f_locals
+ elif len(args)==1:
+ glob = loc = args[0]
+ elif len(args)==2:
+ glob,loc = args
+ else:
+ raise ValueError(
+ 'Expression jobs take at most 2 args (globals,locals)')
+ job = BackgroundJobExpr(func_or_exp, glob, loc)
+ else:
+ raise TypeError('invalid args for new job')
+
+ if kwargs.get('daemon', False):
+ job.daemon = True
+ job.num = len(self.all)+1 if self.all else 0
+ self.running.append(job)
+ self.all[job.num] = job
+ print('Starting job # %s in a separate thread.' % job.num)
+ job.start()
+ return job
+
+ def __getitem__(self, job_key):
+ num = job_key if isinstance(job_key, int) else job_key.num
+ return self.all[num]
+
+ def __call__(self):
+ """An alias to self.status(),
+
+ This allows you to simply call a job manager instance much like the
+ Unix `jobs` shell command."""
+
+ return self.status()
+
+ def _update_status(self):
+ """Update the status of the job lists.
+
+ This method moves finished jobs to one of two lists:
+ - self.completed: jobs which completed successfully
+ - self.dead: jobs which finished but died.
+
+ It also copies those jobs to corresponding _report lists. These lists
+ are used to report jobs completed/dead since the last update, and are
+ then cleared by the reporting function after each call."""
+
+ # Status codes
+ srun, scomp, sdead = self._s_running, self._s_completed, self._s_dead
+ # State lists, use the actual lists b/c the public names are properties
+ # that call this very function on access
+ running, completed, dead = self._running, self._completed, self._dead
+
+ # Now, update all state lists
+ for num, job in enumerate(running):
+ stat = job.stat_code
+ if stat == srun:
+ continue
+ elif stat == scomp:
+ completed.append(job)
+ self._comp_report.append(job)
+ running[num] = False
+ elif stat == sdead:
+ dead.append(job)
+ self._dead_report.append(job)
+ running[num] = False
+ # Remove dead/completed jobs from running list
+ running[:] = filter(None, running)
+
+ def _group_report(self,group,name):
+ """Report summary for a given job group.
+
+ Return True if the group had any elements."""
+
+ if group:
+ print('%s jobs:' % name)
+ for job in group:
+ print('%s : %s' % (job.num,job))
+ print()
+ return True
+
+ def _group_flush(self,group,name):
+ """Flush a given job group
+
+ Return True if the group had any elements."""
+
+ njobs = len(group)
+ if njobs:
+ plural = {1:''}.setdefault(njobs,'s')
+ print('Flushing %s %s job%s.' % (njobs,name,plural))
+ group[:] = []
+ return True
+
+ def _status_new(self):
+ """Print the status of newly finished jobs.
+
+ Return True if any new jobs are reported.
+
+ This call resets its own state every time, so it only reports jobs
+ which have finished since the last time it was called."""
+
+ self._update_status()
+ new_comp = self._group_report(self._comp_report, 'Completed')
+ new_dead = self._group_report(self._dead_report,
+ 'Dead, call jobs.traceback() for details')
+ self._comp_report[:] = []
+ self._dead_report[:] = []
+ return new_comp or new_dead
+
+ def status(self,verbose=0):
+ """Print a status of all jobs currently being managed."""
+
+ self._update_status()
+ self._group_report(self.running,'Running')
+ self._group_report(self.completed,'Completed')
+ self._group_report(self.dead,'Dead')
+ # Also flush the report queues
+ self._comp_report[:] = []
+ self._dead_report[:] = []
+
+ def remove(self,num):
+ """Remove a finished (completed or dead) job."""
+
+ try:
+ job = self.all[num]
+ except KeyError:
+ error('Job #%s not found' % num)
+ else:
+ stat_code = job.stat_code
+ if stat_code == self._s_running:
+ error('Job #%s is still running, it can not be removed.' % num)
+ return
+ elif stat_code == self._s_completed:
+ self.completed.remove(job)
+ elif stat_code == self._s_dead:
+ self.dead.remove(job)
+
+ def flush(self):
+ """Flush all finished jobs (completed and dead) from lists.
+
+ Running jobs are never flushed.
+
+ It first calls _status_new(), to update info. If any jobs have
+ completed since the last _status_new() call, the flush operation
+ aborts."""
+
+ # Remove the finished jobs from the master dict
+ alljobs = self.all
+ for job in self.completed+self.dead:
+ del(alljobs[job.num])
+
+ # Now flush these lists completely
+ fl_comp = self._group_flush(self.completed, 'Completed')
+ fl_dead = self._group_flush(self.dead, 'Dead')
+ if not (fl_comp or fl_dead):
+ print('No jobs to flush.')
+
+ def result(self,num):
+ """result(N) -> return the result of job N."""
+ try:
+ return self.all[num].result
+ except KeyError:
+ error('Job #%s not found' % num)
+
+ def _traceback(self, job):
+ num = job if isinstance(job, int) else job.num
+ try:
+ self.all[num].traceback()
+ except KeyError:
+ error('Job #%s not found' % num)
+
+ def traceback(self, job=None):
+ if job is None:
+ self._update_status()
+ for deadjob in self.dead:
+ print("Traceback for: %r" % deadjob)
+ self._traceback(deadjob)
+ print()
+ else:
+ self._traceback(job)
+
+
+class BackgroundJobBase(threading.Thread):
+ """Base class to build BackgroundJob classes.
+
+ The derived classes must implement:
+
+ - Their own __init__, since the one here raises NotImplementedError. The
+ derived constructor must call self._init() at the end, to provide common
+ initialization.
+
+ - A strform attribute used in calls to __str__.
+
+ - A call() method, which will make the actual execution call and must
+ return a value to be held in the 'result' field of the job object.
+ """
+
+ # Class constants for status, in string and as numerical codes (when
+ # updating jobs lists, we don't want to do string comparisons). This will
+ # be done at every user prompt, so it has to be as fast as possible
+ stat_created = 'Created'; stat_created_c = 0
+ stat_running = 'Running'; stat_running_c = 1
+ stat_completed = 'Completed'; stat_completed_c = 2
+ stat_dead = 'Dead (Exception), call jobs.traceback() for details'
+ stat_dead_c = -1
+
+ def __init__(self):
+ """Must be implemented in subclasses.
+
+ Subclasses must call :meth:`_init` for standard initialisation.
+ """
+ raise NotImplementedError("This class can not be instantiated directly.")
+
+ def _init(self):
+ """Common initialization for all BackgroundJob objects"""
+
+ for attr in ['call','strform']:
+ assert hasattr(self,attr), "Missing attribute <%s>" % attr
+
+ # The num tag can be set by an external job manager
+ self.num = None
+
+ self.status = BackgroundJobBase.stat_created
+ self.stat_code = BackgroundJobBase.stat_created_c
+ self.finished = False
+ self.result = '<BackgroundJob has not completed>'
+
+ # reuse the ipython traceback handler if we can get to it, otherwise
+ # make a new one
+ try:
+ make_tb = get_ipython().InteractiveTB.text
+ except:
+ make_tb = AutoFormattedTB(mode = 'Context',
+ color_scheme='NoColor',
+ tb_offset = 1).text
+ # Note that the actual API for text() requires the three args to be
+ # passed in, so we wrap it in a simple lambda.
+ self._make_tb = lambda : make_tb(None, None, None)
+
+ # Hold a formatted traceback if one is generated.
+ self._tb = None
+
+ threading.Thread.__init__(self)
+
+ def __str__(self):
+ return self.strform
+
+ def __repr__(self):
+ return '<BackgroundJob #%d: %s>' % (self.num, self.strform)
+
+ def traceback(self):
+ print(self._tb)
+
+ def run(self):
+ try:
+ self.status = BackgroundJobBase.stat_running
+ self.stat_code = BackgroundJobBase.stat_running_c
+ self.result = self.call()
+ except:
+ self.status = BackgroundJobBase.stat_dead
+ self.stat_code = BackgroundJobBase.stat_dead_c
+ self.finished = None
+ self.result = ('<BackgroundJob died, call jobs.traceback() for details>')
+ self._tb = self._make_tb()
+ else:
+ self.status = BackgroundJobBase.stat_completed
+ self.stat_code = BackgroundJobBase.stat_completed_c
+ self.finished = True
+
+
+class BackgroundJobExpr(BackgroundJobBase):
+ """Evaluate an expression as a background job (uses a separate thread)."""
+
+ def __init__(self, expression, glob=None, loc=None):
+ """Create a new job from a string which can be fed to eval().
+
+ global/locals dicts can be provided, which will be passed to the eval
+ call."""
+
+ # fail immediately if the given expression can't be compiled
+ self.code = compile(expression,'<BackgroundJob compilation>','eval')
+
+ glob = {} if glob is None else glob
+ loc = {} if loc is None else loc
+ self.expression = self.strform = expression
+ self.glob = glob
+ self.loc = loc
+ self._init()
+
+ def call(self):
+ return eval(self.code,self.glob,self.loc)
+
+
+class BackgroundJobFunc(BackgroundJobBase):
+ """Run a function call as a background job (uses a separate thread)."""
+
+ def __init__(self, func, *args, **kwargs):
+ """Create a new job from a callable object.
+
+ Any positional arguments and keyword args given to this constructor
+ after the initial callable are passed directly to it."""
+
+ if not callable(func):
+ raise TypeError(
+ 'first argument to BackgroundJobFunc must be callable')
+
+ self.func = func
+ self.args = args
+ self.kwargs = kwargs
+ # The string form will only include the function passed, because
+ # generating string representations of the arguments is a potentially
+ # _very_ expensive operation (e.g. with large arrays).
+ self.strform = str(func)
+ self._init()
+
+ def call(self):
+ return self.func(*self.args, **self.kwargs)
diff --git a/contrib/python/ipython/py2/IPython/lib/clipboard.py b/contrib/python/ipython/py2/IPython/lib/clipboard.py
index 713313da33..ac9b685c7d 100644
--- a/contrib/python/ipython/py2/IPython/lib/clipboard.py
+++ b/contrib/python/ipython/py2/IPython/lib/clipboard.py
@@ -1,72 +1,72 @@
-""" Utilities for accessing the platform's clipboard.
-"""
-
-import subprocess
-
-from IPython.core.error import TryNext
-import IPython.utils.py3compat as py3compat
-
-class ClipboardEmpty(ValueError):
- pass
-
-def win32_clipboard_get():
- """ Get the current clipboard's text on Windows.
-
- Requires Mark Hammond's pywin32 extensions.
- """
- try:
- import win32clipboard
- except ImportError:
- raise TryNext("Getting text from the clipboard requires the pywin32 "
- "extensions: http://sourceforge.net/projects/pywin32/")
- win32clipboard.OpenClipboard()
- try:
- text = win32clipboard.GetClipboardData(win32clipboard.CF_UNICODETEXT)
- except (TypeError, win32clipboard.error):
- try:
- text = win32clipboard.GetClipboardData(win32clipboard.CF_TEXT)
- text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING)
- except (TypeError, win32clipboard.error):
- raise ClipboardEmpty
- finally:
- win32clipboard.CloseClipboard()
- return text
-
-def osx_clipboard_get():
- """ Get the clipboard's text on OS X.
- """
- p = subprocess.Popen(['pbpaste', '-Prefer', 'ascii'],
- stdout=subprocess.PIPE)
- text, stderr = p.communicate()
- # Text comes in with old Mac \r line endings. Change them to \n.
- text = text.replace(b'\r', b'\n')
- text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING)
- return text
-
-def tkinter_clipboard_get():
- """ Get the clipboard's text using Tkinter.
-
- This is the default on systems that are not Windows or OS X. It may
- interfere with other UI toolkits and should be replaced with an
- implementation that uses that toolkit.
- """
- try:
- from tkinter import Tk, TclError # Py 3
- except ImportError:
- try:
- from Tkinter import Tk, TclError # Py 2
- except ImportError:
- raise TryNext("Getting text from the clipboard on this platform "
- "requires Tkinter.")
- root = Tk()
- root.withdraw()
- try:
- text = root.clipboard_get()
- except TclError:
- raise ClipboardEmpty
- finally:
- root.destroy()
- text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING)
- return text
-
-
+""" Utilities for accessing the platform's clipboard.
+"""
+
+import subprocess
+
+from IPython.core.error import TryNext
+import IPython.utils.py3compat as py3compat
+
+class ClipboardEmpty(ValueError):
+ pass
+
+def win32_clipboard_get():
+ """ Get the current clipboard's text on Windows.
+
+ Requires Mark Hammond's pywin32 extensions.
+ """
+ try:
+ import win32clipboard
+ except ImportError:
+ raise TryNext("Getting text from the clipboard requires the pywin32 "
+ "extensions: http://sourceforge.net/projects/pywin32/")
+ win32clipboard.OpenClipboard()
+ try:
+ text = win32clipboard.GetClipboardData(win32clipboard.CF_UNICODETEXT)
+ except (TypeError, win32clipboard.error):
+ try:
+ text = win32clipboard.GetClipboardData(win32clipboard.CF_TEXT)
+ text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING)
+ except (TypeError, win32clipboard.error):
+ raise ClipboardEmpty
+ finally:
+ win32clipboard.CloseClipboard()
+ return text
+
+def osx_clipboard_get():
+ """ Get the clipboard's text on OS X.
+ """
+ p = subprocess.Popen(['pbpaste', '-Prefer', 'ascii'],
+ stdout=subprocess.PIPE)
+ text, stderr = p.communicate()
+ # Text comes in with old Mac \r line endings. Change them to \n.
+ text = text.replace(b'\r', b'\n')
+ text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING)
+ return text
+
+def tkinter_clipboard_get():
+ """ Get the clipboard's text using Tkinter.
+
+ This is the default on systems that are not Windows or OS X. It may
+ interfere with other UI toolkits and should be replaced with an
+ implementation that uses that toolkit.
+ """
+ try:
+ from tkinter import Tk, TclError # Py 3
+ except ImportError:
+ try:
+ from Tkinter import Tk, TclError # Py 2
+ except ImportError:
+ raise TryNext("Getting text from the clipboard on this platform "
+ "requires Tkinter.")
+ root = Tk()
+ root.withdraw()
+ try:
+ text = root.clipboard_get()
+ except TclError:
+ raise ClipboardEmpty
+ finally:
+ root.destroy()
+ text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING)
+ return text
+
+
diff --git a/contrib/python/ipython/py2/IPython/lib/deepreload.py b/contrib/python/ipython/py2/IPython/lib/deepreload.py
index 72f2b5752f..76b493c0bb 100644
--- a/contrib/python/ipython/py2/IPython/lib/deepreload.py
+++ b/contrib/python/ipython/py2/IPython/lib/deepreload.py
@@ -1,362 +1,362 @@
-# -*- coding: utf-8 -*-
-"""
-Provides a reload() function that acts recursively.
-
-Python's normal :func:`python:reload` function only reloads the module that it's
-passed. The :func:`reload` function in this module also reloads everything
-imported from that module, which is useful when you're changing files deep
-inside a package.
-
-To use this as your default reload function, type this for Python 2::
-
- import __builtin__
- from IPython.lib import deepreload
- __builtin__.reload = deepreload.reload
-
-Or this for Python 3::
-
- import builtins
- from IPython.lib import deepreload
- builtins.reload = deepreload.reload
-
-A reference to the original :func:`python:reload` is stored in this module as
-:data:`original_reload`, so you can restore it later.
-
-This code is almost entirely based on knee.py, which is a Python
-re-implementation of hierarchical module import.
-"""
-from __future__ import print_function
-#*****************************************************************************
-# Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu>
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#*****************************************************************************
-
-from contextlib import contextmanager
-import imp
-import sys
-
-from types import ModuleType
-from warnings import warn
-
-from IPython.utils.py3compat import builtin_mod, builtin_mod_name
-
-original_import = builtin_mod.__import__
-
-@contextmanager
-def replace_import_hook(new_import):
- saved_import = builtin_mod.__import__
- builtin_mod.__import__ = new_import
- try:
- yield
- finally:
- builtin_mod.__import__ = saved_import
-
-def get_parent(globals, level):
- """
- parent, name = get_parent(globals, level)
-
- Return the package that an import is being performed in. If globals comes
- from the module foo.bar.bat (not itself a package), this returns the
- sys.modules entry for foo.bar. If globals is from a package's __init__.py,
- the package's entry in sys.modules is returned.
-
- If globals doesn't come from a package or a module in a package, or a
- corresponding entry is not found in sys.modules, None is returned.
- """
- orig_level = level
-
- if not level or not isinstance(globals, dict):
- return None, ''
-
- pkgname = globals.get('__package__', None)
-
- if pkgname is not None:
- # __package__ is set, so use it
- if not hasattr(pkgname, 'rindex'):
- raise ValueError('__package__ set to non-string')
- if len(pkgname) == 0:
- if level > 0:
- raise ValueError('Attempted relative import in non-package')
- return None, ''
- name = pkgname
- else:
- # __package__ not set, so figure it out and set it
- if '__name__' not in globals:
- return None, ''
- modname = globals['__name__']
-
- if '__path__' in globals:
- # __path__ is set, so modname is already the package name
- globals['__package__'] = name = modname
- else:
- # Normal module, so work out the package name if any
- lastdot = modname.rfind('.')
- if lastdot < 0 < level:
- raise ValueError("Attempted relative import in non-package")
- if lastdot < 0:
- globals['__package__'] = None
- return None, ''
- globals['__package__'] = name = modname[:lastdot]
-
- dot = len(name)
- for x in range(level, 1, -1):
- try:
- dot = name.rindex('.', 0, dot)
- except ValueError:
- raise ValueError("attempted relative import beyond top-level "
- "package")
- name = name[:dot]
-
- try:
- parent = sys.modules[name]
- except:
- if orig_level < 1:
- warn("Parent module '%.200s' not found while handling absolute "
- "import" % name)
- parent = None
- else:
- raise SystemError("Parent module '%.200s' not loaded, cannot "
- "perform relative import" % name)
-
- # We expect, but can't guarantee, if parent != None, that:
- # - parent.__name__ == name
- # - parent.__dict__ is globals
- # If this is violated... Who cares?
- return parent, name
-
-def load_next(mod, altmod, name, buf):
- """
- mod, name, buf = load_next(mod, altmod, name, buf)
-
- altmod is either None or same as mod
- """
-
- if len(name) == 0:
- # completely empty module name should only happen in
- # 'from . import' (or '__import__("")')
- return mod, None, buf
-
- dot = name.find('.')
- if dot == 0:
- raise ValueError('Empty module name')
-
- if dot < 0:
- subname = name
- next = None
- else:
- subname = name[:dot]
- next = name[dot+1:]
-
- if buf != '':
- buf += '.'
- buf += subname
-
- result = import_submodule(mod, subname, buf)
- if result is None and mod != altmod:
- result = import_submodule(altmod, subname, subname)
- if result is not None:
- buf = subname
-
- if result is None:
- raise ImportError("No module named %.200s" % name)
-
- return result, next, buf
-
-# Need to keep track of what we've already reloaded to prevent cyclic evil
-found_now = {}
-
-def import_submodule(mod, subname, fullname):
- """m = import_submodule(mod, subname, fullname)"""
- # Require:
- # if mod == None: subname == fullname
- # else: mod.__name__ + "." + subname == fullname
-
- global found_now
- if fullname in found_now and fullname in sys.modules:
- m = sys.modules[fullname]
- else:
- print('Reloading', fullname)
- found_now[fullname] = 1
- oldm = sys.modules.get(fullname, None)
-
- if mod is None:
- path = None
- elif hasattr(mod, '__path__'):
- path = mod.__path__
- else:
- return None
-
- try:
- # This appears to be necessary on Python 3, because imp.find_module()
- # tries to import standard libraries (like io) itself, and we don't
- # want them to be processed by our deep_import_hook.
- with replace_import_hook(original_import):
- fp, filename, stuff = imp.find_module(subname, path)
- except ImportError:
- return None
-
- try:
- m = imp.load_module(fullname, fp, filename, stuff)
- except:
- # load_module probably removed name from modules because of
- # the error. Put back the original module object.
- if oldm:
- sys.modules[fullname] = oldm
- raise
- finally:
- if fp: fp.close()
-
- add_submodule(mod, m, fullname, subname)
-
- return m
-
-def add_submodule(mod, submod, fullname, subname):
- """mod.{subname} = submod"""
- if mod is None:
- return #Nothing to do here.
-
- if submod is None:
- submod = sys.modules[fullname]
-
- setattr(mod, subname, submod)
-
- return
-
-def ensure_fromlist(mod, fromlist, buf, recursive):
- """Handle 'from module import a, b, c' imports."""
- if not hasattr(mod, '__path__'):
- return
- for item in fromlist:
- if not hasattr(item, 'rindex'):
- raise TypeError("Item in ``from list'' not a string")
- if item == '*':
- if recursive:
- continue # avoid endless recursion
- try:
- all = mod.__all__
- except AttributeError:
- pass
- else:
- ret = ensure_fromlist(mod, all, buf, 1)
- if not ret:
- return 0
- elif not hasattr(mod, item):
- import_submodule(mod, item, buf + '.' + item)
-
-def deep_import_hook(name, globals=None, locals=None, fromlist=None, level=-1):
- """Replacement for __import__()"""
- parent, buf = get_parent(globals, level)
-
- head, name, buf = load_next(parent, None if level < 0 else parent, name, buf)
-
- tail = head
- while name:
- tail, name, buf = load_next(tail, tail, name, buf)
-
- # If tail is None, both get_parent and load_next found
- # an empty module name: someone called __import__("") or
- # doctored faulty bytecode
- if tail is None:
- raise ValueError('Empty module name')
-
- if not fromlist:
- return head
-
- ensure_fromlist(tail, fromlist, buf, 0)
- return tail
-
-modules_reloading = {}
-
-def deep_reload_hook(m):
- """Replacement for reload()."""
- if not isinstance(m, ModuleType):
- raise TypeError("reload() argument must be module")
-
- name = m.__name__
-
- if name not in sys.modules:
- raise ImportError("reload(): module %.200s not in sys.modules" % name)
-
- global modules_reloading
- try:
- return modules_reloading[name]
- except:
- modules_reloading[name] = m
-
- dot = name.rfind('.')
- if dot < 0:
- subname = name
- path = None
- else:
- try:
- parent = sys.modules[name[:dot]]
- except KeyError:
- modules_reloading.clear()
- raise ImportError("reload(): parent %.200s not in sys.modules" % name[:dot])
- subname = name[dot+1:]
- path = getattr(parent, "__path__", None)
-
- try:
- # This appears to be necessary on Python 3, because imp.find_module()
- # tries to import standard libraries (like io) itself, and we don't
- # want them to be processed by our deep_import_hook.
- with replace_import_hook(original_import):
- fp, filename, stuff = imp.find_module(subname, path)
- finally:
- modules_reloading.clear()
-
- try:
- newm = imp.load_module(name, fp, filename, stuff)
- except:
- # load_module probably removed name from modules because of
- # the error. Put back the original module object.
- sys.modules[name] = m
- raise
- finally:
- if fp: fp.close()
-
- modules_reloading.clear()
- return newm
-
-# Save the original hooks
-try:
- original_reload = builtin_mod.reload
-except AttributeError:
- original_reload = imp.reload # Python 3
-
-# Replacement for reload()
+# -*- coding: utf-8 -*-
+"""
+Provides a reload() function that acts recursively.
+
+Python's normal :func:`python:reload` function only reloads the module that it's
+passed. The :func:`reload` function in this module also reloads everything
+imported from that module, which is useful when you're changing files deep
+inside a package.
+
+To use this as your default reload function, type this for Python 2::
+
+ import __builtin__
+ from IPython.lib import deepreload
+ __builtin__.reload = deepreload.reload
+
+Or this for Python 3::
+
+ import builtins
+ from IPython.lib import deepreload
+ builtins.reload = deepreload.reload
+
+A reference to the original :func:`python:reload` is stored in this module as
+:data:`original_reload`, so you can restore it later.
+
+This code is almost entirely based on knee.py, which is a Python
+re-implementation of hierarchical module import.
+"""
+from __future__ import print_function
+#*****************************************************************************
+# Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu>
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#*****************************************************************************
+
+from contextlib import contextmanager
+import imp
+import sys
+
+from types import ModuleType
+from warnings import warn
+
+from IPython.utils.py3compat import builtin_mod, builtin_mod_name
+
+original_import = builtin_mod.__import__
+
+@contextmanager
+def replace_import_hook(new_import):
+ saved_import = builtin_mod.__import__
+ builtin_mod.__import__ = new_import
+ try:
+ yield
+ finally:
+ builtin_mod.__import__ = saved_import
+
+def get_parent(globals, level):
+ """
+ parent, name = get_parent(globals, level)
+
+ Return the package that an import is being performed in. If globals comes
+ from the module foo.bar.bat (not itself a package), this returns the
+ sys.modules entry for foo.bar. If globals is from a package's __init__.py,
+ the package's entry in sys.modules is returned.
+
+ If globals doesn't come from a package or a module in a package, or a
+ corresponding entry is not found in sys.modules, None is returned.
+ """
+ orig_level = level
+
+ if not level or not isinstance(globals, dict):
+ return None, ''
+
+ pkgname = globals.get('__package__', None)
+
+ if pkgname is not None:
+ # __package__ is set, so use it
+ if not hasattr(pkgname, 'rindex'):
+ raise ValueError('__package__ set to non-string')
+ if len(pkgname) == 0:
+ if level > 0:
+ raise ValueError('Attempted relative import in non-package')
+ return None, ''
+ name = pkgname
+ else:
+ # __package__ not set, so figure it out and set it
+ if '__name__' not in globals:
+ return None, ''
+ modname = globals['__name__']
+
+ if '__path__' in globals:
+ # __path__ is set, so modname is already the package name
+ globals['__package__'] = name = modname
+ else:
+ # Normal module, so work out the package name if any
+ lastdot = modname.rfind('.')
+ if lastdot < 0 < level:
+ raise ValueError("Attempted relative import in non-package")
+ if lastdot < 0:
+ globals['__package__'] = None
+ return None, ''
+ globals['__package__'] = name = modname[:lastdot]
+
+ dot = len(name)
+ for x in range(level, 1, -1):
+ try:
+ dot = name.rindex('.', 0, dot)
+ except ValueError:
+ raise ValueError("attempted relative import beyond top-level "
+ "package")
+ name = name[:dot]
+
+ try:
+ parent = sys.modules[name]
+ except:
+ if orig_level < 1:
+ warn("Parent module '%.200s' not found while handling absolute "
+ "import" % name)
+ parent = None
+ else:
+ raise SystemError("Parent module '%.200s' not loaded, cannot "
+ "perform relative import" % name)
+
+ # We expect, but can't guarantee, if parent != None, that:
+ # - parent.__name__ == name
+ # - parent.__dict__ is globals
+ # If this is violated... Who cares?
+ return parent, name
+
+def load_next(mod, altmod, name, buf):
+ """
+ mod, name, buf = load_next(mod, altmod, name, buf)
+
+ altmod is either None or same as mod
+ """
+
+ if len(name) == 0:
+ # completely empty module name should only happen in
+ # 'from . import' (or '__import__("")')
+ return mod, None, buf
+
+ dot = name.find('.')
+ if dot == 0:
+ raise ValueError('Empty module name')
+
+ if dot < 0:
+ subname = name
+ next = None
+ else:
+ subname = name[:dot]
+ next = name[dot+1:]
+
+ if buf != '':
+ buf += '.'
+ buf += subname
+
+ result = import_submodule(mod, subname, buf)
+ if result is None and mod != altmod:
+ result = import_submodule(altmod, subname, subname)
+ if result is not None:
+ buf = subname
+
+ if result is None:
+ raise ImportError("No module named %.200s" % name)
+
+ return result, next, buf
+
+# Need to keep track of what we've already reloaded to prevent cyclic evil
+found_now = {}
+
+def import_submodule(mod, subname, fullname):
+ """m = import_submodule(mod, subname, fullname)"""
+ # Require:
+ # if mod == None: subname == fullname
+ # else: mod.__name__ + "." + subname == fullname
+
+ global found_now
+ if fullname in found_now and fullname in sys.modules:
+ m = sys.modules[fullname]
+ else:
+ print('Reloading', fullname)
+ found_now[fullname] = 1
+ oldm = sys.modules.get(fullname, None)
+
+ if mod is None:
+ path = None
+ elif hasattr(mod, '__path__'):
+ path = mod.__path__
+ else:
+ return None
+
+ try:
+ # This appears to be necessary on Python 3, because imp.find_module()
+ # tries to import standard libraries (like io) itself, and we don't
+ # want them to be processed by our deep_import_hook.
+ with replace_import_hook(original_import):
+ fp, filename, stuff = imp.find_module(subname, path)
+ except ImportError:
+ return None
+
+ try:
+ m = imp.load_module(fullname, fp, filename, stuff)
+ except:
+ # load_module probably removed name from modules because of
+ # the error. Put back the original module object.
+ if oldm:
+ sys.modules[fullname] = oldm
+ raise
+ finally:
+ if fp: fp.close()
+
+ add_submodule(mod, m, fullname, subname)
+
+ return m
+
+def add_submodule(mod, submod, fullname, subname):
+ """mod.{subname} = submod"""
+ if mod is None:
+ return #Nothing to do here.
+
+ if submod is None:
+ submod = sys.modules[fullname]
+
+ setattr(mod, subname, submod)
+
+ return
+
+def ensure_fromlist(mod, fromlist, buf, recursive):
+ """Handle 'from module import a, b, c' imports."""
+ if not hasattr(mod, '__path__'):
+ return
+ for item in fromlist:
+ if not hasattr(item, 'rindex'):
+ raise TypeError("Item in ``from list'' not a string")
+ if item == '*':
+ if recursive:
+ continue # avoid endless recursion
+ try:
+ all = mod.__all__
+ except AttributeError:
+ pass
+ else:
+ ret = ensure_fromlist(mod, all, buf, 1)
+ if not ret:
+ return 0
+ elif not hasattr(mod, item):
+ import_submodule(mod, item, buf + '.' + item)
+
+def deep_import_hook(name, globals=None, locals=None, fromlist=None, level=-1):
+ """Replacement for __import__()"""
+ parent, buf = get_parent(globals, level)
+
+ head, name, buf = load_next(parent, None if level < 0 else parent, name, buf)
+
+ tail = head
+ while name:
+ tail, name, buf = load_next(tail, tail, name, buf)
+
+ # If tail is None, both get_parent and load_next found
+ # an empty module name: someone called __import__("") or
+ # doctored faulty bytecode
+ if tail is None:
+ raise ValueError('Empty module name')
+
+ if not fromlist:
+ return head
+
+ ensure_fromlist(tail, fromlist, buf, 0)
+ return tail
+
+modules_reloading = {}
+
+def deep_reload_hook(m):
+ """Replacement for reload()."""
+ if not isinstance(m, ModuleType):
+ raise TypeError("reload() argument must be module")
+
+ name = m.__name__
+
+ if name not in sys.modules:
+ raise ImportError("reload(): module %.200s not in sys.modules" % name)
+
+ global modules_reloading
+ try:
+ return modules_reloading[name]
+ except:
+ modules_reloading[name] = m
+
+ dot = name.rfind('.')
+ if dot < 0:
+ subname = name
+ path = None
+ else:
+ try:
+ parent = sys.modules[name[:dot]]
+ except KeyError:
+ modules_reloading.clear()
+ raise ImportError("reload(): parent %.200s not in sys.modules" % name[:dot])
+ subname = name[dot+1:]
+ path = getattr(parent, "__path__", None)
+
+ try:
+ # This appears to be necessary on Python 3, because imp.find_module()
+ # tries to import standard libraries (like io) itself, and we don't
+ # want them to be processed by our deep_import_hook.
+ with replace_import_hook(original_import):
+ fp, filename, stuff = imp.find_module(subname, path)
+ finally:
+ modules_reloading.clear()
+
+ try:
+ newm = imp.load_module(name, fp, filename, stuff)
+ except:
+ # load_module probably removed name from modules because of
+ # the error. Put back the original module object.
+ sys.modules[name] = m
+ raise
+ finally:
+ if fp: fp.close()
+
+ modules_reloading.clear()
+ return newm
+
+# Save the original hooks
+try:
+ original_reload = builtin_mod.reload
+except AttributeError:
+ original_reload = imp.reload # Python 3
+
+# Replacement for reload()
def reload(module, exclude=('sys', 'os.path', builtin_mod_name, '__main__',
'numpy', 'numpy._globals')):
- """Recursively reload all modules used in the given module. Optionally
- takes a list of modules to exclude from reloading. The default exclude
- list contains sys, __main__, and __builtin__, to prevent, e.g., resetting
- display, exception, and io hooks.
- """
- global found_now
- for i in exclude:
- found_now[i] = 1
- try:
- with replace_import_hook(deep_import_hook):
- return deep_reload_hook(module)
- finally:
- found_now = {}
-
-
-def _dreload(module, **kwargs):
- """
- **deprecated**
-
- import reload explicitly from `IPython.lib.deepreload` to use it
-
- """
+ """Recursively reload all modules used in the given module. Optionally
+ takes a list of modules to exclude from reloading. The default exclude
+ list contains sys, __main__, and __builtin__, to prevent, e.g., resetting
+ display, exception, and io hooks.
+ """
+ global found_now
+ for i in exclude:
+ found_now[i] = 1
+ try:
+ with replace_import_hook(deep_import_hook):
+ return deep_reload_hook(module)
+ finally:
+ found_now = {}
+
+
+def _dreload(module, **kwargs):
+ """
+ **deprecated**
+
+ import reload explicitly from `IPython.lib.deepreload` to use it
+
+ """
# this was marked as deprecated and for 5.0 removal, but
# IPython.core_builtin_trap have a Deprecation warning for 6.0, so cannot
# remove that now.
- warn("""
+ warn("""
injecting `dreload` in interactive namespace is deprecated since IPython 4.0.
-Please import `reload` explicitly from `IPython.lib.deepreload`.
-""", DeprecationWarning, stacklevel=2)
- reload(module, **kwargs)
-
+Please import `reload` explicitly from `IPython.lib.deepreload`.
+""", DeprecationWarning, stacklevel=2)
+ reload(module, **kwargs)
+
diff --git a/contrib/python/ipython/py2/IPython/lib/demo.py b/contrib/python/ipython/py2/IPython/lib/demo.py
index d630db99f9..b0f3503ed7 100644
--- a/contrib/python/ipython/py2/IPython/lib/demo.py
+++ b/contrib/python/ipython/py2/IPython/lib/demo.py
@@ -1,583 +1,583 @@
-"""Module for interactive demos using IPython.
-
-This module implements a few classes for running Python scripts interactively
-in IPython for demonstrations. With very simple markup (a few tags in
-comments), you can control points where the script stops executing and returns
-control to IPython.
-
-
-Provided classes
-----------------
-
-The classes are (see their docstrings for further details):
-
- - Demo: pure python demos
-
- - IPythonDemo: demos with input to be processed by IPython as if it had been
- typed interactively (so magics work, as well as any other special syntax you
- may have added via input prefilters).
-
- - LineDemo: single-line version of the Demo class. These demos are executed
- one line at a time, and require no markup.
-
- - IPythonLineDemo: IPython version of the LineDemo class (the demo is
- executed a line at a time, but processed via IPython).
-
- - ClearMixin: mixin to make Demo classes with less visual clutter. It
- declares an empty marquee and a pre_cmd that clears the screen before each
- block (see Subclassing below).
-
- - ClearDemo, ClearIPDemo: mixin-enabled versions of the Demo and IPythonDemo
- classes.
-
-Inheritance diagram:
-
-.. inheritance-diagram:: IPython.lib.demo
- :parts: 3
-
-Subclassing
------------
-
-The classes here all include a few methods meant to make customization by
-subclassing more convenient. Their docstrings below have some more details:
-
- - marquee(): generates a marquee to provide visible on-screen markers at each
- block start and end.
-
- - pre_cmd(): run right before the execution of each block.
-
- - post_cmd(): run right after the execution of each block. If the block
- raises an exception, this is NOT called.
-
-
-Operation
----------
-
-The file is run in its own empty namespace (though you can pass it a string of
-arguments as if in a command line environment, and it will see those as
-sys.argv). But at each stop, the global IPython namespace is updated with the
-current internal demo namespace, so you can work interactively with the data
-accumulated so far.
-
-By default, each block of code is printed (with syntax highlighting) before
-executing it and you have to confirm execution. This is intended to show the
-code to an audience first so you can discuss it, and only proceed with
-execution once you agree. There are a few tags which allow you to modify this
-behavior.
-
-The supported tags are:
-
-# <demo> stop
-
- Defines block boundaries, the points where IPython stops execution of the
- file and returns to the interactive prompt.
-
- You can optionally mark the stop tag with extra dashes before and after the
- word 'stop', to help visually distinguish the blocks in a text editor:
-
- # <demo> --- stop ---
-
-
-# <demo> silent
-
- Make a block execute silently (and hence automatically). Typically used in
- cases where you have some boilerplate or initialization code which you need
- executed but do not want to be seen in the demo.
-
-# <demo> auto
-
- Make a block execute automatically, but still being printed. Useful for
- simple code which does not warrant discussion, since it avoids the extra
- manual confirmation.
-
-# <demo> auto_all
-
- This tag can _only_ be in the first block, and if given it overrides the
- individual auto tags to make the whole demo fully automatic (no block asks
- for confirmation). It can also be given at creation time (or the attribute
- set later) to override what's in the file.
-
-While _any_ python file can be run as a Demo instance, if there are no stop
-tags the whole file will run in a single block (no different that calling
-first %pycat and then %run). The minimal markup to make this useful is to
-place a set of stop tags; the other tags are only there to let you fine-tune
-the execution.
-
-This is probably best explained with the simple example file below. You can
-copy this into a file named ex_demo.py, and try running it via::
-
+"""Module for interactive demos using IPython.
+
+This module implements a few classes for running Python scripts interactively
+in IPython for demonstrations. With very simple markup (a few tags in
+comments), you can control points where the script stops executing and returns
+control to IPython.
+
+
+Provided classes
+----------------
+
+The classes are (see their docstrings for further details):
+
+ - Demo: pure python demos
+
+ - IPythonDemo: demos with input to be processed by IPython as if it had been
+ typed interactively (so magics work, as well as any other special syntax you
+ may have added via input prefilters).
+
+ - LineDemo: single-line version of the Demo class. These demos are executed
+ one line at a time, and require no markup.
+
+ - IPythonLineDemo: IPython version of the LineDemo class (the demo is
+ executed a line at a time, but processed via IPython).
+
+ - ClearMixin: mixin to make Demo classes with less visual clutter. It
+ declares an empty marquee and a pre_cmd that clears the screen before each
+ block (see Subclassing below).
+
+ - ClearDemo, ClearIPDemo: mixin-enabled versions of the Demo and IPythonDemo
+ classes.
+
+Inheritance diagram:
+
+.. inheritance-diagram:: IPython.lib.demo
+ :parts: 3
+
+Subclassing
+-----------
+
+The classes here all include a few methods meant to make customization by
+subclassing more convenient. Their docstrings below have some more details:
+
+ - marquee(): generates a marquee to provide visible on-screen markers at each
+ block start and end.
+
+ - pre_cmd(): run right before the execution of each block.
+
+ - post_cmd(): run right after the execution of each block. If the block
+ raises an exception, this is NOT called.
+
+
+Operation
+---------
+
+The file is run in its own empty namespace (though you can pass it a string of
+arguments as if in a command line environment, and it will see those as
+sys.argv). But at each stop, the global IPython namespace is updated with the
+current internal demo namespace, so you can work interactively with the data
+accumulated so far.
+
+By default, each block of code is printed (with syntax highlighting) before
+executing it and you have to confirm execution. This is intended to show the
+code to an audience first so you can discuss it, and only proceed with
+execution once you agree. There are a few tags which allow you to modify this
+behavior.
+
+The supported tags are:
+
+# <demo> stop
+
+ Defines block boundaries, the points where IPython stops execution of the
+ file and returns to the interactive prompt.
+
+ You can optionally mark the stop tag with extra dashes before and after the
+ word 'stop', to help visually distinguish the blocks in a text editor:
+
+ # <demo> --- stop ---
+
+
+# <demo> silent
+
+ Make a block execute silently (and hence automatically). Typically used in
+ cases where you have some boilerplate or initialization code which you need
+ executed but do not want to be seen in the demo.
+
+# <demo> auto
+
+ Make a block execute automatically, but still being printed. Useful for
+ simple code which does not warrant discussion, since it avoids the extra
+ manual confirmation.
+
+# <demo> auto_all
+
+ This tag can _only_ be in the first block, and if given it overrides the
+ individual auto tags to make the whole demo fully automatic (no block asks
+ for confirmation). It can also be given at creation time (or the attribute
+ set later) to override what's in the file.
+
+While _any_ python file can be run as a Demo instance, if there are no stop
+tags the whole file will run in a single block (no different that calling
+first %pycat and then %run). The minimal markup to make this useful is to
+place a set of stop tags; the other tags are only there to let you fine-tune
+the execution.
+
+This is probably best explained with the simple example file below. You can
+copy this into a file named ex_demo.py, and try running it via::
+
from IPython.lib.demo import Demo
- d = Demo('ex_demo.py')
- d()
-
-Each time you call the demo object, it runs the next block. The demo object
-has a few useful methods for navigation, like again(), edit(), jump(), seek()
-and back(). It can be reset for a new run via reset() or reloaded from disk
-(in case you've edited the source) via reload(). See their docstrings below.
-
-Note: To make this simpler to explore, a file called "demo-exercizer.py" has
-been added to the "docs/examples/core" directory. Just cd to this directory in
-an IPython session, and type::
-
- %run demo-exercizer.py
-
-and then follow the directions.
-
-Example
--------
-
-The following is a very simple example of a valid demo file.
-
-::
-
- #################### EXAMPLE DEMO <ex_demo.py> ###############################
- '''A simple interactive demo to illustrate the use of IPython's Demo class.'''
-
- print 'Hello, welcome to an interactive IPython demo.'
-
- # The mark below defines a block boundary, which is a point where IPython will
- # stop execution and return to the interactive prompt. The dashes are actually
- # optional and used only as a visual aid to clearly separate blocks while
- # editing the demo code.
- # <demo> stop
-
- x = 1
- y = 2
-
- # <demo> stop
-
- # the mark below makes this block as silent
- # <demo> silent
-
- print 'This is a silent block, which gets executed but not printed.'
-
- # <demo> stop
- # <demo> auto
- print 'This is an automatic block.'
- print 'It is executed without asking for confirmation, but printed.'
- z = x+y
-
- print 'z=',x
-
- # <demo> stop
- # This is just another normal block.
- print 'z is now:', z
-
- print 'bye!'
- ################### END EXAMPLE DEMO <ex_demo.py> ############################
-"""
-
-from __future__ import unicode_literals
-
-#*****************************************************************************
-# Copyright (C) 2005-2006 Fernando Perez. <Fernando.Perez@colorado.edu>
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#
-#*****************************************************************************
-from __future__ import print_function
-
-import os
-import re
-import shlex
-import sys
-
-from IPython.utils import io
-from IPython.utils.text import marquee
-from IPython.utils import openpy
-from IPython.utils import py3compat
-__all__ = ['Demo','IPythonDemo','LineDemo','IPythonLineDemo','DemoError']
-
-class DemoError(Exception): pass
-
-def re_mark(mark):
- return re.compile(r'^\s*#\s+<demo>\s+%s\s*$' % mark,re.MULTILINE)
-
-class Demo(object):
-
- re_stop = re_mark('-*\s?stop\s?-*')
- re_silent = re_mark('silent')
- re_auto = re_mark('auto')
- re_auto_all = re_mark('auto_all')
-
- def __init__(self,src,title='',arg_str='',auto_all=None):
- """Make a new demo object. To run the demo, simply call the object.
-
- See the module docstring for full details and an example (you can use
- IPython.Demo? in IPython to see it).
-
- Inputs:
-
- - src is either a file, or file-like object, or a
- string that can be resolved to a filename.
-
- Optional inputs:
-
- - title: a string to use as the demo name. Of most use when the demo
- you are making comes from an object that has no filename, or if you
- want an alternate denotation distinct from the filename.
-
- - arg_str(''): a string of arguments, internally converted to a list
- just like sys.argv, so the demo script can see a similar
- environment.
-
- - auto_all(None): global flag to run all blocks automatically without
- confirmation. This attribute overrides the block-level tags and
- applies to the whole demo. It is an attribute of the object, and
- can be changed at runtime simply by reassigning it to a boolean
- value.
- """
- if hasattr(src, "read"):
- # It seems to be a file or a file-like object
- self.fname = "from a file-like object"
- if title == '':
- self.title = "from a file-like object"
- else:
- self.title = title
- else:
- # Assume it's a string or something that can be converted to one
- self.fname = src
- if title == '':
- (filepath, filename) = os.path.split(src)
- self.title = filename
- else:
- self.title = title
- self.sys_argv = [src] + shlex.split(arg_str)
- self.auto_all = auto_all
- self.src = src
-
- # get a few things from ipython. While it's a bit ugly design-wise,
- # it ensures that things like color scheme and the like are always in
- # sync with the ipython mode being used. This class is only meant to
- # be used inside ipython anyways, so it's OK.
- ip = get_ipython() # this is in builtins whenever IPython is running
- self.ip_ns = ip.user_ns
- self.ip_colorize = ip.pycolorize
- self.ip_showtb = ip.showtraceback
- self.ip_run_cell = ip.run_cell
- self.shell = ip
-
- # load user data and initialize data structures
- self.reload()
-
- def fload(self):
- """Load file object."""
- # read data and parse into blocks
- if hasattr(self, 'fobj') and self.fobj is not None:
- self.fobj.close()
- if hasattr(self.src, "read"):
- # It seems to be a file or a file-like object
- self.fobj = self.src
- else:
- # Assume it's a string or something that can be converted to one
- self.fobj = openpy.open(self.fname)
-
- def reload(self):
- """Reload source from disk and initialize state."""
- self.fload()
-
- self.src = "".join(openpy.strip_encoding_cookie(self.fobj))
- src_b = [b.strip() for b in self.re_stop.split(self.src) if b]
- self._silent = [bool(self.re_silent.findall(b)) for b in src_b]
- self._auto = [bool(self.re_auto.findall(b)) for b in src_b]
-
- # if auto_all is not given (def. None), we read it from the file
- if self.auto_all is None:
- self.auto_all = bool(self.re_auto_all.findall(src_b[0]))
- else:
- self.auto_all = bool(self.auto_all)
-
- # Clean the sources from all markup so it doesn't get displayed when
- # running the demo
- src_blocks = []
- auto_strip = lambda s: self.re_auto.sub('',s)
- for i,b in enumerate(src_b):
- if self._auto[i]:
- src_blocks.append(auto_strip(b))
- else:
- src_blocks.append(b)
- # remove the auto_all marker
- src_blocks[0] = self.re_auto_all.sub('',src_blocks[0])
-
- self.nblocks = len(src_blocks)
- self.src_blocks = src_blocks
-
- # also build syntax-highlighted source
- self.src_blocks_colored = list(map(self.ip_colorize,self.src_blocks))
-
- # ensure clean namespace and seek offset
- self.reset()
-
- def reset(self):
- """Reset the namespace and seek pointer to restart the demo"""
- self.user_ns = {}
- self.finished = False
- self.block_index = 0
-
- def _validate_index(self,index):
- if index<0 or index>=self.nblocks:
- raise ValueError('invalid block index %s' % index)
-
- def _get_index(self,index):
- """Get the current block index, validating and checking status.
-
- Returns None if the demo is finished"""
-
- if index is None:
- if self.finished:
+ d = Demo('ex_demo.py')
+ d()
+
+Each time you call the demo object, it runs the next block. The demo object
+has a few useful methods for navigation, like again(), edit(), jump(), seek()
+and back(). It can be reset for a new run via reset() or reloaded from disk
+(in case you've edited the source) via reload(). See their docstrings below.
+
+Note: To make this simpler to explore, a file called "demo-exercizer.py" has
+been added to the "docs/examples/core" directory. Just cd to this directory in
+an IPython session, and type::
+
+ %run demo-exercizer.py
+
+and then follow the directions.
+
+Example
+-------
+
+The following is a very simple example of a valid demo file.
+
+::
+
+ #################### EXAMPLE DEMO <ex_demo.py> ###############################
+ '''A simple interactive demo to illustrate the use of IPython's Demo class.'''
+
+ print 'Hello, welcome to an interactive IPython demo.'
+
+ # The mark below defines a block boundary, which is a point where IPython will
+ # stop execution and return to the interactive prompt. The dashes are actually
+ # optional and used only as a visual aid to clearly separate blocks while
+ # editing the demo code.
+ # <demo> stop
+
+ x = 1
+ y = 2
+
+ # <demo> stop
+
+ # the mark below makes this block as silent
+ # <demo> silent
+
+ print 'This is a silent block, which gets executed but not printed.'
+
+ # <demo> stop
+ # <demo> auto
+ print 'This is an automatic block.'
+ print 'It is executed without asking for confirmation, but printed.'
+ z = x+y
+
+ print 'z=',x
+
+ # <demo> stop
+ # This is just another normal block.
+ print 'z is now:', z
+
+ print 'bye!'
+ ################### END EXAMPLE DEMO <ex_demo.py> ############################
+"""
+
+from __future__ import unicode_literals
+
+#*****************************************************************************
+# Copyright (C) 2005-2006 Fernando Perez. <Fernando.Perez@colorado.edu>
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#
+#*****************************************************************************
+from __future__ import print_function
+
+import os
+import re
+import shlex
+import sys
+
+from IPython.utils import io
+from IPython.utils.text import marquee
+from IPython.utils import openpy
+from IPython.utils import py3compat
+__all__ = ['Demo','IPythonDemo','LineDemo','IPythonLineDemo','DemoError']
+
+class DemoError(Exception): pass
+
+def re_mark(mark):
+ return re.compile(r'^\s*#\s+<demo>\s+%s\s*$' % mark,re.MULTILINE)
+
+class Demo(object):
+
+ re_stop = re_mark('-*\s?stop\s?-*')
+ re_silent = re_mark('silent')
+ re_auto = re_mark('auto')
+ re_auto_all = re_mark('auto_all')
+
+ def __init__(self,src,title='',arg_str='',auto_all=None):
+ """Make a new demo object. To run the demo, simply call the object.
+
+ See the module docstring for full details and an example (you can use
+ IPython.Demo? in IPython to see it).
+
+ Inputs:
+
+ - src is either a file, or file-like object, or a
+ string that can be resolved to a filename.
+
+ Optional inputs:
+
+ - title: a string to use as the demo name. Of most use when the demo
+ you are making comes from an object that has no filename, or if you
+ want an alternate denotation distinct from the filename.
+
+ - arg_str(''): a string of arguments, internally converted to a list
+ just like sys.argv, so the demo script can see a similar
+ environment.
+
+ - auto_all(None): global flag to run all blocks automatically without
+ confirmation. This attribute overrides the block-level tags and
+ applies to the whole demo. It is an attribute of the object, and
+ can be changed at runtime simply by reassigning it to a boolean
+ value.
+ """
+ if hasattr(src, "read"):
+ # It seems to be a file or a file-like object
+ self.fname = "from a file-like object"
+ if title == '':
+ self.title = "from a file-like object"
+ else:
+ self.title = title
+ else:
+ # Assume it's a string or something that can be converted to one
+ self.fname = src
+ if title == '':
+ (filepath, filename) = os.path.split(src)
+ self.title = filename
+ else:
+ self.title = title
+ self.sys_argv = [src] + shlex.split(arg_str)
+ self.auto_all = auto_all
+ self.src = src
+
+ # get a few things from ipython. While it's a bit ugly design-wise,
+ # it ensures that things like color scheme and the like are always in
+ # sync with the ipython mode being used. This class is only meant to
+ # be used inside ipython anyways, so it's OK.
+ ip = get_ipython() # this is in builtins whenever IPython is running
+ self.ip_ns = ip.user_ns
+ self.ip_colorize = ip.pycolorize
+ self.ip_showtb = ip.showtraceback
+ self.ip_run_cell = ip.run_cell
+ self.shell = ip
+
+ # load user data and initialize data structures
+ self.reload()
+
+ def fload(self):
+ """Load file object."""
+ # read data and parse into blocks
+ if hasattr(self, 'fobj') and self.fobj is not None:
+ self.fobj.close()
+ if hasattr(self.src, "read"):
+ # It seems to be a file or a file-like object
+ self.fobj = self.src
+ else:
+ # Assume it's a string or something that can be converted to one
+ self.fobj = openpy.open(self.fname)
+
+ def reload(self):
+ """Reload source from disk and initialize state."""
+ self.fload()
+
+ self.src = "".join(openpy.strip_encoding_cookie(self.fobj))
+ src_b = [b.strip() for b in self.re_stop.split(self.src) if b]
+ self._silent = [bool(self.re_silent.findall(b)) for b in src_b]
+ self._auto = [bool(self.re_auto.findall(b)) for b in src_b]
+
+ # if auto_all is not given (def. None), we read it from the file
+ if self.auto_all is None:
+ self.auto_all = bool(self.re_auto_all.findall(src_b[0]))
+ else:
+ self.auto_all = bool(self.auto_all)
+
+ # Clean the sources from all markup so it doesn't get displayed when
+ # running the demo
+ src_blocks = []
+ auto_strip = lambda s: self.re_auto.sub('',s)
+ for i,b in enumerate(src_b):
+ if self._auto[i]:
+ src_blocks.append(auto_strip(b))
+ else:
+ src_blocks.append(b)
+ # remove the auto_all marker
+ src_blocks[0] = self.re_auto_all.sub('',src_blocks[0])
+
+ self.nblocks = len(src_blocks)
+ self.src_blocks = src_blocks
+
+ # also build syntax-highlighted source
+ self.src_blocks_colored = list(map(self.ip_colorize,self.src_blocks))
+
+ # ensure clean namespace and seek offset
+ self.reset()
+
+ def reset(self):
+ """Reset the namespace and seek pointer to restart the demo"""
+ self.user_ns = {}
+ self.finished = False
+ self.block_index = 0
+
+ def _validate_index(self,index):
+ if index<0 or index>=self.nblocks:
+ raise ValueError('invalid block index %s' % index)
+
+ def _get_index(self,index):
+ """Get the current block index, validating and checking status.
+
+ Returns None if the demo is finished"""
+
+ if index is None:
+ if self.finished:
print('Demo finished. Use <demo_name>.reset() if you want to rerun it.')
- return None
- index = self.block_index
- else:
- self._validate_index(index)
- return index
-
- def seek(self,index):
- """Move the current seek pointer to the given block.
-
- You can use negative indices to seek from the end, with identical
- semantics to those of Python lists."""
- if index<0:
- index = self.nblocks + index
- self._validate_index(index)
- self.block_index = index
- self.finished = False
-
- def back(self,num=1):
- """Move the seek pointer back num blocks (default is 1)."""
- self.seek(self.block_index-num)
-
- def jump(self,num=1):
- """Jump a given number of blocks relative to the current one.
-
- The offset can be positive or negative, defaults to 1."""
- self.seek(self.block_index+num)
-
- def again(self):
- """Move the seek pointer back one block and re-execute."""
- self.back(1)
- self()
-
- def edit(self,index=None):
- """Edit a block.
-
- If no number is given, use the last block executed.
-
- This edits the in-memory copy of the demo, it does NOT modify the
- original source file. If you want to do that, simply open the file in
- an editor and use reload() when you make changes to the file. This
- method is meant to let you change a block during a demonstration for
- explanatory purposes, without damaging your original script."""
-
- index = self._get_index(index)
- if index is None:
- return
- # decrease the index by one (unless we're at the very beginning), so
- # that the default demo.edit() call opens up the sblock we've last run
- if index>0:
- index -= 1
-
- filename = self.shell.mktempfile(self.src_blocks[index])
- self.shell.hooks.editor(filename,1)
- with open(filename, 'r') as f:
- new_block = f.read()
- # update the source and colored block
- self.src_blocks[index] = new_block
- self.src_blocks_colored[index] = self.ip_colorize(new_block)
- self.block_index = index
- # call to run with the newly edited index
- self()
-
- def show(self,index=None):
- """Show a single block on screen"""
-
- index = self._get_index(index)
- if index is None:
- return
-
- print(self.marquee('<%s> block # %s (%s remaining)' %
+ return None
+ index = self.block_index
+ else:
+ self._validate_index(index)
+ return index
+
+ def seek(self,index):
+ """Move the current seek pointer to the given block.
+
+ You can use negative indices to seek from the end, with identical
+ semantics to those of Python lists."""
+ if index<0:
+ index = self.nblocks + index
+ self._validate_index(index)
+ self.block_index = index
+ self.finished = False
+
+ def back(self,num=1):
+ """Move the seek pointer back num blocks (default is 1)."""
+ self.seek(self.block_index-num)
+
+ def jump(self,num=1):
+ """Jump a given number of blocks relative to the current one.
+
+ The offset can be positive or negative, defaults to 1."""
+ self.seek(self.block_index+num)
+
+ def again(self):
+ """Move the seek pointer back one block and re-execute."""
+ self.back(1)
+ self()
+
+ def edit(self,index=None):
+ """Edit a block.
+
+ If no number is given, use the last block executed.
+
+ This edits the in-memory copy of the demo, it does NOT modify the
+ original source file. If you want to do that, simply open the file in
+ an editor and use reload() when you make changes to the file. This
+ method is meant to let you change a block during a demonstration for
+ explanatory purposes, without damaging your original script."""
+
+ index = self._get_index(index)
+ if index is None:
+ return
+ # decrease the index by one (unless we're at the very beginning), so
+ # that the default demo.edit() call opens up the sblock we've last run
+ if index>0:
+ index -= 1
+
+ filename = self.shell.mktempfile(self.src_blocks[index])
+ self.shell.hooks.editor(filename,1)
+ with open(filename, 'r') as f:
+ new_block = f.read()
+ # update the source and colored block
+ self.src_blocks[index] = new_block
+ self.src_blocks_colored[index] = self.ip_colorize(new_block)
+ self.block_index = index
+ # call to run with the newly edited index
+ self()
+
+ def show(self,index=None):
+ """Show a single block on screen"""
+
+ index = self._get_index(index)
+ if index is None:
+ return
+
+ print(self.marquee('<%s> block # %s (%s remaining)' %
(self.title,index,self.nblocks-index-1)))
print(self.src_blocks_colored[index])
- sys.stdout.flush()
-
- def show_all(self):
- """Show entire demo on screen, block by block"""
-
- fname = self.title
- title = self.title
- nblocks = self.nblocks
- silent = self._silent
- marquee = self.marquee
- for index,block in enumerate(self.src_blocks_colored):
- if silent[index]:
- print(marquee('<%s> SILENT block # %s (%s remaining)' %
+ sys.stdout.flush()
+
+ def show_all(self):
+ """Show entire demo on screen, block by block"""
+
+ fname = self.title
+ title = self.title
+ nblocks = self.nblocks
+ silent = self._silent
+ marquee = self.marquee
+ for index,block in enumerate(self.src_blocks_colored):
+ if silent[index]:
+ print(marquee('<%s> SILENT block # %s (%s remaining)' %
(title,index,nblocks-index-1)))
- else:
- print(marquee('<%s> block # %s (%s remaining)' %
+ else:
+ print(marquee('<%s> block # %s (%s remaining)' %
(title,index,nblocks-index-1)))
print(block, end=' ')
- sys.stdout.flush()
-
- def run_cell(self,source):
- """Execute a string with one or more lines of code"""
-
- exec(source, self.user_ns)
-
- def __call__(self,index=None):
- """run a block of the demo.
-
- If index is given, it should be an integer >=1 and <= nblocks. This
- means that the calling convention is one off from typical Python
- lists. The reason for the inconsistency is that the demo always
- prints 'Block n/N, and N is the total, so it would be very odd to use
- zero-indexing here."""
-
- index = self._get_index(index)
- if index is None:
- return
- try:
- marquee = self.marquee
- next_block = self.src_blocks[index]
- self.block_index += 1
- if self._silent[index]:
- print(marquee('Executing silent block # %s (%s remaining)' %
+ sys.stdout.flush()
+
+ def run_cell(self,source):
+ """Execute a string with one or more lines of code"""
+
+ exec(source, self.user_ns)
+
+ def __call__(self,index=None):
+ """run a block of the demo.
+
+ If index is given, it should be an integer >=1 and <= nblocks. This
+ means that the calling convention is one off from typical Python
+ lists. The reason for the inconsistency is that the demo always
+ prints 'Block n/N, and N is the total, so it would be very odd to use
+ zero-indexing here."""
+
+ index = self._get_index(index)
+ if index is None:
+ return
+ try:
+ marquee = self.marquee
+ next_block = self.src_blocks[index]
+ self.block_index += 1
+ if self._silent[index]:
+ print(marquee('Executing silent block # %s (%s remaining)' %
(index,self.nblocks-index-1)))
- else:
- self.pre_cmd()
- self.show(index)
- if self.auto_all or self._auto[index]:
+ else:
+ self.pre_cmd()
+ self.show(index)
+ if self.auto_all or self._auto[index]:
print(marquee('output:'))
- else:
+ else:
print(marquee('Press <q> to quit, <Enter> to execute...'), end=' ')
- ans = py3compat.input().strip()
- if ans:
+ ans = py3compat.input().strip()
+ if ans:
print(marquee('Block NOT executed'))
- return
- try:
- save_argv = sys.argv
- sys.argv = self.sys_argv
- self.run_cell(next_block)
- self.post_cmd()
- finally:
- sys.argv = save_argv
-
- except:
- self.ip_showtb(filename=self.fname)
- else:
- self.ip_ns.update(self.user_ns)
-
- if self.block_index == self.nblocks:
- mq1 = self.marquee('END OF DEMO')
- if mq1:
+ return
+ try:
+ save_argv = sys.argv
+ sys.argv = self.sys_argv
+ self.run_cell(next_block)
+ self.post_cmd()
+ finally:
+ sys.argv = save_argv
+
+ except:
+ self.ip_showtb(filename=self.fname)
+ else:
+ self.ip_ns.update(self.user_ns)
+
+ if self.block_index == self.nblocks:
+ mq1 = self.marquee('END OF DEMO')
+ if mq1:
# avoid spurious print if empty marquees are used
print()
print(mq1)
print(self.marquee('Use <demo_name>.reset() if you want to rerun it.'))
- self.finished = True
-
- # These methods are meant to be overridden by subclasses who may wish to
- # customize the behavior of of their demos.
- def marquee(self,txt='',width=78,mark='*'):
- """Return the input string centered in a 'marquee'."""
- return marquee(txt,width,mark)
-
- def pre_cmd(self):
- """Method called before executing each block."""
- pass
-
- def post_cmd(self):
- """Method called after executing each block."""
- pass
-
-
-class IPythonDemo(Demo):
- """Class for interactive demos with IPython's input processing applied.
-
- This subclasses Demo, but instead of executing each block by the Python
- interpreter (via exec), it actually calls IPython on it, so that any input
- filters which may be in place are applied to the input block.
-
- If you have an interactive environment which exposes special input
- processing, you can use this class instead to write demo scripts which
- operate exactly as if you had typed them interactively. The default Demo
- class requires the input to be valid, pure Python code.
- """
-
- def run_cell(self,source):
- """Execute a string with one or more lines of code"""
-
- self.shell.run_cell(source)
-
-class LineDemo(Demo):
- """Demo where each line is executed as a separate block.
-
- The input script should be valid Python code.
-
- This class doesn't require any markup at all, and it's meant for simple
- scripts (with no nesting or any kind of indentation) which consist of
- multiple lines of input to be executed, one at a time, as if they had been
- typed in the interactive prompt.
-
- Note: the input can not have *any* indentation, which means that only
- single-lines of input are accepted, not even function definitions are
- valid."""
-
- def reload(self):
- """Reload source from disk and initialize state."""
- # read data and parse into blocks
- self.fload()
- lines = self.fobj.readlines()
- src_b = [l for l in lines if l.strip()]
- nblocks = len(src_b)
- self.src = ''.join(lines)
- self._silent = [False]*nblocks
- self._auto = [True]*nblocks
- self.auto_all = True
- self.nblocks = nblocks
- self.src_blocks = src_b
-
- # also build syntax-highlighted source
- self.src_blocks_colored = map(self.ip_colorize,self.src_blocks)
-
- # ensure clean namespace and seek offset
- self.reset()
-
-
-class IPythonLineDemo(IPythonDemo,LineDemo):
- """Variant of the LineDemo class whose input is processed by IPython."""
- pass
-
-
-class ClearMixin(object):
- """Use this mixin to make Demo classes with less visual clutter.
-
- Demos using this mixin will clear the screen before every block and use
- blank marquees.
-
- Note that in order for the methods defined here to actually override those
- of the classes it's mixed with, it must go /first/ in the inheritance
- tree. For example:
-
- class ClearIPDemo(ClearMixin,IPythonDemo): pass
-
- will provide an IPythonDemo class with the mixin's features.
- """
-
- def marquee(self,txt='',width=78,mark='*'):
- """Blank marquee that returns '' no matter what the input."""
- return ''
-
- def pre_cmd(self):
- """Method called before executing each block.
-
- This one simply clears the screen."""
- from IPython.utils.terminal import term_clear
- term_clear()
-
-class ClearDemo(ClearMixin,Demo):
- pass
-
-
-class ClearIPDemo(ClearMixin,IPythonDemo):
- pass
+ self.finished = True
+
+ # These methods are meant to be overridden by subclasses who may wish to
+ # customize the behavior of of their demos.
+ def marquee(self,txt='',width=78,mark='*'):
+ """Return the input string centered in a 'marquee'."""
+ return marquee(txt,width,mark)
+
+ def pre_cmd(self):
+ """Method called before executing each block."""
+ pass
+
+ def post_cmd(self):
+ """Method called after executing each block."""
+ pass
+
+
+class IPythonDemo(Demo):
+ """Class for interactive demos with IPython's input processing applied.
+
+ This subclasses Demo, but instead of executing each block by the Python
+ interpreter (via exec), it actually calls IPython on it, so that any input
+ filters which may be in place are applied to the input block.
+
+ If you have an interactive environment which exposes special input
+ processing, you can use this class instead to write demo scripts which
+ operate exactly as if you had typed them interactively. The default Demo
+ class requires the input to be valid, pure Python code.
+ """
+
+ def run_cell(self,source):
+ """Execute a string with one or more lines of code"""
+
+ self.shell.run_cell(source)
+
+class LineDemo(Demo):
+ """Demo where each line is executed as a separate block.
+
+ The input script should be valid Python code.
+
+ This class doesn't require any markup at all, and it's meant for simple
+ scripts (with no nesting or any kind of indentation) which consist of
+ multiple lines of input to be executed, one at a time, as if they had been
+ typed in the interactive prompt.
+
+ Note: the input can not have *any* indentation, which means that only
+ single-lines of input are accepted, not even function definitions are
+ valid."""
+
+ def reload(self):
+ """Reload source from disk and initialize state."""
+ # read data and parse into blocks
+ self.fload()
+ lines = self.fobj.readlines()
+ src_b = [l for l in lines if l.strip()]
+ nblocks = len(src_b)
+ self.src = ''.join(lines)
+ self._silent = [False]*nblocks
+ self._auto = [True]*nblocks
+ self.auto_all = True
+ self.nblocks = nblocks
+ self.src_blocks = src_b
+
+ # also build syntax-highlighted source
+ self.src_blocks_colored = map(self.ip_colorize,self.src_blocks)
+
+ # ensure clean namespace and seek offset
+ self.reset()
+
+
+class IPythonLineDemo(IPythonDemo,LineDemo):
+ """Variant of the LineDemo class whose input is processed by IPython."""
+ pass
+
+
+class ClearMixin(object):
+ """Use this mixin to make Demo classes with less visual clutter.
+
+ Demos using this mixin will clear the screen before every block and use
+ blank marquees.
+
+ Note that in order for the methods defined here to actually override those
+ of the classes it's mixed with, it must go /first/ in the inheritance
+ tree. For example:
+
+ class ClearIPDemo(ClearMixin,IPythonDemo): pass
+
+ will provide an IPythonDemo class with the mixin's features.
+ """
+
+ def marquee(self,txt='',width=78,mark='*'):
+ """Blank marquee that returns '' no matter what the input."""
+ return ''
+
+ def pre_cmd(self):
+ """Method called before executing each block.
+
+ This one simply clears the screen."""
+ from IPython.utils.terminal import term_clear
+ term_clear()
+
+class ClearDemo(ClearMixin,Demo):
+ pass
+
+
+class ClearIPDemo(ClearMixin,IPythonDemo):
+ pass
diff --git a/contrib/python/ipython/py2/IPython/lib/display.py b/contrib/python/ipython/py2/IPython/lib/display.py
index bf55015ff4..9221e2e062 100644
--- a/contrib/python/ipython/py2/IPython/lib/display.py
+++ b/contrib/python/ipython/py2/IPython/lib/display.py
@@ -1,558 +1,558 @@
-"""Various display related classes.
-
-Authors : MinRK, gregcaporaso, dannystaple
-"""
-from os.path import exists, isfile, splitext, abspath, join, isdir
-from os import walk, sep
-
-from IPython.core.display import DisplayObject
-
-__all__ = ['Audio', 'IFrame', 'YouTubeVideo', 'VimeoVideo', 'ScribdDocument',
- 'FileLink', 'FileLinks']
-
-
-class Audio(DisplayObject):
- """Create an audio object.
-
- When this object is returned by an input cell or passed to the
- display function, it will result in Audio controls being displayed
- in the frontend (only works in the notebook).
-
- Parameters
- ----------
- data : numpy array, list, unicode, str or bytes
- Can be one of
-
- * Numpy 1d array containing the desired waveform (mono)
- * Numpy 2d array containing waveforms for each channel.
- Shape=(NCHAN, NSAMPLES). For the standard channel order, see
- http://msdn.microsoft.com/en-us/library/windows/hardware/dn653308(v=vs.85).aspx
- * List of float or integer representing the waveform (mono)
- * String containing the filename
- * Bytestring containing raw PCM data or
- * URL pointing to a file on the web.
-
- If the array option is used the waveform will be normalized.
-
- If a filename or url is used the format support will be browser
- dependent.
- url : unicode
- A URL to download the data from.
- filename : unicode
- Path to a local file to load the data from.
- embed : boolean
+"""Various display related classes.
+
+Authors : MinRK, gregcaporaso, dannystaple
+"""
+from os.path import exists, isfile, splitext, abspath, join, isdir
+from os import walk, sep
+
+from IPython.core.display import DisplayObject
+
+__all__ = ['Audio', 'IFrame', 'YouTubeVideo', 'VimeoVideo', 'ScribdDocument',
+ 'FileLink', 'FileLinks']
+
+
+class Audio(DisplayObject):
+ """Create an audio object.
+
+ When this object is returned by an input cell or passed to the
+ display function, it will result in Audio controls being displayed
+ in the frontend (only works in the notebook).
+
+ Parameters
+ ----------
+ data : numpy array, list, unicode, str or bytes
+ Can be one of
+
+ * Numpy 1d array containing the desired waveform (mono)
+ * Numpy 2d array containing waveforms for each channel.
+ Shape=(NCHAN, NSAMPLES). For the standard channel order, see
+ http://msdn.microsoft.com/en-us/library/windows/hardware/dn653308(v=vs.85).aspx
+ * List of float or integer representing the waveform (mono)
+ * String containing the filename
+ * Bytestring containing raw PCM data or
+ * URL pointing to a file on the web.
+
+ If the array option is used the waveform will be normalized.
+
+ If a filename or url is used the format support will be browser
+ dependent.
+ url : unicode
+ A URL to download the data from.
+ filename : unicode
+ Path to a local file to load the data from.
+ embed : boolean
Should the audio data be embedded using a data URI (True) or should
- the original source be referenced. Set this to True if you want the
- audio to playable later with no internet connection in the notebook.
-
- Default is `True`, unless the keyword argument `url` is set, then
- default value is `False`.
- rate : integer
- The sampling rate of the raw data.
- Only required when data parameter is being used as an array
- autoplay : bool
- Set to True if the audio should immediately start playing.
- Default is `False`.
-
- Examples
- --------
- ::
-
- # Generate a sound
- import numpy as np
- framerate = 44100
- t = np.linspace(0,5,framerate*5)
- data = np.sin(2*np.pi*220*t) + np.sin(2*np.pi*224*t))
- Audio(data,rate=framerate)
-
- # Can also do stereo or more channels
- dataleft = np.sin(2*np.pi*220*t)
- dataright = np.sin(2*np.pi*224*t)
- Audio([dataleft, dataright],rate=framerate)
-
- Audio("http://www.nch.com.au/acm/8k16bitpcm.wav") # From URL
- Audio(url="http://www.w3schools.com/html/horse.ogg")
-
- Audio('/path/to/sound.wav') # From file
- Audio(filename='/path/to/sound.ogg')
-
- Audio(b'RAW_WAV_DATA..) # From bytes
- Audio(data=b'RAW_WAV_DATA..)
-
- """
- _read_flags = 'rb'
-
- def __init__(self, data=None, filename=None, url=None, embed=None, rate=None, autoplay=False):
- if filename is None and url is None and data is None:
- raise ValueError("No image data found. Expecting filename, url, or data.")
- if embed is False and url is None:
- raise ValueError("No url found. Expecting url when embed=False")
-
- if url is not None and embed is not True:
- self.embed = False
- else:
- self.embed = True
- self.autoplay = autoplay
- super(Audio, self).__init__(data=data, url=url, filename=filename)
-
- if self.data is not None and not isinstance(self.data, bytes):
- self.data = self._make_wav(data,rate)
-
- def reload(self):
- """Reload the raw data from file or URL."""
- import mimetypes
- if self.embed:
- super(Audio, self).reload()
-
- if self.filename is not None:
- self.mimetype = mimetypes.guess_type(self.filename)[0]
- elif self.url is not None:
- self.mimetype = mimetypes.guess_type(self.url)[0]
- else:
- self.mimetype = "audio/wav"
-
- def _make_wav(self, data, rate):
- """ Transform a numpy array to a PCM bytestring """
- import struct
- from io import BytesIO
- import wave
-
- try:
- import numpy as np
-
- data = np.array(data, dtype=float)
- if len(data.shape) == 1:
- nchan = 1
- elif len(data.shape) == 2:
- # In wave files,channels are interleaved. E.g.,
- # "L1R1L2R2..." for stereo. See
- # http://msdn.microsoft.com/en-us/library/windows/hardware/dn653308(v=vs.85).aspx
- # for channel ordering
- nchan = data.shape[0]
- data = data.T.ravel()
- else:
- raise ValueError('Array audio input must be a 1D or 2D array')
- scaled = np.int16(data/np.max(np.abs(data))*32767).tolist()
- except ImportError:
- # check that it is a "1D" list
- idata = iter(data) # fails if not an iterable
- try:
- iter(idata.next())
- raise TypeError('Only lists of mono audio are '
- 'supported if numpy is not installed')
- except TypeError:
- # this means it's not a nested list, which is what we want
- pass
- maxabsvalue = float(max([abs(x) for x in data]))
- scaled = [int(x/maxabsvalue*32767) for x in data]
- nchan = 1
-
- fp = BytesIO()
- waveobj = wave.open(fp,mode='wb')
- waveobj.setnchannels(nchan)
- waveobj.setframerate(rate)
- waveobj.setsampwidth(2)
- waveobj.setcomptype('NONE','NONE')
- waveobj.writeframes(b''.join([struct.pack('<h',x) for x in scaled]))
- val = fp.getvalue()
- waveobj.close()
-
- return val
-
- def _data_and_metadata(self):
- """shortcut for returning metadata with url information, if defined"""
- md = {}
- if self.url:
- md['url'] = self.url
- if md:
- return self.data, md
- else:
- return self.data
-
- def _repr_html_(self):
- src = """
- <audio controls="controls" {autoplay}>
- <source src="{src}" type="{type}" />
- Your browser does not support the audio element.
- </audio>
- """
- return src.format(src=self.src_attr(),type=self.mimetype, autoplay=self.autoplay_attr())
-
- def src_attr(self):
- import base64
- if self.embed and (self.data is not None):
- data = base64=base64.b64encode(self.data).decode('ascii')
- return """data:{type};base64,{base64}""".format(type=self.mimetype,
- base64=data)
- elif self.url is not None:
- return self.url
- else:
- return ""
-
- def autoplay_attr(self):
- if(self.autoplay):
- return 'autoplay="autoplay"'
- else:
- return ''
-
-class IFrame(object):
- """
- Generic class to embed an iframe in an IPython notebook
- """
-
- iframe = """
- <iframe
- width="{width}"
- height="{height}"
- src="{src}{params}"
- frameborder="0"
- allowfullscreen
- ></iframe>
- """
-
- def __init__(self, src, width, height, **kwargs):
- self.src = src
- self.width = width
- self.height = height
- self.params = kwargs
-
- def _repr_html_(self):
- """return the embed iframe"""
- if self.params:
- try:
- from urllib.parse import urlencode # Py 3
- except ImportError:
- from urllib import urlencode
- params = "?" + urlencode(self.params)
- else:
- params = ""
- return self.iframe.format(src=self.src,
- width=self.width,
- height=self.height,
- params=params)
-
-class YouTubeVideo(IFrame):
- """Class for embedding a YouTube Video in an IPython session, based on its video id.
-
- e.g. to embed the video from https://www.youtube.com/watch?v=foo , you would
- do::
-
- vid = YouTubeVideo("foo")
- display(vid)
-
- To start from 30 seconds::
-
- vid = YouTubeVideo("abc", start=30)
- display(vid)
-
- To calculate seconds from time as hours, minutes, seconds use
- :class:`datetime.timedelta`::
-
- start=int(timedelta(hours=1, minutes=46, seconds=40).total_seconds())
-
- Other parameters can be provided as documented at
+ the original source be referenced. Set this to True if you want the
+ audio to playable later with no internet connection in the notebook.
+
+ Default is `True`, unless the keyword argument `url` is set, then
+ default value is `False`.
+ rate : integer
+ The sampling rate of the raw data.
+ Only required when data parameter is being used as an array
+ autoplay : bool
+ Set to True if the audio should immediately start playing.
+ Default is `False`.
+
+ Examples
+ --------
+ ::
+
+ # Generate a sound
+ import numpy as np
+ framerate = 44100
+ t = np.linspace(0,5,framerate*5)
+ data = np.sin(2*np.pi*220*t) + np.sin(2*np.pi*224*t))
+ Audio(data,rate=framerate)
+
+ # Can also do stereo or more channels
+ dataleft = np.sin(2*np.pi*220*t)
+ dataright = np.sin(2*np.pi*224*t)
+ Audio([dataleft, dataright],rate=framerate)
+
+ Audio("http://www.nch.com.au/acm/8k16bitpcm.wav") # From URL
+ Audio(url="http://www.w3schools.com/html/horse.ogg")
+
+ Audio('/path/to/sound.wav') # From file
+ Audio(filename='/path/to/sound.ogg')
+
+ Audio(b'RAW_WAV_DATA..) # From bytes
+ Audio(data=b'RAW_WAV_DATA..)
+
+ """
+ _read_flags = 'rb'
+
+ def __init__(self, data=None, filename=None, url=None, embed=None, rate=None, autoplay=False):
+ if filename is None and url is None and data is None:
+ raise ValueError("No image data found. Expecting filename, url, or data.")
+ if embed is False and url is None:
+ raise ValueError("No url found. Expecting url when embed=False")
+
+ if url is not None and embed is not True:
+ self.embed = False
+ else:
+ self.embed = True
+ self.autoplay = autoplay
+ super(Audio, self).__init__(data=data, url=url, filename=filename)
+
+ if self.data is not None and not isinstance(self.data, bytes):
+ self.data = self._make_wav(data,rate)
+
+ def reload(self):
+ """Reload the raw data from file or URL."""
+ import mimetypes
+ if self.embed:
+ super(Audio, self).reload()
+
+ if self.filename is not None:
+ self.mimetype = mimetypes.guess_type(self.filename)[0]
+ elif self.url is not None:
+ self.mimetype = mimetypes.guess_type(self.url)[0]
+ else:
+ self.mimetype = "audio/wav"
+
+ def _make_wav(self, data, rate):
+ """ Transform a numpy array to a PCM bytestring """
+ import struct
+ from io import BytesIO
+ import wave
+
+ try:
+ import numpy as np
+
+ data = np.array(data, dtype=float)
+ if len(data.shape) == 1:
+ nchan = 1
+ elif len(data.shape) == 2:
+ # In wave files,channels are interleaved. E.g.,
+ # "L1R1L2R2..." for stereo. See
+ # http://msdn.microsoft.com/en-us/library/windows/hardware/dn653308(v=vs.85).aspx
+ # for channel ordering
+ nchan = data.shape[0]
+ data = data.T.ravel()
+ else:
+ raise ValueError('Array audio input must be a 1D or 2D array')
+ scaled = np.int16(data/np.max(np.abs(data))*32767).tolist()
+ except ImportError:
+ # check that it is a "1D" list
+ idata = iter(data) # fails if not an iterable
+ try:
+ iter(idata.next())
+ raise TypeError('Only lists of mono audio are '
+ 'supported if numpy is not installed')
+ except TypeError:
+ # this means it's not a nested list, which is what we want
+ pass
+ maxabsvalue = float(max([abs(x) for x in data]))
+ scaled = [int(x/maxabsvalue*32767) for x in data]
+ nchan = 1
+
+ fp = BytesIO()
+ waveobj = wave.open(fp,mode='wb')
+ waveobj.setnchannels(nchan)
+ waveobj.setframerate(rate)
+ waveobj.setsampwidth(2)
+ waveobj.setcomptype('NONE','NONE')
+ waveobj.writeframes(b''.join([struct.pack('<h',x) for x in scaled]))
+ val = fp.getvalue()
+ waveobj.close()
+
+ return val
+
+ def _data_and_metadata(self):
+ """shortcut for returning metadata with url information, if defined"""
+ md = {}
+ if self.url:
+ md['url'] = self.url
+ if md:
+ return self.data, md
+ else:
+ return self.data
+
+ def _repr_html_(self):
+ src = """
+ <audio controls="controls" {autoplay}>
+ <source src="{src}" type="{type}" />
+ Your browser does not support the audio element.
+ </audio>
+ """
+ return src.format(src=self.src_attr(),type=self.mimetype, autoplay=self.autoplay_attr())
+
+ def src_attr(self):
+ import base64
+ if self.embed and (self.data is not None):
+ data = base64=base64.b64encode(self.data).decode('ascii')
+ return """data:{type};base64,{base64}""".format(type=self.mimetype,
+ base64=data)
+ elif self.url is not None:
+ return self.url
+ else:
+ return ""
+
+ def autoplay_attr(self):
+ if(self.autoplay):
+ return 'autoplay="autoplay"'
+ else:
+ return ''
+
+class IFrame(object):
+ """
+ Generic class to embed an iframe in an IPython notebook
+ """
+
+ iframe = """
+ <iframe
+ width="{width}"
+ height="{height}"
+ src="{src}{params}"
+ frameborder="0"
+ allowfullscreen
+ ></iframe>
+ """
+
+ def __init__(self, src, width, height, **kwargs):
+ self.src = src
+ self.width = width
+ self.height = height
+ self.params = kwargs
+
+ def _repr_html_(self):
+ """return the embed iframe"""
+ if self.params:
+ try:
+ from urllib.parse import urlencode # Py 3
+ except ImportError:
+ from urllib import urlencode
+ params = "?" + urlencode(self.params)
+ else:
+ params = ""
+ return self.iframe.format(src=self.src,
+ width=self.width,
+ height=self.height,
+ params=params)
+
+class YouTubeVideo(IFrame):
+ """Class for embedding a YouTube Video in an IPython session, based on its video id.
+
+ e.g. to embed the video from https://www.youtube.com/watch?v=foo , you would
+ do::
+
+ vid = YouTubeVideo("foo")
+ display(vid)
+
+ To start from 30 seconds::
+
+ vid = YouTubeVideo("abc", start=30)
+ display(vid)
+
+ To calculate seconds from time as hours, minutes, seconds use
+ :class:`datetime.timedelta`::
+
+ start=int(timedelta(hours=1, minutes=46, seconds=40).total_seconds())
+
+ Other parameters can be provided as documented at
https://developers.google.com/youtube/player_parameters#Parameters
-
- When converting the notebook using nbconvert, a jpeg representation of the video
- will be inserted in the document.
- """
-
- def __init__(self, id, width=400, height=300, **kwargs):
- self.id=id
- src = "https://www.youtube.com/embed/{0}".format(id)
- super(YouTubeVideo, self).__init__(src, width, height, **kwargs)
-
- def _repr_jpeg_(self):
- try:
- from urllib.request import urlopen # Py3
- except ImportError:
- from urllib2 import urlopen
- try:
- return urlopen("https://img.youtube.com/vi/{id}/hqdefault.jpg".format(id=self.id)).read()
- except IOError:
- return None
-
-class VimeoVideo(IFrame):
- """
- Class for embedding a Vimeo video in an IPython session, based on its video id.
- """
-
- def __init__(self, id, width=400, height=300, **kwargs):
- src="https://player.vimeo.com/video/{0}".format(id)
- super(VimeoVideo, self).__init__(src, width, height, **kwargs)
-
-class ScribdDocument(IFrame):
- """
- Class for embedding a Scribd document in an IPython session
-
- Use the start_page params to specify a starting point in the document
- Use the view_mode params to specify display type one off scroll | slideshow | book
-
- e.g to Display Wes' foundational paper about PANDAS in book mode from page 3
-
- ScribdDocument(71048089, width=800, height=400, start_page=3, view_mode="book")
- """
-
- def __init__(self, id, width=400, height=300, **kwargs):
- src="https://www.scribd.com/embeds/{0}/content".format(id)
- super(ScribdDocument, self).__init__(src, width, height, **kwargs)
-
-class FileLink(object):
- """Class for embedding a local file link in an IPython session, based on path
-
- e.g. to embed a link that was generated in the IPython notebook as my/data.txt
-
- you would do::
-
- local_file = FileLink("my/data.txt")
- display(local_file)
-
- or in the HTML notebook, just::
-
- FileLink("my/data.txt")
- """
-
- html_link_str = "<a href='%s' target='_blank'>%s</a>"
-
- def __init__(self,
- path,
- url_prefix='',
- result_html_prefix='',
- result_html_suffix='<br>'):
- """
- Parameters
- ----------
- path : str
- path to the file or directory that should be formatted
+
+ When converting the notebook using nbconvert, a jpeg representation of the video
+ will be inserted in the document.
+ """
+
+ def __init__(self, id, width=400, height=300, **kwargs):
+ self.id=id
+ src = "https://www.youtube.com/embed/{0}".format(id)
+ super(YouTubeVideo, self).__init__(src, width, height, **kwargs)
+
+ def _repr_jpeg_(self):
+ try:
+ from urllib.request import urlopen # Py3
+ except ImportError:
+ from urllib2 import urlopen
+ try:
+ return urlopen("https://img.youtube.com/vi/{id}/hqdefault.jpg".format(id=self.id)).read()
+ except IOError:
+ return None
+
+class VimeoVideo(IFrame):
+ """
+ Class for embedding a Vimeo video in an IPython session, based on its video id.
+ """
+
+ def __init__(self, id, width=400, height=300, **kwargs):
+ src="https://player.vimeo.com/video/{0}".format(id)
+ super(VimeoVideo, self).__init__(src, width, height, **kwargs)
+
+class ScribdDocument(IFrame):
+ """
+ Class for embedding a Scribd document in an IPython session
+
+ Use the start_page params to specify a starting point in the document
+ Use the view_mode params to specify display type one off scroll | slideshow | book
+
+ e.g to Display Wes' foundational paper about PANDAS in book mode from page 3
+
+ ScribdDocument(71048089, width=800, height=400, start_page=3, view_mode="book")
+ """
+
+ def __init__(self, id, width=400, height=300, **kwargs):
+ src="https://www.scribd.com/embeds/{0}/content".format(id)
+ super(ScribdDocument, self).__init__(src, width, height, **kwargs)
+
+class FileLink(object):
+ """Class for embedding a local file link in an IPython session, based on path
+
+ e.g. to embed a link that was generated in the IPython notebook as my/data.txt
+
+ you would do::
+
+ local_file = FileLink("my/data.txt")
+ display(local_file)
+
+ or in the HTML notebook, just::
+
+ FileLink("my/data.txt")
+ """
+
+ html_link_str = "<a href='%s' target='_blank'>%s</a>"
+
+ def __init__(self,
+ path,
+ url_prefix='',
+ result_html_prefix='',
+ result_html_suffix='<br>'):
+ """
+ Parameters
+ ----------
+ path : str
+ path to the file or directory that should be formatted
url_prefix : str
- prefix to be prepended to all files to form a working link [default:
+ prefix to be prepended to all files to form a working link [default:
'']
- result_html_prefix : str
+ result_html_prefix : str
text to append to beginning to link [default: '']
- result_html_suffix : str
- text to append at the end of link [default: '<br>']
- """
- if isdir(path):
- raise ValueError("Cannot display a directory using FileLink. "
- "Use FileLinks to display '%s'." % path)
- self.path = path
- self.url_prefix = url_prefix
- self.result_html_prefix = result_html_prefix
- self.result_html_suffix = result_html_suffix
-
- def _format_path(self):
- fp = ''.join([self.url_prefix,self.path])
- return ''.join([self.result_html_prefix,
- self.html_link_str % (fp, self.path),
- self.result_html_suffix])
-
- def _repr_html_(self):
- """return html link to file
- """
- if not exists(self.path):
- return ("Path (<tt>%s</tt>) doesn't exist. "
- "It may still be in the process of "
- "being generated, or you may have the "
- "incorrect path." % self.path)
-
- return self._format_path()
-
- def __repr__(self):
- """return absolute path to file
- """
- return abspath(self.path)
-
-class FileLinks(FileLink):
- """Class for embedding local file links in an IPython session, based on path
-
- e.g. to embed links to files that were generated in the IPython notebook
- under ``my/data``, you would do::
-
- local_files = FileLinks("my/data")
- display(local_files)
-
- or in the HTML notebook, just::
-
- FileLinks("my/data")
- """
- def __init__(self,
- path,
- url_prefix='',
- included_suffixes=None,
- result_html_prefix='',
- result_html_suffix='<br>',
- notebook_display_formatter=None,
- terminal_display_formatter=None,
- recursive=True):
- """
- See :class:`FileLink` for the ``path``, ``url_prefix``,
- ``result_html_prefix`` and ``result_html_suffix`` parameters.
-
- included_suffixes : list
- Filename suffixes to include when formatting output [default: include
- all files]
-
- notebook_display_formatter : function
- Used to format links for display in the notebook. See discussion of
- formatter functions below.
-
- terminal_display_formatter : function
- Used to format links for display in the terminal. See discussion of
- formatter functions below.
-
- Formatter functions must be of the form::
-
- f(dirname, fnames, included_suffixes)
-
- dirname : str
- The name of a directory
- fnames : list
- The files in that directory
- included_suffixes : list
- The file suffixes that should be included in the output (passing None
- meansto include all suffixes in the output in the built-in formatters)
- recursive : boolean
- Whether to recurse into subdirectories. Default is True.
-
- The function should return a list of lines that will be printed in the
- notebook (if passing notebook_display_formatter) or the terminal (if
- passing terminal_display_formatter). This function is iterated over for
- each directory in self.path. Default formatters are in place, can be
- passed here to support alternative formatting.
-
- """
- if isfile(path):
- raise ValueError("Cannot display a file using FileLinks. "
- "Use FileLink to display '%s'." % path)
- self.included_suffixes = included_suffixes
- # remove trailing slashs for more consistent output formatting
- path = path.rstrip('/')
-
- self.path = path
- self.url_prefix = url_prefix
- self.result_html_prefix = result_html_prefix
- self.result_html_suffix = result_html_suffix
-
- self.notebook_display_formatter = \
- notebook_display_formatter or self._get_notebook_display_formatter()
- self.terminal_display_formatter = \
- terminal_display_formatter or self._get_terminal_display_formatter()
-
- self.recursive = recursive
-
- def _get_display_formatter(self,
- dirname_output_format,
- fname_output_format,
- fp_format,
- fp_cleaner=None):
- """ generate built-in formatter function
-
- this is used to define both the notebook and terminal built-in
- formatters as they only differ by some wrapper text for each entry
-
- dirname_output_format: string to use for formatting directory
- names, dirname will be substituted for a single "%s" which
- must appear in this string
- fname_output_format: string to use for formatting file names,
- if a single "%s" appears in the string, fname will be substituted
- if two "%s" appear in the string, the path to fname will be
- substituted for the first and fname will be substituted for the
- second
- fp_format: string to use for formatting filepaths, must contain
- exactly two "%s" and the dirname will be subsituted for the first
- and fname will be substituted for the second
- """
- def f(dirname, fnames, included_suffixes=None):
- result = []
- # begin by figuring out which filenames, if any,
- # are going to be displayed
- display_fnames = []
- for fname in fnames:
- if (isfile(join(dirname,fname)) and
- (included_suffixes is None or
- splitext(fname)[1] in included_suffixes)):
- display_fnames.append(fname)
-
- if len(display_fnames) == 0:
- # if there are no filenames to display, don't print anything
- # (not even the directory name)
- pass
- else:
- # otherwise print the formatted directory name followed by
- # the formatted filenames
- dirname_output_line = dirname_output_format % dirname
- result.append(dirname_output_line)
- for fname in display_fnames:
- fp = fp_format % (dirname,fname)
- if fp_cleaner is not None:
- fp = fp_cleaner(fp)
- try:
- # output can include both a filepath and a filename...
- fname_output_line = fname_output_format % (fp, fname)
- except TypeError:
- # ... or just a single filepath
- fname_output_line = fname_output_format % fname
- result.append(fname_output_line)
- return result
- return f
-
- def _get_notebook_display_formatter(self,
- spacer="&nbsp;&nbsp;"):
- """ generate function to use for notebook formatting
- """
- dirname_output_format = \
- self.result_html_prefix + "%s/" + self.result_html_suffix
- fname_output_format = \
- self.result_html_prefix + spacer + self.html_link_str + self.result_html_suffix
- fp_format = self.url_prefix + '%s/%s'
- if sep == "\\":
- # Working on a platform where the path separator is "\", so
- # must convert these to "/" for generating a URI
- def fp_cleaner(fp):
- # Replace all occurences of backslash ("\") with a forward
- # slash ("/") - this is necessary on windows when a path is
- # provided as input, but we must link to a URI
- return fp.replace('\\','/')
- else:
- fp_cleaner = None
-
- return self._get_display_formatter(dirname_output_format,
- fname_output_format,
- fp_format,
- fp_cleaner)
-
- def _get_terminal_display_formatter(self,
- spacer=" "):
- """ generate function to use for terminal formatting
- """
- dirname_output_format = "%s/"
- fname_output_format = spacer + "%s"
- fp_format = '%s/%s'
-
- return self._get_display_formatter(dirname_output_format,
- fname_output_format,
- fp_format)
-
- def _format_path(self):
- result_lines = []
- if self.recursive:
- walked_dir = list(walk(self.path))
- else:
- walked_dir = [next(walk(self.path))]
- walked_dir.sort()
- for dirname, subdirs, fnames in walked_dir:
- result_lines += self.notebook_display_formatter(dirname, fnames, self.included_suffixes)
- return '\n'.join(result_lines)
-
- def __repr__(self):
- """return newline-separated absolute paths
- """
- result_lines = []
- if self.recursive:
- walked_dir = list(walk(self.path))
- else:
- walked_dir = [next(walk(self.path))]
- walked_dir.sort()
- for dirname, subdirs, fnames in walked_dir:
- result_lines += self.terminal_display_formatter(dirname, fnames, self.included_suffixes)
- return '\n'.join(result_lines)
+ result_html_suffix : str
+ text to append at the end of link [default: '<br>']
+ """
+ if isdir(path):
+ raise ValueError("Cannot display a directory using FileLink. "
+ "Use FileLinks to display '%s'." % path)
+ self.path = path
+ self.url_prefix = url_prefix
+ self.result_html_prefix = result_html_prefix
+ self.result_html_suffix = result_html_suffix
+
+ def _format_path(self):
+ fp = ''.join([self.url_prefix,self.path])
+ return ''.join([self.result_html_prefix,
+ self.html_link_str % (fp, self.path),
+ self.result_html_suffix])
+
+ def _repr_html_(self):
+ """return html link to file
+ """
+ if not exists(self.path):
+ return ("Path (<tt>%s</tt>) doesn't exist. "
+ "It may still be in the process of "
+ "being generated, or you may have the "
+ "incorrect path." % self.path)
+
+ return self._format_path()
+
+ def __repr__(self):
+ """return absolute path to file
+ """
+ return abspath(self.path)
+
+class FileLinks(FileLink):
+ """Class for embedding local file links in an IPython session, based on path
+
+ e.g. to embed links to files that were generated in the IPython notebook
+ under ``my/data``, you would do::
+
+ local_files = FileLinks("my/data")
+ display(local_files)
+
+ or in the HTML notebook, just::
+
+ FileLinks("my/data")
+ """
+ def __init__(self,
+ path,
+ url_prefix='',
+ included_suffixes=None,
+ result_html_prefix='',
+ result_html_suffix='<br>',
+ notebook_display_formatter=None,
+ terminal_display_formatter=None,
+ recursive=True):
+ """
+ See :class:`FileLink` for the ``path``, ``url_prefix``,
+ ``result_html_prefix`` and ``result_html_suffix`` parameters.
+
+ included_suffixes : list
+ Filename suffixes to include when formatting output [default: include
+ all files]
+
+ notebook_display_formatter : function
+ Used to format links for display in the notebook. See discussion of
+ formatter functions below.
+
+ terminal_display_formatter : function
+ Used to format links for display in the terminal. See discussion of
+ formatter functions below.
+
+ Formatter functions must be of the form::
+
+ f(dirname, fnames, included_suffixes)
+
+ dirname : str
+ The name of a directory
+ fnames : list
+ The files in that directory
+ included_suffixes : list
+ The file suffixes that should be included in the output (passing None
+ meansto include all suffixes in the output in the built-in formatters)
+ recursive : boolean
+ Whether to recurse into subdirectories. Default is True.
+
+ The function should return a list of lines that will be printed in the
+ notebook (if passing notebook_display_formatter) or the terminal (if
+ passing terminal_display_formatter). This function is iterated over for
+ each directory in self.path. Default formatters are in place, can be
+ passed here to support alternative formatting.
+
+ """
+ if isfile(path):
+ raise ValueError("Cannot display a file using FileLinks. "
+ "Use FileLink to display '%s'." % path)
+ self.included_suffixes = included_suffixes
+ # remove trailing slashs for more consistent output formatting
+ path = path.rstrip('/')
+
+ self.path = path
+ self.url_prefix = url_prefix
+ self.result_html_prefix = result_html_prefix
+ self.result_html_suffix = result_html_suffix
+
+ self.notebook_display_formatter = \
+ notebook_display_formatter or self._get_notebook_display_formatter()
+ self.terminal_display_formatter = \
+ terminal_display_formatter or self._get_terminal_display_formatter()
+
+ self.recursive = recursive
+
+ def _get_display_formatter(self,
+ dirname_output_format,
+ fname_output_format,
+ fp_format,
+ fp_cleaner=None):
+ """ generate built-in formatter function
+
+ this is used to define both the notebook and terminal built-in
+ formatters as they only differ by some wrapper text for each entry
+
+ dirname_output_format: string to use for formatting directory
+ names, dirname will be substituted for a single "%s" which
+ must appear in this string
+ fname_output_format: string to use for formatting file names,
+ if a single "%s" appears in the string, fname will be substituted
+ if two "%s" appear in the string, the path to fname will be
+ substituted for the first and fname will be substituted for the
+ second
+ fp_format: string to use for formatting filepaths, must contain
+ exactly two "%s" and the dirname will be subsituted for the first
+ and fname will be substituted for the second
+ """
+ def f(dirname, fnames, included_suffixes=None):
+ result = []
+ # begin by figuring out which filenames, if any,
+ # are going to be displayed
+ display_fnames = []
+ for fname in fnames:
+ if (isfile(join(dirname,fname)) and
+ (included_suffixes is None or
+ splitext(fname)[1] in included_suffixes)):
+ display_fnames.append(fname)
+
+ if len(display_fnames) == 0:
+ # if there are no filenames to display, don't print anything
+ # (not even the directory name)
+ pass
+ else:
+ # otherwise print the formatted directory name followed by
+ # the formatted filenames
+ dirname_output_line = dirname_output_format % dirname
+ result.append(dirname_output_line)
+ for fname in display_fnames:
+ fp = fp_format % (dirname,fname)
+ if fp_cleaner is not None:
+ fp = fp_cleaner(fp)
+ try:
+ # output can include both a filepath and a filename...
+ fname_output_line = fname_output_format % (fp, fname)
+ except TypeError:
+ # ... or just a single filepath
+ fname_output_line = fname_output_format % fname
+ result.append(fname_output_line)
+ return result
+ return f
+
+ def _get_notebook_display_formatter(self,
+ spacer="&nbsp;&nbsp;"):
+ """ generate function to use for notebook formatting
+ """
+ dirname_output_format = \
+ self.result_html_prefix + "%s/" + self.result_html_suffix
+ fname_output_format = \
+ self.result_html_prefix + spacer + self.html_link_str + self.result_html_suffix
+ fp_format = self.url_prefix + '%s/%s'
+ if sep == "\\":
+ # Working on a platform where the path separator is "\", so
+ # must convert these to "/" for generating a URI
+ def fp_cleaner(fp):
+ # Replace all occurences of backslash ("\") with a forward
+ # slash ("/") - this is necessary on windows when a path is
+ # provided as input, but we must link to a URI
+ return fp.replace('\\','/')
+ else:
+ fp_cleaner = None
+
+ return self._get_display_formatter(dirname_output_format,
+ fname_output_format,
+ fp_format,
+ fp_cleaner)
+
+ def _get_terminal_display_formatter(self,
+ spacer=" "):
+ """ generate function to use for terminal formatting
+ """
+ dirname_output_format = "%s/"
+ fname_output_format = spacer + "%s"
+ fp_format = '%s/%s'
+
+ return self._get_display_formatter(dirname_output_format,
+ fname_output_format,
+ fp_format)
+
+ def _format_path(self):
+ result_lines = []
+ if self.recursive:
+ walked_dir = list(walk(self.path))
+ else:
+ walked_dir = [next(walk(self.path))]
+ walked_dir.sort()
+ for dirname, subdirs, fnames in walked_dir:
+ result_lines += self.notebook_display_formatter(dirname, fnames, self.included_suffixes)
+ return '\n'.join(result_lines)
+
+ def __repr__(self):
+ """return newline-separated absolute paths
+ """
+ result_lines = []
+ if self.recursive:
+ walked_dir = list(walk(self.path))
+ else:
+ walked_dir = [next(walk(self.path))]
+ walked_dir.sort()
+ for dirname, subdirs, fnames in walked_dir:
+ result_lines += self.terminal_display_formatter(dirname, fnames, self.included_suffixes)
+ return '\n'.join(result_lines)
diff --git a/contrib/python/ipython/py2/IPython/lib/editorhooks.py b/contrib/python/ipython/py2/IPython/lib/editorhooks.py
index 31964fbe72..392557b509 100644
--- a/contrib/python/ipython/py2/IPython/lib/editorhooks.py
+++ b/contrib/python/ipython/py2/IPython/lib/editorhooks.py
@@ -1,129 +1,129 @@
-""" 'editor' hooks for common editors that work well with ipython
-
-They should honor the line number argument, at least.
-
-Contributions are *very* welcome.
-"""
-from __future__ import print_function
-
-import os
-import pipes
-import shlex
-import subprocess
-import sys
-
-from IPython import get_ipython
-from IPython.core.error import TryNext
-from IPython.utils import py3compat
-
-
-def install_editor(template, wait=False):
- """Installs the editor that is called by IPython for the %edit magic.
-
- This overrides the default editor, which is generally set by your EDITOR
- environment variable or is notepad (windows) or vi (linux). By supplying a
- template string `run_template`, you can control how the editor is invoked
- by IPython -- (e.g. the format in which it accepts command line options)
-
- Parameters
- ----------
- template : basestring
- run_template acts as a template for how your editor is invoked by
- the shell. It should contain '{filename}', which will be replaced on
- invokation with the file name, and '{line}', $line by line number
- (or 0) to invoke the file with.
- wait : bool
- If `wait` is true, wait until the user presses enter before returning,
- to facilitate non-blocking editors that exit immediately after
- the call.
- """
-
- # not all editors support $line, so we'll leave out this check
- # for substitution in ['$file', '$line']:
- # if not substitution in run_template:
- # raise ValueError(('run_template should contain %s'
- # ' for string substitution. You supplied "%s"' % (substitution,
- # run_template)))
-
- def call_editor(self, filename, line=0):
- if line is None:
- line = 0
- cmd = template.format(filename=pipes.quote(filename), line=line)
- print(">", cmd)
- # pipes.quote doesn't work right on Windows, but it does after splitting
- if sys.platform.startswith('win'):
- cmd = shlex.split(cmd)
- proc = subprocess.Popen(cmd, shell=True)
+""" 'editor' hooks for common editors that work well with ipython
+
+They should honor the line number argument, at least.
+
+Contributions are *very* welcome.
+"""
+from __future__ import print_function
+
+import os
+import pipes
+import shlex
+import subprocess
+import sys
+
+from IPython import get_ipython
+from IPython.core.error import TryNext
+from IPython.utils import py3compat
+
+
+def install_editor(template, wait=False):
+ """Installs the editor that is called by IPython for the %edit magic.
+
+ This overrides the default editor, which is generally set by your EDITOR
+ environment variable or is notepad (windows) or vi (linux). By supplying a
+ template string `run_template`, you can control how the editor is invoked
+ by IPython -- (e.g. the format in which it accepts command line options)
+
+ Parameters
+ ----------
+ template : basestring
+ run_template acts as a template for how your editor is invoked by
+ the shell. It should contain '{filename}', which will be replaced on
+ invokation with the file name, and '{line}', $line by line number
+ (or 0) to invoke the file with.
+ wait : bool
+ If `wait` is true, wait until the user presses enter before returning,
+ to facilitate non-blocking editors that exit immediately after
+ the call.
+ """
+
+ # not all editors support $line, so we'll leave out this check
+ # for substitution in ['$file', '$line']:
+ # if not substitution in run_template:
+ # raise ValueError(('run_template should contain %s'
+ # ' for string substitution. You supplied "%s"' % (substitution,
+ # run_template)))
+
+ def call_editor(self, filename, line=0):
+ if line is None:
+ line = 0
+ cmd = template.format(filename=pipes.quote(filename), line=line)
+ print(">", cmd)
+ # pipes.quote doesn't work right on Windows, but it does after splitting
+ if sys.platform.startswith('win'):
+ cmd = shlex.split(cmd)
+ proc = subprocess.Popen(cmd, shell=True)
if proc.wait() != 0:
- raise TryNext()
- if wait:
- py3compat.input("Press Enter when done editing:")
-
- get_ipython().set_hook('editor', call_editor)
- get_ipython().editor = template
-
-
-# in these, exe is always the path/name of the executable. Useful
-# if you don't have the editor directory in your path
-def komodo(exe=u'komodo'):
- """ Activestate Komodo [Edit] """
- install_editor(exe + u' -l {line} {filename}', wait=True)
-
-
-def scite(exe=u"scite"):
- """ SciTE or Sc1 """
- install_editor(exe + u' {filename} -goto:{line}')
-
-
-def notepadplusplus(exe=u'notepad++'):
- """ Notepad++ http://notepad-plus.sourceforge.net """
- install_editor(exe + u' -n{line} {filename}')
-
-
-def jed(exe=u'jed'):
- """ JED, the lightweight emacsish editor """
- install_editor(exe + u' +{line} {filename}')
-
-
-def idle(exe=u'idle'):
- """ Idle, the editor bundled with python
-
- Parameters
- ----------
- exe : str, None
- If none, should be pretty smart about finding the executable.
- """
- if exe is None:
- import idlelib
- p = os.path.dirname(idlelib.__filename__)
- # i'm not sure if this actually works. Is this idle.py script
- # guarenteed to be executable?
- exe = os.path.join(p, 'idle.py')
- install_editor(exe + u' {filename}')
-
-
-def mate(exe=u'mate'):
- """ TextMate, the missing editor"""
- # wait=True is not required since we're using the -w flag to mate
- install_editor(exe + u' -w -l {line} {filename}')
-
-
-# ##########################################
-# these are untested, report any problems
-# ##########################################
-
-
-def emacs(exe=u'emacs'):
- install_editor(exe + u' +{line} {filename}')
-
-
-def gnuclient(exe=u'gnuclient'):
- install_editor(exe + u' -nw +{line} {filename}')
-
-
-def crimson_editor(exe=u'cedt.exe'):
- install_editor(exe + u' /L:{line} {filename}')
-
-
-def kate(exe=u'kate'):
- install_editor(exe + u' -u -l {line} {filename}')
+ raise TryNext()
+ if wait:
+ py3compat.input("Press Enter when done editing:")
+
+ get_ipython().set_hook('editor', call_editor)
+ get_ipython().editor = template
+
+
+# in these, exe is always the path/name of the executable. Useful
+# if you don't have the editor directory in your path
+def komodo(exe=u'komodo'):
+ """ Activestate Komodo [Edit] """
+ install_editor(exe + u' -l {line} {filename}', wait=True)
+
+
+def scite(exe=u"scite"):
+ """ SciTE or Sc1 """
+ install_editor(exe + u' {filename} -goto:{line}')
+
+
+def notepadplusplus(exe=u'notepad++'):
+ """ Notepad++ http://notepad-plus.sourceforge.net """
+ install_editor(exe + u' -n{line} {filename}')
+
+
+def jed(exe=u'jed'):
+ """ JED, the lightweight emacsish editor """
+ install_editor(exe + u' +{line} {filename}')
+
+
+def idle(exe=u'idle'):
+ """ Idle, the editor bundled with python
+
+ Parameters
+ ----------
+ exe : str, None
+ If none, should be pretty smart about finding the executable.
+ """
+ if exe is None:
+ import idlelib
+ p = os.path.dirname(idlelib.__filename__)
+ # i'm not sure if this actually works. Is this idle.py script
+ # guarenteed to be executable?
+ exe = os.path.join(p, 'idle.py')
+ install_editor(exe + u' {filename}')
+
+
+def mate(exe=u'mate'):
+ """ TextMate, the missing editor"""
+ # wait=True is not required since we're using the -w flag to mate
+ install_editor(exe + u' -w -l {line} {filename}')
+
+
+# ##########################################
+# these are untested, report any problems
+# ##########################################
+
+
+def emacs(exe=u'emacs'):
+ install_editor(exe + u' +{line} {filename}')
+
+
+def gnuclient(exe=u'gnuclient'):
+ install_editor(exe + u' -nw +{line} {filename}')
+
+
+def crimson_editor(exe=u'cedt.exe'):
+ install_editor(exe + u' /L:{line} {filename}')
+
+
+def kate(exe=u'kate'):
+ install_editor(exe + u' -u -l {line} {filename}')
diff --git a/contrib/python/ipython/py2/IPython/lib/guisupport.py b/contrib/python/ipython/py2/IPython/lib/guisupport.py
index 57020b30b1..5e13d4343c 100644
--- a/contrib/python/ipython/py2/IPython/lib/guisupport.py
+++ b/contrib/python/ipython/py2/IPython/lib/guisupport.py
@@ -1,83 +1,83 @@
-# coding: utf-8
-"""
-Support for creating GUI apps and starting event loops.
-
-IPython's GUI integration allows interative plotting and GUI usage in IPython
-session. IPython has two different types of GUI integration:
-
-1. The terminal based IPython supports GUI event loops through Python's
- PyOS_InputHook. PyOS_InputHook is a hook that Python calls periodically
- whenever raw_input is waiting for a user to type code. We implement GUI
- support in the terminal by setting PyOS_InputHook to a function that
- iterates the event loop for a short while. It is important to note that
- in this situation, the real GUI event loop is NOT run in the normal
- manner, so you can't use the normal means to detect that it is running.
-2. In the two process IPython kernel/frontend, the GUI event loop is run in
- the kernel. In this case, the event loop is run in the normal manner by
- calling the function or method of the GUI toolkit that starts the event
- loop.
-
-In addition to starting the GUI event loops in one of these two ways, IPython
-will *always* create an appropriate GUI application object when GUi
-integration is enabled.
-
-If you want your GUI apps to run in IPython you need to do two things:
-
-1. Test to see if there is already an existing main application object. If
- there is, you should use it. If there is not an existing application object
- you should create one.
-2. Test to see if the GUI event loop is running. If it is, you should not
- start it. If the event loop is not running you may start it.
-
-This module contains functions for each toolkit that perform these things
-in a consistent manner. Because of how PyOS_InputHook runs the event loop
-you cannot detect if the event loop is running using the traditional calls
-(such as ``wx.GetApp.IsMainLoopRunning()`` in wxPython). If PyOS_InputHook is
-set These methods will return a false negative. That is, they will say the
-event loop is not running, when is actually is. To work around this limitation
-we proposed the following informal protocol:
-
-* Whenever someone starts the event loop, they *must* set the ``_in_event_loop``
- attribute of the main application object to ``True``. This should be done
- regardless of how the event loop is actually run.
-* Whenever someone stops the event loop, they *must* set the ``_in_event_loop``
- attribute of the main application object to ``False``.
-* If you want to see if the event loop is running, you *must* use ``hasattr``
- to see if ``_in_event_loop`` attribute has been set. If it is set, you
- *must* use its value. If it has not been set, you can query the toolkit
- in the normal manner.
-* If you want GUI support and no one else has created an application or
- started the event loop you *must* do this. We don't want projects to
- attempt to defer these things to someone else if they themselves need it.
-
-The functions below implement this logic for each GUI toolkit. If you need
-to create custom application subclasses, you will likely have to modify this
-code for your own purposes. This code can be copied into your own project
-so you don't have to depend on IPython.
-
-"""
-
+# coding: utf-8
+"""
+Support for creating GUI apps and starting event loops.
+
+IPython's GUI integration allows interative plotting and GUI usage in IPython
+session. IPython has two different types of GUI integration:
+
+1. The terminal based IPython supports GUI event loops through Python's
+ PyOS_InputHook. PyOS_InputHook is a hook that Python calls periodically
+ whenever raw_input is waiting for a user to type code. We implement GUI
+ support in the terminal by setting PyOS_InputHook to a function that
+ iterates the event loop for a short while. It is important to note that
+ in this situation, the real GUI event loop is NOT run in the normal
+ manner, so you can't use the normal means to detect that it is running.
+2. In the two process IPython kernel/frontend, the GUI event loop is run in
+ the kernel. In this case, the event loop is run in the normal manner by
+ calling the function or method of the GUI toolkit that starts the event
+ loop.
+
+In addition to starting the GUI event loops in one of these two ways, IPython
+will *always* create an appropriate GUI application object when GUi
+integration is enabled.
+
+If you want your GUI apps to run in IPython you need to do two things:
+
+1. Test to see if there is already an existing main application object. If
+ there is, you should use it. If there is not an existing application object
+ you should create one.
+2. Test to see if the GUI event loop is running. If it is, you should not
+ start it. If the event loop is not running you may start it.
+
+This module contains functions for each toolkit that perform these things
+in a consistent manner. Because of how PyOS_InputHook runs the event loop
+you cannot detect if the event loop is running using the traditional calls
+(such as ``wx.GetApp.IsMainLoopRunning()`` in wxPython). If PyOS_InputHook is
+set These methods will return a false negative. That is, they will say the
+event loop is not running, when is actually is. To work around this limitation
+we proposed the following informal protocol:
+
+* Whenever someone starts the event loop, they *must* set the ``_in_event_loop``
+ attribute of the main application object to ``True``. This should be done
+ regardless of how the event loop is actually run.
+* Whenever someone stops the event loop, they *must* set the ``_in_event_loop``
+ attribute of the main application object to ``False``.
+* If you want to see if the event loop is running, you *must* use ``hasattr``
+ to see if ``_in_event_loop`` attribute has been set. If it is set, you
+ *must* use its value. If it has not been set, you can query the toolkit
+ in the normal manner.
+* If you want GUI support and no one else has created an application or
+ started the event loop you *must* do this. We don't want projects to
+ attempt to defer these things to someone else if they themselves need it.
+
+The functions below implement this logic for each GUI toolkit. If you need
+to create custom application subclasses, you will likely have to modify this
+code for your own purposes. This code can be copied into your own project
+so you don't have to depend on IPython.
+
+"""
+
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
-
+
from IPython.core.getipython import get_ipython
-
-#-----------------------------------------------------------------------------
-# wx
-#-----------------------------------------------------------------------------
-
-def get_app_wx(*args, **kwargs):
- """Create a new wx app or return an exiting one."""
- import wx
- app = wx.GetApp()
- if app is None:
- if 'redirect' not in kwargs:
- kwargs['redirect'] = False
- app = wx.PySimpleApp(*args, **kwargs)
- return app
-
-def is_event_loop_running_wx(app=None):
- """Is the wx event loop running."""
+
+#-----------------------------------------------------------------------------
+# wx
+#-----------------------------------------------------------------------------
+
+def get_app_wx(*args, **kwargs):
+ """Create a new wx app or return an exiting one."""
+ import wx
+ app = wx.GetApp()
+ if app is None:
+ if 'redirect' not in kwargs:
+ kwargs['redirect'] = False
+ app = wx.PySimpleApp(*args, **kwargs)
+ return app
+
+def is_event_loop_running_wx(app=None):
+ """Is the wx event loop running."""
# New way: check attribute on shell instance
ip = get_ipython()
if ip is not None:
@@ -87,69 +87,69 @@ def is_event_loop_running_wx(app=None):
# to check if the event loop is running, unlike Qt.
# Old way: check Wx application
- if app is None:
- app = get_app_wx()
- if hasattr(app, '_in_event_loop'):
- return app._in_event_loop
- else:
- return app.IsMainLoopRunning()
-
-def start_event_loop_wx(app=None):
- """Start the wx event loop in a consistent manner."""
- if app is None:
- app = get_app_wx()
- if not is_event_loop_running_wx(app):
- app._in_event_loop = True
- app.MainLoop()
- app._in_event_loop = False
- else:
- app._in_event_loop = True
-
-#-----------------------------------------------------------------------------
-# qt4
-#-----------------------------------------------------------------------------
-
-def get_app_qt4(*args, **kwargs):
- """Create a new qt4 app or return an existing one."""
- from IPython.external.qt_for_kernel import QtGui
- app = QtGui.QApplication.instance()
- if app is None:
- if not args:
- args = ([''],)
- app = QtGui.QApplication(*args, **kwargs)
- return app
-
-def is_event_loop_running_qt4(app=None):
- """Is the qt4 event loop running."""
+ if app is None:
+ app = get_app_wx()
+ if hasattr(app, '_in_event_loop'):
+ return app._in_event_loop
+ else:
+ return app.IsMainLoopRunning()
+
+def start_event_loop_wx(app=None):
+ """Start the wx event loop in a consistent manner."""
+ if app is None:
+ app = get_app_wx()
+ if not is_event_loop_running_wx(app):
+ app._in_event_loop = True
+ app.MainLoop()
+ app._in_event_loop = False
+ else:
+ app._in_event_loop = True
+
+#-----------------------------------------------------------------------------
+# qt4
+#-----------------------------------------------------------------------------
+
+def get_app_qt4(*args, **kwargs):
+ """Create a new qt4 app or return an existing one."""
+ from IPython.external.qt_for_kernel import QtGui
+ app = QtGui.QApplication.instance()
+ if app is None:
+ if not args:
+ args = ([''],)
+ app = QtGui.QApplication(*args, **kwargs)
+ return app
+
+def is_event_loop_running_qt4(app=None):
+ """Is the qt4 event loop running."""
# New way: check attribute on shell instance
ip = get_ipython()
if ip is not None:
return ip.active_eventloop and ip.active_eventloop.startswith('qt')
# Old way: check attribute on QApplication singleton
- if app is None:
- app = get_app_qt4([''])
- if hasattr(app, '_in_event_loop'):
- return app._in_event_loop
- else:
- # Does qt4 provide a other way to detect this?
- return False
-
-def start_event_loop_qt4(app=None):
- """Start the qt4 event loop in a consistent manner."""
- if app is None:
- app = get_app_qt4([''])
- if not is_event_loop_running_qt4(app):
- app._in_event_loop = True
- app.exec_()
- app._in_event_loop = False
- else:
- app._in_event_loop = True
-
-#-----------------------------------------------------------------------------
-# Tk
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# gtk
-#-----------------------------------------------------------------------------
+ if app is None:
+ app = get_app_qt4([''])
+ if hasattr(app, '_in_event_loop'):
+ return app._in_event_loop
+ else:
+ # Does qt4 provide a other way to detect this?
+ return False
+
+def start_event_loop_qt4(app=None):
+ """Start the qt4 event loop in a consistent manner."""
+ if app is None:
+ app = get_app_qt4([''])
+ if not is_event_loop_running_qt4(app):
+ app._in_event_loop = True
+ app.exec_()
+ app._in_event_loop = False
+ else:
+ app._in_event_loop = True
+
+#-----------------------------------------------------------------------------
+# Tk
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# gtk
+#-----------------------------------------------------------------------------
diff --git a/contrib/python/ipython/py2/IPython/lib/inputhook.py b/contrib/python/ipython/py2/IPython/lib/inputhook.py
index 38ccda8831..e6e8f2dbbc 100644
--- a/contrib/python/ipython/py2/IPython/lib/inputhook.py
+++ b/contrib/python/ipython/py2/IPython/lib/inputhook.py
@@ -1,666 +1,666 @@
-# coding: utf-8
-"""
+# coding: utf-8
+"""
Deprecated since IPython 5.0
-Inputhook management for GUI event loop integration.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-try:
- import ctypes
-except ImportError:
- ctypes = None
-except SystemError: # IronPython issue, 2/8/2014
- ctypes = None
-import os
-import platform
-import sys
-from distutils.version import LooseVersion as V
-
+Inputhook management for GUI event loop integration.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+try:
+ import ctypes
+except ImportError:
+ ctypes = None
+except SystemError: # IronPython issue, 2/8/2014
+ ctypes = None
+import os
+import platform
+import sys
+from distutils.version import LooseVersion as V
+
from warnings import warn
-
+
warn("`IPython.lib.inputhook` is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
-#-----------------------------------------------------------------------------
-# Constants
-#-----------------------------------------------------------------------------
-
-# Constants for identifying the GUI toolkits.
-GUI_WX = 'wx'
-GUI_QT = 'qt'
-GUI_QT4 = 'qt4'
-GUI_GTK = 'gtk'
-GUI_TK = 'tk'
-GUI_OSX = 'osx'
-GUI_GLUT = 'glut'
-GUI_PYGLET = 'pyglet'
-GUI_GTK3 = 'gtk3'
-GUI_NONE = 'none' # i.e. disable
-
-#-----------------------------------------------------------------------------
-# Utilities
-#-----------------------------------------------------------------------------
-
-def _stdin_ready_posix():
- """Return True if there's something to read on stdin (posix version)."""
- infds, outfds, erfds = select.select([sys.stdin],[],[],0)
- return bool(infds)
-
-def _stdin_ready_nt():
- """Return True if there's something to read on stdin (nt version)."""
- return msvcrt.kbhit()
-
-def _stdin_ready_other():
- """Return True, assuming there's something to read on stdin."""
- return True
-
-def _use_appnope():
- """Should we use appnope for dealing with OS X app nap?
-
- Checks if we are on OS X 10.9 or greater.
- """
- return sys.platform == 'darwin' and V(platform.mac_ver()[0]) >= V('10.9')
-
-def _ignore_CTRL_C_posix():
- """Ignore CTRL+C (SIGINT)."""
- signal.signal(signal.SIGINT, signal.SIG_IGN)
-
-def _allow_CTRL_C_posix():
- """Take CTRL+C into account (SIGINT)."""
- signal.signal(signal.SIGINT, signal.default_int_handler)
-
-def _ignore_CTRL_C_other():
- """Ignore CTRL+C (not implemented)."""
- pass
-
-def _allow_CTRL_C_other():
- """Take CTRL+C into account (not implemented)."""
- pass
-
-if os.name == 'posix':
- import select
- import signal
- stdin_ready = _stdin_ready_posix
- ignore_CTRL_C = _ignore_CTRL_C_posix
- allow_CTRL_C = _allow_CTRL_C_posix
-elif os.name == 'nt':
- import msvcrt
- stdin_ready = _stdin_ready_nt
- ignore_CTRL_C = _ignore_CTRL_C_other
- allow_CTRL_C = _allow_CTRL_C_other
-else:
- stdin_ready = _stdin_ready_other
- ignore_CTRL_C = _ignore_CTRL_C_other
- allow_CTRL_C = _allow_CTRL_C_other
-
-
-#-----------------------------------------------------------------------------
-# Main InputHookManager class
-#-----------------------------------------------------------------------------
-
-
-class InputHookManager(object):
+#-----------------------------------------------------------------------------
+# Constants
+#-----------------------------------------------------------------------------
+
+# Constants for identifying the GUI toolkits.
+GUI_WX = 'wx'
+GUI_QT = 'qt'
+GUI_QT4 = 'qt4'
+GUI_GTK = 'gtk'
+GUI_TK = 'tk'
+GUI_OSX = 'osx'
+GUI_GLUT = 'glut'
+GUI_PYGLET = 'pyglet'
+GUI_GTK3 = 'gtk3'
+GUI_NONE = 'none' # i.e. disable
+
+#-----------------------------------------------------------------------------
+# Utilities
+#-----------------------------------------------------------------------------
+
+def _stdin_ready_posix():
+ """Return True if there's something to read on stdin (posix version)."""
+ infds, outfds, erfds = select.select([sys.stdin],[],[],0)
+ return bool(infds)
+
+def _stdin_ready_nt():
+ """Return True if there's something to read on stdin (nt version)."""
+ return msvcrt.kbhit()
+
+def _stdin_ready_other():
+ """Return True, assuming there's something to read on stdin."""
+ return True
+
+def _use_appnope():
+ """Should we use appnope for dealing with OS X app nap?
+
+ Checks if we are on OS X 10.9 or greater.
+ """
+ return sys.platform == 'darwin' and V(platform.mac_ver()[0]) >= V('10.9')
+
+def _ignore_CTRL_C_posix():
+ """Ignore CTRL+C (SIGINT)."""
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+def _allow_CTRL_C_posix():
+ """Take CTRL+C into account (SIGINT)."""
+ signal.signal(signal.SIGINT, signal.default_int_handler)
+
+def _ignore_CTRL_C_other():
+ """Ignore CTRL+C (not implemented)."""
+ pass
+
+def _allow_CTRL_C_other():
+ """Take CTRL+C into account (not implemented)."""
+ pass
+
+if os.name == 'posix':
+ import select
+ import signal
+ stdin_ready = _stdin_ready_posix
+ ignore_CTRL_C = _ignore_CTRL_C_posix
+ allow_CTRL_C = _allow_CTRL_C_posix
+elif os.name == 'nt':
+ import msvcrt
+ stdin_ready = _stdin_ready_nt
+ ignore_CTRL_C = _ignore_CTRL_C_other
+ allow_CTRL_C = _allow_CTRL_C_other
+else:
+ stdin_ready = _stdin_ready_other
+ ignore_CTRL_C = _ignore_CTRL_C_other
+ allow_CTRL_C = _allow_CTRL_C_other
+
+
+#-----------------------------------------------------------------------------
+# Main InputHookManager class
+#-----------------------------------------------------------------------------
+
+
+class InputHookManager(object):
"""DEPRECATED since IPython 5.0
-
+
Manage PyOS_InputHook for different GUI toolkits.
- This class installs various hooks under ``PyOSInputHook`` to handle
- GUI event loop integration.
- """
-
- def __init__(self):
- if ctypes is None:
- warn("IPython GUI event loop requires ctypes, %gui will not be available")
- else:
- self.PYFUNC = ctypes.PYFUNCTYPE(ctypes.c_int)
- self.guihooks = {}
- self.aliases = {}
- self.apps = {}
- self._reset()
-
- def _reset(self):
- self._callback_pyfunctype = None
- self._callback = None
- self._installed = False
- self._current_gui = None
-
- def get_pyos_inputhook(self):
+ This class installs various hooks under ``PyOSInputHook`` to handle
+ GUI event loop integration.
+ """
+
+ def __init__(self):
+ if ctypes is None:
+ warn("IPython GUI event loop requires ctypes, %gui will not be available")
+ else:
+ self.PYFUNC = ctypes.PYFUNCTYPE(ctypes.c_int)
+ self.guihooks = {}
+ self.aliases = {}
+ self.apps = {}
+ self._reset()
+
+ def _reset(self):
+ self._callback_pyfunctype = None
+ self._callback = None
+ self._installed = False
+ self._current_gui = None
+
+ def get_pyos_inputhook(self):
"""DEPRECATED since IPython 5.0
Return the current PyOS_InputHook as a ctypes.c_void_p."""
warn("`get_pyos_inputhook` is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- return ctypes.c_void_p.in_dll(ctypes.pythonapi,"PyOS_InputHook")
-
- def get_pyos_inputhook_as_func(self):
+ return ctypes.c_void_p.in_dll(ctypes.pythonapi,"PyOS_InputHook")
+
+ def get_pyos_inputhook_as_func(self):
"""DEPRECATED since IPython 5.0
Return the current PyOS_InputHook as a ctypes.PYFUNCYPE."""
warn("`get_pyos_inputhook_as_func` is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- return self.PYFUNC.in_dll(ctypes.pythonapi,"PyOS_InputHook")
-
- def set_inputhook(self, callback):
+ return self.PYFUNC.in_dll(ctypes.pythonapi,"PyOS_InputHook")
+
+ def set_inputhook(self, callback):
"""DEPRECATED since IPython 5.0
Set PyOS_InputHook to callback and return the previous one."""
- # On platforms with 'readline' support, it's all too likely to
- # have a KeyboardInterrupt signal delivered *even before* an
- # initial ``try:`` clause in the callback can be executed, so
- # we need to disable CTRL+C in this situation.
- ignore_CTRL_C()
- self._callback = callback
- self._callback_pyfunctype = self.PYFUNC(callback)
- pyos_inputhook_ptr = self.get_pyos_inputhook()
- original = self.get_pyos_inputhook_as_func()
- pyos_inputhook_ptr.value = \
- ctypes.cast(self._callback_pyfunctype, ctypes.c_void_p).value
- self._installed = True
- return original
-
- def clear_inputhook(self, app=None):
+ # On platforms with 'readline' support, it's all too likely to
+ # have a KeyboardInterrupt signal delivered *even before* an
+ # initial ``try:`` clause in the callback can be executed, so
+ # we need to disable CTRL+C in this situation.
+ ignore_CTRL_C()
+ self._callback = callback
+ self._callback_pyfunctype = self.PYFUNC(callback)
+ pyos_inputhook_ptr = self.get_pyos_inputhook()
+ original = self.get_pyos_inputhook_as_func()
+ pyos_inputhook_ptr.value = \
+ ctypes.cast(self._callback_pyfunctype, ctypes.c_void_p).value
+ self._installed = True
+ return original
+
+ def clear_inputhook(self, app=None):
"""DEPRECATED since IPython 5.0
-
+
Set PyOS_InputHook to NULL and return the previous one.
- Parameters
- ----------
- app : optional, ignored
- This parameter is allowed only so that clear_inputhook() can be
- called with a similar interface as all the ``enable_*`` methods. But
- the actual value of the parameter is ignored. This uniform interface
- makes it easier to have user-level entry points in the main IPython
- app like :meth:`enable_gui`."""
+ Parameters
+ ----------
+ app : optional, ignored
+ This parameter is allowed only so that clear_inputhook() can be
+ called with a similar interface as all the ``enable_*`` methods. But
+ the actual value of the parameter is ignored. This uniform interface
+ makes it easier to have user-level entry points in the main IPython
+ app like :meth:`enable_gui`."""
warn("`clear_inputhook` is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- pyos_inputhook_ptr = self.get_pyos_inputhook()
- original = self.get_pyos_inputhook_as_func()
- pyos_inputhook_ptr.value = ctypes.c_void_p(None).value
- allow_CTRL_C()
- self._reset()
- return original
-
- def clear_app_refs(self, gui=None):
+ pyos_inputhook_ptr = self.get_pyos_inputhook()
+ original = self.get_pyos_inputhook_as_func()
+ pyos_inputhook_ptr.value = ctypes.c_void_p(None).value
+ allow_CTRL_C()
+ self._reset()
+ return original
+
+ def clear_app_refs(self, gui=None):
"""DEPRECATED since IPython 5.0
-
+
Clear IPython's internal reference to an application instance.
- Whenever we create an app for a user on qt4 or wx, we hold a
- reference to the app. This is needed because in some cases bad things
- can happen if a user doesn't hold a reference themselves. This
- method is provided to clear the references we are holding.
-
- Parameters
- ----------
- gui : None or str
- If None, clear all app references. If ('wx', 'qt4') clear
- the app for that toolkit. References are not held for gtk or tk
- as those toolkits don't have the notion of an app.
- """
+ Whenever we create an app for a user on qt4 or wx, we hold a
+ reference to the app. This is needed because in some cases bad things
+ can happen if a user doesn't hold a reference themselves. This
+ method is provided to clear the references we are holding.
+
+ Parameters
+ ----------
+ gui : None or str
+ If None, clear all app references. If ('wx', 'qt4') clear
+ the app for that toolkit. References are not held for gtk or tk
+ as those toolkits don't have the notion of an app.
+ """
warn("`clear_app_refs` is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- if gui is None:
- self.apps = {}
- elif gui in self.apps:
- del self.apps[gui]
-
- def register(self, toolkitname, *aliases):
+ if gui is None:
+ self.apps = {}
+ elif gui in self.apps:
+ del self.apps[gui]
+
+ def register(self, toolkitname, *aliases):
"""DEPRECATED since IPython 5.0
Register a class to provide the event loop for a given GUI.
-
- This is intended to be used as a class decorator. It should be passed
- the names with which to register this GUI integration. The classes
- themselves should subclass :class:`InputHookBase`.
-
- ::
-
- @inputhook_manager.register('qt')
- class QtInputHook(InputHookBase):
- def enable(self, app=None):
- ...
- """
+
+ This is intended to be used as a class decorator. It should be passed
+ the names with which to register this GUI integration. The classes
+ themselves should subclass :class:`InputHookBase`.
+
+ ::
+
+ @inputhook_manager.register('qt')
+ class QtInputHook(InputHookBase):
+ def enable(self, app=None):
+ ...
+ """
warn("`register` is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- def decorator(cls):
- if ctypes is not None:
- inst = cls(self)
- self.guihooks[toolkitname] = inst
- for a in aliases:
- self.aliases[a] = toolkitname
- return cls
- return decorator
-
- def current_gui(self):
+ def decorator(cls):
+ if ctypes is not None:
+ inst = cls(self)
+ self.guihooks[toolkitname] = inst
+ for a in aliases:
+ self.aliases[a] = toolkitname
+ return cls
+ return decorator
+
+ def current_gui(self):
"""DEPRECATED since IPython 5.0
Return a string indicating the currently active GUI or None."""
warn("`current_gui` is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- return self._current_gui
-
- def enable_gui(self, gui=None, app=None):
+ return self._current_gui
+
+ def enable_gui(self, gui=None, app=None):
"""DEPRECATED since IPython 5.0
-
+
Switch amongst GUI input hooks by name.
- This is a higher level method than :meth:`set_inputhook` - it uses the
- GUI name to look up a registered object which enables the input hook
- for that GUI.
-
- Parameters
- ----------
- gui : optional, string or None
- If None (or 'none'), clears input hook, otherwise it must be one
- of the recognized GUI names (see ``GUI_*`` constants in module).
-
- app : optional, existing application object.
- For toolkits that have the concept of a global app, you can supply an
- existing one. If not given, the toolkit will be probed for one, and if
- none is found, a new one will be created. Note that GTK does not have
- this concept, and passing an app if ``gui=="GTK"`` will raise an error.
-
- Returns
- -------
- The output of the underlying gui switch routine, typically the actual
- PyOS_InputHook wrapper object or the GUI toolkit app created, if there was
- one.
- """
+ This is a higher level method than :meth:`set_inputhook` - it uses the
+ GUI name to look up a registered object which enables the input hook
+ for that GUI.
+
+ Parameters
+ ----------
+ gui : optional, string or None
+ If None (or 'none'), clears input hook, otherwise it must be one
+ of the recognized GUI names (see ``GUI_*`` constants in module).
+
+ app : optional, existing application object.
+ For toolkits that have the concept of a global app, you can supply an
+ existing one. If not given, the toolkit will be probed for one, and if
+ none is found, a new one will be created. Note that GTK does not have
+ this concept, and passing an app if ``gui=="GTK"`` will raise an error.
+
+ Returns
+ -------
+ The output of the underlying gui switch routine, typically the actual
+ PyOS_InputHook wrapper object or the GUI toolkit app created, if there was
+ one.
+ """
warn("`enable_gui` is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- if gui in (None, GUI_NONE):
- return self.disable_gui()
-
- if gui in self.aliases:
- return self.enable_gui(self.aliases[gui], app)
-
- try:
- gui_hook = self.guihooks[gui]
- except KeyError:
- e = "Invalid GUI request {!r}, valid ones are: {}"
- raise ValueError(e.format(gui, ', '.join(self.guihooks)))
- self._current_gui = gui
-
- app = gui_hook.enable(app)
- if app is not None:
- app._in_event_loop = True
+ if gui in (None, GUI_NONE):
+ return self.disable_gui()
+
+ if gui in self.aliases:
+ return self.enable_gui(self.aliases[gui], app)
+
+ try:
+ gui_hook = self.guihooks[gui]
+ except KeyError:
+ e = "Invalid GUI request {!r}, valid ones are: {}"
+ raise ValueError(e.format(gui, ', '.join(self.guihooks)))
+ self._current_gui = gui
+
+ app = gui_hook.enable(app)
+ if app is not None:
+ app._in_event_loop = True
self.apps[gui] = app
- return app
-
- def disable_gui(self):
+ return app
+
+ def disable_gui(self):
"""DEPRECATED since IPython 5.0
Disable GUI event loop integration.
-
- If an application was registered, this sets its ``_in_event_loop``
- attribute to False. It then calls :meth:`clear_inputhook`.
- """
+
+ If an application was registered, this sets its ``_in_event_loop``
+ attribute to False. It then calls :meth:`clear_inputhook`.
+ """
warn("`disable_gui` is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- gui = self._current_gui
- if gui in self.apps:
- self.apps[gui]._in_event_loop = False
- return self.clear_inputhook()
-
-class InputHookBase(object):
+ gui = self._current_gui
+ if gui in self.apps:
+ self.apps[gui]._in_event_loop = False
+ return self.clear_inputhook()
+
+class InputHookBase(object):
"""DEPRECATED since IPython 5.0
Base class for input hooks for specific toolkits.
-
- Subclasses should define an :meth:`enable` method with one argument, ``app``,
- which will either be an instance of the toolkit's application class, or None.
- They may also define a :meth:`disable` method with no arguments.
- """
- def __init__(self, manager):
- self.manager = manager
-
- def disable(self):
- pass
-
-inputhook_manager = InputHookManager()
-
-@inputhook_manager.register('osx')
-class NullInputHook(InputHookBase):
+
+ Subclasses should define an :meth:`enable` method with one argument, ``app``,
+ which will either be an instance of the toolkit's application class, or None.
+ They may also define a :meth:`disable` method with no arguments.
+ """
+ def __init__(self, manager):
+ self.manager = manager
+
+ def disable(self):
+ pass
+
+inputhook_manager = InputHookManager()
+
+@inputhook_manager.register('osx')
+class NullInputHook(InputHookBase):
"""DEPRECATED since IPython 5.0
A null inputhook that doesn't need to do anything"""
- def enable(self, app=None):
+ def enable(self, app=None):
warn("This function is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
-
-@inputhook_manager.register('wx')
-class WxInputHook(InputHookBase):
- def enable(self, app=None):
+
+@inputhook_manager.register('wx')
+class WxInputHook(InputHookBase):
+ def enable(self, app=None):
"""DEPRECATED since IPython 5.0
-
+
Enable event loop integration with wxPython.
- Parameters
- ----------
- app : WX Application, optional.
- Running application to use. If not given, we probe WX for an
- existing application object, and create a new one if none is found.
-
- Notes
- -----
- This methods sets the ``PyOS_InputHook`` for wxPython, which allows
- the wxPython to integrate with terminal based applications like
- IPython.
-
- If ``app`` is not given we probe for an existing one, and return it if
- found. If no existing app is found, we create an :class:`wx.App` as
- follows::
-
- import wx
- app = wx.App(redirect=False, clearSigInt=False)
- """
+ Parameters
+ ----------
+ app : WX Application, optional.
+ Running application to use. If not given, we probe WX for an
+ existing application object, and create a new one if none is found.
+
+ Notes
+ -----
+ This methods sets the ``PyOS_InputHook`` for wxPython, which allows
+ the wxPython to integrate with terminal based applications like
+ IPython.
+
+ If ``app`` is not given we probe for an existing one, and return it if
+ found. If no existing app is found, we create an :class:`wx.App` as
+ follows::
+
+ import wx
+ app = wx.App(redirect=False, clearSigInt=False)
+ """
warn("This function is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- import wx
-
- wx_version = V(wx.__version__).version
-
- if wx_version < [2, 8]:
- raise ValueError("requires wxPython >= 2.8, but you have %s" % wx.__version__)
-
- from IPython.lib.inputhookwx import inputhook_wx
- self.manager.set_inputhook(inputhook_wx)
- if _use_appnope():
- from appnope import nope
- nope()
-
- import wx
- if app is None:
- app = wx.GetApp()
- if app is None:
- app = wx.App(redirect=False, clearSigInt=False)
-
- return app
-
- def disable(self):
+ import wx
+
+ wx_version = V(wx.__version__).version
+
+ if wx_version < [2, 8]:
+ raise ValueError("requires wxPython >= 2.8, but you have %s" % wx.__version__)
+
+ from IPython.lib.inputhookwx import inputhook_wx
+ self.manager.set_inputhook(inputhook_wx)
+ if _use_appnope():
+ from appnope import nope
+ nope()
+
+ import wx
+ if app is None:
+ app = wx.GetApp()
+ if app is None:
+ app = wx.App(redirect=False, clearSigInt=False)
+
+ return app
+
+ def disable(self):
"""DEPRECATED since IPython 5.0
-
+
Disable event loop integration with wxPython.
- This restores appnapp on OS X
- """
+ This restores appnapp on OS X
+ """
warn("This function is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- if _use_appnope():
- from appnope import nap
- nap()
-
-@inputhook_manager.register('qt', 'qt4')
-class Qt4InputHook(InputHookBase):
- def enable(self, app=None):
+ if _use_appnope():
+ from appnope import nap
+ nap()
+
+@inputhook_manager.register('qt', 'qt4')
+class Qt4InputHook(InputHookBase):
+ def enable(self, app=None):
"""DEPRECATED since IPython 5.0
Enable event loop integration with PyQt4.
-
- Parameters
- ----------
- app : Qt Application, optional.
- Running application to use. If not given, we probe Qt for an
- existing application object, and create a new one if none is found.
-
- Notes
- -----
- This methods sets the PyOS_InputHook for PyQt4, which allows
- the PyQt4 to integrate with terminal based applications like
- IPython.
-
- If ``app`` is not given we probe for an existing one, and return it if
- found. If no existing app is found, we create an :class:`QApplication`
- as follows::
-
- from PyQt4 import QtCore
- app = QtGui.QApplication(sys.argv)
- """
+
+ Parameters
+ ----------
+ app : Qt Application, optional.
+ Running application to use. If not given, we probe Qt for an
+ existing application object, and create a new one if none is found.
+
+ Notes
+ -----
+ This methods sets the PyOS_InputHook for PyQt4, which allows
+ the PyQt4 to integrate with terminal based applications like
+ IPython.
+
+ If ``app`` is not given we probe for an existing one, and return it if
+ found. If no existing app is found, we create an :class:`QApplication`
+ as follows::
+
+ from PyQt4 import QtCore
+ app = QtGui.QApplication(sys.argv)
+ """
warn("This function is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- from IPython.lib.inputhookqt4 import create_inputhook_qt4
- app, inputhook_qt4 = create_inputhook_qt4(self.manager, app)
- self.manager.set_inputhook(inputhook_qt4)
- if _use_appnope():
- from appnope import nope
- nope()
-
- return app
-
- def disable_qt4(self):
+ from IPython.lib.inputhookqt4 import create_inputhook_qt4
+ app, inputhook_qt4 = create_inputhook_qt4(self.manager, app)
+ self.manager.set_inputhook(inputhook_qt4)
+ if _use_appnope():
+ from appnope import nope
+ nope()
+
+ return app
+
+ def disable_qt4(self):
"""DEPRECATED since IPython 5.0
-
+
Disable event loop integration with PyQt4.
- This restores appnapp on OS X
- """
+ This restores appnapp on OS X
+ """
warn("This function is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- if _use_appnope():
- from appnope import nap
- nap()
-
-
-@inputhook_manager.register('qt5')
-class Qt5InputHook(Qt4InputHook):
- def enable(self, app=None):
+ if _use_appnope():
+ from appnope import nap
+ nap()
+
+
+@inputhook_manager.register('qt5')
+class Qt5InputHook(Qt4InputHook):
+ def enable(self, app=None):
warn("This function is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- os.environ['QT_API'] = 'pyqt5'
- return Qt4InputHook.enable(self, app)
-
-
-@inputhook_manager.register('gtk')
-class GtkInputHook(InputHookBase):
- def enable(self, app=None):
+ os.environ['QT_API'] = 'pyqt5'
+ return Qt4InputHook.enable(self, app)
+
+
+@inputhook_manager.register('gtk')
+class GtkInputHook(InputHookBase):
+ def enable(self, app=None):
"""DEPRECATED since IPython 5.0
-
+
Enable event loop integration with PyGTK.
- Parameters
- ----------
- app : ignored
- Ignored, it's only a placeholder to keep the call signature of all
- gui activation methods consistent, which simplifies the logic of
- supporting magics.
-
- Notes
- -----
- This methods sets the PyOS_InputHook for PyGTK, which allows
- the PyGTK to integrate with terminal based applications like
- IPython.
- """
+ Parameters
+ ----------
+ app : ignored
+ Ignored, it's only a placeholder to keep the call signature of all
+ gui activation methods consistent, which simplifies the logic of
+ supporting magics.
+
+ Notes
+ -----
+ This methods sets the PyOS_InputHook for PyGTK, which allows
+ the PyGTK to integrate with terminal based applications like
+ IPython.
+ """
warn("This function is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- import gtk
- try:
- gtk.set_interactive(True)
- except AttributeError:
- # For older versions of gtk, use our own ctypes version
- from IPython.lib.inputhookgtk import inputhook_gtk
- self.manager.set_inputhook(inputhook_gtk)
-
-
-@inputhook_manager.register('tk')
-class TkInputHook(InputHookBase):
- def enable(self, app=None):
+ import gtk
+ try:
+ gtk.set_interactive(True)
+ except AttributeError:
+ # For older versions of gtk, use our own ctypes version
+ from IPython.lib.inputhookgtk import inputhook_gtk
+ self.manager.set_inputhook(inputhook_gtk)
+
+
+@inputhook_manager.register('tk')
+class TkInputHook(InputHookBase):
+ def enable(self, app=None):
"""DEPRECATED since IPython 5.0
-
+
Enable event loop integration with Tk.
- Parameters
- ----------
- app : toplevel :class:`Tkinter.Tk` widget, optional.
- Running toplevel widget to use. If not given, we probe Tk for an
- existing one, and create a new one if none is found.
-
- Notes
- -----
- If you have already created a :class:`Tkinter.Tk` object, the only
- thing done by this method is to register with the
- :class:`InputHookManager`, since creating that object automatically
- sets ``PyOS_InputHook``.
- """
+ Parameters
+ ----------
+ app : toplevel :class:`Tkinter.Tk` widget, optional.
+ Running toplevel widget to use. If not given, we probe Tk for an
+ existing one, and create a new one if none is found.
+
+ Notes
+ -----
+ If you have already created a :class:`Tkinter.Tk` object, the only
+ thing done by this method is to register with the
+ :class:`InputHookManager`, since creating that object automatically
+ sets ``PyOS_InputHook``.
+ """
warn("This function is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- if app is None:
- try:
- from tkinter import Tk # Py 3
- except ImportError:
- from Tkinter import Tk # Py 2
- app = Tk()
- app.withdraw()
- self.manager.apps[GUI_TK] = app
- return app
-
-
-@inputhook_manager.register('glut')
-class GlutInputHook(InputHookBase):
- def enable(self, app=None):
+ if app is None:
+ try:
+ from tkinter import Tk # Py 3
+ except ImportError:
+ from Tkinter import Tk # Py 2
+ app = Tk()
+ app.withdraw()
+ self.manager.apps[GUI_TK] = app
+ return app
+
+
+@inputhook_manager.register('glut')
+class GlutInputHook(InputHookBase):
+ def enable(self, app=None):
"""DEPRECATED since IPython 5.0
-
+
Enable event loop integration with GLUT.
- Parameters
- ----------
-
- app : ignored
- Ignored, it's only a placeholder to keep the call signature of all
- gui activation methods consistent, which simplifies the logic of
- supporting magics.
-
- Notes
- -----
-
- This methods sets the PyOS_InputHook for GLUT, which allows the GLUT to
- integrate with terminal based applications like IPython. Due to GLUT
- limitations, it is currently not possible to start the event loop
- without first creating a window. You should thus not create another
- window but use instead the created one. See 'gui-glut.py' in the
- docs/examples/lib directory.
-
- The default screen mode is set to:
- glut.GLUT_DOUBLE | glut.GLUT_RGBA | glut.GLUT_DEPTH
- """
+ Parameters
+ ----------
+
+ app : ignored
+ Ignored, it's only a placeholder to keep the call signature of all
+ gui activation methods consistent, which simplifies the logic of
+ supporting magics.
+
+ Notes
+ -----
+
+ This methods sets the PyOS_InputHook for GLUT, which allows the GLUT to
+ integrate with terminal based applications like IPython. Due to GLUT
+ limitations, it is currently not possible to start the event loop
+ without first creating a window. You should thus not create another
+ window but use instead the created one. See 'gui-glut.py' in the
+ docs/examples/lib directory.
+
+ The default screen mode is set to:
+ glut.GLUT_DOUBLE | glut.GLUT_RGBA | glut.GLUT_DEPTH
+ """
warn("This function is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
-
- import OpenGL.GLUT as glut
- from IPython.lib.inputhookglut import glut_display_mode, \
- glut_close, glut_display, \
- glut_idle, inputhook_glut
-
- if GUI_GLUT not in self.manager.apps:
- glut.glutInit( sys.argv )
- glut.glutInitDisplayMode( glut_display_mode )
- # This is specific to freeglut
- if bool(glut.glutSetOption):
- glut.glutSetOption( glut.GLUT_ACTION_ON_WINDOW_CLOSE,
- glut.GLUT_ACTION_GLUTMAINLOOP_RETURNS )
- glut.glutCreateWindow( sys.argv[0] )
- glut.glutReshapeWindow( 1, 1 )
- glut.glutHideWindow( )
- glut.glutWMCloseFunc( glut_close )
- glut.glutDisplayFunc( glut_display )
- glut.glutIdleFunc( glut_idle )
- else:
- glut.glutWMCloseFunc( glut_close )
- glut.glutDisplayFunc( glut_display )
- glut.glutIdleFunc( glut_idle)
- self.manager.set_inputhook( inputhook_glut )
-
-
- def disable(self):
+
+ import OpenGL.GLUT as glut
+ from IPython.lib.inputhookglut import glut_display_mode, \
+ glut_close, glut_display, \
+ glut_idle, inputhook_glut
+
+ if GUI_GLUT not in self.manager.apps:
+ glut.glutInit( sys.argv )
+ glut.glutInitDisplayMode( glut_display_mode )
+ # This is specific to freeglut
+ if bool(glut.glutSetOption):
+ glut.glutSetOption( glut.GLUT_ACTION_ON_WINDOW_CLOSE,
+ glut.GLUT_ACTION_GLUTMAINLOOP_RETURNS )
+ glut.glutCreateWindow( sys.argv[0] )
+ glut.glutReshapeWindow( 1, 1 )
+ glut.glutHideWindow( )
+ glut.glutWMCloseFunc( glut_close )
+ glut.glutDisplayFunc( glut_display )
+ glut.glutIdleFunc( glut_idle )
+ else:
+ glut.glutWMCloseFunc( glut_close )
+ glut.glutDisplayFunc( glut_display )
+ glut.glutIdleFunc( glut_idle)
+ self.manager.set_inputhook( inputhook_glut )
+
+
+ def disable(self):
"""DEPRECATED since IPython 5.0
Disable event loop integration with glut.
-
- This sets PyOS_InputHook to NULL and set the display function to a
- dummy one and set the timer to a dummy timer that will be triggered
- very far in the future.
- """
+
+ This sets PyOS_InputHook to NULL and set the display function to a
+ dummy one and set the timer to a dummy timer that will be triggered
+ very far in the future.
+ """
warn("This function is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- import OpenGL.GLUT as glut
- from glut_support import glutMainLoopEvent
-
- glut.glutHideWindow() # This is an event to be processed below
- glutMainLoopEvent()
- super(GlutInputHook, self).disable()
-
-@inputhook_manager.register('pyglet')
-class PygletInputHook(InputHookBase):
- def enable(self, app=None):
+ import OpenGL.GLUT as glut
+ from glut_support import glutMainLoopEvent
+
+ glut.glutHideWindow() # This is an event to be processed below
+ glutMainLoopEvent()
+ super(GlutInputHook, self).disable()
+
+@inputhook_manager.register('pyglet')
+class PygletInputHook(InputHookBase):
+ def enable(self, app=None):
"""DEPRECATED since IPython 5.0
-
+
Enable event loop integration with pyglet.
- Parameters
- ----------
- app : ignored
- Ignored, it's only a placeholder to keep the call signature of all
- gui activation methods consistent, which simplifies the logic of
- supporting magics.
-
- Notes
- -----
- This methods sets the ``PyOS_InputHook`` for pyglet, which allows
- pyglet to integrate with terminal based applications like
- IPython.
-
- """
+ Parameters
+ ----------
+ app : ignored
+ Ignored, it's only a placeholder to keep the call signature of all
+ gui activation methods consistent, which simplifies the logic of
+ supporting magics.
+
+ Notes
+ -----
+ This methods sets the ``PyOS_InputHook`` for pyglet, which allows
+ pyglet to integrate with terminal based applications like
+ IPython.
+
+ """
warn("This function is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- from IPython.lib.inputhookpyglet import inputhook_pyglet
- self.manager.set_inputhook(inputhook_pyglet)
- return app
-
-
-@inputhook_manager.register('gtk3')
-class Gtk3InputHook(InputHookBase):
- def enable(self, app=None):
+ from IPython.lib.inputhookpyglet import inputhook_pyglet
+ self.manager.set_inputhook(inputhook_pyglet)
+ return app
+
+
+@inputhook_manager.register('gtk3')
+class Gtk3InputHook(InputHookBase):
+ def enable(self, app=None):
"""DEPRECATED since IPython 5.0
-
+
Enable event loop integration with Gtk3 (gir bindings).
- Parameters
- ----------
- app : ignored
- Ignored, it's only a placeholder to keep the call signature of all
- gui activation methods consistent, which simplifies the logic of
- supporting magics.
-
- Notes
- -----
- This methods sets the PyOS_InputHook for Gtk3, which allows
- the Gtk3 to integrate with terminal based applications like
- IPython.
- """
+ Parameters
+ ----------
+ app : ignored
+ Ignored, it's only a placeholder to keep the call signature of all
+ gui activation methods consistent, which simplifies the logic of
+ supporting magics.
+
+ Notes
+ -----
+ This methods sets the PyOS_InputHook for Gtk3, which allows
+ the Gtk3 to integrate with terminal based applications like
+ IPython.
+ """
warn("This function is deprecated since IPython 5.0 and will be removed in future versions.",
DeprecationWarning, stacklevel=2)
- from IPython.lib.inputhookgtk3 import inputhook_gtk3
- self.manager.set_inputhook(inputhook_gtk3)
-
-
-clear_inputhook = inputhook_manager.clear_inputhook
-set_inputhook = inputhook_manager.set_inputhook
-current_gui = inputhook_manager.current_gui
-clear_app_refs = inputhook_manager.clear_app_refs
-enable_gui = inputhook_manager.enable_gui
-disable_gui = inputhook_manager.disable_gui
-register = inputhook_manager.register
-guis = inputhook_manager.guihooks
-
-
-def _deprecated_disable():
+ from IPython.lib.inputhookgtk3 import inputhook_gtk3
+ self.manager.set_inputhook(inputhook_gtk3)
+
+
+clear_inputhook = inputhook_manager.clear_inputhook
+set_inputhook = inputhook_manager.set_inputhook
+current_gui = inputhook_manager.current_gui
+clear_app_refs = inputhook_manager.clear_app_refs
+enable_gui = inputhook_manager.enable_gui
+disable_gui = inputhook_manager.disable_gui
+register = inputhook_manager.register
+guis = inputhook_manager.guihooks
+
+
+def _deprecated_disable():
warn("This function is deprecated since IPython 4.0 use disable_gui() instead",
DeprecationWarning, stacklevel=2)
- inputhook_manager.disable_gui()
+ inputhook_manager.disable_gui()
-disable_wx = disable_qt4 = disable_gtk = disable_gtk3 = disable_glut = \
- disable_pyglet = disable_osx = _deprecated_disable
+disable_wx = disable_qt4 = disable_gtk = disable_gtk3 = disable_glut = \
+ disable_pyglet = disable_osx = _deprecated_disable
diff --git a/contrib/python/ipython/py2/IPython/lib/inputhookglut.py b/contrib/python/ipython/py2/IPython/lib/inputhookglut.py
index 5dd908f814..14bafe1632 100644
--- a/contrib/python/ipython/py2/IPython/lib/inputhookglut.py
+++ b/contrib/python/ipython/py2/IPython/lib/inputhookglut.py
@@ -1,173 +1,173 @@
-# coding: utf-8
-"""
-GLUT Inputhook support functions
-"""
-from __future__ import print_function
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-# GLUT is quite an old library and it is difficult to ensure proper
-# integration within IPython since original GLUT does not allow to handle
-# events one by one. Instead, it requires for the mainloop to be entered
-# and never returned (there is not even a function to exit he
-# mainloop). Fortunately, there are alternatives such as freeglut
-# (available for linux and windows) and the OSX implementation gives
-# access to a glutCheckLoop() function that blocks itself until a new
-# event is received. This means we have to setup the idle callback to
-# ensure we got at least one event that will unblock the function.
-#
-# Furthermore, it is not possible to install these handlers without a window
-# being first created. We choose to make this window invisible. This means that
-# display mode options are set at this level and user won't be able to change
-# them later without modifying the code. This should probably be made available
-# via IPython options system.
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-import os
-import sys
-import time
-import signal
-import OpenGL.GLUT as glut
-import OpenGL.platform as platform
-from timeit import default_timer as clock
-
-#-----------------------------------------------------------------------------
-# Constants
-#-----------------------------------------------------------------------------
-
-# Frame per second : 60
-# Should probably be an IPython option
-glut_fps = 60
-
-
-# Display mode : double buffeed + rgba + depth
-# Should probably be an IPython option
-glut_display_mode = (glut.GLUT_DOUBLE |
- glut.GLUT_RGBA |
- glut.GLUT_DEPTH)
-
-glutMainLoopEvent = None
-if sys.platform == 'darwin':
- try:
- glutCheckLoop = platform.createBaseFunction(
- 'glutCheckLoop', dll=platform.GLUT, resultType=None,
- argTypes=[],
- doc='glutCheckLoop( ) -> None',
- argNames=(),
- )
- except AttributeError:
- raise RuntimeError(
- '''Your glut implementation does not allow interactive sessions'''
- '''Consider installing freeglut.''')
- glutMainLoopEvent = glutCheckLoop
-elif glut.HAVE_FREEGLUT:
- glutMainLoopEvent = glut.glutMainLoopEvent
-else:
- raise RuntimeError(
- '''Your glut implementation does not allow interactive sessions. '''
- '''Consider installing freeglut.''')
-
-
-#-----------------------------------------------------------------------------
-# Platform-dependent imports and functions
-#-----------------------------------------------------------------------------
-
-if os.name == 'posix':
- import select
-
- def stdin_ready():
- infds, outfds, erfds = select.select([sys.stdin],[],[],0)
- if infds:
- return True
- else:
- return False
-
-elif sys.platform == 'win32':
- import msvcrt
-
- def stdin_ready():
- return msvcrt.kbhit()
-
-#-----------------------------------------------------------------------------
-# Callback functions
-#-----------------------------------------------------------------------------
-
-def glut_display():
- # Dummy display function
- pass
-
-def glut_idle():
- # Dummy idle function
- pass
-
-def glut_close():
- # Close function only hides the current window
- glut.glutHideWindow()
- glutMainLoopEvent()
-
-def glut_int_handler(signum, frame):
- # Catch sigint and print the defautl message
- signal.signal(signal.SIGINT, signal.default_int_handler)
- print('\nKeyboardInterrupt')
- # Need to reprint the prompt at this stage
-
-
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-def inputhook_glut():
- """Run the pyglet event loop by processing pending events only.
-
- This keeps processing pending events until stdin is ready. After
- processing all pending events, a call to time.sleep is inserted. This is
- needed, otherwise, CPU usage is at 100%. This sleep time should be tuned
- though for best performance.
- """
- # We need to protect against a user pressing Control-C when IPython is
- # idle and this is running. We trap KeyboardInterrupt and pass.
-
- signal.signal(signal.SIGINT, glut_int_handler)
-
- try:
- t = clock()
-
- # Make sure the default window is set after a window has been closed
- if glut.glutGetWindow() == 0:
- glut.glutSetWindow( 1 )
- glutMainLoopEvent()
- return 0
-
- while not stdin_ready():
- glutMainLoopEvent()
- # We need to sleep at this point to keep the idle CPU load
- # low. However, if sleep to long, GUI response is poor. As
- # a compromise, we watch how often GUI events are being processed
- # and switch between a short and long sleep time. Here are some
- # stats useful in helping to tune this.
- # time CPU load
- # 0.001 13%
- # 0.005 3%
- # 0.01 1.5%
- # 0.05 0.5%
- used_time = clock() - t
- if used_time > 10.0:
- # print 'Sleep for 1 s' # dbg
- time.sleep(1.0)
- elif used_time > 0.1:
- # Few GUI events coming in, so we can sleep longer
- # print 'Sleep for 0.05 s' # dbg
- time.sleep(0.05)
- else:
- # Many GUI events coming in, so sleep only very little
- time.sleep(0.001)
- except KeyboardInterrupt:
- pass
- return 0
+# coding: utf-8
+"""
+GLUT Inputhook support functions
+"""
+from __future__ import print_function
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+# GLUT is quite an old library and it is difficult to ensure proper
+# integration within IPython since original GLUT does not allow to handle
+# events one by one. Instead, it requires for the mainloop to be entered
+# and never returned (there is not even a function to exit he
+# mainloop). Fortunately, there are alternatives such as freeglut
+# (available for linux and windows) and the OSX implementation gives
+# access to a glutCheckLoop() function that blocks itself until a new
+# event is received. This means we have to setup the idle callback to
+# ensure we got at least one event that will unblock the function.
+#
+# Furthermore, it is not possible to install these handlers without a window
+# being first created. We choose to make this window invisible. This means that
+# display mode options are set at this level and user won't be able to change
+# them later without modifying the code. This should probably be made available
+# via IPython options system.
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+import os
+import sys
+import time
+import signal
+import OpenGL.GLUT as glut
+import OpenGL.platform as platform
+from timeit import default_timer as clock
+
+#-----------------------------------------------------------------------------
+# Constants
+#-----------------------------------------------------------------------------
+
+# Frame per second : 60
+# Should probably be an IPython option
+glut_fps = 60
+
+
+# Display mode : double buffeed + rgba + depth
+# Should probably be an IPython option
+glut_display_mode = (glut.GLUT_DOUBLE |
+ glut.GLUT_RGBA |
+ glut.GLUT_DEPTH)
+
+glutMainLoopEvent = None
+if sys.platform == 'darwin':
+ try:
+ glutCheckLoop = platform.createBaseFunction(
+ 'glutCheckLoop', dll=platform.GLUT, resultType=None,
+ argTypes=[],
+ doc='glutCheckLoop( ) -> None',
+ argNames=(),
+ )
+ except AttributeError:
+ raise RuntimeError(
+ '''Your glut implementation does not allow interactive sessions'''
+ '''Consider installing freeglut.''')
+ glutMainLoopEvent = glutCheckLoop
+elif glut.HAVE_FREEGLUT:
+ glutMainLoopEvent = glut.glutMainLoopEvent
+else:
+ raise RuntimeError(
+ '''Your glut implementation does not allow interactive sessions. '''
+ '''Consider installing freeglut.''')
+
+
+#-----------------------------------------------------------------------------
+# Platform-dependent imports and functions
+#-----------------------------------------------------------------------------
+
+if os.name == 'posix':
+ import select
+
+ def stdin_ready():
+ infds, outfds, erfds = select.select([sys.stdin],[],[],0)
+ if infds:
+ return True
+ else:
+ return False
+
+elif sys.platform == 'win32':
+ import msvcrt
+
+ def stdin_ready():
+ return msvcrt.kbhit()
+
+#-----------------------------------------------------------------------------
+# Callback functions
+#-----------------------------------------------------------------------------
+
+def glut_display():
+ # Dummy display function
+ pass
+
+def glut_idle():
+ # Dummy idle function
+ pass
+
+def glut_close():
+ # Close function only hides the current window
+ glut.glutHideWindow()
+ glutMainLoopEvent()
+
+def glut_int_handler(signum, frame):
+ # Catch sigint and print the defautl message
+ signal.signal(signal.SIGINT, signal.default_int_handler)
+ print('\nKeyboardInterrupt')
+ # Need to reprint the prompt at this stage
+
+
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+def inputhook_glut():
+ """Run the pyglet event loop by processing pending events only.
+
+ This keeps processing pending events until stdin is ready. After
+ processing all pending events, a call to time.sleep is inserted. This is
+ needed, otherwise, CPU usage is at 100%. This sleep time should be tuned
+ though for best performance.
+ """
+ # We need to protect against a user pressing Control-C when IPython is
+ # idle and this is running. We trap KeyboardInterrupt and pass.
+
+ signal.signal(signal.SIGINT, glut_int_handler)
+
+ try:
+ t = clock()
+
+ # Make sure the default window is set after a window has been closed
+ if glut.glutGetWindow() == 0:
+ glut.glutSetWindow( 1 )
+ glutMainLoopEvent()
+ return 0
+
+ while not stdin_ready():
+ glutMainLoopEvent()
+ # We need to sleep at this point to keep the idle CPU load
+ # low. However, if sleep to long, GUI response is poor. As
+ # a compromise, we watch how often GUI events are being processed
+ # and switch between a short and long sleep time. Here are some
+ # stats useful in helping to tune this.
+ # time CPU load
+ # 0.001 13%
+ # 0.005 3%
+ # 0.01 1.5%
+ # 0.05 0.5%
+ used_time = clock() - t
+ if used_time > 10.0:
+ # print 'Sleep for 1 s' # dbg
+ time.sleep(1.0)
+ elif used_time > 0.1:
+ # Few GUI events coming in, so we can sleep longer
+ # print 'Sleep for 0.05 s' # dbg
+ time.sleep(0.05)
+ else:
+ # Many GUI events coming in, so sleep only very little
+ time.sleep(0.001)
+ except KeyboardInterrupt:
+ pass
+ return 0
diff --git a/contrib/python/ipython/py2/IPython/lib/inputhookgtk.py b/contrib/python/ipython/py2/IPython/lib/inputhookgtk.py
index 52dfac5f52..2b4b656f91 100644
--- a/contrib/python/ipython/py2/IPython/lib/inputhookgtk.py
+++ b/contrib/python/ipython/py2/IPython/lib/inputhookgtk.py
@@ -1,35 +1,35 @@
-# encoding: utf-8
-"""
-Enable pygtk to be used interacive by setting PyOS_InputHook.
-
-Authors: Brian Granger
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import sys
-import gtk, gobject
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-
-def _main_quit(*args, **kwargs):
- gtk.main_quit()
- return False
-
-def inputhook_gtk():
- gobject.io_add_watch(sys.stdin, gobject.IO_IN, _main_quit)
- gtk.main()
- return 0
-
+# encoding: utf-8
+"""
+Enable pygtk to be used interacive by setting PyOS_InputHook.
+
+Authors: Brian Granger
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import sys
+import gtk, gobject
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+
+def _main_quit(*args, **kwargs):
+ gtk.main_quit()
+ return False
+
+def inputhook_gtk():
+ gobject.io_add_watch(sys.stdin, gobject.IO_IN, _main_quit)
+ gtk.main()
+ return 0
+
diff --git a/contrib/python/ipython/py2/IPython/lib/inputhookgtk3.py b/contrib/python/ipython/py2/IPython/lib/inputhookgtk3.py
index 6998805145..531f5cae14 100644
--- a/contrib/python/ipython/py2/IPython/lib/inputhookgtk3.py
+++ b/contrib/python/ipython/py2/IPython/lib/inputhookgtk3.py
@@ -1,34 +1,34 @@
-# encoding: utf-8
-"""
-Enable Gtk3 to be used interacive by IPython.
-
-Authors: Thomi Richards
-"""
-#-----------------------------------------------------------------------------
-# Copyright (c) 2012, the IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import sys
-from gi.repository import Gtk, GLib
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-def _main_quit(*args, **kwargs):
- Gtk.main_quit()
- return False
-
-
-def inputhook_gtk3():
- GLib.io_add_watch(sys.stdin, GLib.IO_IN, _main_quit)
- Gtk.main()
- return 0
+# encoding: utf-8
+"""
+Enable Gtk3 to be used interacive by IPython.
+
+Authors: Thomi Richards
+"""
+#-----------------------------------------------------------------------------
+# Copyright (c) 2012, the IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import sys
+from gi.repository import Gtk, GLib
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+def _main_quit(*args, **kwargs):
+ Gtk.main_quit()
+ return False
+
+
+def inputhook_gtk3():
+ GLib.io_add_watch(sys.stdin, GLib.IO_IN, _main_quit)
+ Gtk.main()
+ return 0
diff --git a/contrib/python/ipython/py2/IPython/lib/inputhookpyglet.py b/contrib/python/ipython/py2/IPython/lib/inputhookpyglet.py
index 3cd209946e..b82fcf5ea7 100644
--- a/contrib/python/ipython/py2/IPython/lib/inputhookpyglet.py
+++ b/contrib/python/ipython/py2/IPython/lib/inputhookpyglet.py
@@ -1,111 +1,111 @@
-# encoding: utf-8
-"""
-Enable pyglet to be used interacive by setting PyOS_InputHook.
-
-Authors
--------
-
-* Nicolas P. Rougier
-* Fernando Perez
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import os
-import sys
-import time
-from timeit import default_timer as clock
-import pyglet
-
-#-----------------------------------------------------------------------------
-# Platform-dependent imports and functions
-#-----------------------------------------------------------------------------
-
-if os.name == 'posix':
- import select
-
- def stdin_ready():
- infds, outfds, erfds = select.select([sys.stdin],[],[],0)
- if infds:
- return True
- else:
- return False
-
-elif sys.platform == 'win32':
- import msvcrt
-
- def stdin_ready():
- return msvcrt.kbhit()
-
-
-# On linux only, window.flip() has a bug that causes an AttributeError on
-# window close. For details, see:
-# http://groups.google.com/group/pyglet-users/browse_thread/thread/47c1aab9aa4a3d23/c22f9e819826799e?#c22f9e819826799e
-
-if sys.platform.startswith('linux'):
- def flip(window):
- try:
- window.flip()
- except AttributeError:
- pass
-else:
- def flip(window):
- window.flip()
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-def inputhook_pyglet():
- """Run the pyglet event loop by processing pending events only.
-
- This keeps processing pending events until stdin is ready. After
- processing all pending events, a call to time.sleep is inserted. This is
- needed, otherwise, CPU usage is at 100%. This sleep time should be tuned
- though for best performance.
- """
- # We need to protect against a user pressing Control-C when IPython is
- # idle and this is running. We trap KeyboardInterrupt and pass.
- try:
- t = clock()
- while not stdin_ready():
- pyglet.clock.tick()
- for window in pyglet.app.windows:
- window.switch_to()
- window.dispatch_events()
- window.dispatch_event('on_draw')
- flip(window)
-
- # We need to sleep at this point to keep the idle CPU load
- # low. However, if sleep to long, GUI response is poor. As
- # a compromise, we watch how often GUI events are being processed
- # and switch between a short and long sleep time. Here are some
- # stats useful in helping to tune this.
- # time CPU load
- # 0.001 13%
- # 0.005 3%
- # 0.01 1.5%
- # 0.05 0.5%
- used_time = clock() - t
- if used_time > 10.0:
- # print 'Sleep for 1 s' # dbg
- time.sleep(1.0)
- elif used_time > 0.1:
- # Few GUI events coming in, so we can sleep longer
- # print 'Sleep for 0.05 s' # dbg
- time.sleep(0.05)
- else:
- # Many GUI events coming in, so sleep only very little
- time.sleep(0.001)
- except KeyboardInterrupt:
- pass
- return 0
+# encoding: utf-8
+"""
+Enable pyglet to be used interacive by setting PyOS_InputHook.
+
+Authors
+-------
+
+* Nicolas P. Rougier
+* Fernando Perez
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import os
+import sys
+import time
+from timeit import default_timer as clock
+import pyglet
+
+#-----------------------------------------------------------------------------
+# Platform-dependent imports and functions
+#-----------------------------------------------------------------------------
+
+if os.name == 'posix':
+ import select
+
+ def stdin_ready():
+ infds, outfds, erfds = select.select([sys.stdin],[],[],0)
+ if infds:
+ return True
+ else:
+ return False
+
+elif sys.platform == 'win32':
+ import msvcrt
+
+ def stdin_ready():
+ return msvcrt.kbhit()
+
+
+# On linux only, window.flip() has a bug that causes an AttributeError on
+# window close. For details, see:
+# http://groups.google.com/group/pyglet-users/browse_thread/thread/47c1aab9aa4a3d23/c22f9e819826799e?#c22f9e819826799e
+
+if sys.platform.startswith('linux'):
+ def flip(window):
+ try:
+ window.flip()
+ except AttributeError:
+ pass
+else:
+ def flip(window):
+ window.flip()
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+def inputhook_pyglet():
+ """Run the pyglet event loop by processing pending events only.
+
+ This keeps processing pending events until stdin is ready. After
+ processing all pending events, a call to time.sleep is inserted. This is
+ needed, otherwise, CPU usage is at 100%. This sleep time should be tuned
+ though for best performance.
+ """
+ # We need to protect against a user pressing Control-C when IPython is
+ # idle and this is running. We trap KeyboardInterrupt and pass.
+ try:
+ t = clock()
+ while not stdin_ready():
+ pyglet.clock.tick()
+ for window in pyglet.app.windows:
+ window.switch_to()
+ window.dispatch_events()
+ window.dispatch_event('on_draw')
+ flip(window)
+
+ # We need to sleep at this point to keep the idle CPU load
+ # low. However, if sleep to long, GUI response is poor. As
+ # a compromise, we watch how often GUI events are being processed
+ # and switch between a short and long sleep time. Here are some
+ # stats useful in helping to tune this.
+ # time CPU load
+ # 0.001 13%
+ # 0.005 3%
+ # 0.01 1.5%
+ # 0.05 0.5%
+ used_time = clock() - t
+ if used_time > 10.0:
+ # print 'Sleep for 1 s' # dbg
+ time.sleep(1.0)
+ elif used_time > 0.1:
+ # Few GUI events coming in, so we can sleep longer
+ # print 'Sleep for 0.05 s' # dbg
+ time.sleep(0.05)
+ else:
+ # Many GUI events coming in, so sleep only very little
+ time.sleep(0.001)
+ except KeyboardInterrupt:
+ pass
+ return 0
diff --git a/contrib/python/ipython/py2/IPython/lib/inputhookqt4.py b/contrib/python/ipython/py2/IPython/lib/inputhookqt4.py
index a0d0f9e348..8a83902fc0 100644
--- a/contrib/python/ipython/py2/IPython/lib/inputhookqt4.py
+++ b/contrib/python/ipython/py2/IPython/lib/inputhookqt4.py
@@ -1,180 +1,180 @@
-# -*- coding: utf-8 -*-
-"""
-Qt4's inputhook support function
-
-Author: Christian Boos
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import os
-import signal
-import threading
-
-from IPython.core.interactiveshell import InteractiveShell
-from IPython.external.qt_for_kernel import QtCore, QtGui
-from IPython.lib.inputhook import allow_CTRL_C, ignore_CTRL_C, stdin_ready
-
-#-----------------------------------------------------------------------------
-# Module Globals
-#-----------------------------------------------------------------------------
-
-got_kbdint = False
-sigint_timer = None
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-def create_inputhook_qt4(mgr, app=None):
- """Create an input hook for running the Qt4 application event loop.
-
- Parameters
- ----------
- mgr : an InputHookManager
-
- app : Qt Application, optional.
- Running application to use. If not given, we probe Qt for an
- existing application object, and create a new one if none is found.
-
- Returns
- -------
- A pair consisting of a Qt Application (either the one given or the
- one found or created) and a inputhook.
-
- Notes
- -----
- We use a custom input hook instead of PyQt4's default one, as it
- interacts better with the readline packages (issue #481).
-
- The inputhook function works in tandem with a 'pre_prompt_hook'
- which automatically restores the hook as an inputhook in case the
- latter has been temporarily disabled after having intercepted a
- KeyboardInterrupt.
- """
-
- if app is None:
- app = QtCore.QCoreApplication.instance()
- if app is None:
- app = QtGui.QApplication([" "])
-
- # Re-use previously created inputhook if any
- ip = InteractiveShell.instance()
- if hasattr(ip, '_inputhook_qt4'):
- return app, ip._inputhook_qt4
-
- # Otherwise create the inputhook_qt4/preprompthook_qt4 pair of
- # hooks (they both share the got_kbdint flag)
-
- def inputhook_qt4():
- """PyOS_InputHook python hook for Qt4.
-
- Process pending Qt events and if there's no pending keyboard
- input, spend a short slice of time (50ms) running the Qt event
- loop.
-
- As a Python ctypes callback can't raise an exception, we catch
- the KeyboardInterrupt and temporarily deactivate the hook,
- which will let a *second* CTRL+C be processed normally and go
- back to a clean prompt line.
- """
- try:
- allow_CTRL_C()
- app = QtCore.QCoreApplication.instance()
- if not app: # shouldn't happen, but safer if it happens anyway...
- return 0
- app.processEvents(QtCore.QEventLoop.AllEvents, 300)
- if not stdin_ready():
- # Generally a program would run QCoreApplication::exec()
- # from main() to enter and process the Qt event loop until
- # quit() or exit() is called and the program terminates.
- #
- # For our input hook integration, we need to repeatedly
- # enter and process the Qt event loop for only a short
- # amount of time (say 50ms) to ensure that Python stays
- # responsive to other user inputs.
- #
- # A naive approach would be to repeatedly call
- # QCoreApplication::exec(), using a timer to quit after a
- # short amount of time. Unfortunately, QCoreApplication
- # emits an aboutToQuit signal before stopping, which has
- # the undesirable effect of closing all modal windows.
- #
- # To work around this problem, we instead create a
- # QEventLoop and call QEventLoop::exec(). Other than
- # setting some state variables which do not seem to be
- # used anywhere, the only thing QCoreApplication adds is
- # the aboutToQuit signal which is precisely what we are
- # trying to avoid.
- timer = QtCore.QTimer()
- event_loop = QtCore.QEventLoop()
- timer.timeout.connect(event_loop.quit)
- while not stdin_ready():
- timer.start(50)
- event_loop.exec_()
- timer.stop()
- except KeyboardInterrupt:
- global got_kbdint, sigint_timer
-
- ignore_CTRL_C()
- got_kbdint = True
- mgr.clear_inputhook()
-
- # This generates a second SIGINT so the user doesn't have to
- # press CTRL+C twice to get a clean prompt.
- #
- # Since we can't catch the resulting KeyboardInterrupt here
- # (because this is a ctypes callback), we use a timer to
- # generate the SIGINT after we leave this callback.
- #
- # Unfortunately this doesn't work on Windows (SIGINT kills
- # Python and CTRL_C_EVENT doesn't work).
- if(os.name == 'posix'):
- pid = os.getpid()
- if(not sigint_timer):
- sigint_timer = threading.Timer(.01, os.kill,
- args=[pid, signal.SIGINT] )
- sigint_timer.start()
- else:
- print("\nKeyboardInterrupt - Ctrl-C again for new prompt")
-
-
- except: # NO exceptions are allowed to escape from a ctypes callback
- ignore_CTRL_C()
- from traceback import print_exc
- print_exc()
- print("Got exception from inputhook_qt4, unregistering.")
- mgr.clear_inputhook()
- finally:
- allow_CTRL_C()
- return 0
-
- def preprompthook_qt4(ishell):
- """'pre_prompt_hook' used to restore the Qt4 input hook
-
- (in case the latter was temporarily deactivated after a
- CTRL+C)
- """
- global got_kbdint, sigint_timer
-
- if(sigint_timer):
- sigint_timer.cancel()
- sigint_timer = None
-
- if got_kbdint:
- mgr.set_inputhook(inputhook_qt4)
- got_kbdint = False
-
- ip._inputhook_qt4 = inputhook_qt4
- ip.set_hook('pre_prompt_hook', preprompthook_qt4)
-
- return app, inputhook_qt4
+# -*- coding: utf-8 -*-
+"""
+Qt4's inputhook support function
+
+Author: Christian Boos
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import os
+import signal
+import threading
+
+from IPython.core.interactiveshell import InteractiveShell
+from IPython.external.qt_for_kernel import QtCore, QtGui
+from IPython.lib.inputhook import allow_CTRL_C, ignore_CTRL_C, stdin_ready
+
+#-----------------------------------------------------------------------------
+# Module Globals
+#-----------------------------------------------------------------------------
+
+got_kbdint = False
+sigint_timer = None
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+def create_inputhook_qt4(mgr, app=None):
+ """Create an input hook for running the Qt4 application event loop.
+
+ Parameters
+ ----------
+ mgr : an InputHookManager
+
+ app : Qt Application, optional.
+ Running application to use. If not given, we probe Qt for an
+ existing application object, and create a new one if none is found.
+
+ Returns
+ -------
+ A pair consisting of a Qt Application (either the one given or the
+ one found or created) and a inputhook.
+
+ Notes
+ -----
+ We use a custom input hook instead of PyQt4's default one, as it
+ interacts better with the readline packages (issue #481).
+
+ The inputhook function works in tandem with a 'pre_prompt_hook'
+ which automatically restores the hook as an inputhook in case the
+ latter has been temporarily disabled after having intercepted a
+ KeyboardInterrupt.
+ """
+
+ if app is None:
+ app = QtCore.QCoreApplication.instance()
+ if app is None:
+ app = QtGui.QApplication([" "])
+
+ # Re-use previously created inputhook if any
+ ip = InteractiveShell.instance()
+ if hasattr(ip, '_inputhook_qt4'):
+ return app, ip._inputhook_qt4
+
+ # Otherwise create the inputhook_qt4/preprompthook_qt4 pair of
+ # hooks (they both share the got_kbdint flag)
+
+ def inputhook_qt4():
+ """PyOS_InputHook python hook for Qt4.
+
+ Process pending Qt events and if there's no pending keyboard
+ input, spend a short slice of time (50ms) running the Qt event
+ loop.
+
+ As a Python ctypes callback can't raise an exception, we catch
+ the KeyboardInterrupt and temporarily deactivate the hook,
+ which will let a *second* CTRL+C be processed normally and go
+ back to a clean prompt line.
+ """
+ try:
+ allow_CTRL_C()
+ app = QtCore.QCoreApplication.instance()
+ if not app: # shouldn't happen, but safer if it happens anyway...
+ return 0
+ app.processEvents(QtCore.QEventLoop.AllEvents, 300)
+ if not stdin_ready():
+ # Generally a program would run QCoreApplication::exec()
+ # from main() to enter and process the Qt event loop until
+ # quit() or exit() is called and the program terminates.
+ #
+ # For our input hook integration, we need to repeatedly
+ # enter and process the Qt event loop for only a short
+ # amount of time (say 50ms) to ensure that Python stays
+ # responsive to other user inputs.
+ #
+ # A naive approach would be to repeatedly call
+ # QCoreApplication::exec(), using a timer to quit after a
+ # short amount of time. Unfortunately, QCoreApplication
+ # emits an aboutToQuit signal before stopping, which has
+ # the undesirable effect of closing all modal windows.
+ #
+ # To work around this problem, we instead create a
+ # QEventLoop and call QEventLoop::exec(). Other than
+ # setting some state variables which do not seem to be
+ # used anywhere, the only thing QCoreApplication adds is
+ # the aboutToQuit signal which is precisely what we are
+ # trying to avoid.
+ timer = QtCore.QTimer()
+ event_loop = QtCore.QEventLoop()
+ timer.timeout.connect(event_loop.quit)
+ while not stdin_ready():
+ timer.start(50)
+ event_loop.exec_()
+ timer.stop()
+ except KeyboardInterrupt:
+ global got_kbdint, sigint_timer
+
+ ignore_CTRL_C()
+ got_kbdint = True
+ mgr.clear_inputhook()
+
+ # This generates a second SIGINT so the user doesn't have to
+ # press CTRL+C twice to get a clean prompt.
+ #
+ # Since we can't catch the resulting KeyboardInterrupt here
+ # (because this is a ctypes callback), we use a timer to
+ # generate the SIGINT after we leave this callback.
+ #
+ # Unfortunately this doesn't work on Windows (SIGINT kills
+ # Python and CTRL_C_EVENT doesn't work).
+ if(os.name == 'posix'):
+ pid = os.getpid()
+ if(not sigint_timer):
+ sigint_timer = threading.Timer(.01, os.kill,
+ args=[pid, signal.SIGINT] )
+ sigint_timer.start()
+ else:
+ print("\nKeyboardInterrupt - Ctrl-C again for new prompt")
+
+
+ except: # NO exceptions are allowed to escape from a ctypes callback
+ ignore_CTRL_C()
+ from traceback import print_exc
+ print_exc()
+ print("Got exception from inputhook_qt4, unregistering.")
+ mgr.clear_inputhook()
+ finally:
+ allow_CTRL_C()
+ return 0
+
+ def preprompthook_qt4(ishell):
+ """'pre_prompt_hook' used to restore the Qt4 input hook
+
+ (in case the latter was temporarily deactivated after a
+ CTRL+C)
+ """
+ global got_kbdint, sigint_timer
+
+ if(sigint_timer):
+ sigint_timer.cancel()
+ sigint_timer = None
+
+ if got_kbdint:
+ mgr.set_inputhook(inputhook_qt4)
+ got_kbdint = False
+
+ ip._inputhook_qt4 = inputhook_qt4
+ ip.set_hook('pre_prompt_hook', preprompthook_qt4)
+
+ return app, inputhook_qt4
diff --git a/contrib/python/ipython/py2/IPython/lib/inputhookwx.py b/contrib/python/ipython/py2/IPython/lib/inputhookwx.py
index e054c6d915..3aac526131 100644
--- a/contrib/python/ipython/py2/IPython/lib/inputhookwx.py
+++ b/contrib/python/ipython/py2/IPython/lib/inputhookwx.py
@@ -1,167 +1,167 @@
-# encoding: utf-8
-
-"""
-Enable wxPython to be used interacive by setting PyOS_InputHook.
-
-Authors: Robin Dunn, Brian Granger, Ondrej Certik
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import sys
-import signal
-import time
-from timeit import default_timer as clock
-import wx
-
-from IPython.lib.inputhook import stdin_ready
-
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-def inputhook_wx1():
- """Run the wx event loop by processing pending events only.
-
- This approach seems to work, but its performance is not great as it
- relies on having PyOS_InputHook called regularly.
- """
- try:
- app = wx.GetApp()
- if app is not None:
- assert wx.Thread_IsMain()
-
- # Make a temporary event loop and process system events until
- # there are no more waiting, then allow idle events (which
- # will also deal with pending or posted wx events.)
- evtloop = wx.EventLoop()
- ea = wx.EventLoopActivator(evtloop)
- while evtloop.Pending():
- evtloop.Dispatch()
- app.ProcessIdle()
- del ea
- except KeyboardInterrupt:
- pass
- return 0
-
-class EventLoopTimer(wx.Timer):
-
- def __init__(self, func):
- self.func = func
- wx.Timer.__init__(self)
-
- def Notify(self):
- self.func()
-
-class EventLoopRunner(object):
-
- def Run(self, time):
- self.evtloop = wx.EventLoop()
- self.timer = EventLoopTimer(self.check_stdin)
- self.timer.Start(time)
- self.evtloop.Run()
-
- def check_stdin(self):
- if stdin_ready():
- self.timer.Stop()
- self.evtloop.Exit()
-
-def inputhook_wx2():
- """Run the wx event loop, polling for stdin.
-
- This version runs the wx eventloop for an undetermined amount of time,
- during which it periodically checks to see if anything is ready on
- stdin. If anything is ready on stdin, the event loop exits.
-
- The argument to elr.Run controls how often the event loop looks at stdin.
- This determines the responsiveness at the keyboard. A setting of 1000
- enables a user to type at most 1 char per second. I have found that a
- setting of 10 gives good keyboard response. We can shorten it further,
- but eventually performance would suffer from calling select/kbhit too
- often.
- """
- try:
- app = wx.GetApp()
- if app is not None:
- assert wx.Thread_IsMain()
- elr = EventLoopRunner()
- # As this time is made shorter, keyboard response improves, but idle
- # CPU load goes up. 10 ms seems like a good compromise.
- elr.Run(time=10) # CHANGE time here to control polling interval
- except KeyboardInterrupt:
- pass
- return 0
-
-def inputhook_wx3():
- """Run the wx event loop by processing pending events only.
-
- This is like inputhook_wx1, but it keeps processing pending events
- until stdin is ready. After processing all pending events, a call to
- time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%.
- This sleep time should be tuned though for best performance.
- """
- # We need to protect against a user pressing Control-C when IPython is
- # idle and this is running. We trap KeyboardInterrupt and pass.
- try:
- app = wx.GetApp()
- if app is not None:
- assert wx.Thread_IsMain()
-
- # The import of wx on Linux sets the handler for signal.SIGINT
- # to 0. This is a bug in wx or gtk. We fix by just setting it
- # back to the Python default.
- if not callable(signal.getsignal(signal.SIGINT)):
- signal.signal(signal.SIGINT, signal.default_int_handler)
-
- evtloop = wx.EventLoop()
- ea = wx.EventLoopActivator(evtloop)
- t = clock()
- while not stdin_ready():
- while evtloop.Pending():
- t = clock()
- evtloop.Dispatch()
- app.ProcessIdle()
- # We need to sleep at this point to keep the idle CPU load
- # low. However, if sleep to long, GUI response is poor. As
- # a compromise, we watch how often GUI events are being processed
- # and switch between a short and long sleep time. Here are some
- # stats useful in helping to tune this.
- # time CPU load
- # 0.001 13%
- # 0.005 3%
- # 0.01 1.5%
- # 0.05 0.5%
- used_time = clock() - t
- if used_time > 10.0:
- # print 'Sleep for 1 s' # dbg
- time.sleep(1.0)
- elif used_time > 0.1:
- # Few GUI events coming in, so we can sleep longer
- # print 'Sleep for 0.05 s' # dbg
- time.sleep(0.05)
- else:
- # Many GUI events coming in, so sleep only very little
- time.sleep(0.001)
- del ea
- except KeyboardInterrupt:
- pass
- return 0
-
-if sys.platform == 'darwin':
- # On OSX, evtloop.Pending() always returns True, regardless of there being
- # any events pending. As such we can't use implementations 1 or 3 of the
- # inputhook as those depend on a pending/dispatch loop.
- inputhook_wx = inputhook_wx2
-else:
- # This is our default implementation
- inputhook_wx = inputhook_wx3
+# encoding: utf-8
+
+"""
+Enable wxPython to be used interacive by setting PyOS_InputHook.
+
+Authors: Robin Dunn, Brian Granger, Ondrej Certik
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import sys
+import signal
+import time
+from timeit import default_timer as clock
+import wx
+
+from IPython.lib.inputhook import stdin_ready
+
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+def inputhook_wx1():
+ """Run the wx event loop by processing pending events only.
+
+ This approach seems to work, but its performance is not great as it
+ relies on having PyOS_InputHook called regularly.
+ """
+ try:
+ app = wx.GetApp()
+ if app is not None:
+ assert wx.Thread_IsMain()
+
+ # Make a temporary event loop and process system events until
+ # there are no more waiting, then allow idle events (which
+ # will also deal with pending or posted wx events.)
+ evtloop = wx.EventLoop()
+ ea = wx.EventLoopActivator(evtloop)
+ while evtloop.Pending():
+ evtloop.Dispatch()
+ app.ProcessIdle()
+ del ea
+ except KeyboardInterrupt:
+ pass
+ return 0
+
+class EventLoopTimer(wx.Timer):
+
+ def __init__(self, func):
+ self.func = func
+ wx.Timer.__init__(self)
+
+ def Notify(self):
+ self.func()
+
+class EventLoopRunner(object):
+
+ def Run(self, time):
+ self.evtloop = wx.EventLoop()
+ self.timer = EventLoopTimer(self.check_stdin)
+ self.timer.Start(time)
+ self.evtloop.Run()
+
+ def check_stdin(self):
+ if stdin_ready():
+ self.timer.Stop()
+ self.evtloop.Exit()
+
+def inputhook_wx2():
+ """Run the wx event loop, polling for stdin.
+
+ This version runs the wx eventloop for an undetermined amount of time,
+ during which it periodically checks to see if anything is ready on
+ stdin. If anything is ready on stdin, the event loop exits.
+
+ The argument to elr.Run controls how often the event loop looks at stdin.
+ This determines the responsiveness at the keyboard. A setting of 1000
+ enables a user to type at most 1 char per second. I have found that a
+ setting of 10 gives good keyboard response. We can shorten it further,
+ but eventually performance would suffer from calling select/kbhit too
+ often.
+ """
+ try:
+ app = wx.GetApp()
+ if app is not None:
+ assert wx.Thread_IsMain()
+ elr = EventLoopRunner()
+ # As this time is made shorter, keyboard response improves, but idle
+ # CPU load goes up. 10 ms seems like a good compromise.
+ elr.Run(time=10) # CHANGE time here to control polling interval
+ except KeyboardInterrupt:
+ pass
+ return 0
+
+def inputhook_wx3():
+ """Run the wx event loop by processing pending events only.
+
+ This is like inputhook_wx1, but it keeps processing pending events
+ until stdin is ready. After processing all pending events, a call to
+ time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%.
+ This sleep time should be tuned though for best performance.
+ """
+ # We need to protect against a user pressing Control-C when IPython is
+ # idle and this is running. We trap KeyboardInterrupt and pass.
+ try:
+ app = wx.GetApp()
+ if app is not None:
+ assert wx.Thread_IsMain()
+
+ # The import of wx on Linux sets the handler for signal.SIGINT
+ # to 0. This is a bug in wx or gtk. We fix by just setting it
+ # back to the Python default.
+ if not callable(signal.getsignal(signal.SIGINT)):
+ signal.signal(signal.SIGINT, signal.default_int_handler)
+
+ evtloop = wx.EventLoop()
+ ea = wx.EventLoopActivator(evtloop)
+ t = clock()
+ while not stdin_ready():
+ while evtloop.Pending():
+ t = clock()
+ evtloop.Dispatch()
+ app.ProcessIdle()
+ # We need to sleep at this point to keep the idle CPU load
+ # low. However, if sleep to long, GUI response is poor. As
+ # a compromise, we watch how often GUI events are being processed
+ # and switch between a short and long sleep time. Here are some
+ # stats useful in helping to tune this.
+ # time CPU load
+ # 0.001 13%
+ # 0.005 3%
+ # 0.01 1.5%
+ # 0.05 0.5%
+ used_time = clock() - t
+ if used_time > 10.0:
+ # print 'Sleep for 1 s' # dbg
+ time.sleep(1.0)
+ elif used_time > 0.1:
+ # Few GUI events coming in, so we can sleep longer
+ # print 'Sleep for 0.05 s' # dbg
+ time.sleep(0.05)
+ else:
+ # Many GUI events coming in, so sleep only very little
+ time.sleep(0.001)
+ del ea
+ except KeyboardInterrupt:
+ pass
+ return 0
+
+if sys.platform == 'darwin':
+ # On OSX, evtloop.Pending() always returns True, regardless of there being
+ # any events pending. As such we can't use implementations 1 or 3 of the
+ # inputhook as those depend on a pending/dispatch loop.
+ inputhook_wx = inputhook_wx2
+else:
+ # This is our default implementation
+ inputhook_wx = inputhook_wx3
diff --git a/contrib/python/ipython/py2/IPython/lib/kernel.py b/contrib/python/ipython/py2/IPython/lib/kernel.py
index 7de2ea4b12..af9827667f 100644
--- a/contrib/python/ipython/py2/IPython/lib/kernel.py
+++ b/contrib/python/ipython/py2/IPython/lib/kernel.py
@@ -1,13 +1,13 @@
-"""[DEPRECATED] Utilities for connecting to kernels
-
-Moved to IPython.kernel.connect
-"""
-
-import warnings
-warnings.warn("IPython.lib.kernel moved to IPython.kernel.connect in IPython 1.0,"
- " and will be removed in IPython 6.0.",
- DeprecationWarning
-)
-
-from ipykernel.connect import *
-
+"""[DEPRECATED] Utilities for connecting to kernels
+
+Moved to IPython.kernel.connect
+"""
+
+import warnings
+warnings.warn("IPython.lib.kernel moved to IPython.kernel.connect in IPython 1.0,"
+ " and will be removed in IPython 6.0.",
+ DeprecationWarning
+)
+
+from ipykernel.connect import *
+
diff --git a/contrib/python/ipython/py2/IPython/lib/latextools.py b/contrib/python/ipython/py2/IPython/lib/latextools.py
index 4df8e562ed..c3230dd489 100644
--- a/contrib/python/ipython/py2/IPython/lib/latextools.py
+++ b/contrib/python/ipython/py2/IPython/lib/latextools.py
@@ -1,111 +1,111 @@
-# -*- coding: utf-8 -*-
-"""Tools for handling LaTeX."""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from io import BytesIO, open
-import os
-import tempfile
-import shutil
-import subprocess
-
-from IPython.utils.process import find_cmd, FindCmdError
-from traitlets.config import get_config
-from traitlets.config.configurable import SingletonConfigurable
-from traitlets import List, Bool, Unicode
-from IPython.utils.py3compat import cast_unicode, cast_unicode_py2 as u, PY3
-
-try: # Py3
- from base64 import encodebytes
-except ImportError: # Py2
- from base64 import encodestring as encodebytes
-
-
-class LaTeXTool(SingletonConfigurable):
- """An object to store configuration of the LaTeX tool."""
- def _config_default(self):
- return get_config()
-
- backends = List(
- Unicode(), ["matplotlib", "dvipng"],
- help="Preferred backend to draw LaTeX math equations. "
- "Backends in the list are checked one by one and the first "
- "usable one is used. Note that `matplotlib` backend "
- "is usable only for inline style equations. To draw "
- "display style equations, `dvipng` backend must be specified. ",
- # It is a List instead of Enum, to make configuration more
- # flexible. For example, to use matplotlib mainly but dvipng
- # for display style, the default ["matplotlib", "dvipng"] can
- # be used. To NOT use dvipng so that other repr such as
- # unicode pretty printing is used, you can use ["matplotlib"].
+# -*- coding: utf-8 -*-
+"""Tools for handling LaTeX."""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from io import BytesIO, open
+import os
+import tempfile
+import shutil
+import subprocess
+
+from IPython.utils.process import find_cmd, FindCmdError
+from traitlets.config import get_config
+from traitlets.config.configurable import SingletonConfigurable
+from traitlets import List, Bool, Unicode
+from IPython.utils.py3compat import cast_unicode, cast_unicode_py2 as u, PY3
+
+try: # Py3
+ from base64 import encodebytes
+except ImportError: # Py2
+ from base64 import encodestring as encodebytes
+
+
+class LaTeXTool(SingletonConfigurable):
+ """An object to store configuration of the LaTeX tool."""
+ def _config_default(self):
+ return get_config()
+
+ backends = List(
+ Unicode(), ["matplotlib", "dvipng"],
+ help="Preferred backend to draw LaTeX math equations. "
+ "Backends in the list are checked one by one and the first "
+ "usable one is used. Note that `matplotlib` backend "
+ "is usable only for inline style equations. To draw "
+ "display style equations, `dvipng` backend must be specified. ",
+ # It is a List instead of Enum, to make configuration more
+ # flexible. For example, to use matplotlib mainly but dvipng
+ # for display style, the default ["matplotlib", "dvipng"] can
+ # be used. To NOT use dvipng so that other repr such as
+ # unicode pretty printing is used, you can use ["matplotlib"].
).tag(config=True)
-
- use_breqn = Bool(
- True,
- help="Use breqn.sty to automatically break long equations. "
- "This configuration takes effect only for dvipng backend.",
+
+ use_breqn = Bool(
+ True,
+ help="Use breqn.sty to automatically break long equations. "
+ "This configuration takes effect only for dvipng backend.",
).tag(config=True)
-
- packages = List(
- ['amsmath', 'amsthm', 'amssymb', 'bm'],
- help="A list of packages to use for dvipng backend. "
- "'breqn' will be automatically appended when use_breqn=True.",
+
+ packages = List(
+ ['amsmath', 'amsthm', 'amssymb', 'bm'],
+ help="A list of packages to use for dvipng backend. "
+ "'breqn' will be automatically appended when use_breqn=True.",
).tag(config=True)
-
- preamble = Unicode(
- help="Additional preamble to use when generating LaTeX source "
- "for dvipng backend.",
+
+ preamble = Unicode(
+ help="Additional preamble to use when generating LaTeX source "
+ "for dvipng backend.",
).tag(config=True)
-
-
-def latex_to_png(s, encode=False, backend=None, wrap=False):
- """Render a LaTeX string to PNG.
-
- Parameters
- ----------
+
+
+def latex_to_png(s, encode=False, backend=None, wrap=False):
+ """Render a LaTeX string to PNG.
+
+ Parameters
+ ----------
s : str
- The raw string containing valid inline LaTeX.
- encode : bool, optional
- Should the PNG data base64 encoded to make it JSON'able.
- backend : {matplotlib, dvipng}
- Backend for producing PNG data.
- wrap : bool
- If true, Automatically wrap `s` as a LaTeX equation.
-
- None is returned when the backend cannot be used.
-
- """
- s = cast_unicode(s)
- allowed_backends = LaTeXTool.instance().backends
- if backend is None:
- backend = allowed_backends[0]
- if backend not in allowed_backends:
- return None
- if backend == 'matplotlib':
- f = latex_to_png_mpl
- elif backend == 'dvipng':
- f = latex_to_png_dvipng
- else:
- raise ValueError('No such backend {0}'.format(backend))
- bin_data = f(s, wrap)
- if encode and bin_data:
- bin_data = encodebytes(bin_data)
- return bin_data
-
-
-def latex_to_png_mpl(s, wrap):
- try:
- from matplotlib import mathtext
+ The raw string containing valid inline LaTeX.
+ encode : bool, optional
+ Should the PNG data base64 encoded to make it JSON'able.
+ backend : {matplotlib, dvipng}
+ Backend for producing PNG data.
+ wrap : bool
+ If true, Automatically wrap `s` as a LaTeX equation.
+
+ None is returned when the backend cannot be used.
+
+ """
+ s = cast_unicode(s)
+ allowed_backends = LaTeXTool.instance().backends
+ if backend is None:
+ backend = allowed_backends[0]
+ if backend not in allowed_backends:
+ return None
+ if backend == 'matplotlib':
+ f = latex_to_png_mpl
+ elif backend == 'dvipng':
+ f = latex_to_png_dvipng
+ else:
+ raise ValueError('No such backend {0}'.format(backend))
+ bin_data = f(s, wrap)
+ if encode and bin_data:
+ bin_data = encodebytes(bin_data)
+ return bin_data
+
+
+def latex_to_png_mpl(s, wrap):
+ try:
+ from matplotlib import mathtext
from pyparsing import ParseFatalException
- except ImportError:
- return None
-
- # mpl mathtext doesn't support display math, force inline
- s = s.replace('$$', '$')
- if wrap:
- s = u'${0}$'.format(s)
-
+ except ImportError:
+ return None
+
+ # mpl mathtext doesn't support display math, force inline
+ s = s.replace('$$', '$')
+ if wrap:
+ s = u'${0}$'.format(s)
+
try:
mt = mathtext.MathTextParser('bitmap')
f = BytesIO()
@@ -113,93 +113,93 @@ def latex_to_png_mpl(s, wrap):
return f.getvalue()
except (ValueError, RuntimeError, ParseFatalException):
return None
-
-
-def latex_to_png_dvipng(s, wrap):
- try:
- find_cmd('latex')
- find_cmd('dvipng')
- except FindCmdError:
- return None
- try:
- workdir = tempfile.mkdtemp()
- tmpfile = os.path.join(workdir, "tmp.tex")
- dvifile = os.path.join(workdir, "tmp.dvi")
- outfile = os.path.join(workdir, "tmp.png")
-
- with open(tmpfile, "w", encoding='utf8') as f:
- f.writelines(genelatex(s, wrap))
-
- with open(os.devnull, 'wb') as devnull:
- subprocess.check_call(
- ["latex", "-halt-on-error", "-interaction", "batchmode", tmpfile],
- cwd=workdir, stdout=devnull, stderr=devnull)
-
- subprocess.check_call(
- ["dvipng", "-T", "tight", "-x", "1500", "-z", "9",
- "-bg", "transparent", "-o", outfile, dvifile], cwd=workdir,
- stdout=devnull, stderr=devnull)
-
- with open(outfile, "rb") as f:
- return f.read()
+
+
+def latex_to_png_dvipng(s, wrap):
+ try:
+ find_cmd('latex')
+ find_cmd('dvipng')
+ except FindCmdError:
+ return None
+ try:
+ workdir = tempfile.mkdtemp()
+ tmpfile = os.path.join(workdir, "tmp.tex")
+ dvifile = os.path.join(workdir, "tmp.dvi")
+ outfile = os.path.join(workdir, "tmp.png")
+
+ with open(tmpfile, "w", encoding='utf8') as f:
+ f.writelines(genelatex(s, wrap))
+
+ with open(os.devnull, 'wb') as devnull:
+ subprocess.check_call(
+ ["latex", "-halt-on-error", "-interaction", "batchmode", tmpfile],
+ cwd=workdir, stdout=devnull, stderr=devnull)
+
+ subprocess.check_call(
+ ["dvipng", "-T", "tight", "-x", "1500", "-z", "9",
+ "-bg", "transparent", "-o", outfile, dvifile], cwd=workdir,
+ stdout=devnull, stderr=devnull)
+
+ with open(outfile, "rb") as f:
+ return f.read()
except subprocess.CalledProcessError:
return None
- finally:
- shutil.rmtree(workdir)
-
-
-def kpsewhich(filename):
- """Invoke kpsewhich command with an argument `filename`."""
- try:
- find_cmd("kpsewhich")
- proc = subprocess.Popen(
- ["kpsewhich", filename],
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- (stdout, stderr) = proc.communicate()
- return stdout.strip().decode('utf8', 'replace')
- except FindCmdError:
- pass
-
-
-def genelatex(body, wrap):
- """Generate LaTeX document for dvipng backend."""
- lt = LaTeXTool.instance()
- breqn = wrap and lt.use_breqn and kpsewhich("breqn.sty")
- yield u(r'\documentclass{article}')
- packages = lt.packages
- if breqn:
- packages = packages + ['breqn']
- for pack in packages:
- yield u(r'\usepackage{{{0}}}'.format(pack))
- yield u(r'\pagestyle{empty}')
- if lt.preamble:
- yield lt.preamble
- yield u(r'\begin{document}')
- if breqn:
- yield u(r'\begin{dmath*}')
- yield body
- yield u(r'\end{dmath*}')
- elif wrap:
- yield u'$${0}$$'.format(body)
- else:
- yield body
- yield u'\end{document}'
-
-
-_data_uri_template_png = u"""<img src="data:image/png;base64,%s" alt=%s />"""
-
-def latex_to_html(s, alt='image'):
- """Render LaTeX to HTML with embedded PNG data using data URIs.
-
- Parameters
- ----------
- s : str
- The raw string containing valid inline LateX.
- alt : str
- The alt text to use for the HTML.
- """
- base64_data = latex_to_png(s, encode=True).decode('ascii')
- if base64_data:
- return _data_uri_template_png % (base64_data, alt)
-
-
+ finally:
+ shutil.rmtree(workdir)
+
+
+def kpsewhich(filename):
+ """Invoke kpsewhich command with an argument `filename`."""
+ try:
+ find_cmd("kpsewhich")
+ proc = subprocess.Popen(
+ ["kpsewhich", filename],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (stdout, stderr) = proc.communicate()
+ return stdout.strip().decode('utf8', 'replace')
+ except FindCmdError:
+ pass
+
+
+def genelatex(body, wrap):
+ """Generate LaTeX document for dvipng backend."""
+ lt = LaTeXTool.instance()
+ breqn = wrap and lt.use_breqn and kpsewhich("breqn.sty")
+ yield u(r'\documentclass{article}')
+ packages = lt.packages
+ if breqn:
+ packages = packages + ['breqn']
+ for pack in packages:
+ yield u(r'\usepackage{{{0}}}'.format(pack))
+ yield u(r'\pagestyle{empty}')
+ if lt.preamble:
+ yield lt.preamble
+ yield u(r'\begin{document}')
+ if breqn:
+ yield u(r'\begin{dmath*}')
+ yield body
+ yield u(r'\end{dmath*}')
+ elif wrap:
+ yield u'$${0}$$'.format(body)
+ else:
+ yield body
+ yield u'\end{document}'
+
+
+_data_uri_template_png = u"""<img src="data:image/png;base64,%s" alt=%s />"""
+
+def latex_to_html(s, alt='image'):
+ """Render LaTeX to HTML with embedded PNG data using data URIs.
+
+ Parameters
+ ----------
+ s : str
+ The raw string containing valid inline LateX.
+ alt : str
+ The alt text to use for the HTML.
+ """
+ base64_data = latex_to_png(s, encode=True).decode('ascii')
+ if base64_data:
+ return _data_uri_template_png % (base64_data, alt)
+
+
diff --git a/contrib/python/ipython/py2/IPython/lib/lexers.py b/contrib/python/ipython/py2/IPython/lib/lexers.py
index ec43856115..9160ae1245 100644
--- a/contrib/python/ipython/py2/IPython/lib/lexers.py
+++ b/contrib/python/ipython/py2/IPython/lib/lexers.py
@@ -1,517 +1,517 @@
-# -*- coding: utf-8 -*-
-"""
-Defines a variety of Pygments lexers for highlighting IPython code.
-
-This includes:
-
- IPythonLexer, IPython3Lexer
- Lexers for pure IPython (python + magic/shell commands)
-
- IPythonPartialTracebackLexer, IPythonTracebackLexer
- Supports 2.x and 3.x via keyword `python3`. The partial traceback
- lexer reads everything but the Python code appearing in a traceback.
- The full lexer combines the partial lexer with an IPython lexer.
-
- IPythonConsoleLexer
- A lexer for IPython console sessions, with support for tracebacks.
-
- IPyLexer
- A friendly lexer which examines the first line of text and from it,
- decides whether to use an IPython lexer or an IPython console lexer.
- This is probably the only lexer that needs to be explicitly added
- to Pygments.
-
-"""
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, the IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-# Standard library
-import re
-
-# Third party
+# -*- coding: utf-8 -*-
+"""
+Defines a variety of Pygments lexers for highlighting IPython code.
+
+This includes:
+
+ IPythonLexer, IPython3Lexer
+ Lexers for pure IPython (python + magic/shell commands)
+
+ IPythonPartialTracebackLexer, IPythonTracebackLexer
+ Supports 2.x and 3.x via keyword `python3`. The partial traceback
+ lexer reads everything but the Python code appearing in a traceback.
+ The full lexer combines the partial lexer with an IPython lexer.
+
+ IPythonConsoleLexer
+ A lexer for IPython console sessions, with support for tracebacks.
+
+ IPyLexer
+ A friendly lexer which examines the first line of text and from it,
+ decides whether to use an IPython lexer or an IPython console lexer.
+ This is probably the only lexer that needs to be explicitly added
+ to Pygments.
+
+"""
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, the IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+# Standard library
+import re
+
+# Third party
from pygments.lexers import BashLexer, Python3Lexer
try:
# PythonLexer was renamed to Python2Lexer in pygments 2.5
from pygments.lexers import Python2Lexer
except ImportError:
from pygments.lexers import PythonLexer as Python2Lexer
-from pygments.lexer import (
- Lexer, DelegatingLexer, RegexLexer, do_insertions, bygroups, using,
-)
-from pygments.token import (
+from pygments.lexer import (
+ Lexer, DelegatingLexer, RegexLexer, do_insertions, bygroups, using,
+)
+from pygments.token import (
Generic, Keyword, Literal, Name, Operator, Other, Text, Error,
-)
-from pygments.util import get_bool_opt
-
-# Local
-
-line_re = re.compile('.*?\n')
-
-__all__ = ['build_ipy_lexer', 'IPython3Lexer', 'IPythonLexer',
- 'IPythonPartialTracebackLexer', 'IPythonTracebackLexer',
- 'IPythonConsoleLexer', 'IPyLexer']
-
-ipython_tokens = [
- (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
- (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))),
- (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)),
- (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)),
- (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword,
- using(BashLexer), Text)),
- (r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)),
- (r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
- (r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
- (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)),
- (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)),
-]
-
-def build_ipy_lexer(python3):
- """Builds IPython lexers depending on the value of `python3`.
-
- The lexer inherits from an appropriate Python lexer and then adds
- information about IPython specific keywords (i.e. magic commands,
- shell commands, etc.)
-
- Parameters
- ----------
- python3 : bool
- If `True`, then build an IPython lexer from a Python 3 lexer.
-
- """
- # It would be nice to have a single IPython lexer class which takes
- # a boolean `python3`. But since there are two Python lexer classes,
- # we will also have two IPython lexer classes.
- if python3:
- PyLexer = Python3Lexer
- name = 'IPython3'
- aliases = ['ipython3']
- doc = """IPython3 Lexer"""
- else:
+)
+from pygments.util import get_bool_opt
+
+# Local
+
+line_re = re.compile('.*?\n')
+
+__all__ = ['build_ipy_lexer', 'IPython3Lexer', 'IPythonLexer',
+ 'IPythonPartialTracebackLexer', 'IPythonTracebackLexer',
+ 'IPythonConsoleLexer', 'IPyLexer']
+
+ipython_tokens = [
+ (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
+ (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))),
+ (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)),
+ (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)),
+ (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword,
+ using(BashLexer), Text)),
+ (r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)),
+ (r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
+ (r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
+ (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)),
+ (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)),
+]
+
+def build_ipy_lexer(python3):
+ """Builds IPython lexers depending on the value of `python3`.
+
+ The lexer inherits from an appropriate Python lexer and then adds
+ information about IPython specific keywords (i.e. magic commands,
+ shell commands, etc.)
+
+ Parameters
+ ----------
+ python3 : bool
+ If `True`, then build an IPython lexer from a Python 3 lexer.
+
+ """
+ # It would be nice to have a single IPython lexer class which takes
+ # a boolean `python3`. But since there are two Python lexer classes,
+ # we will also have two IPython lexer classes.
+ if python3:
+ PyLexer = Python3Lexer
+ name = 'IPython3'
+ aliases = ['ipython3']
+ doc = """IPython3 Lexer"""
+ else:
PyLexer = Python2Lexer
- name = 'IPython'
- aliases = ['ipython2', 'ipython']
- doc = """IPython Lexer"""
-
- tokens = PyLexer.tokens.copy()
- tokens['root'] = ipython_tokens + tokens['root']
-
- attrs = {'name': name, 'aliases': aliases, 'filenames': [],
- '__doc__': doc, 'tokens': tokens}
-
- return type(name, (PyLexer,), attrs)
-
-
-IPython3Lexer = build_ipy_lexer(python3=True)
-IPythonLexer = build_ipy_lexer(python3=False)
-
-
-class IPythonPartialTracebackLexer(RegexLexer):
- """
- Partial lexer for IPython tracebacks.
-
- Handles all the non-python output. This works for both Python 2.x and 3.x.
-
- """
- name = 'IPython Partial Traceback'
-
- tokens = {
- 'root': [
- # Tracebacks for syntax errors have a different style.
- # For both types of tracebacks, we mark the first line with
- # Generic.Traceback. For syntax errors, we mark the filename
- # as we mark the filenames for non-syntax tracebacks.
- #
- # These two regexps define how IPythonConsoleLexer finds a
- # traceback.
- #
- ## Non-syntax traceback
- (r'^(\^C)?(-+\n)', bygroups(Error, Generic.Traceback)),
- ## Syntax traceback
- (r'^( File)(.*)(, line )(\d+\n)',
- bygroups(Generic.Traceback, Name.Namespace,
- Generic.Traceback, Literal.Number.Integer)),
-
- # (Exception Identifier)(Whitespace)(Traceback Message)
- (r'(?u)(^[^\d\W]\w*)(\s*)(Traceback.*?\n)',
- bygroups(Name.Exception, Generic.Whitespace, Text)),
- # (Module/Filename)(Text)(Callee)(Function Signature)
- # Better options for callee and function signature?
- (r'(.*)( in )(.*)(\(.*\)\n)',
- bygroups(Name.Namespace, Text, Name.Entity, Name.Tag)),
- # Regular line: (Whitespace)(Line Number)(Python Code)
- (r'(\s*?)(\d+)(.*?\n)',
- bygroups(Generic.Whitespace, Literal.Number.Integer, Other)),
- # Emphasized line: (Arrow)(Line Number)(Python Code)
- # Using Exception token so arrow color matches the Exception.
- (r'(-*>?\s?)(\d+)(.*?\n)',
- bygroups(Name.Exception, Literal.Number.Integer, Other)),
- # (Exception Identifier)(Message)
- (r'(?u)(^[^\d\W]\w*)(:.*?\n)',
- bygroups(Name.Exception, Text)),
- # Tag everything else as Other, will be handled later.
- (r'.*\n', Other),
- ],
- }
-
-
-class IPythonTracebackLexer(DelegatingLexer):
- """
- IPython traceback lexer.
-
- For doctests, the tracebacks can be snipped as much as desired with the
- exception to the lines that designate a traceback. For non-syntax error
- tracebacks, this is the line of hyphens. For syntax error tracebacks,
- this is the line which lists the File and line number.
-
- """
- # The lexer inherits from DelegatingLexer. The "root" lexer is an
- # appropriate IPython lexer, which depends on the value of the boolean
- # `python3`. First, we parse with the partial IPython traceback lexer.
- # Then, any code marked with the "Other" token is delegated to the root
- # lexer.
- #
- name = 'IPython Traceback'
- aliases = ['ipythontb']
-
- def __init__(self, **options):
- self.python3 = get_bool_opt(options, 'python3', False)
- if self.python3:
- self.aliases = ['ipython3tb']
- else:
- self.aliases = ['ipython2tb', 'ipythontb']
-
- if self.python3:
- IPyLexer = IPython3Lexer
- else:
- IPyLexer = IPythonLexer
-
- DelegatingLexer.__init__(self, IPyLexer,
- IPythonPartialTracebackLexer, **options)
-
-class IPythonConsoleLexer(Lexer):
- """
- An IPython console lexer for IPython code-blocks and doctests, such as:
-
- .. code-block:: rst
-
- .. code-block:: ipythonconsole
-
- In [1]: a = 'foo'
-
- In [2]: a
- Out[2]: 'foo'
-
- In [3]: print a
- foo
-
- In [4]: 1 / 0
-
-
- Support is also provided for IPython exceptions:
-
- .. code-block:: rst
-
- .. code-block:: ipythonconsole
-
- In [1]: raise Exception
-
- ---------------------------------------------------------------------------
- Exception Traceback (most recent call last)
- <ipython-input-1-fca2ab0ca76b> in <module>()
- ----> 1 raise Exception
-
- Exception:
-
- """
- name = 'IPython console session'
- aliases = ['ipythonconsole']
- mimetypes = ['text/x-ipython-console']
-
- # The regexps used to determine what is input and what is output.
- # The default prompts for IPython are:
- #
+ name = 'IPython'
+ aliases = ['ipython2', 'ipython']
+ doc = """IPython Lexer"""
+
+ tokens = PyLexer.tokens.copy()
+ tokens['root'] = ipython_tokens + tokens['root']
+
+ attrs = {'name': name, 'aliases': aliases, 'filenames': [],
+ '__doc__': doc, 'tokens': tokens}
+
+ return type(name, (PyLexer,), attrs)
+
+
+IPython3Lexer = build_ipy_lexer(python3=True)
+IPythonLexer = build_ipy_lexer(python3=False)
+
+
+class IPythonPartialTracebackLexer(RegexLexer):
+ """
+ Partial lexer for IPython tracebacks.
+
+ Handles all the non-python output. This works for both Python 2.x and 3.x.
+
+ """
+ name = 'IPython Partial Traceback'
+
+ tokens = {
+ 'root': [
+ # Tracebacks for syntax errors have a different style.
+ # For both types of tracebacks, we mark the first line with
+ # Generic.Traceback. For syntax errors, we mark the filename
+ # as we mark the filenames for non-syntax tracebacks.
+ #
+ # These two regexps define how IPythonConsoleLexer finds a
+ # traceback.
+ #
+ ## Non-syntax traceback
+ (r'^(\^C)?(-+\n)', bygroups(Error, Generic.Traceback)),
+ ## Syntax traceback
+ (r'^( File)(.*)(, line )(\d+\n)',
+ bygroups(Generic.Traceback, Name.Namespace,
+ Generic.Traceback, Literal.Number.Integer)),
+
+ # (Exception Identifier)(Whitespace)(Traceback Message)
+ (r'(?u)(^[^\d\W]\w*)(\s*)(Traceback.*?\n)',
+ bygroups(Name.Exception, Generic.Whitespace, Text)),
+ # (Module/Filename)(Text)(Callee)(Function Signature)
+ # Better options for callee and function signature?
+ (r'(.*)( in )(.*)(\(.*\)\n)',
+ bygroups(Name.Namespace, Text, Name.Entity, Name.Tag)),
+ # Regular line: (Whitespace)(Line Number)(Python Code)
+ (r'(\s*?)(\d+)(.*?\n)',
+ bygroups(Generic.Whitespace, Literal.Number.Integer, Other)),
+ # Emphasized line: (Arrow)(Line Number)(Python Code)
+ # Using Exception token so arrow color matches the Exception.
+ (r'(-*>?\s?)(\d+)(.*?\n)',
+ bygroups(Name.Exception, Literal.Number.Integer, Other)),
+ # (Exception Identifier)(Message)
+ (r'(?u)(^[^\d\W]\w*)(:.*?\n)',
+ bygroups(Name.Exception, Text)),
+ # Tag everything else as Other, will be handled later.
+ (r'.*\n', Other),
+ ],
+ }
+
+
+class IPythonTracebackLexer(DelegatingLexer):
+ """
+ IPython traceback lexer.
+
+ For doctests, the tracebacks can be snipped as much as desired with the
+ exception to the lines that designate a traceback. For non-syntax error
+ tracebacks, this is the line of hyphens. For syntax error tracebacks,
+ this is the line which lists the File and line number.
+
+ """
+ # The lexer inherits from DelegatingLexer. The "root" lexer is an
+ # appropriate IPython lexer, which depends on the value of the boolean
+ # `python3`. First, we parse with the partial IPython traceback lexer.
+ # Then, any code marked with the "Other" token is delegated to the root
+ # lexer.
+ #
+ name = 'IPython Traceback'
+ aliases = ['ipythontb']
+
+ def __init__(self, **options):
+ self.python3 = get_bool_opt(options, 'python3', False)
+ if self.python3:
+ self.aliases = ['ipython3tb']
+ else:
+ self.aliases = ['ipython2tb', 'ipythontb']
+
+ if self.python3:
+ IPyLexer = IPython3Lexer
+ else:
+ IPyLexer = IPythonLexer
+
+ DelegatingLexer.__init__(self, IPyLexer,
+ IPythonPartialTracebackLexer, **options)
+
+class IPythonConsoleLexer(Lexer):
+ """
+ An IPython console lexer for IPython code-blocks and doctests, such as:
+
+ .. code-block:: rst
+
+ .. code-block:: ipythonconsole
+
+ In [1]: a = 'foo'
+
+ In [2]: a
+ Out[2]: 'foo'
+
+ In [3]: print a
+ foo
+
+ In [4]: 1 / 0
+
+
+ Support is also provided for IPython exceptions:
+
+ .. code-block:: rst
+
+ .. code-block:: ipythonconsole
+
+ In [1]: raise Exception
+
+ ---------------------------------------------------------------------------
+ Exception Traceback (most recent call last)
+ <ipython-input-1-fca2ab0ca76b> in <module>()
+ ----> 1 raise Exception
+
+ Exception:
+
+ """
+ name = 'IPython console session'
+ aliases = ['ipythonconsole']
+ mimetypes = ['text/x-ipython-console']
+
+ # The regexps used to determine what is input and what is output.
+ # The default prompts for IPython are:
+ #
# in = 'In [#]: '
# continuation = ' .D.: '
# template = 'Out[#]: '
- #
+ #
# Where '#' is the 'prompt number' or 'execution count' and 'D'
# D is a number of dots matching the width of the execution count
#
- in1_regex = r'In \[[0-9]+\]: '
- in2_regex = r' \.\.+\.: '
- out_regex = r'Out\[[0-9]+\]: '
-
- #: The regex to determine when a traceback starts.
- ipytb_start = re.compile(r'^(\^C)?(-+\n)|^( File)(.*)(, line )(\d+\n)')
-
- def __init__(self, **options):
- """Initialize the IPython console lexer.
-
- Parameters
- ----------
- python3 : bool
- If `True`, then the console inputs are parsed using a Python 3
- lexer. Otherwise, they are parsed using a Python 2 lexer.
- in1_regex : RegexObject
- The compiled regular expression used to detect the start
- of inputs. Although the IPython configuration setting may have a
- trailing whitespace, do not include it in the regex. If `None`,
- then the default input prompt is assumed.
- in2_regex : RegexObject
- The compiled regular expression used to detect the continuation
- of inputs. Although the IPython configuration setting may have a
- trailing whitespace, do not include it in the regex. If `None`,
- then the default input prompt is assumed.
- out_regex : RegexObject
- The compiled regular expression used to detect outputs. If `None`,
- then the default output prompt is assumed.
-
- """
- self.python3 = get_bool_opt(options, 'python3', False)
- if self.python3:
- self.aliases = ['ipython3console']
- else:
- self.aliases = ['ipython2console', 'ipythonconsole']
-
- in1_regex = options.get('in1_regex', self.in1_regex)
- in2_regex = options.get('in2_regex', self.in2_regex)
- out_regex = options.get('out_regex', self.out_regex)
-
- # So that we can work with input and output prompts which have been
- # rstrip'd (possibly by editors) we also need rstrip'd variants. If
- # we do not do this, then such prompts will be tagged as 'output'.
- # The reason can't just use the rstrip'd variants instead is because
- # we want any whitespace associated with the prompt to be inserted
- # with the token. This allows formatted code to be modified so as hide
- # the appearance of prompts, with the whitespace included. One example
- # use of this is in copybutton.js from the standard lib Python docs.
- in1_regex_rstrip = in1_regex.rstrip() + '\n'
- in2_regex_rstrip = in2_regex.rstrip() + '\n'
- out_regex_rstrip = out_regex.rstrip() + '\n'
-
- # Compile and save them all.
- attrs = ['in1_regex', 'in2_regex', 'out_regex',
- 'in1_regex_rstrip', 'in2_regex_rstrip', 'out_regex_rstrip']
- for attr in attrs:
- self.__setattr__(attr, re.compile(locals()[attr]))
-
- Lexer.__init__(self, **options)
-
- if self.python3:
- pylexer = IPython3Lexer
- tblexer = IPythonTracebackLexer
- else:
- pylexer = IPythonLexer
- tblexer = IPythonTracebackLexer
-
- self.pylexer = pylexer(**options)
- self.tblexer = tblexer(**options)
-
- self.reset()
-
- def reset(self):
- self.mode = 'output'
- self.index = 0
- self.buffer = u''
- self.insertions = []
-
- def buffered_tokens(self):
- """
- Generator of unprocessed tokens after doing insertions and before
- changing to a new state.
-
- """
- if self.mode == 'output':
- tokens = [(0, Generic.Output, self.buffer)]
- elif self.mode == 'input':
- tokens = self.pylexer.get_tokens_unprocessed(self.buffer)
- else: # traceback
- tokens = self.tblexer.get_tokens_unprocessed(self.buffer)
-
- for i, t, v in do_insertions(self.insertions, tokens):
- # All token indexes are relative to the buffer.
- yield self.index + i, t, v
-
- # Clear it all
- self.index += len(self.buffer)
- self.buffer = u''
- self.insertions = []
-
- def get_mci(self, line):
- """
- Parses the line and returns a 3-tuple: (mode, code, insertion).
-
- `mode` is the next mode (or state) of the lexer, and is always equal
- to 'input', 'output', or 'tb'.
-
- `code` is a portion of the line that should be added to the buffer
- corresponding to the next mode and eventually lexed by another lexer.
- For example, `code` could be Python code if `mode` were 'input'.
-
- `insertion` is a 3-tuple (index, token, text) representing an
- unprocessed "token" that will be inserted into the stream of tokens
- that are created from the buffer once we change modes. This is usually
- the input or output prompt.
-
- In general, the next mode depends on current mode and on the contents
- of `line`.
-
- """
- # To reduce the number of regex match checks, we have multiple
- # 'if' blocks instead of 'if-elif' blocks.
-
- # Check for possible end of input
- in2_match = self.in2_regex.match(line)
- in2_match_rstrip = self.in2_regex_rstrip.match(line)
- if (in2_match and in2_match.group().rstrip() == line.rstrip()) or \
- in2_match_rstrip:
- end_input = True
- else:
- end_input = False
- if end_input and self.mode != 'tb':
- # Only look for an end of input when not in tb mode.
- # An ellipsis could appear within the traceback.
- mode = 'output'
- code = u''
- insertion = (0, Generic.Prompt, line)
- return mode, code, insertion
-
- # Check for output prompt
- out_match = self.out_regex.match(line)
- out_match_rstrip = self.out_regex_rstrip.match(line)
- if out_match or out_match_rstrip:
- mode = 'output'
- if out_match:
- idx = out_match.end()
- else:
- idx = out_match_rstrip.end()
- code = line[idx:]
- # Use the 'heading' token for output. We cannot use Generic.Error
- # since it would conflict with exceptions.
- insertion = (0, Generic.Heading, line[:idx])
- return mode, code, insertion
-
-
- # Check for input or continuation prompt (non stripped version)
- in1_match = self.in1_regex.match(line)
- if in1_match or (in2_match and self.mode != 'tb'):
- # New input or when not in tb, continued input.
- # We do not check for continued input when in tb since it is
- # allowable to replace a long stack with an ellipsis.
- mode = 'input'
- if in1_match:
- idx = in1_match.end()
- else: # in2_match
- idx = in2_match.end()
- code = line[idx:]
- insertion = (0, Generic.Prompt, line[:idx])
- return mode, code, insertion
-
- # Check for input or continuation prompt (stripped version)
- in1_match_rstrip = self.in1_regex_rstrip.match(line)
- if in1_match_rstrip or (in2_match_rstrip and self.mode != 'tb'):
- # New input or when not in tb, continued input.
- # We do not check for continued input when in tb since it is
- # allowable to replace a long stack with an ellipsis.
- mode = 'input'
- if in1_match_rstrip:
- idx = in1_match_rstrip.end()
- else: # in2_match
- idx = in2_match_rstrip.end()
- code = line[idx:]
- insertion = (0, Generic.Prompt, line[:idx])
- return mode, code, insertion
-
- # Check for traceback
- if self.ipytb_start.match(line):
- mode = 'tb'
- code = line
- insertion = None
- return mode, code, insertion
-
- # All other stuff...
- if self.mode in ('input', 'output'):
- # We assume all other text is output. Multiline input that
- # does not use the continuation marker cannot be detected.
- # For example, the 3 in the following is clearly output:
- #
- # In [1]: print 3
- # 3
- #
- # But the following second line is part of the input:
- #
- # In [2]: while True:
- # print True
- #
- # In both cases, the 2nd line will be 'output'.
- #
- mode = 'output'
- else:
- mode = 'tb'
-
- code = line
- insertion = None
-
- return mode, code, insertion
-
- def get_tokens_unprocessed(self, text):
- self.reset()
- for match in line_re.finditer(text):
- line = match.group()
- mode, code, insertion = self.get_mci(line)
-
- if mode != self.mode:
- # Yield buffered tokens before transitioning to new mode.
- for token in self.buffered_tokens():
- yield token
- self.mode = mode
-
- if insertion:
- self.insertions.append((len(self.buffer), [insertion]))
- self.buffer += code
-
+ in1_regex = r'In \[[0-9]+\]: '
+ in2_regex = r' \.\.+\.: '
+ out_regex = r'Out\[[0-9]+\]: '
+
+ #: The regex to determine when a traceback starts.
+ ipytb_start = re.compile(r'^(\^C)?(-+\n)|^( File)(.*)(, line )(\d+\n)')
+
+ def __init__(self, **options):
+ """Initialize the IPython console lexer.
+
+ Parameters
+ ----------
+ python3 : bool
+ If `True`, then the console inputs are parsed using a Python 3
+ lexer. Otherwise, they are parsed using a Python 2 lexer.
+ in1_regex : RegexObject
+ The compiled regular expression used to detect the start
+ of inputs. Although the IPython configuration setting may have a
+ trailing whitespace, do not include it in the regex. If `None`,
+ then the default input prompt is assumed.
+ in2_regex : RegexObject
+ The compiled regular expression used to detect the continuation
+ of inputs. Although the IPython configuration setting may have a
+ trailing whitespace, do not include it in the regex. If `None`,
+ then the default input prompt is assumed.
+ out_regex : RegexObject
+ The compiled regular expression used to detect outputs. If `None`,
+ then the default output prompt is assumed.
+
+ """
+ self.python3 = get_bool_opt(options, 'python3', False)
+ if self.python3:
+ self.aliases = ['ipython3console']
+ else:
+ self.aliases = ['ipython2console', 'ipythonconsole']
+
+ in1_regex = options.get('in1_regex', self.in1_regex)
+ in2_regex = options.get('in2_regex', self.in2_regex)
+ out_regex = options.get('out_regex', self.out_regex)
+
+ # So that we can work with input and output prompts which have been
+ # rstrip'd (possibly by editors) we also need rstrip'd variants. If
+ # we do not do this, then such prompts will be tagged as 'output'.
+ # The reason can't just use the rstrip'd variants instead is because
+ # we want any whitespace associated with the prompt to be inserted
+ # with the token. This allows formatted code to be modified so as hide
+ # the appearance of prompts, with the whitespace included. One example
+ # use of this is in copybutton.js from the standard lib Python docs.
+ in1_regex_rstrip = in1_regex.rstrip() + '\n'
+ in2_regex_rstrip = in2_regex.rstrip() + '\n'
+ out_regex_rstrip = out_regex.rstrip() + '\n'
+
+ # Compile and save them all.
+ attrs = ['in1_regex', 'in2_regex', 'out_regex',
+ 'in1_regex_rstrip', 'in2_regex_rstrip', 'out_regex_rstrip']
+ for attr in attrs:
+ self.__setattr__(attr, re.compile(locals()[attr]))
+
+ Lexer.__init__(self, **options)
+
+ if self.python3:
+ pylexer = IPython3Lexer
+ tblexer = IPythonTracebackLexer
+ else:
+ pylexer = IPythonLexer
+ tblexer = IPythonTracebackLexer
+
+ self.pylexer = pylexer(**options)
+ self.tblexer = tblexer(**options)
+
+ self.reset()
+
+ def reset(self):
+ self.mode = 'output'
+ self.index = 0
+ self.buffer = u''
+ self.insertions = []
+
+ def buffered_tokens(self):
+ """
+ Generator of unprocessed tokens after doing insertions and before
+ changing to a new state.
+
+ """
+ if self.mode == 'output':
+ tokens = [(0, Generic.Output, self.buffer)]
+ elif self.mode == 'input':
+ tokens = self.pylexer.get_tokens_unprocessed(self.buffer)
+ else: # traceback
+ tokens = self.tblexer.get_tokens_unprocessed(self.buffer)
+
+ for i, t, v in do_insertions(self.insertions, tokens):
+ # All token indexes are relative to the buffer.
+ yield self.index + i, t, v
+
+ # Clear it all
+ self.index += len(self.buffer)
+ self.buffer = u''
+ self.insertions = []
+
+ def get_mci(self, line):
+ """
+ Parses the line and returns a 3-tuple: (mode, code, insertion).
+
+ `mode` is the next mode (or state) of the lexer, and is always equal
+ to 'input', 'output', or 'tb'.
+
+ `code` is a portion of the line that should be added to the buffer
+ corresponding to the next mode and eventually lexed by another lexer.
+ For example, `code` could be Python code if `mode` were 'input'.
+
+ `insertion` is a 3-tuple (index, token, text) representing an
+ unprocessed "token" that will be inserted into the stream of tokens
+ that are created from the buffer once we change modes. This is usually
+ the input or output prompt.
+
+ In general, the next mode depends on current mode and on the contents
+ of `line`.
+
+ """
+ # To reduce the number of regex match checks, we have multiple
+ # 'if' blocks instead of 'if-elif' blocks.
+
+ # Check for possible end of input
+ in2_match = self.in2_regex.match(line)
+ in2_match_rstrip = self.in2_regex_rstrip.match(line)
+ if (in2_match and in2_match.group().rstrip() == line.rstrip()) or \
+ in2_match_rstrip:
+ end_input = True
+ else:
+ end_input = False
+ if end_input and self.mode != 'tb':
+ # Only look for an end of input when not in tb mode.
+ # An ellipsis could appear within the traceback.
+ mode = 'output'
+ code = u''
+ insertion = (0, Generic.Prompt, line)
+ return mode, code, insertion
+
+ # Check for output prompt
+ out_match = self.out_regex.match(line)
+ out_match_rstrip = self.out_regex_rstrip.match(line)
+ if out_match or out_match_rstrip:
+ mode = 'output'
+ if out_match:
+ idx = out_match.end()
+ else:
+ idx = out_match_rstrip.end()
+ code = line[idx:]
+ # Use the 'heading' token for output. We cannot use Generic.Error
+ # since it would conflict with exceptions.
+ insertion = (0, Generic.Heading, line[:idx])
+ return mode, code, insertion
+
+
+ # Check for input or continuation prompt (non stripped version)
+ in1_match = self.in1_regex.match(line)
+ if in1_match or (in2_match and self.mode != 'tb'):
+ # New input or when not in tb, continued input.
+ # We do not check for continued input when in tb since it is
+ # allowable to replace a long stack with an ellipsis.
+ mode = 'input'
+ if in1_match:
+ idx = in1_match.end()
+ else: # in2_match
+ idx = in2_match.end()
+ code = line[idx:]
+ insertion = (0, Generic.Prompt, line[:idx])
+ return mode, code, insertion
+
+ # Check for input or continuation prompt (stripped version)
+ in1_match_rstrip = self.in1_regex_rstrip.match(line)
+ if in1_match_rstrip or (in2_match_rstrip and self.mode != 'tb'):
+ # New input or when not in tb, continued input.
+ # We do not check for continued input when in tb since it is
+ # allowable to replace a long stack with an ellipsis.
+ mode = 'input'
+ if in1_match_rstrip:
+ idx = in1_match_rstrip.end()
+ else: # in2_match
+ idx = in2_match_rstrip.end()
+ code = line[idx:]
+ insertion = (0, Generic.Prompt, line[:idx])
+ return mode, code, insertion
+
+ # Check for traceback
+ if self.ipytb_start.match(line):
+ mode = 'tb'
+ code = line
+ insertion = None
+ return mode, code, insertion
+
+ # All other stuff...
+ if self.mode in ('input', 'output'):
+ # We assume all other text is output. Multiline input that
+ # does not use the continuation marker cannot be detected.
+ # For example, the 3 in the following is clearly output:
+ #
+ # In [1]: print 3
+ # 3
+ #
+ # But the following second line is part of the input:
+ #
+ # In [2]: while True:
+ # print True
+ #
+ # In both cases, the 2nd line will be 'output'.
+ #
+ mode = 'output'
+ else:
+ mode = 'tb'
+
+ code = line
+ insertion = None
+
+ return mode, code, insertion
+
+ def get_tokens_unprocessed(self, text):
+ self.reset()
+ for match in line_re.finditer(text):
+ line = match.group()
+ mode, code, insertion = self.get_mci(line)
+
+ if mode != self.mode:
+ # Yield buffered tokens before transitioning to new mode.
+ for token in self.buffered_tokens():
+ yield token
+ self.mode = mode
+
+ if insertion:
+ self.insertions.append((len(self.buffer), [insertion]))
+ self.buffer += code
+
for token in self.buffered_tokens():
yield token
-class IPyLexer(Lexer):
- """
- Primary lexer for all IPython-like code.
-
- This is a simple helper lexer. If the first line of the text begins with
- "In \[[0-9]+\]:", then the entire text is parsed with an IPython console
- lexer. If not, then the entire text is parsed with an IPython lexer.
-
- The goal is to reduce the number of lexers that are registered
- with Pygments.
-
- """
- name = 'IPy session'
- aliases = ['ipy']
-
- def __init__(self, **options):
- self.python3 = get_bool_opt(options, 'python3', False)
- if self.python3:
- self.aliases = ['ipy3']
- else:
- self.aliases = ['ipy2', 'ipy']
-
- Lexer.__init__(self, **options)
-
- self.IPythonLexer = IPythonLexer(**options)
- self.IPythonConsoleLexer = IPythonConsoleLexer(**options)
-
- def get_tokens_unprocessed(self, text):
- # Search for the input prompt anywhere...this allows code blocks to
- # begin with comments as well.
- if re.match(r'.*(In \[[0-9]+\]:)', text.strip(), re.DOTALL):
- lex = self.IPythonConsoleLexer
- else:
- lex = self.IPythonLexer
- for token in lex.get_tokens_unprocessed(text):
- yield token
-
+class IPyLexer(Lexer):
+ """
+ Primary lexer for all IPython-like code.
+
+ This is a simple helper lexer. If the first line of the text begins with
+ "In \[[0-9]+\]:", then the entire text is parsed with an IPython console
+ lexer. If not, then the entire text is parsed with an IPython lexer.
+
+ The goal is to reduce the number of lexers that are registered
+ with Pygments.
+
+ """
+ name = 'IPy session'
+ aliases = ['ipy']
+
+ def __init__(self, **options):
+ self.python3 = get_bool_opt(options, 'python3', False)
+ if self.python3:
+ self.aliases = ['ipy3']
+ else:
+ self.aliases = ['ipy2', 'ipy']
+
+ Lexer.__init__(self, **options)
+
+ self.IPythonLexer = IPythonLexer(**options)
+ self.IPythonConsoleLexer = IPythonConsoleLexer(**options)
+
+ def get_tokens_unprocessed(self, text):
+ # Search for the input prompt anywhere...this allows code blocks to
+ # begin with comments as well.
+ if re.match(r'.*(In \[[0-9]+\]:)', text.strip(), re.DOTALL):
+ lex = self.IPythonConsoleLexer
+ else:
+ lex = self.IPythonLexer
+ for token in lex.get_tokens_unprocessed(text):
+ yield token
+
diff --git a/contrib/python/ipython/py2/IPython/lib/pretty.py b/contrib/python/ipython/py2/IPython/lib/pretty.py
index 385365fc3c..28eee523c5 100644
--- a/contrib/python/ipython/py2/IPython/lib/pretty.py
+++ b/contrib/python/ipython/py2/IPython/lib/pretty.py
@@ -1,127 +1,127 @@
-# -*- coding: utf-8 -*-
-"""
-Python advanced pretty printer. This pretty printer is intended to
-replace the old `pprint` python module which does not allow developers
-to provide their own pretty print callbacks.
-
-This module is based on ruby's `prettyprint.rb` library by `Tanaka Akira`.
-
-
-Example Usage
--------------
-
-To directly print the representation of an object use `pprint`::
-
- from pretty import pprint
- pprint(complex_object)
-
-To get a string of the output use `pretty`::
-
- from pretty import pretty
- string = pretty(complex_object)
-
-
-Extending
----------
-
-The pretty library allows developers to add pretty printing rules for their
-own objects. This process is straightforward. All you have to do is to
-add a `_repr_pretty_` method to your object and call the methods on the
-pretty printer passed::
-
- class MyObject(object):
-
- def _repr_pretty_(self, p, cycle):
- ...
-
-Here is an example implementation of a `_repr_pretty_` method for a list
-subclass::
-
- class MyList(list):
-
- def _repr_pretty_(self, p, cycle):
- if cycle:
- p.text('MyList(...)')
- else:
- with p.group(8, 'MyList([', '])'):
- for idx, item in enumerate(self):
- if idx:
- p.text(',')
- p.breakable()
- p.pretty(item)
-
-The `cycle` parameter is `True` if pretty detected a cycle. You *have* to
-react to that or the result is an infinite loop. `p.text()` just adds
-non breaking text to the output, `p.breakable()` either adds a whitespace
-or breaks here. If you pass it an argument it's used instead of the
-default space. `p.pretty` prettyprints another object using the pretty print
-method.
-
-The first parameter to the `group` function specifies the extra indentation
-of the next line. In this example the next item will either be on the same
-line (if the items are short enough) or aligned with the right edge of the
-opening bracket of `MyList`.
-
-If you just want to indent something you can use the group function
-without open / close parameters. You can also use this code::
-
- with p.indent(2):
- ...
-
-Inheritance diagram:
-
-.. inheritance-diagram:: IPython.lib.pretty
- :parts: 3
-
-:copyright: 2007 by Armin Ronacher.
- Portions (c) 2009 by Robert Kern.
-:license: BSD License.
-"""
-from __future__ import print_function
-from contextlib import contextmanager
-import sys
-import types
-import re
-import datetime
-from collections import deque
-
+# -*- coding: utf-8 -*-
+"""
+Python advanced pretty printer. This pretty printer is intended to
+replace the old `pprint` python module which does not allow developers
+to provide their own pretty print callbacks.
+
+This module is based on ruby's `prettyprint.rb` library by `Tanaka Akira`.
+
+
+Example Usage
+-------------
+
+To directly print the representation of an object use `pprint`::
+
+ from pretty import pprint
+ pprint(complex_object)
+
+To get a string of the output use `pretty`::
+
+ from pretty import pretty
+ string = pretty(complex_object)
+
+
+Extending
+---------
+
+The pretty library allows developers to add pretty printing rules for their
+own objects. This process is straightforward. All you have to do is to
+add a `_repr_pretty_` method to your object and call the methods on the
+pretty printer passed::
+
+ class MyObject(object):
+
+ def _repr_pretty_(self, p, cycle):
+ ...
+
+Here is an example implementation of a `_repr_pretty_` method for a list
+subclass::
+
+ class MyList(list):
+
+ def _repr_pretty_(self, p, cycle):
+ if cycle:
+ p.text('MyList(...)')
+ else:
+ with p.group(8, 'MyList([', '])'):
+ for idx, item in enumerate(self):
+ if idx:
+ p.text(',')
+ p.breakable()
+ p.pretty(item)
+
+The `cycle` parameter is `True` if pretty detected a cycle. You *have* to
+react to that or the result is an infinite loop. `p.text()` just adds
+non breaking text to the output, `p.breakable()` either adds a whitespace
+or breaks here. If you pass it an argument it's used instead of the
+default space. `p.pretty` prettyprints another object using the pretty print
+method.
+
+The first parameter to the `group` function specifies the extra indentation
+of the next line. In this example the next item will either be on the same
+line (if the items are short enough) or aligned with the right edge of the
+opening bracket of `MyList`.
+
+If you just want to indent something you can use the group function
+without open / close parameters. You can also use this code::
+
+ with p.indent(2):
+ ...
+
+Inheritance diagram:
+
+.. inheritance-diagram:: IPython.lib.pretty
+ :parts: 3
+
+:copyright: 2007 by Armin Ronacher.
+ Portions (c) 2009 by Robert Kern.
+:license: BSD License.
+"""
+from __future__ import print_function
+from contextlib import contextmanager
+import sys
+import types
+import re
+import datetime
+from collections import deque
+
from IPython.utils.py3compat import PY3, PYPY, cast_unicode, string_types
-from IPython.utils.encoding import get_stream_enc
-
-from io import StringIO
-
-
-__all__ = ['pretty', 'pprint', 'PrettyPrinter', 'RepresentationPrinter',
- 'for_type', 'for_type_by_name']
-
-
-MAX_SEQ_LENGTH = 1000
+from IPython.utils.encoding import get_stream_enc
+
+from io import StringIO
+
+
+__all__ = ['pretty', 'pprint', 'PrettyPrinter', 'RepresentationPrinter',
+ 'for_type', 'for_type_by_name']
+
+
+MAX_SEQ_LENGTH = 1000
# The language spec says that dicts preserve order from 3.7, but CPython
# does so from 3.6, so it seems likely that people will expect that.
DICT_IS_ORDERED = sys.version_info >= (3, 6)
-_re_pattern_type = type(re.compile(''))
-
-def _safe_getattr(obj, attr, default=None):
- """Safe version of getattr.
-
- Same as getattr, but will return ``default`` on any Exception,
- rather than raising.
- """
- try:
- return getattr(obj, attr, default)
- except Exception:
- return default
-
-if PY3:
- CUnicodeIO = StringIO
-else:
- class CUnicodeIO(StringIO):
- """StringIO that casts str to unicode on Python 2"""
- def write(self, text):
- return super(CUnicodeIO, self).write(
- cast_unicode(text, encoding=get_stream_enc(sys.stdout)))
-
-
+_re_pattern_type = type(re.compile(''))
+
+def _safe_getattr(obj, attr, default=None):
+ """Safe version of getattr.
+
+ Same as getattr, but will return ``default`` on any Exception,
+ rather than raising.
+ """
+ try:
+ return getattr(obj, attr, default)
+ except Exception:
+ return default
+
+if PY3:
+ CUnicodeIO = StringIO
+else:
+ class CUnicodeIO(StringIO):
+ """StringIO that casts str to unicode on Python 2"""
+ def write(self, text):
+ return super(CUnicodeIO, self).write(
+ cast_unicode(text, encoding=get_stream_enc(sys.stdout)))
+
+
def _sorted_for_pprint(items):
"""
Sort the given items for pretty printing. Since some predictable
@@ -137,734 +137,734 @@ def _sorted_for_pprint(items):
except Exception:
return items
-def pretty(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
- """
- Pretty print the object's representation.
- """
- stream = CUnicodeIO()
- printer = RepresentationPrinter(stream, verbose, max_width, newline, max_seq_length=max_seq_length)
- printer.pretty(obj)
- printer.flush()
- return stream.getvalue()
-
-
-def pprint(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
- """
- Like `pretty` but print to stdout.
- """
- printer = RepresentationPrinter(sys.stdout, verbose, max_width, newline, max_seq_length=max_seq_length)
- printer.pretty(obj)
- printer.flush()
- sys.stdout.write(newline)
- sys.stdout.flush()
-
-class _PrettyPrinterBase(object):
-
- @contextmanager
- def indent(self, indent):
- """with statement support for indenting/dedenting."""
- self.indentation += indent
- try:
- yield
- finally:
- self.indentation -= indent
-
- @contextmanager
- def group(self, indent=0, open='', close=''):
- """like begin_group / end_group but for the with statement."""
- self.begin_group(indent, open)
- try:
- yield
- finally:
- self.end_group(indent, close)
-
-class PrettyPrinter(_PrettyPrinterBase):
- """
- Baseclass for the `RepresentationPrinter` prettyprinter that is used to
- generate pretty reprs of objects. Contrary to the `RepresentationPrinter`
- this printer knows nothing about the default pprinters or the `_repr_pretty_`
- callback method.
- """
-
- def __init__(self, output, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
- self.output = output
- self.max_width = max_width
- self.newline = newline
- self.max_seq_length = max_seq_length
- self.output_width = 0
- self.buffer_width = 0
- self.buffer = deque()
-
- root_group = Group(0)
- self.group_stack = [root_group]
- self.group_queue = GroupQueue(root_group)
- self.indentation = 0
-
- def _break_outer_groups(self):
- while self.max_width < self.output_width + self.buffer_width:
- group = self.group_queue.deq()
- if not group:
- return
- while group.breakables:
- x = self.buffer.popleft()
- self.output_width = x.output(self.output, self.output_width)
- self.buffer_width -= x.width
- while self.buffer and isinstance(self.buffer[0], Text):
- x = self.buffer.popleft()
- self.output_width = x.output(self.output, self.output_width)
- self.buffer_width -= x.width
-
- def text(self, obj):
- """Add literal text to the output."""
- width = len(obj)
- if self.buffer:
- text = self.buffer[-1]
- if not isinstance(text, Text):
- text = Text()
- self.buffer.append(text)
- text.add(obj, width)
- self.buffer_width += width
- self._break_outer_groups()
- else:
- self.output.write(obj)
- self.output_width += width
-
- def breakable(self, sep=' '):
- """
- Add a breakable separator to the output. This does not mean that it
- will automatically break here. If no breaking on this position takes
- place the `sep` is inserted which default to one space.
- """
- width = len(sep)
- group = self.group_stack[-1]
- if group.want_break:
- self.flush()
- self.output.write(self.newline)
- self.output.write(' ' * self.indentation)
- self.output_width = self.indentation
- self.buffer_width = 0
- else:
- self.buffer.append(Breakable(sep, width, self))
- self.buffer_width += width
- self._break_outer_groups()
-
- def break_(self):
- """
- Explicitly insert a newline into the output, maintaining correct indentation.
- """
- self.flush()
- self.output.write(self.newline)
- self.output.write(' ' * self.indentation)
- self.output_width = self.indentation
- self.buffer_width = 0
-
-
- def begin_group(self, indent=0, open=''):
- """
- Begin a group. If you want support for python < 2.5 which doesn't has
- the with statement this is the preferred way:
-
- p.begin_group(1, '{')
- ...
- p.end_group(1, '}')
-
- The python 2.5 expression would be this:
-
- with p.group(1, '{', '}'):
- ...
-
- The first parameter specifies the indentation for the next line (usually
- the width of the opening text), the second the opening text. All
- parameters are optional.
- """
- if open:
- self.text(open)
- group = Group(self.group_stack[-1].depth + 1)
- self.group_stack.append(group)
- self.group_queue.enq(group)
- self.indentation += indent
-
- def _enumerate(self, seq):
- """like enumerate, but with an upper limit on the number of items"""
- for idx, x in enumerate(seq):
- if self.max_seq_length and idx >= self.max_seq_length:
- self.text(',')
- self.breakable()
- self.text('...')
- return
- yield idx, x
-
- def end_group(self, dedent=0, close=''):
- """End a group. See `begin_group` for more details."""
- self.indentation -= dedent
- group = self.group_stack.pop()
- if not group.breakables:
- self.group_queue.remove(group)
- if close:
- self.text(close)
-
- def flush(self):
- """Flush data that is left in the buffer."""
- for data in self.buffer:
- self.output_width += data.output(self.output, self.output_width)
- self.buffer.clear()
- self.buffer_width = 0
-
-
-def _get_mro(obj_class):
- """ Get a reasonable method resolution order of a class and its superclasses
- for both old-style and new-style classes.
- """
- if not hasattr(obj_class, '__mro__'):
- # Old-style class. Mix in object to make a fake new-style class.
- try:
- obj_class = type(obj_class.__name__, (obj_class, object), {})
- except TypeError:
- # Old-style extension type that does not descend from object.
- # FIXME: try to construct a more thorough MRO.
- mro = [obj_class]
- else:
- mro = obj_class.__mro__[1:-1]
- else:
- mro = obj_class.__mro__
- return mro
-
-
-class RepresentationPrinter(PrettyPrinter):
- """
- Special pretty printer that has a `pretty` method that calls the pretty
- printer for a python object.
-
- This class stores processing data on `self` so you must *never* use
- this class in a threaded environment. Always lock it or reinstanciate
- it.
-
- Instances also have a verbose flag callbacks can access to control their
- output. For example the default instance repr prints all attributes and
- methods that are not prefixed by an underscore if the printer is in
- verbose mode.
- """
-
- def __init__(self, output, verbose=False, max_width=79, newline='\n',
- singleton_pprinters=None, type_pprinters=None, deferred_pprinters=None,
- max_seq_length=MAX_SEQ_LENGTH):
-
- PrettyPrinter.__init__(self, output, max_width, newline, max_seq_length=max_seq_length)
- self.verbose = verbose
- self.stack = []
- if singleton_pprinters is None:
- singleton_pprinters = _singleton_pprinters.copy()
- self.singleton_pprinters = singleton_pprinters
- if type_pprinters is None:
- type_pprinters = _type_pprinters.copy()
- self.type_pprinters = type_pprinters
- if deferred_pprinters is None:
- deferred_pprinters = _deferred_type_pprinters.copy()
- self.deferred_pprinters = deferred_pprinters
-
- def pretty(self, obj):
- """Pretty print the given object."""
- obj_id = id(obj)
- cycle = obj_id in self.stack
- self.stack.append(obj_id)
- self.begin_group()
- try:
- obj_class = _safe_getattr(obj, '__class__', None) or type(obj)
- # First try to find registered singleton printers for the type.
- try:
- printer = self.singleton_pprinters[obj_id]
- except (TypeError, KeyError):
- pass
- else:
- return printer(obj, self, cycle)
- # Next walk the mro and check for either:
- # 1) a registered printer
- # 2) a _repr_pretty_ method
- for cls in _get_mro(obj_class):
- if cls in self.type_pprinters:
- # printer registered in self.type_pprinters
- return self.type_pprinters[cls](obj, self, cycle)
- else:
- # deferred printer
- printer = self._in_deferred_types(cls)
- if printer is not None:
- return printer(obj, self, cycle)
- else:
- # Finally look for special method names.
- # Some objects automatically create any requested
- # attribute. Try to ignore most of them by checking for
- # callability.
- if '_repr_pretty_' in cls.__dict__:
- meth = cls._repr_pretty_
- if callable(meth):
- return meth(obj, self, cycle)
+def pretty(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
+ """
+ Pretty print the object's representation.
+ """
+ stream = CUnicodeIO()
+ printer = RepresentationPrinter(stream, verbose, max_width, newline, max_seq_length=max_seq_length)
+ printer.pretty(obj)
+ printer.flush()
+ return stream.getvalue()
+
+
+def pprint(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
+ """
+ Like `pretty` but print to stdout.
+ """
+ printer = RepresentationPrinter(sys.stdout, verbose, max_width, newline, max_seq_length=max_seq_length)
+ printer.pretty(obj)
+ printer.flush()
+ sys.stdout.write(newline)
+ sys.stdout.flush()
+
+class _PrettyPrinterBase(object):
+
+ @contextmanager
+ def indent(self, indent):
+ """with statement support for indenting/dedenting."""
+ self.indentation += indent
+ try:
+ yield
+ finally:
+ self.indentation -= indent
+
+ @contextmanager
+ def group(self, indent=0, open='', close=''):
+ """like begin_group / end_group but for the with statement."""
+ self.begin_group(indent, open)
+ try:
+ yield
+ finally:
+ self.end_group(indent, close)
+
+class PrettyPrinter(_PrettyPrinterBase):
+ """
+ Baseclass for the `RepresentationPrinter` prettyprinter that is used to
+ generate pretty reprs of objects. Contrary to the `RepresentationPrinter`
+ this printer knows nothing about the default pprinters or the `_repr_pretty_`
+ callback method.
+ """
+
+ def __init__(self, output, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
+ self.output = output
+ self.max_width = max_width
+ self.newline = newline
+ self.max_seq_length = max_seq_length
+ self.output_width = 0
+ self.buffer_width = 0
+ self.buffer = deque()
+
+ root_group = Group(0)
+ self.group_stack = [root_group]
+ self.group_queue = GroupQueue(root_group)
+ self.indentation = 0
+
+ def _break_outer_groups(self):
+ while self.max_width < self.output_width + self.buffer_width:
+ group = self.group_queue.deq()
+ if not group:
+ return
+ while group.breakables:
+ x = self.buffer.popleft()
+ self.output_width = x.output(self.output, self.output_width)
+ self.buffer_width -= x.width
+ while self.buffer and isinstance(self.buffer[0], Text):
+ x = self.buffer.popleft()
+ self.output_width = x.output(self.output, self.output_width)
+ self.buffer_width -= x.width
+
+ def text(self, obj):
+ """Add literal text to the output."""
+ width = len(obj)
+ if self.buffer:
+ text = self.buffer[-1]
+ if not isinstance(text, Text):
+ text = Text()
+ self.buffer.append(text)
+ text.add(obj, width)
+ self.buffer_width += width
+ self._break_outer_groups()
+ else:
+ self.output.write(obj)
+ self.output_width += width
+
+ def breakable(self, sep=' '):
+ """
+ Add a breakable separator to the output. This does not mean that it
+ will automatically break here. If no breaking on this position takes
+ place the `sep` is inserted which default to one space.
+ """
+ width = len(sep)
+ group = self.group_stack[-1]
+ if group.want_break:
+ self.flush()
+ self.output.write(self.newline)
+ self.output.write(' ' * self.indentation)
+ self.output_width = self.indentation
+ self.buffer_width = 0
+ else:
+ self.buffer.append(Breakable(sep, width, self))
+ self.buffer_width += width
+ self._break_outer_groups()
+
+ def break_(self):
+ """
+ Explicitly insert a newline into the output, maintaining correct indentation.
+ """
+ self.flush()
+ self.output.write(self.newline)
+ self.output.write(' ' * self.indentation)
+ self.output_width = self.indentation
+ self.buffer_width = 0
+
+
+ def begin_group(self, indent=0, open=''):
+ """
+ Begin a group. If you want support for python < 2.5 which doesn't has
+ the with statement this is the preferred way:
+
+ p.begin_group(1, '{')
+ ...
+ p.end_group(1, '}')
+
+ The python 2.5 expression would be this:
+
+ with p.group(1, '{', '}'):
+ ...
+
+ The first parameter specifies the indentation for the next line (usually
+ the width of the opening text), the second the opening text. All
+ parameters are optional.
+ """
+ if open:
+ self.text(open)
+ group = Group(self.group_stack[-1].depth + 1)
+ self.group_stack.append(group)
+ self.group_queue.enq(group)
+ self.indentation += indent
+
+ def _enumerate(self, seq):
+ """like enumerate, but with an upper limit on the number of items"""
+ for idx, x in enumerate(seq):
+ if self.max_seq_length and idx >= self.max_seq_length:
+ self.text(',')
+ self.breakable()
+ self.text('...')
+ return
+ yield idx, x
+
+ def end_group(self, dedent=0, close=''):
+ """End a group. See `begin_group` for more details."""
+ self.indentation -= dedent
+ group = self.group_stack.pop()
+ if not group.breakables:
+ self.group_queue.remove(group)
+ if close:
+ self.text(close)
+
+ def flush(self):
+ """Flush data that is left in the buffer."""
+ for data in self.buffer:
+ self.output_width += data.output(self.output, self.output_width)
+ self.buffer.clear()
+ self.buffer_width = 0
+
+
+def _get_mro(obj_class):
+ """ Get a reasonable method resolution order of a class and its superclasses
+ for both old-style and new-style classes.
+ """
+ if not hasattr(obj_class, '__mro__'):
+ # Old-style class. Mix in object to make a fake new-style class.
+ try:
+ obj_class = type(obj_class.__name__, (obj_class, object), {})
+ except TypeError:
+ # Old-style extension type that does not descend from object.
+ # FIXME: try to construct a more thorough MRO.
+ mro = [obj_class]
+ else:
+ mro = obj_class.__mro__[1:-1]
+ else:
+ mro = obj_class.__mro__
+ return mro
+
+
+class RepresentationPrinter(PrettyPrinter):
+ """
+ Special pretty printer that has a `pretty` method that calls the pretty
+ printer for a python object.
+
+ This class stores processing data on `self` so you must *never* use
+ this class in a threaded environment. Always lock it or reinstanciate
+ it.
+
+ Instances also have a verbose flag callbacks can access to control their
+ output. For example the default instance repr prints all attributes and
+ methods that are not prefixed by an underscore if the printer is in
+ verbose mode.
+ """
+
+ def __init__(self, output, verbose=False, max_width=79, newline='\n',
+ singleton_pprinters=None, type_pprinters=None, deferred_pprinters=None,
+ max_seq_length=MAX_SEQ_LENGTH):
+
+ PrettyPrinter.__init__(self, output, max_width, newline, max_seq_length=max_seq_length)
+ self.verbose = verbose
+ self.stack = []
+ if singleton_pprinters is None:
+ singleton_pprinters = _singleton_pprinters.copy()
+ self.singleton_pprinters = singleton_pprinters
+ if type_pprinters is None:
+ type_pprinters = _type_pprinters.copy()
+ self.type_pprinters = type_pprinters
+ if deferred_pprinters is None:
+ deferred_pprinters = _deferred_type_pprinters.copy()
+ self.deferred_pprinters = deferred_pprinters
+
+ def pretty(self, obj):
+ """Pretty print the given object."""
+ obj_id = id(obj)
+ cycle = obj_id in self.stack
+ self.stack.append(obj_id)
+ self.begin_group()
+ try:
+ obj_class = _safe_getattr(obj, '__class__', None) or type(obj)
+ # First try to find registered singleton printers for the type.
+ try:
+ printer = self.singleton_pprinters[obj_id]
+ except (TypeError, KeyError):
+ pass
+ else:
+ return printer(obj, self, cycle)
+ # Next walk the mro and check for either:
+ # 1) a registered printer
+ # 2) a _repr_pretty_ method
+ for cls in _get_mro(obj_class):
+ if cls in self.type_pprinters:
+ # printer registered in self.type_pprinters
+ return self.type_pprinters[cls](obj, self, cycle)
+ else:
+ # deferred printer
+ printer = self._in_deferred_types(cls)
+ if printer is not None:
+ return printer(obj, self, cycle)
+ else:
+ # Finally look for special method names.
+ # Some objects automatically create any requested
+ # attribute. Try to ignore most of them by checking for
+ # callability.
+ if '_repr_pretty_' in cls.__dict__:
+ meth = cls._repr_pretty_
+ if callable(meth):
+ return meth(obj, self, cycle)
if cls is not object \
and callable(cls.__dict__.get('__repr__')):
return _repr_pprint(obj, self, cycle)
- return _default_pprint(obj, self, cycle)
- finally:
- self.end_group()
- self.stack.pop()
-
- def _in_deferred_types(self, cls):
- """
- Check if the given class is specified in the deferred type registry.
-
- Returns the printer from the registry if it exists, and None if the
- class is not in the registry. Successful matches will be moved to the
- regular type registry for future use.
- """
- mod = _safe_getattr(cls, '__module__', None)
- name = _safe_getattr(cls, '__name__', None)
- key = (mod, name)
- printer = None
- if key in self.deferred_pprinters:
- # Move the printer over to the regular registry.
- printer = self.deferred_pprinters.pop(key)
- self.type_pprinters[cls] = printer
- return printer
-
-
-class Printable(object):
-
- def output(self, stream, output_width):
- return output_width
-
-
-class Text(Printable):
-
- def __init__(self):
- self.objs = []
- self.width = 0
-
- def output(self, stream, output_width):
- for obj in self.objs:
- stream.write(obj)
- return output_width + self.width
-
- def add(self, obj, width):
- self.objs.append(obj)
- self.width += width
-
-
-class Breakable(Printable):
-
- def __init__(self, seq, width, pretty):
- self.obj = seq
- self.width = width
- self.pretty = pretty
- self.indentation = pretty.indentation
- self.group = pretty.group_stack[-1]
- self.group.breakables.append(self)
-
- def output(self, stream, output_width):
- self.group.breakables.popleft()
- if self.group.want_break:
- stream.write(self.pretty.newline)
- stream.write(' ' * self.indentation)
- return self.indentation
- if not self.group.breakables:
- self.pretty.group_queue.remove(self.group)
- stream.write(self.obj)
- return output_width + self.width
-
-
-class Group(Printable):
-
- def __init__(self, depth):
- self.depth = depth
- self.breakables = deque()
- self.want_break = False
-
-
-class GroupQueue(object):
-
- def __init__(self, *groups):
- self.queue = []
- for group in groups:
- self.enq(group)
-
- def enq(self, group):
- depth = group.depth
- while depth > len(self.queue) - 1:
- self.queue.append([])
- self.queue[depth].append(group)
-
- def deq(self):
- for stack in self.queue:
- for idx, group in enumerate(reversed(stack)):
- if group.breakables:
- del stack[idx]
- group.want_break = True
- return group
- for group in stack:
- group.want_break = True
- del stack[:]
-
- def remove(self, group):
- try:
- self.queue[group.depth].remove(group)
- except ValueError:
- pass
-
-try:
- _baseclass_reprs = (object.__repr__, types.InstanceType.__repr__)
-except AttributeError: # Python 3
- _baseclass_reprs = (object.__repr__,)
-
-
-def _default_pprint(obj, p, cycle):
- """
- The default print function. Used if an object does not provide one and
- it's none of the builtin objects.
- """
- klass = _safe_getattr(obj, '__class__', None) or type(obj)
- if _safe_getattr(klass, '__repr__', None) not in _baseclass_reprs:
- # A user-provided repr. Find newlines and replace them with p.break_()
- _repr_pprint(obj, p, cycle)
- return
- p.begin_group(1, '<')
- p.pretty(klass)
- p.text(' at 0x%x' % id(obj))
- if cycle:
- p.text(' ...')
- elif p.verbose:
- first = True
- for key in dir(obj):
- if not key.startswith('_'):
- try:
- value = getattr(obj, key)
- except AttributeError:
- continue
- if isinstance(value, types.MethodType):
- continue
- if not first:
- p.text(',')
- p.breakable()
- p.text(key)
- p.text('=')
- step = len(key) + 1
- p.indentation += step
- p.pretty(value)
- p.indentation -= step
- first = False
- p.end_group(1, '>')
-
-
+ return _default_pprint(obj, self, cycle)
+ finally:
+ self.end_group()
+ self.stack.pop()
+
+ def _in_deferred_types(self, cls):
+ """
+ Check if the given class is specified in the deferred type registry.
+
+ Returns the printer from the registry if it exists, and None if the
+ class is not in the registry. Successful matches will be moved to the
+ regular type registry for future use.
+ """
+ mod = _safe_getattr(cls, '__module__', None)
+ name = _safe_getattr(cls, '__name__', None)
+ key = (mod, name)
+ printer = None
+ if key in self.deferred_pprinters:
+ # Move the printer over to the regular registry.
+ printer = self.deferred_pprinters.pop(key)
+ self.type_pprinters[cls] = printer
+ return printer
+
+
+class Printable(object):
+
+ def output(self, stream, output_width):
+ return output_width
+
+
+class Text(Printable):
+
+ def __init__(self):
+ self.objs = []
+ self.width = 0
+
+ def output(self, stream, output_width):
+ for obj in self.objs:
+ stream.write(obj)
+ return output_width + self.width
+
+ def add(self, obj, width):
+ self.objs.append(obj)
+ self.width += width
+
+
+class Breakable(Printable):
+
+ def __init__(self, seq, width, pretty):
+ self.obj = seq
+ self.width = width
+ self.pretty = pretty
+ self.indentation = pretty.indentation
+ self.group = pretty.group_stack[-1]
+ self.group.breakables.append(self)
+
+ def output(self, stream, output_width):
+ self.group.breakables.popleft()
+ if self.group.want_break:
+ stream.write(self.pretty.newline)
+ stream.write(' ' * self.indentation)
+ return self.indentation
+ if not self.group.breakables:
+ self.pretty.group_queue.remove(self.group)
+ stream.write(self.obj)
+ return output_width + self.width
+
+
+class Group(Printable):
+
+ def __init__(self, depth):
+ self.depth = depth
+ self.breakables = deque()
+ self.want_break = False
+
+
+class GroupQueue(object):
+
+ def __init__(self, *groups):
+ self.queue = []
+ for group in groups:
+ self.enq(group)
+
+ def enq(self, group):
+ depth = group.depth
+ while depth > len(self.queue) - 1:
+ self.queue.append([])
+ self.queue[depth].append(group)
+
+ def deq(self):
+ for stack in self.queue:
+ for idx, group in enumerate(reversed(stack)):
+ if group.breakables:
+ del stack[idx]
+ group.want_break = True
+ return group
+ for group in stack:
+ group.want_break = True
+ del stack[:]
+
+ def remove(self, group):
+ try:
+ self.queue[group.depth].remove(group)
+ except ValueError:
+ pass
+
+try:
+ _baseclass_reprs = (object.__repr__, types.InstanceType.__repr__)
+except AttributeError: # Python 3
+ _baseclass_reprs = (object.__repr__,)
+
+
+def _default_pprint(obj, p, cycle):
+ """
+ The default print function. Used if an object does not provide one and
+ it's none of the builtin objects.
+ """
+ klass = _safe_getattr(obj, '__class__', None) or type(obj)
+ if _safe_getattr(klass, '__repr__', None) not in _baseclass_reprs:
+ # A user-provided repr. Find newlines and replace them with p.break_()
+ _repr_pprint(obj, p, cycle)
+ return
+ p.begin_group(1, '<')
+ p.pretty(klass)
+ p.text(' at 0x%x' % id(obj))
+ if cycle:
+ p.text(' ...')
+ elif p.verbose:
+ first = True
+ for key in dir(obj):
+ if not key.startswith('_'):
+ try:
+ value = getattr(obj, key)
+ except AttributeError:
+ continue
+ if isinstance(value, types.MethodType):
+ continue
+ if not first:
+ p.text(',')
+ p.breakable()
+ p.text(key)
+ p.text('=')
+ step = len(key) + 1
+ p.indentation += step
+ p.pretty(value)
+ p.indentation -= step
+ first = False
+ p.end_group(1, '>')
+
+
def _seq_pprinter_factory(start, end):
- """
- Factory that returns a pprint function useful for sequences. Used by
- the default pprint for tuples, dicts, and lists.
- """
- def inner(obj, p, cycle):
- if cycle:
- return p.text(start + '...' + end)
- step = len(start)
- p.begin_group(step, start)
- for idx, x in p._enumerate(obj):
- if idx:
- p.text(',')
- p.breakable()
- p.pretty(x)
- if len(obj) == 1 and type(obj) is tuple:
- # Special case for 1-item tuples.
- p.text(',')
- p.end_group(step, end)
- return inner
-
-
+ """
+ Factory that returns a pprint function useful for sequences. Used by
+ the default pprint for tuples, dicts, and lists.
+ """
+ def inner(obj, p, cycle):
+ if cycle:
+ return p.text(start + '...' + end)
+ step = len(start)
+ p.begin_group(step, start)
+ for idx, x in p._enumerate(obj):
+ if idx:
+ p.text(',')
+ p.breakable()
+ p.pretty(x)
+ if len(obj) == 1 and type(obj) is tuple:
+ # Special case for 1-item tuples.
+ p.text(',')
+ p.end_group(step, end)
+ return inner
+
+
def _set_pprinter_factory(start, end):
- """
- Factory that returns a pprint function useful for sets and frozensets.
- """
- def inner(obj, p, cycle):
- if cycle:
- return p.text(start + '...' + end)
- if len(obj) == 0:
- # Special case.
+ """
+ Factory that returns a pprint function useful for sets and frozensets.
+ """
+ def inner(obj, p, cycle):
+ if cycle:
+ return p.text(start + '...' + end)
+ if len(obj) == 0:
+ # Special case.
p.text(type(obj).__name__ + '()')
- else:
- step = len(start)
- p.begin_group(step, start)
- # Like dictionary keys, we will try to sort the items if there aren't too many
- if not (p.max_seq_length and len(obj) >= p.max_seq_length):
+ else:
+ step = len(start)
+ p.begin_group(step, start)
+ # Like dictionary keys, we will try to sort the items if there aren't too many
+ if not (p.max_seq_length and len(obj) >= p.max_seq_length):
items = _sorted_for_pprint(obj)
else:
items = obj
- for idx, x in p._enumerate(items):
- if idx:
- p.text(',')
- p.breakable()
- p.pretty(x)
- p.end_group(step, end)
- return inner
-
-
+ for idx, x in p._enumerate(items):
+ if idx:
+ p.text(',')
+ p.breakable()
+ p.pretty(x)
+ p.end_group(step, end)
+ return inner
+
+
def _dict_pprinter_factory(start, end):
- """
- Factory that returns a pprint function used by the default pprint of
- dicts and dict proxies.
- """
- def inner(obj, p, cycle):
- if cycle:
- return p.text('{...}')
+ """
+ Factory that returns a pprint function used by the default pprint of
+ dicts and dict proxies.
+ """
+ def inner(obj, p, cycle):
+ if cycle:
+ return p.text('{...}')
step = len(start)
p.begin_group(step, start)
- keys = obj.keys()
- # if dict isn't large enough to be truncated, sort keys before displaying
+ keys = obj.keys()
+ # if dict isn't large enough to be truncated, sort keys before displaying
# From Python 3.7, dicts preserve order by definition, so we don't sort.
if not DICT_IS_ORDERED \
and not (p.max_seq_length and len(obj) >= p.max_seq_length):
keys = _sorted_for_pprint(keys)
- for idx, key in p._enumerate(keys):
- if idx:
- p.text(',')
- p.breakable()
- p.pretty(key)
- p.text(': ')
- p.pretty(obj[key])
+ for idx, key in p._enumerate(keys):
+ if idx:
+ p.text(',')
+ p.breakable()
+ p.pretty(key)
+ p.text(': ')
+ p.pretty(obj[key])
p.end_group(step, end)
- return inner
-
-
-def _super_pprint(obj, p, cycle):
- """The pprint for the super type."""
- p.begin_group(8, '<super: ')
- p.pretty(obj.__thisclass__)
- p.text(',')
- p.breakable()
+ return inner
+
+
+def _super_pprint(obj, p, cycle):
+ """The pprint for the super type."""
+ p.begin_group(8, '<super: ')
+ p.pretty(obj.__thisclass__)
+ p.text(',')
+ p.breakable()
if PYPY: # In PyPy, super() objects don't have __self__ attributes
dself = obj.__repr__.__self__
p.pretty(None if dself is obj else dself)
else:
p.pretty(obj.__self__)
- p.end_group(8, '>')
-
-
-def _re_pattern_pprint(obj, p, cycle):
- """The pprint function for regular expression patterns."""
- p.text('re.compile(')
- pattern = repr(obj.pattern)
- if pattern[:1] in 'uU':
- pattern = pattern[1:]
- prefix = 'ur'
- else:
- prefix = 'r'
- pattern = prefix + pattern.replace('\\\\', '\\')
- p.text(pattern)
- if obj.flags:
- p.text(',')
- p.breakable()
- done_one = False
- for flag in ('TEMPLATE', 'IGNORECASE', 'LOCALE', 'MULTILINE', 'DOTALL',
- 'UNICODE', 'VERBOSE', 'DEBUG'):
- if obj.flags & getattr(re, flag):
- if done_one:
- p.text('|')
- p.text('re.' + flag)
- done_one = True
- p.text(')')
-
-
-def _type_pprint(obj, p, cycle):
- """The pprint for classes and types."""
- # Heap allocated types might not have the module attribute,
- # and others may set it to None.
-
+ p.end_group(8, '>')
+
+
+def _re_pattern_pprint(obj, p, cycle):
+ """The pprint function for regular expression patterns."""
+ p.text('re.compile(')
+ pattern = repr(obj.pattern)
+ if pattern[:1] in 'uU':
+ pattern = pattern[1:]
+ prefix = 'ur'
+ else:
+ prefix = 'r'
+ pattern = prefix + pattern.replace('\\\\', '\\')
+ p.text(pattern)
+ if obj.flags:
+ p.text(',')
+ p.breakable()
+ done_one = False
+ for flag in ('TEMPLATE', 'IGNORECASE', 'LOCALE', 'MULTILINE', 'DOTALL',
+ 'UNICODE', 'VERBOSE', 'DEBUG'):
+ if obj.flags & getattr(re, flag):
+ if done_one:
+ p.text('|')
+ p.text('re.' + flag)
+ done_one = True
+ p.text(')')
+
+
+def _type_pprint(obj, p, cycle):
+ """The pprint for classes and types."""
+ # Heap allocated types might not have the module attribute,
+ # and others may set it to None.
+
# Checks for a __repr__ override in the metaclass. Can't compare the
# type(obj).__repr__ directly because in PyPy the representation function
# inherited from type isn't the same type.__repr__
if [m for m in _get_mro(type(obj)) if "__repr__" in vars(m)][:1] != [type]:
- _repr_pprint(obj, p, cycle)
- return
-
- mod = _safe_getattr(obj, '__module__', None)
- try:
- name = obj.__qualname__
- if not isinstance(name, string_types):
- # This can happen if the type implements __qualname__ as a property
- # or other descriptor in Python 2.
- raise Exception("Try __name__")
- except Exception:
- name = obj.__name__
- if not isinstance(name, string_types):
- name = '<unknown type>'
-
- if mod in (None, '__builtin__', 'builtins', 'exceptions'):
- p.text(name)
- else:
- p.text(mod + '.' + name)
-
-
-def _repr_pprint(obj, p, cycle):
- """A pprint that just redirects to the normal repr function."""
- # Find newlines and replace them with p.break_()
- output = repr(obj)
- for idx,output_line in enumerate(output.splitlines()):
- if idx:
- p.break_()
- p.text(output_line)
-
-
-def _function_pprint(obj, p, cycle):
- """Base pprint for all functions and builtin functions."""
- name = _safe_getattr(obj, '__qualname__', obj.__name__)
- mod = obj.__module__
- if mod and mod not in ('__builtin__', 'builtins', 'exceptions'):
- name = mod + '.' + name
- p.text('<function %s>' % name)
-
-
-def _exception_pprint(obj, p, cycle):
- """Base pprint for all exceptions."""
- name = getattr(obj.__class__, '__qualname__', obj.__class__.__name__)
- if obj.__class__.__module__ not in ('exceptions', 'builtins'):
- name = '%s.%s' % (obj.__class__.__module__, name)
- step = len(name) + 1
- p.begin_group(step, name + '(')
- for idx, arg in enumerate(getattr(obj, 'args', ())):
- if idx:
- p.text(',')
- p.breakable()
- p.pretty(arg)
- p.end_group(step, ')')
-
-
-#: the exception base
-try:
- _exception_base = BaseException
-except NameError:
- _exception_base = Exception
-
-
-#: printers for builtin types
-_type_pprinters = {
- int: _repr_pprint,
- float: _repr_pprint,
- str: _repr_pprint,
+ _repr_pprint(obj, p, cycle)
+ return
+
+ mod = _safe_getattr(obj, '__module__', None)
+ try:
+ name = obj.__qualname__
+ if not isinstance(name, string_types):
+ # This can happen if the type implements __qualname__ as a property
+ # or other descriptor in Python 2.
+ raise Exception("Try __name__")
+ except Exception:
+ name = obj.__name__
+ if not isinstance(name, string_types):
+ name = '<unknown type>'
+
+ if mod in (None, '__builtin__', 'builtins', 'exceptions'):
+ p.text(name)
+ else:
+ p.text(mod + '.' + name)
+
+
+def _repr_pprint(obj, p, cycle):
+ """A pprint that just redirects to the normal repr function."""
+ # Find newlines and replace them with p.break_()
+ output = repr(obj)
+ for idx,output_line in enumerate(output.splitlines()):
+ if idx:
+ p.break_()
+ p.text(output_line)
+
+
+def _function_pprint(obj, p, cycle):
+ """Base pprint for all functions and builtin functions."""
+ name = _safe_getattr(obj, '__qualname__', obj.__name__)
+ mod = obj.__module__
+ if mod and mod not in ('__builtin__', 'builtins', 'exceptions'):
+ name = mod + '.' + name
+ p.text('<function %s>' % name)
+
+
+def _exception_pprint(obj, p, cycle):
+ """Base pprint for all exceptions."""
+ name = getattr(obj.__class__, '__qualname__', obj.__class__.__name__)
+ if obj.__class__.__module__ not in ('exceptions', 'builtins'):
+ name = '%s.%s' % (obj.__class__.__module__, name)
+ step = len(name) + 1
+ p.begin_group(step, name + '(')
+ for idx, arg in enumerate(getattr(obj, 'args', ())):
+ if idx:
+ p.text(',')
+ p.breakable()
+ p.pretty(arg)
+ p.end_group(step, ')')
+
+
+#: the exception base
+try:
+ _exception_base = BaseException
+except NameError:
+ _exception_base = Exception
+
+
+#: printers for builtin types
+_type_pprinters = {
+ int: _repr_pprint,
+ float: _repr_pprint,
+ str: _repr_pprint,
tuple: _seq_pprinter_factory('(', ')'),
list: _seq_pprinter_factory('[', ']'),
dict: _dict_pprinter_factory('{', '}'),
-
+
set: _set_pprinter_factory('{', '}'),
frozenset: _set_pprinter_factory('frozenset({', '})'),
- super: _super_pprint,
- _re_pattern_type: _re_pattern_pprint,
- type: _type_pprint,
- types.FunctionType: _function_pprint,
- types.BuiltinFunctionType: _function_pprint,
- types.MethodType: _repr_pprint,
-
- datetime.datetime: _repr_pprint,
- datetime.timedelta: _repr_pprint,
- _exception_base: _exception_pprint
-}
-
-try:
+ super: _super_pprint,
+ _re_pattern_type: _re_pattern_pprint,
+ type: _type_pprint,
+ types.FunctionType: _function_pprint,
+ types.BuiltinFunctionType: _function_pprint,
+ types.MethodType: _repr_pprint,
+
+ datetime.datetime: _repr_pprint,
+ datetime.timedelta: _repr_pprint,
+ _exception_base: _exception_pprint
+}
+
+try:
# In PyPy, types.DictProxyType is dict, setting the dictproxy printer
# using dict.setdefault avoids overwritting the dict printer
_type_pprinters.setdefault(types.DictProxyType,
_dict_pprinter_factory('dict_proxy({', '})'))
- _type_pprinters[types.ClassType] = _type_pprint
- _type_pprinters[types.SliceType] = _repr_pprint
-except AttributeError: # Python 3
+ _type_pprinters[types.ClassType] = _type_pprint
+ _type_pprinters[types.SliceType] = _repr_pprint
+except AttributeError: # Python 3
_type_pprinters[types.MappingProxyType] = \
_dict_pprinter_factory('mappingproxy({', '})')
- _type_pprinters[slice] = _repr_pprint
-
-try:
- _type_pprinters[xrange] = _repr_pprint
- _type_pprinters[long] = _repr_pprint
- _type_pprinters[unicode] = _repr_pprint
-except NameError:
- _type_pprinters[range] = _repr_pprint
- _type_pprinters[bytes] = _repr_pprint
-
-#: printers for types specified by name
-_deferred_type_pprinters = {
-}
-
-def for_type(typ, func):
- """
- Add a pretty printer for a given type.
- """
- oldfunc = _type_pprinters.get(typ, None)
- if func is not None:
- # To support easy restoration of old pprinters, we need to ignore Nones.
- _type_pprinters[typ] = func
- return oldfunc
-
-def for_type_by_name(type_module, type_name, func):
- """
- Add a pretty printer for a type specified by the module and name of a type
- rather than the type object itself.
- """
- key = (type_module, type_name)
- oldfunc = _deferred_type_pprinters.get(key, None)
- if func is not None:
- # To support easy restoration of old pprinters, we need to ignore Nones.
- _deferred_type_pprinters[key] = func
- return oldfunc
-
-
-#: printers for the default singletons
-_singleton_pprinters = dict.fromkeys(map(id, [None, True, False, Ellipsis,
- NotImplemented]), _repr_pprint)
-
-
-def _defaultdict_pprint(obj, p, cycle):
- name = obj.__class__.__name__
- with p.group(len(name) + 1, name + '(', ')'):
- if cycle:
- p.text('...')
- else:
- p.pretty(obj.default_factory)
- p.text(',')
- p.breakable()
- p.pretty(dict(obj))
-
-def _ordereddict_pprint(obj, p, cycle):
- name = obj.__class__.__name__
- with p.group(len(name) + 1, name + '(', ')'):
- if cycle:
- p.text('...')
- elif len(obj):
- p.pretty(list(obj.items()))
-
-def _deque_pprint(obj, p, cycle):
- name = obj.__class__.__name__
- with p.group(len(name) + 1, name + '(', ')'):
- if cycle:
- p.text('...')
- else:
- p.pretty(list(obj))
-
-
-def _counter_pprint(obj, p, cycle):
- name = obj.__class__.__name__
- with p.group(len(name) + 1, name + '(', ')'):
- if cycle:
- p.text('...')
- elif len(obj):
- p.pretty(dict(obj))
-
-for_type_by_name('collections', 'defaultdict', _defaultdict_pprint)
-for_type_by_name('collections', 'OrderedDict', _ordereddict_pprint)
-for_type_by_name('collections', 'deque', _deque_pprint)
-for_type_by_name('collections', 'Counter', _counter_pprint)
-
-if __name__ == '__main__':
- from random import randrange
- class Foo(object):
- def __init__(self):
- self.foo = 1
- self.bar = re.compile(r'\s+')
- self.blub = dict.fromkeys(range(30), randrange(1, 40))
- self.hehe = 23424.234234
- self.list = ["blub", "blah", self]
-
- def get_foo(self):
- print("foo")
-
- pprint(Foo(), verbose=True)
+ _type_pprinters[slice] = _repr_pprint
+
+try:
+ _type_pprinters[xrange] = _repr_pprint
+ _type_pprinters[long] = _repr_pprint
+ _type_pprinters[unicode] = _repr_pprint
+except NameError:
+ _type_pprinters[range] = _repr_pprint
+ _type_pprinters[bytes] = _repr_pprint
+
+#: printers for types specified by name
+_deferred_type_pprinters = {
+}
+
+def for_type(typ, func):
+ """
+ Add a pretty printer for a given type.
+ """
+ oldfunc = _type_pprinters.get(typ, None)
+ if func is not None:
+ # To support easy restoration of old pprinters, we need to ignore Nones.
+ _type_pprinters[typ] = func
+ return oldfunc
+
+def for_type_by_name(type_module, type_name, func):
+ """
+ Add a pretty printer for a type specified by the module and name of a type
+ rather than the type object itself.
+ """
+ key = (type_module, type_name)
+ oldfunc = _deferred_type_pprinters.get(key, None)
+ if func is not None:
+ # To support easy restoration of old pprinters, we need to ignore Nones.
+ _deferred_type_pprinters[key] = func
+ return oldfunc
+
+
+#: printers for the default singletons
+_singleton_pprinters = dict.fromkeys(map(id, [None, True, False, Ellipsis,
+ NotImplemented]), _repr_pprint)
+
+
+def _defaultdict_pprint(obj, p, cycle):
+ name = obj.__class__.__name__
+ with p.group(len(name) + 1, name + '(', ')'):
+ if cycle:
+ p.text('...')
+ else:
+ p.pretty(obj.default_factory)
+ p.text(',')
+ p.breakable()
+ p.pretty(dict(obj))
+
+def _ordereddict_pprint(obj, p, cycle):
+ name = obj.__class__.__name__
+ with p.group(len(name) + 1, name + '(', ')'):
+ if cycle:
+ p.text('...')
+ elif len(obj):
+ p.pretty(list(obj.items()))
+
+def _deque_pprint(obj, p, cycle):
+ name = obj.__class__.__name__
+ with p.group(len(name) + 1, name + '(', ')'):
+ if cycle:
+ p.text('...')
+ else:
+ p.pretty(list(obj))
+
+
+def _counter_pprint(obj, p, cycle):
+ name = obj.__class__.__name__
+ with p.group(len(name) + 1, name + '(', ')'):
+ if cycle:
+ p.text('...')
+ elif len(obj):
+ p.pretty(dict(obj))
+
+for_type_by_name('collections', 'defaultdict', _defaultdict_pprint)
+for_type_by_name('collections', 'OrderedDict', _ordereddict_pprint)
+for_type_by_name('collections', 'deque', _deque_pprint)
+for_type_by_name('collections', 'Counter', _counter_pprint)
+
+if __name__ == '__main__':
+ from random import randrange
+ class Foo(object):
+ def __init__(self):
+ self.foo = 1
+ self.bar = re.compile(r'\s+')
+ self.blub = dict.fromkeys(range(30), randrange(1, 40))
+ self.hehe = 23424.234234
+ self.list = ["blub", "blah", self]
+
+ def get_foo(self):
+ print("foo")
+
+ pprint(Foo(), verbose=True)
diff --git a/contrib/python/ipython/py2/IPython/lib/security.py b/contrib/python/ipython/py2/IPython/lib/security.py
index a20fcc132e..8429c2a4be 100644
--- a/contrib/python/ipython/py2/IPython/lib/security.py
+++ b/contrib/python/ipython/py2/IPython/lib/security.py
@@ -1,114 +1,114 @@
-"""
-Password generation for the IPython notebook.
-"""
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-# Stdlib
-import getpass
-import hashlib
-import random
-
-# Our own
-from IPython.core.error import UsageError
-from IPython.utils.py3compat import cast_bytes, str_to_bytes
-
-#-----------------------------------------------------------------------------
-# Globals
-#-----------------------------------------------------------------------------
-
-# Length of the salt in nr of hex chars, which implies salt_len * 4
-# bits of randomness.
-salt_len = 12
-
-#-----------------------------------------------------------------------------
-# Functions
-#-----------------------------------------------------------------------------
-
-def passwd(passphrase=None, algorithm='sha1'):
- """Generate hashed password and salt for use in notebook configuration.
-
- In the notebook configuration, set `c.NotebookApp.password` to
- the generated string.
-
- Parameters
- ----------
- passphrase : str
- Password to hash. If unspecified, the user is asked to input
- and verify a password.
- algorithm : str
- Hashing algorithm to use (e.g, 'sha1' or any argument supported
- by :func:`hashlib.new`).
-
- Returns
- -------
- hashed_passphrase : str
- Hashed password, in the format 'hash_algorithm:salt:passphrase_hash'.
-
- Examples
- --------
- >>> passwd('mypassword')
- 'sha1:7cf3:b7d6da294ea9592a9480c8f52e63cd42cfb9dd12'
-
- """
- if passphrase is None:
- for i in range(3):
- p0 = getpass.getpass('Enter password: ')
- p1 = getpass.getpass('Verify password: ')
- if p0 == p1:
- passphrase = p0
- break
- else:
- print('Passwords do not match.')
- else:
- raise UsageError('No matching passwords found. Giving up.')
-
- h = hashlib.new(algorithm)
- salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len)
- h.update(cast_bytes(passphrase, 'utf-8') + str_to_bytes(salt, 'ascii'))
-
- return ':'.join((algorithm, salt, h.hexdigest()))
-
-
-def passwd_check(hashed_passphrase, passphrase):
- """Verify that a given passphrase matches its hashed version.
-
- Parameters
- ----------
- hashed_passphrase : str
- Hashed password, in the format returned by `passwd`.
- passphrase : str
- Passphrase to validate.
-
- Returns
- -------
- valid : bool
- True if the passphrase matches the hash.
-
- Examples
- --------
- >>> from IPython.lib.security import passwd_check
- >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a',
- ... 'mypassword')
- True
-
- >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a',
- ... 'anotherpassword')
- False
- """
- try:
- algorithm, salt, pw_digest = hashed_passphrase.split(':', 2)
- except (ValueError, TypeError):
- return False
-
- try:
- h = hashlib.new(algorithm)
- except ValueError:
- return False
-
- if len(pw_digest) == 0:
- return False
-
- h.update(cast_bytes(passphrase, 'utf-8') + cast_bytes(salt, 'ascii'))
-
- return h.hexdigest() == pw_digest
+"""
+Password generation for the IPython notebook.
+"""
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+# Stdlib
+import getpass
+import hashlib
+import random
+
+# Our own
+from IPython.core.error import UsageError
+from IPython.utils.py3compat import cast_bytes, str_to_bytes
+
+#-----------------------------------------------------------------------------
+# Globals
+#-----------------------------------------------------------------------------
+
+# Length of the salt in nr of hex chars, which implies salt_len * 4
+# bits of randomness.
+salt_len = 12
+
+#-----------------------------------------------------------------------------
+# Functions
+#-----------------------------------------------------------------------------
+
+def passwd(passphrase=None, algorithm='sha1'):
+ """Generate hashed password and salt for use in notebook configuration.
+
+ In the notebook configuration, set `c.NotebookApp.password` to
+ the generated string.
+
+ Parameters
+ ----------
+ passphrase : str
+ Password to hash. If unspecified, the user is asked to input
+ and verify a password.
+ algorithm : str
+ Hashing algorithm to use (e.g, 'sha1' or any argument supported
+ by :func:`hashlib.new`).
+
+ Returns
+ -------
+ hashed_passphrase : str
+ Hashed password, in the format 'hash_algorithm:salt:passphrase_hash'.
+
+ Examples
+ --------
+ >>> passwd('mypassword')
+ 'sha1:7cf3:b7d6da294ea9592a9480c8f52e63cd42cfb9dd12'
+
+ """
+ if passphrase is None:
+ for i in range(3):
+ p0 = getpass.getpass('Enter password: ')
+ p1 = getpass.getpass('Verify password: ')
+ if p0 == p1:
+ passphrase = p0
+ break
+ else:
+ print('Passwords do not match.')
+ else:
+ raise UsageError('No matching passwords found. Giving up.')
+
+ h = hashlib.new(algorithm)
+ salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len)
+ h.update(cast_bytes(passphrase, 'utf-8') + str_to_bytes(salt, 'ascii'))
+
+ return ':'.join((algorithm, salt, h.hexdigest()))
+
+
+def passwd_check(hashed_passphrase, passphrase):
+ """Verify that a given passphrase matches its hashed version.
+
+ Parameters
+ ----------
+ hashed_passphrase : str
+ Hashed password, in the format returned by `passwd`.
+ passphrase : str
+ Passphrase to validate.
+
+ Returns
+ -------
+ valid : bool
+ True if the passphrase matches the hash.
+
+ Examples
+ --------
+ >>> from IPython.lib.security import passwd_check
+ >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a',
+ ... 'mypassword')
+ True
+
+ >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a',
+ ... 'anotherpassword')
+ False
+ """
+ try:
+ algorithm, salt, pw_digest = hashed_passphrase.split(':', 2)
+ except (ValueError, TypeError):
+ return False
+
+ try:
+ h = hashlib.new(algorithm)
+ except ValueError:
+ return False
+
+ if len(pw_digest) == 0:
+ return False
+
+ h.update(cast_bytes(passphrase, 'utf-8') + cast_bytes(salt, 'ascii'))
+
+ return h.hexdigest() == pw_digest
diff --git a/contrib/python/ipython/py2/IPython/nbconvert.py b/contrib/python/ipython/py2/IPython/nbconvert.py
index 3d0a6c5076..2de4ee50bc 100644
--- a/contrib/python/ipython/py2/IPython/nbconvert.py
+++ b/contrib/python/ipython/py2/IPython/nbconvert.py
@@ -1,19 +1,19 @@
-"""
-Shim to maintain backwards compatibility with old IPython.nbconvert imports.
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import sys
-from warnings import warn
-
-from IPython.utils.shimmodule import ShimModule, ShimWarning
-
+"""
+Shim to maintain backwards compatibility with old IPython.nbconvert imports.
+"""
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import sys
+from warnings import warn
+
+from IPython.utils.shimmodule import ShimModule, ShimWarning
+
warn("The `IPython.nbconvert` package has been deprecated since IPython 4.0. "
- "You should import from nbconvert instead.", ShimWarning)
-
-# Unconditionally insert the shim into sys.modules so that further import calls
-# trigger the custom attribute access above
-
-sys.modules['IPython.nbconvert'] = ShimModule(
- src='IPython.nbconvert', mirror='nbconvert')
+ "You should import from nbconvert instead.", ShimWarning)
+
+# Unconditionally insert the shim into sys.modules so that further import calls
+# trigger the custom attribute access above
+
+sys.modules['IPython.nbconvert'] = ShimModule(
+ src='IPython.nbconvert', mirror='nbconvert')
diff --git a/contrib/python/ipython/py2/IPython/nbformat.py b/contrib/python/ipython/py2/IPython/nbformat.py
index 2626d50e0c..310277de00 100644
--- a/contrib/python/ipython/py2/IPython/nbformat.py
+++ b/contrib/python/ipython/py2/IPython/nbformat.py
@@ -1,19 +1,19 @@
-"""
-Shim to maintain backwards compatibility with old IPython.nbformat imports.
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import sys
-from warnings import warn
-
-from IPython.utils.shimmodule import ShimModule, ShimWarning
-
+"""
+Shim to maintain backwards compatibility with old IPython.nbformat imports.
+"""
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import sys
+from warnings import warn
+
+from IPython.utils.shimmodule import ShimModule, ShimWarning
+
warn("The `IPython.nbformat` package has been deprecated since IPython 4.0. "
- "You should import from nbformat instead.", ShimWarning)
-
-# Unconditionally insert the shim into sys.modules so that further import calls
-# trigger the custom attribute access above
-
-sys.modules['IPython.nbformat'] = ShimModule(
- src='IPython.nbformat', mirror='nbformat')
+ "You should import from nbformat instead.", ShimWarning)
+
+# Unconditionally insert the shim into sys.modules so that further import calls
+# trigger the custom attribute access above
+
+sys.modules['IPython.nbformat'] = ShimModule(
+ src='IPython.nbformat', mirror='nbformat')
diff --git a/contrib/python/ipython/py2/IPython/parallel.py b/contrib/python/ipython/py2/IPython/parallel.py
index ea25050476..0f10012783 100644
--- a/contrib/python/ipython/py2/IPython/parallel.py
+++ b/contrib/python/ipython/py2/IPython/parallel.py
@@ -1,20 +1,20 @@
-"""
-Shim to maintain backwards compatibility with old IPython.parallel imports.
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import sys
-from warnings import warn
-
-from IPython.utils.shimmodule import ShimModule, ShimWarning
-
+"""
+Shim to maintain backwards compatibility with old IPython.parallel imports.
+"""
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import sys
+from warnings import warn
+
+from IPython.utils.shimmodule import ShimModule, ShimWarning
+
warn("The `IPython.parallel` package has been deprecated since IPython 4.0. "
- "You should import from ipyparallel instead.", ShimWarning)
-
-# Unconditionally insert the shim into sys.modules so that further import calls
-# trigger the custom attribute access above
-
-sys.modules['IPython.parallel'] = ShimModule(
- src='IPython.parallel', mirror='ipyparallel')
-
+ "You should import from ipyparallel instead.", ShimWarning)
+
+# Unconditionally insert the shim into sys.modules so that further import calls
+# trigger the custom attribute access above
+
+sys.modules['IPython.parallel'] = ShimModule(
+ src='IPython.parallel', mirror='ipyparallel')
+
diff --git a/contrib/python/ipython/py2/IPython/paths.py b/contrib/python/ipython/py2/IPython/paths.py
index d14f26c1fb..59787722a5 100644
--- a/contrib/python/ipython/py2/IPython/paths.py
+++ b/contrib/python/ipython/py2/IPython/paths.py
@@ -1,120 +1,120 @@
-"""Find files and directories which IPython uses.
-"""
-import os.path
-import shutil
-import tempfile
-from warnings import warn
-
-import IPython
-from IPython.utils.importstring import import_item
-from IPython.utils.path import (
- get_home_dir, get_xdg_dir, get_xdg_cache_dir, compress_user, _writable_dir,
- ensure_dir_exists, fs_encoding, filefind
-)
-from IPython.utils import py3compat
-
-def get_ipython_dir():
- """Get the IPython directory for this platform and user.
-
- This uses the logic in `get_home_dir` to find the home directory
- and then adds .ipython to the end of the path.
- """
-
- env = os.environ
- pjoin = os.path.join
-
-
- ipdir_def = '.ipython'
-
- home_dir = get_home_dir()
- xdg_dir = get_xdg_dir()
-
- # import pdb; pdb.set_trace() # dbg
- if 'IPYTHON_DIR' in env:
- warn('The environment variable IPYTHON_DIR is deprecated. '
- 'Please use IPYTHONDIR instead.')
- ipdir = env.get('IPYTHONDIR', env.get('IPYTHON_DIR', None))
- if ipdir is None:
- # not set explicitly, use ~/.ipython
- ipdir = pjoin(home_dir, ipdir_def)
- if xdg_dir:
- # Several IPython versions (up to 1.x) defaulted to .config/ipython
- # on Linux. We have decided to go back to using .ipython everywhere
- xdg_ipdir = pjoin(xdg_dir, 'ipython')
-
- if _writable_dir(xdg_ipdir):
- cu = compress_user
- if os.path.exists(ipdir):
- warn(('Ignoring {0} in favour of {1}. Remove {0} to '
- 'get rid of this message').format(cu(xdg_ipdir), cu(ipdir)))
- elif os.path.islink(xdg_ipdir):
- warn(('{0} is deprecated. Move link to {1} to '
- 'get rid of this message').format(cu(xdg_ipdir), cu(ipdir)))
- else:
- warn('Moving {0} to {1}'.format(cu(xdg_ipdir), cu(ipdir)))
- shutil.move(xdg_ipdir, ipdir)
-
- ipdir = os.path.normpath(os.path.expanduser(ipdir))
-
- if os.path.exists(ipdir) and not _writable_dir(ipdir):
- # ipdir exists, but is not writable
- warn("IPython dir '{0}' is not a writable location,"
- " using a temp directory.".format(ipdir))
- ipdir = tempfile.mkdtemp()
- elif not os.path.exists(ipdir):
- parent = os.path.dirname(ipdir)
- if not _writable_dir(parent):
- # ipdir does not exist and parent isn't writable
- warn("IPython parent '{0}' is not a writable location,"
- " using a temp directory.".format(parent))
- ipdir = tempfile.mkdtemp()
-
- return py3compat.cast_unicode(ipdir, fs_encoding)
-
-
-def get_ipython_cache_dir():
- """Get the cache directory it is created if it does not exist."""
- xdgdir = get_xdg_cache_dir()
- if xdgdir is None:
- return get_ipython_dir()
- ipdir = os.path.join(xdgdir, "ipython")
- if not os.path.exists(ipdir) and _writable_dir(xdgdir):
- ensure_dir_exists(ipdir)
- elif not _writable_dir(xdgdir):
- return get_ipython_dir()
-
- return py3compat.cast_unicode(ipdir, fs_encoding)
-
-
-def get_ipython_package_dir():
- """Get the base directory where IPython itself is installed."""
- ipdir = os.path.dirname(IPython.__file__)
- return py3compat.cast_unicode(ipdir, fs_encoding)
-
-
-def get_ipython_module_path(module_str):
- """Find the path to an IPython module in this version of IPython.
-
- This will always find the version of the module that is in this importable
- IPython package. This will always return the path to the ``.py``
- version of the module.
- """
- if module_str == 'IPython':
- return os.path.join(get_ipython_package_dir(), '__init__.py')
- mod = import_item(module_str)
- the_path = mod.__file__.replace('.pyc', '.py')
- the_path = the_path.replace('.pyo', '.py')
- return py3compat.cast_unicode(the_path, fs_encoding)
-
-def locate_profile(profile='default'):
- """Find the path to the folder associated with a given profile.
-
- I.e. find $IPYTHONDIR/profile_whatever.
- """
- from IPython.core.profiledir import ProfileDir, ProfileDirError
- try:
- pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
- except ProfileDirError:
- # IOError makes more sense when people are expecting a path
- raise IOError("Couldn't find profile %r" % profile)
- return pd.location
+"""Find files and directories which IPython uses.
+"""
+import os.path
+import shutil
+import tempfile
+from warnings import warn
+
+import IPython
+from IPython.utils.importstring import import_item
+from IPython.utils.path import (
+ get_home_dir, get_xdg_dir, get_xdg_cache_dir, compress_user, _writable_dir,
+ ensure_dir_exists, fs_encoding, filefind
+)
+from IPython.utils import py3compat
+
+def get_ipython_dir():
+ """Get the IPython directory for this platform and user.
+
+ This uses the logic in `get_home_dir` to find the home directory
+ and then adds .ipython to the end of the path.
+ """
+
+ env = os.environ
+ pjoin = os.path.join
+
+
+ ipdir_def = '.ipython'
+
+ home_dir = get_home_dir()
+ xdg_dir = get_xdg_dir()
+
+ # import pdb; pdb.set_trace() # dbg
+ if 'IPYTHON_DIR' in env:
+ warn('The environment variable IPYTHON_DIR is deprecated. '
+ 'Please use IPYTHONDIR instead.')
+ ipdir = env.get('IPYTHONDIR', env.get('IPYTHON_DIR', None))
+ if ipdir is None:
+ # not set explicitly, use ~/.ipython
+ ipdir = pjoin(home_dir, ipdir_def)
+ if xdg_dir:
+ # Several IPython versions (up to 1.x) defaulted to .config/ipython
+ # on Linux. We have decided to go back to using .ipython everywhere
+ xdg_ipdir = pjoin(xdg_dir, 'ipython')
+
+ if _writable_dir(xdg_ipdir):
+ cu = compress_user
+ if os.path.exists(ipdir):
+ warn(('Ignoring {0} in favour of {1}. Remove {0} to '
+ 'get rid of this message').format(cu(xdg_ipdir), cu(ipdir)))
+ elif os.path.islink(xdg_ipdir):
+ warn(('{0} is deprecated. Move link to {1} to '
+ 'get rid of this message').format(cu(xdg_ipdir), cu(ipdir)))
+ else:
+ warn('Moving {0} to {1}'.format(cu(xdg_ipdir), cu(ipdir)))
+ shutil.move(xdg_ipdir, ipdir)
+
+ ipdir = os.path.normpath(os.path.expanduser(ipdir))
+
+ if os.path.exists(ipdir) and not _writable_dir(ipdir):
+ # ipdir exists, but is not writable
+ warn("IPython dir '{0}' is not a writable location,"
+ " using a temp directory.".format(ipdir))
+ ipdir = tempfile.mkdtemp()
+ elif not os.path.exists(ipdir):
+ parent = os.path.dirname(ipdir)
+ if not _writable_dir(parent):
+ # ipdir does not exist and parent isn't writable
+ warn("IPython parent '{0}' is not a writable location,"
+ " using a temp directory.".format(parent))
+ ipdir = tempfile.mkdtemp()
+
+ return py3compat.cast_unicode(ipdir, fs_encoding)
+
+
+def get_ipython_cache_dir():
+ """Get the cache directory it is created if it does not exist."""
+ xdgdir = get_xdg_cache_dir()
+ if xdgdir is None:
+ return get_ipython_dir()
+ ipdir = os.path.join(xdgdir, "ipython")
+ if not os.path.exists(ipdir) and _writable_dir(xdgdir):
+ ensure_dir_exists(ipdir)
+ elif not _writable_dir(xdgdir):
+ return get_ipython_dir()
+
+ return py3compat.cast_unicode(ipdir, fs_encoding)
+
+
+def get_ipython_package_dir():
+ """Get the base directory where IPython itself is installed."""
+ ipdir = os.path.dirname(IPython.__file__)
+ return py3compat.cast_unicode(ipdir, fs_encoding)
+
+
+def get_ipython_module_path(module_str):
+ """Find the path to an IPython module in this version of IPython.
+
+ This will always find the version of the module that is in this importable
+ IPython package. This will always return the path to the ``.py``
+ version of the module.
+ """
+ if module_str == 'IPython':
+ return os.path.join(get_ipython_package_dir(), '__init__.py')
+ mod = import_item(module_str)
+ the_path = mod.__file__.replace('.pyc', '.py')
+ the_path = the_path.replace('.pyo', '.py')
+ return py3compat.cast_unicode(the_path, fs_encoding)
+
+def locate_profile(profile='default'):
+ """Find the path to the folder associated with a given profile.
+
+ I.e. find $IPYTHONDIR/profile_whatever.
+ """
+ from IPython.core.profiledir import ProfileDir, ProfileDirError
+ try:
+ pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
+ except ProfileDirError:
+ # IOError makes more sense when people are expecting a path
+ raise IOError("Couldn't find profile %r" % profile)
+ return pd.location
diff --git a/contrib/python/ipython/py2/IPython/qt.py b/contrib/python/ipython/py2/IPython/qt.py
index 9b032e271e..7557a3f329 100644
--- a/contrib/python/ipython/py2/IPython/qt.py
+++ b/contrib/python/ipython/py2/IPython/qt.py
@@ -1,24 +1,24 @@
-"""
-Shim to maintain backwards compatibility with old IPython.qt imports.
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import sys
-from warnings import warn
-
-from IPython.utils.shimmodule import ShimModule, ShimWarning
-
+"""
+Shim to maintain backwards compatibility with old IPython.qt imports.
+"""
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import sys
+from warnings import warn
+
+from IPython.utils.shimmodule import ShimModule, ShimWarning
+
warn("The `IPython.qt` package has been deprecated since IPython 4.0. "
- "You should import from qtconsole instead.", ShimWarning)
-
-# Unconditionally insert the shim into sys.modules so that further import calls
-# trigger the custom attribute access above
-
-_console = sys.modules['IPython.qt.console'] = ShimModule(
- src='IPython.qt.console', mirror='qtconsole')
-
-_qt = ShimModule(src='IPython.qt', mirror='qtconsole')
-
-_qt.console = _console
-sys.modules['IPython.qt'] = _qt
+ "You should import from qtconsole instead.", ShimWarning)
+
+# Unconditionally insert the shim into sys.modules so that further import calls
+# trigger the custom attribute access above
+
+_console = sys.modules['IPython.qt.console'] = ShimModule(
+ src='IPython.qt.console', mirror='qtconsole')
+
+_qt = ShimModule(src='IPython.qt', mirror='qtconsole')
+
+_qt.console = _console
+sys.modules['IPython.qt'] = _qt
diff --git a/contrib/python/ipython/py2/IPython/sphinxext/custom_doctests.py b/contrib/python/ipython/py2/IPython/sphinxext/custom_doctests.py
index 65d7051f4e..7678fd6801 100644
--- a/contrib/python/ipython/py2/IPython/sphinxext/custom_doctests.py
+++ b/contrib/python/ipython/py2/IPython/sphinxext/custom_doctests.py
@@ -1,155 +1,155 @@
-"""
-Handlers for IPythonDirective's @doctest pseudo-decorator.
-
-The Sphinx extension that provides support for embedded IPython code provides
-a pseudo-decorator @doctest, which treats the input/output block as a
-doctest, raising a RuntimeError during doc generation if the actual output
-(after running the input) does not match the expected output.
-
-An example usage is:
-
-.. code-block:: rst
-
- .. ipython::
-
- In [1]: x = 1
-
- @doctest
- In [2]: x + 2
- Out[3]: 3
-
-One can also provide arguments to the decorator. The first argument should be
-the name of a custom handler. The specification of any other arguments is
-determined by the handler. For example,
-
-.. code-block:: rst
-
- .. ipython::
-
- @doctest float
- In [154]: 0.1 + 0.2
- Out[154]: 0.3
-
-allows the actual output ``0.30000000000000004`` to match the expected output
-due to a comparison with `np.allclose`.
-
-This module contains handlers for the @doctest pseudo-decorator. Handlers
-should have the following function signature::
-
- handler(sphinx_shell, args, input_lines, found, submitted)
-
-where `sphinx_shell` is the embedded Sphinx shell, `args` contains the list
-of arguments that follow: '@doctest handler_name', `input_lines` contains
-a list of the lines relevant to the current doctest, `found` is a string
-containing the output from the IPython shell, and `submitted` is a string
-containing the expected output from the IPython shell.
-
-Handlers must be registered in the `doctests` dict at the end of this module.
-
-"""
-
-def str_to_array(s):
- """
- Simplistic converter of strings from repr to float NumPy arrays.
-
- If the repr representation has ellipsis in it, then this will fail.
-
- Parameters
- ----------
- s : str
- The repr version of a NumPy array.
-
- Examples
- --------
- >>> s = "array([ 0.3, inf, nan])"
- >>> a = str_to_array(s)
-
- """
- import numpy as np
-
- # Need to make sure eval() knows about inf and nan.
- # This also assumes default printoptions for NumPy.
- from numpy import inf, nan
-
- if s.startswith(u'array'):
- # Remove array( and )
- s = s[6:-1]
-
- if s.startswith(u'['):
- a = np.array(eval(s), dtype=float)
- else:
- # Assume its a regular float. Force 1D so we can index into it.
- a = np.atleast_1d(float(s))
- return a
-
-def float_doctest(sphinx_shell, args, input_lines, found, submitted):
- """
- Doctest which allow the submitted output to vary slightly from the input.
-
- Here is how it might appear in an rst file:
-
- .. code-block:: rst
-
- .. ipython::
-
- @doctest float
- In [1]: 0.1 + 0.2
- Out[1]: 0.3
-
- """
- import numpy as np
-
- if len(args) == 2:
- rtol = 1e-05
- atol = 1e-08
- else:
- # Both must be specified if any are specified.
- try:
- rtol = float(args[2])
- atol = float(args[3])
- except IndexError:
- e = ("Both `rtol` and `atol` must be specified "
- "if either are specified: {0}".format(args))
- raise IndexError(e)
-
- try:
- submitted = str_to_array(submitted)
- found = str_to_array(found)
- except:
- # For example, if the array is huge and there are ellipsis in it.
- error = True
- else:
- found_isnan = np.isnan(found)
- submitted_isnan = np.isnan(submitted)
- error = not np.allclose(found_isnan, submitted_isnan)
- error |= not np.allclose(found[~found_isnan],
- submitted[~submitted_isnan],
- rtol=rtol, atol=atol)
-
- TAB = ' ' * 4
- directive = sphinx_shell.directive
- if directive is None:
- source = 'Unavailable'
- content = 'Unavailable'
- else:
- source = directive.state.document.current_source
- # Add tabs and make into a single string.
- content = '\n'.join([TAB + line for line in directive.content])
-
- if error:
-
- e = ('doctest float comparison failure\n\n'
- 'Document source: {0}\n\n'
- 'Raw content: \n{1}\n\n'
- 'On input line(s):\n{TAB}{2}\n\n'
- 'we found output:\n{TAB}{3}\n\n'
- 'instead of the expected:\n{TAB}{4}\n\n')
- e = e.format(source, content, '\n'.join(input_lines), repr(found),
- repr(submitted), TAB=TAB)
- raise RuntimeError(e)
-
-# dict of allowable doctest handlers. The key represents the first argument
-# that must be given to @doctest in order to activate the handler.
-doctests = {
- 'float': float_doctest,
-}
+"""
+Handlers for IPythonDirective's @doctest pseudo-decorator.
+
+The Sphinx extension that provides support for embedded IPython code provides
+a pseudo-decorator @doctest, which treats the input/output block as a
+doctest, raising a RuntimeError during doc generation if the actual output
+(after running the input) does not match the expected output.
+
+An example usage is:
+
+.. code-block:: rst
+
+ .. ipython::
+
+ In [1]: x = 1
+
+ @doctest
+ In [2]: x + 2
+ Out[3]: 3
+
+One can also provide arguments to the decorator. The first argument should be
+the name of a custom handler. The specification of any other arguments is
+determined by the handler. For example,
+
+.. code-block:: rst
+
+ .. ipython::
+
+ @doctest float
+ In [154]: 0.1 + 0.2
+ Out[154]: 0.3
+
+allows the actual output ``0.30000000000000004`` to match the expected output
+due to a comparison with `np.allclose`.
+
+This module contains handlers for the @doctest pseudo-decorator. Handlers
+should have the following function signature::
+
+ handler(sphinx_shell, args, input_lines, found, submitted)
+
+where `sphinx_shell` is the embedded Sphinx shell, `args` contains the list
+of arguments that follow: '@doctest handler_name', `input_lines` contains
+a list of the lines relevant to the current doctest, `found` is a string
+containing the output from the IPython shell, and `submitted` is a string
+containing the expected output from the IPython shell.
+
+Handlers must be registered in the `doctests` dict at the end of this module.
+
+"""
+
+def str_to_array(s):
+ """
+ Simplistic converter of strings from repr to float NumPy arrays.
+
+ If the repr representation has ellipsis in it, then this will fail.
+
+ Parameters
+ ----------
+ s : str
+ The repr version of a NumPy array.
+
+ Examples
+ --------
+ >>> s = "array([ 0.3, inf, nan])"
+ >>> a = str_to_array(s)
+
+ """
+ import numpy as np
+
+ # Need to make sure eval() knows about inf and nan.
+ # This also assumes default printoptions for NumPy.
+ from numpy import inf, nan
+
+ if s.startswith(u'array'):
+ # Remove array( and )
+ s = s[6:-1]
+
+ if s.startswith(u'['):
+ a = np.array(eval(s), dtype=float)
+ else:
+ # Assume its a regular float. Force 1D so we can index into it.
+ a = np.atleast_1d(float(s))
+ return a
+
+def float_doctest(sphinx_shell, args, input_lines, found, submitted):
+ """
+ Doctest which allow the submitted output to vary slightly from the input.
+
+ Here is how it might appear in an rst file:
+
+ .. code-block:: rst
+
+ .. ipython::
+
+ @doctest float
+ In [1]: 0.1 + 0.2
+ Out[1]: 0.3
+
+ """
+ import numpy as np
+
+ if len(args) == 2:
+ rtol = 1e-05
+ atol = 1e-08
+ else:
+ # Both must be specified if any are specified.
+ try:
+ rtol = float(args[2])
+ atol = float(args[3])
+ except IndexError:
+ e = ("Both `rtol` and `atol` must be specified "
+ "if either are specified: {0}".format(args))
+ raise IndexError(e)
+
+ try:
+ submitted = str_to_array(submitted)
+ found = str_to_array(found)
+ except:
+ # For example, if the array is huge and there are ellipsis in it.
+ error = True
+ else:
+ found_isnan = np.isnan(found)
+ submitted_isnan = np.isnan(submitted)
+ error = not np.allclose(found_isnan, submitted_isnan)
+ error |= not np.allclose(found[~found_isnan],
+ submitted[~submitted_isnan],
+ rtol=rtol, atol=atol)
+
+ TAB = ' ' * 4
+ directive = sphinx_shell.directive
+ if directive is None:
+ source = 'Unavailable'
+ content = 'Unavailable'
+ else:
+ source = directive.state.document.current_source
+ # Add tabs and make into a single string.
+ content = '\n'.join([TAB + line for line in directive.content])
+
+ if error:
+
+ e = ('doctest float comparison failure\n\n'
+ 'Document source: {0}\n\n'
+ 'Raw content: \n{1}\n\n'
+ 'On input line(s):\n{TAB}{2}\n\n'
+ 'we found output:\n{TAB}{3}\n\n'
+ 'instead of the expected:\n{TAB}{4}\n\n')
+ e = e.format(source, content, '\n'.join(input_lines), repr(found),
+ repr(submitted), TAB=TAB)
+ raise RuntimeError(e)
+
+# dict of allowable doctest handlers. The key represents the first argument
+# that must be given to @doctest in order to activate the handler.
+doctests = {
+ 'float': float_doctest,
+}
diff --git a/contrib/python/ipython/py2/IPython/sphinxext/ipython_console_highlighting.py b/contrib/python/ipython/py2/IPython/sphinxext/ipython_console_highlighting.py
index bc64087797..b93a151fb3 100644
--- a/contrib/python/ipython/py2/IPython/sphinxext/ipython_console_highlighting.py
+++ b/contrib/python/ipython/py2/IPython/sphinxext/ipython_console_highlighting.py
@@ -1,28 +1,28 @@
-"""
-reST directive for syntax-highlighting ipython interactive sessions.
-
-"""
-
-from sphinx import highlighting
-from IPython.lib.lexers import IPyLexer
-
-def setup(app):
- """Setup as a sphinx extension."""
-
- # This is only a lexer, so adding it below to pygments appears sufficient.
- # But if somebody knows what the right API usage should be to do that via
- # sphinx, by all means fix it here. At least having this setup.py
- # suppresses the sphinx warning we'd get without it.
- metadata = {'parallel_read_safe': True, 'parallel_write_safe': True}
- return metadata
-
-# Register the extension as a valid pygments lexer.
-# Alternatively, we could register the lexer with pygments instead. This would
-# require using setuptools entrypoints: http://pygments.org/docs/plugins
-
-ipy2 = IPyLexer(python3=False)
-ipy3 = IPyLexer(python3=True)
-
-highlighting.lexers['ipython'] = ipy2
-highlighting.lexers['ipython2'] = ipy2
-highlighting.lexers['ipython3'] = ipy3
+"""
+reST directive for syntax-highlighting ipython interactive sessions.
+
+"""
+
+from sphinx import highlighting
+from IPython.lib.lexers import IPyLexer
+
+def setup(app):
+ """Setup as a sphinx extension."""
+
+ # This is only a lexer, so adding it below to pygments appears sufficient.
+ # But if somebody knows what the right API usage should be to do that via
+ # sphinx, by all means fix it here. At least having this setup.py
+ # suppresses the sphinx warning we'd get without it.
+ metadata = {'parallel_read_safe': True, 'parallel_write_safe': True}
+ return metadata
+
+# Register the extension as a valid pygments lexer.
+# Alternatively, we could register the lexer with pygments instead. This would
+# require using setuptools entrypoints: http://pygments.org/docs/plugins
+
+ipy2 = IPyLexer(python3=False)
+ipy3 = IPyLexer(python3=True)
+
+highlighting.lexers['ipython'] = ipy2
+highlighting.lexers['ipython2'] = ipy2
+highlighting.lexers['ipython3'] = ipy3
diff --git a/contrib/python/ipython/py2/IPython/sphinxext/ipython_directive.py b/contrib/python/ipython/py2/IPython/sphinxext/ipython_directive.py
index a9e9b65108..8df9ace1f3 100644
--- a/contrib/python/ipython/py2/IPython/sphinxext/ipython_directive.py
+++ b/contrib/python/ipython/py2/IPython/sphinxext/ipython_directive.py
@@ -1,1178 +1,1178 @@
-# -*- coding: utf-8 -*-
-"""
-Sphinx directive to support embedded IPython code.
-
-This directive allows pasting of entire interactive IPython sessions, prompts
-and all, and their code will actually get re-executed at doc build time, with
-all prompts renumbered sequentially. It also allows you to input code as a pure
-python input by giving the argument python to the directive. The output looks
-like an interactive ipython section.
-
-To enable this directive, simply list it in your Sphinx ``conf.py`` file
-(making sure the directory where you placed it is visible to sphinx, as is
-needed for all Sphinx directives). For example, to enable syntax highlighting
-and the IPython directive::
-
- extensions = ['IPython.sphinxext.ipython_console_highlighting',
- 'IPython.sphinxext.ipython_directive']
-
-The IPython directive outputs code-blocks with the language 'ipython'. So
-if you do not have the syntax highlighting extension enabled as well, then
-all rendered code-blocks will be uncolored. By default this directive assumes
-that your prompts are unchanged IPython ones, but this can be customized.
-The configurable options that can be placed in conf.py are:
-
-ipython_savefig_dir:
- The directory in which to save the figures. This is relative to the
- Sphinx source directory. The default is `html_static_path`.
-ipython_rgxin:
- The compiled regular expression to denote the start of IPython input
- lines. The default is re.compile('In \[(\d+)\]:\s?(.*)\s*'). You
- shouldn't need to change this.
-ipython_rgxout:
- The compiled regular expression to denote the start of IPython output
- lines. The default is re.compile('Out\[(\d+)\]:\s?(.*)\s*'). You
- shouldn't need to change this.
-ipython_promptin:
- The string to represent the IPython input prompt in the generated ReST.
- The default is 'In [%d]:'. This expects that the line numbers are used
- in the prompt.
-ipython_promptout:
- The string to represent the IPython prompt in the generated ReST. The
- default is 'Out [%d]:'. This expects that the line numbers are used
- in the prompt.
-ipython_mplbackend:
- The string which specifies if the embedded Sphinx shell should import
- Matplotlib and set the backend. The value specifies a backend that is
- passed to `matplotlib.use()` before any lines in `ipython_execlines` are
- executed. If not specified in conf.py, then the default value of 'agg' is
- used. To use the IPython directive without matplotlib as a dependency, set
- the value to `None`. It may end up that matplotlib is still imported
- if the user specifies so in `ipython_execlines` or makes use of the
- @savefig pseudo decorator.
-ipython_execlines:
- A list of strings to be exec'd in the embedded Sphinx shell. Typical
- usage is to make certain packages always available. Set this to an empty
- list if you wish to have no imports always available. If specified in
- conf.py as `None`, then it has the effect of making no imports available.
- If omitted from conf.py altogether, then the default value of
- ['import numpy as np', 'import matplotlib.pyplot as plt'] is used.
-ipython_holdcount
- When the @suppress pseudo-decorator is used, the execution count can be
- incremented or not. The default behavior is to hold the execution count,
- corresponding to a value of `True`. Set this to `False` to increment
- the execution count after each suppressed command.
-
-As an example, to use the IPython directive when `matplotlib` is not available,
-one sets the backend to `None`::
-
- ipython_mplbackend = None
-
-An example usage of the directive is:
-
-.. code-block:: rst
-
- .. ipython::
-
- In [1]: x = 1
-
- In [2]: y = x**2
-
- In [3]: print(y)
-
-See http://matplotlib.org/sampledoc/ipython_directive.html for additional
-documentation.
-
-Pseudo-Decorators
-=================
-
-Note: Only one decorator is supported per input. If more than one decorator
-is specified, then only the last one is used.
-
-In addition to the Pseudo-Decorators/options described at the above link,
-several enhancements have been made. The directive will emit a message to the
-console at build-time if code-execution resulted in an exception or warning.
-You can suppress these on a per-block basis by specifying the :okexcept:
-or :okwarning: options:
-
-.. code-block:: rst
-
- .. ipython::
- :okexcept:
- :okwarning:
-
- In [1]: 1/0
- In [2]: # raise warning.
-
+# -*- coding: utf-8 -*-
+"""
+Sphinx directive to support embedded IPython code.
+
+This directive allows pasting of entire interactive IPython sessions, prompts
+and all, and their code will actually get re-executed at doc build time, with
+all prompts renumbered sequentially. It also allows you to input code as a pure
+python input by giving the argument python to the directive. The output looks
+like an interactive ipython section.
+
+To enable this directive, simply list it in your Sphinx ``conf.py`` file
+(making sure the directory where you placed it is visible to sphinx, as is
+needed for all Sphinx directives). For example, to enable syntax highlighting
+and the IPython directive::
+
+ extensions = ['IPython.sphinxext.ipython_console_highlighting',
+ 'IPython.sphinxext.ipython_directive']
+
+The IPython directive outputs code-blocks with the language 'ipython'. So
+if you do not have the syntax highlighting extension enabled as well, then
+all rendered code-blocks will be uncolored. By default this directive assumes
+that your prompts are unchanged IPython ones, but this can be customized.
+The configurable options that can be placed in conf.py are:
+
+ipython_savefig_dir:
+ The directory in which to save the figures. This is relative to the
+ Sphinx source directory. The default is `html_static_path`.
+ipython_rgxin:
+ The compiled regular expression to denote the start of IPython input
+ lines. The default is re.compile('In \[(\d+)\]:\s?(.*)\s*'). You
+ shouldn't need to change this.
+ipython_rgxout:
+ The compiled regular expression to denote the start of IPython output
+ lines. The default is re.compile('Out\[(\d+)\]:\s?(.*)\s*'). You
+ shouldn't need to change this.
+ipython_promptin:
+ The string to represent the IPython input prompt in the generated ReST.
+ The default is 'In [%d]:'. This expects that the line numbers are used
+ in the prompt.
+ipython_promptout:
+ The string to represent the IPython prompt in the generated ReST. The
+ default is 'Out [%d]:'. This expects that the line numbers are used
+ in the prompt.
+ipython_mplbackend:
+ The string which specifies if the embedded Sphinx shell should import
+ Matplotlib and set the backend. The value specifies a backend that is
+ passed to `matplotlib.use()` before any lines in `ipython_execlines` are
+ executed. If not specified in conf.py, then the default value of 'agg' is
+ used. To use the IPython directive without matplotlib as a dependency, set
+ the value to `None`. It may end up that matplotlib is still imported
+ if the user specifies so in `ipython_execlines` or makes use of the
+ @savefig pseudo decorator.
+ipython_execlines:
+ A list of strings to be exec'd in the embedded Sphinx shell. Typical
+ usage is to make certain packages always available. Set this to an empty
+ list if you wish to have no imports always available. If specified in
+ conf.py as `None`, then it has the effect of making no imports available.
+ If omitted from conf.py altogether, then the default value of
+ ['import numpy as np', 'import matplotlib.pyplot as plt'] is used.
+ipython_holdcount
+ When the @suppress pseudo-decorator is used, the execution count can be
+ incremented or not. The default behavior is to hold the execution count,
+ corresponding to a value of `True`. Set this to `False` to increment
+ the execution count after each suppressed command.
+
+As an example, to use the IPython directive when `matplotlib` is not available,
+one sets the backend to `None`::
+
+ ipython_mplbackend = None
+
+An example usage of the directive is:
+
+.. code-block:: rst
+
+ .. ipython::
+
+ In [1]: x = 1
+
+ In [2]: y = x**2
+
+ In [3]: print(y)
+
+See http://matplotlib.org/sampledoc/ipython_directive.html for additional
+documentation.
+
+Pseudo-Decorators
+=================
+
+Note: Only one decorator is supported per input. If more than one decorator
+is specified, then only the last one is used.
+
+In addition to the Pseudo-Decorators/options described at the above link,
+several enhancements have been made. The directive will emit a message to the
+console at build-time if code-execution resulted in an exception or warning.
+You can suppress these on a per-block basis by specifying the :okexcept:
+or :okwarning: options:
+
+.. code-block:: rst
+
+ .. ipython::
+ :okexcept:
+ :okwarning:
+
+ In [1]: 1/0
+ In [2]: # raise warning.
+
To Do
-----
-
-- Turn the ad-hoc test() function into a real test suite.
-- Break up ipython-specific functionality from matplotlib stuff into better
- separated code.
-
-Authors
--------
-
-- John D Hunter: orignal author.
-- Fernando Perez: refactoring, documentation, cleanups, port to 0.11.
-- VáclavŠmilauer <eudoxos-AT-arcig.cz>: Prompt generalizations.
-- Skipper Seabold, refactoring, cleanups, pure python addition
-"""
-from __future__ import print_function
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Stdlib
-import atexit
+
+- Turn the ad-hoc test() function into a real test suite.
+- Break up ipython-specific functionality from matplotlib stuff into better
+ separated code.
+
+Authors
+-------
+
+- John D Hunter: orignal author.
+- Fernando Perez: refactoring, documentation, cleanups, port to 0.11.
+- VáclavŠmilauer <eudoxos-AT-arcig.cz>: Prompt generalizations.
+- Skipper Seabold, refactoring, cleanups, pure python addition
+"""
+from __future__ import print_function
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+# Stdlib
+import atexit
import errno
-import os
-import re
-import sys
-import tempfile
-import ast
-import warnings
-import shutil
-
-
-# Third-party
-from docutils.parsers.rst import directives
+import os
+import re
+import sys
+import tempfile
+import ast
+import warnings
+import shutil
+
+
+# Third-party
+from docutils.parsers.rst import directives
from docutils.parsers.rst import Directive
-
-# Our own
-from traitlets.config import Config
-from IPython import InteractiveShell
-from IPython.core.profiledir import ProfileDir
-from IPython.utils import io
-from IPython.utils.py3compat import PY3
-
-if PY3:
- from io import StringIO
-else:
- from StringIO import StringIO
-
-#-----------------------------------------------------------------------------
-# Globals
-#-----------------------------------------------------------------------------
-# for tokenizing blocks
-COMMENT, INPUT, OUTPUT = range(3)
-
-#-----------------------------------------------------------------------------
-# Functions and class declarations
-#-----------------------------------------------------------------------------
-
-def block_parser(part, rgxin, rgxout, fmtin, fmtout):
- """
- part is a string of ipython text, comprised of at most one
- input, one output, comments, and blank lines. The block parser
- parses the text into a list of::
-
- blocks = [ (TOKEN0, data0), (TOKEN1, data1), ...]
-
- where TOKEN is one of [COMMENT | INPUT | OUTPUT ] and
- data is, depending on the type of token::
-
- COMMENT : the comment string
-
- INPUT: the (DECORATOR, INPUT_LINE, REST) where
- DECORATOR: the input decorator (or None)
- INPUT_LINE: the input as string (possibly multi-line)
- REST : any stdout generated by the input line (not OUTPUT)
-
- OUTPUT: the output string, possibly multi-line
-
- """
- block = []
- lines = part.split('\n')
- N = len(lines)
- i = 0
- decorator = None
- while 1:
-
- if i==N:
- # nothing left to parse -- the last line
- break
-
- line = lines[i]
- i += 1
- line_stripped = line.strip()
- if line_stripped.startswith('#'):
- block.append((COMMENT, line))
- continue
-
- if line_stripped.startswith('@'):
- # Here is where we assume there is, at most, one decorator.
- # Might need to rethink this.
- decorator = line_stripped
- continue
-
- # does this look like an input line?
- matchin = rgxin.match(line)
- if matchin:
- lineno, inputline = int(matchin.group(1)), matchin.group(2)
-
- # the ....: continuation string
- continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
- Nc = len(continuation)
- # input lines can continue on for more than one line, if
- # we have a '\' line continuation char or a function call
- # echo line 'print'. The input line can only be
- # terminated by the end of the block or an output line, so
- # we parse out the rest of the input line if it is
- # multiline as well as any echo text
-
- rest = []
- while i<N:
-
- # look ahead; if the next line is blank, or a comment, or
- # an output line, we're done
-
- nextline = lines[i]
- matchout = rgxout.match(nextline)
- #print "nextline=%s, continuation=%s, starts=%s"%(nextline, continuation, nextline.startswith(continuation))
- if matchout or nextline.startswith('#'):
- break
- elif nextline.startswith(continuation):
- # The default ipython_rgx* treat the space following the colon as optional.
- # However, If the space is there we must consume it or code
- # employing the cython_magic extension will fail to execute.
- #
- # This works with the default ipython_rgx* patterns,
- # If you modify them, YMMV.
- nextline = nextline[Nc:]
- if nextline and nextline[0] == ' ':
- nextline = nextline[1:]
-
- inputline += '\n' + nextline
- else:
- rest.append(nextline)
- i+= 1
-
- block.append((INPUT, (decorator, inputline, '\n'.join(rest))))
- continue
-
- # if it looks like an output line grab all the text to the end
- # of the block
- matchout = rgxout.match(line)
- if matchout:
- lineno, output = int(matchout.group(1)), matchout.group(2)
- if i<N-1:
- output = '\n'.join([output] + lines[i:])
-
- block.append((OUTPUT, output))
- break
-
- return block
-
-
-class EmbeddedSphinxShell(object):
- """An embedded IPython instance to run inside Sphinx"""
-
- def __init__(self, exec_lines=None):
-
- self.cout = StringIO()
-
- if exec_lines is None:
- exec_lines = []
-
- # Create config object for IPython
- config = Config()
- config.HistoryManager.hist_file = ':memory:'
- config.InteractiveShell.autocall = False
- config.InteractiveShell.autoindent = False
- config.InteractiveShell.colors = 'NoColor'
-
- # create a profile so instance history isn't saved
- tmp_profile_dir = tempfile.mkdtemp(prefix='profile_')
- profname = 'auto_profile_sphinx_build'
- pdir = os.path.join(tmp_profile_dir,profname)
- profile = ProfileDir.create_profile_dir(pdir)
-
- # Create and initialize global ipython, but don't start its mainloop.
- # This will persist across different EmbededSphinxShell instances.
- IP = InteractiveShell.instance(config=config, profile_dir=profile)
- atexit.register(self.cleanup)
-
- # Store a few parts of IPython we'll need.
- self.IP = IP
- self.user_ns = self.IP.user_ns
- self.user_global_ns = self.IP.user_global_ns
-
- self.input = ''
- self.output = ''
- self.tmp_profile_dir = tmp_profile_dir
-
- self.is_verbatim = False
- self.is_doctest = False
- self.is_suppress = False
-
- # Optionally, provide more detailed information to shell.
- # this is assigned by the SetUp method of IPythonDirective
- # to point at itself.
- #
- # So, you can access handy things at self.directive.state
- self.directive = None
-
- # on the first call to the savefig decorator, we'll import
- # pyplot as plt so we can make a call to the plt.gcf().savefig
- self._pyplot_imported = False
-
- # Prepopulate the namespace.
- for line in exec_lines:
- self.process_input_line(line, store_history=False)
-
- def cleanup(self):
- shutil.rmtree(self.tmp_profile_dir, ignore_errors=True)
-
- def clear_cout(self):
- self.cout.seek(0)
- self.cout.truncate(0)
-
- def process_input_line(self, line, store_history=True):
- """process the input, capturing stdout"""
-
- stdout = sys.stdout
- splitter = self.IP.input_splitter
- try:
- sys.stdout = self.cout
- splitter.push(line)
- more = splitter.push_accepts_more()
- if not more:
- source_raw = splitter.raw_reset()
- self.IP.run_cell(source_raw, store_history=store_history)
- finally:
- sys.stdout = stdout
-
- def process_image(self, decorator):
- """
- # build out an image directive like
- # .. image:: somefile.png
- # :width 4in
- #
- # from an input like
- # savefig somefile.png width=4in
- """
- savefig_dir = self.savefig_dir
- source_dir = self.source_dir
- saveargs = decorator.split(' ')
- filename = saveargs[1]
+
+# Our own
+from traitlets.config import Config
+from IPython import InteractiveShell
+from IPython.core.profiledir import ProfileDir
+from IPython.utils import io
+from IPython.utils.py3compat import PY3
+
+if PY3:
+ from io import StringIO
+else:
+ from StringIO import StringIO
+
+#-----------------------------------------------------------------------------
+# Globals
+#-----------------------------------------------------------------------------
+# for tokenizing blocks
+COMMENT, INPUT, OUTPUT = range(3)
+
+#-----------------------------------------------------------------------------
+# Functions and class declarations
+#-----------------------------------------------------------------------------
+
+def block_parser(part, rgxin, rgxout, fmtin, fmtout):
+ """
+ part is a string of ipython text, comprised of at most one
+ input, one output, comments, and blank lines. The block parser
+ parses the text into a list of::
+
+ blocks = [ (TOKEN0, data0), (TOKEN1, data1), ...]
+
+ where TOKEN is one of [COMMENT | INPUT | OUTPUT ] and
+ data is, depending on the type of token::
+
+ COMMENT : the comment string
+
+ INPUT: the (DECORATOR, INPUT_LINE, REST) where
+ DECORATOR: the input decorator (or None)
+ INPUT_LINE: the input as string (possibly multi-line)
+ REST : any stdout generated by the input line (not OUTPUT)
+
+ OUTPUT: the output string, possibly multi-line
+
+ """
+ block = []
+ lines = part.split('\n')
+ N = len(lines)
+ i = 0
+ decorator = None
+ while 1:
+
+ if i==N:
+ # nothing left to parse -- the last line
+ break
+
+ line = lines[i]
+ i += 1
+ line_stripped = line.strip()
+ if line_stripped.startswith('#'):
+ block.append((COMMENT, line))
+ continue
+
+ if line_stripped.startswith('@'):
+ # Here is where we assume there is, at most, one decorator.
+ # Might need to rethink this.
+ decorator = line_stripped
+ continue
+
+ # does this look like an input line?
+ matchin = rgxin.match(line)
+ if matchin:
+ lineno, inputline = int(matchin.group(1)), matchin.group(2)
+
+ # the ....: continuation string
+ continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
+ Nc = len(continuation)
+ # input lines can continue on for more than one line, if
+ # we have a '\' line continuation char or a function call
+ # echo line 'print'. The input line can only be
+ # terminated by the end of the block or an output line, so
+ # we parse out the rest of the input line if it is
+ # multiline as well as any echo text
+
+ rest = []
+ while i<N:
+
+ # look ahead; if the next line is blank, or a comment, or
+ # an output line, we're done
+
+ nextline = lines[i]
+ matchout = rgxout.match(nextline)
+ #print "nextline=%s, continuation=%s, starts=%s"%(nextline, continuation, nextline.startswith(continuation))
+ if matchout or nextline.startswith('#'):
+ break
+ elif nextline.startswith(continuation):
+ # The default ipython_rgx* treat the space following the colon as optional.
+ # However, If the space is there we must consume it or code
+ # employing the cython_magic extension will fail to execute.
+ #
+ # This works with the default ipython_rgx* patterns,
+ # If you modify them, YMMV.
+ nextline = nextline[Nc:]
+ if nextline and nextline[0] == ' ':
+ nextline = nextline[1:]
+
+ inputline += '\n' + nextline
+ else:
+ rest.append(nextline)
+ i+= 1
+
+ block.append((INPUT, (decorator, inputline, '\n'.join(rest))))
+ continue
+
+ # if it looks like an output line grab all the text to the end
+ # of the block
+ matchout = rgxout.match(line)
+ if matchout:
+ lineno, output = int(matchout.group(1)), matchout.group(2)
+ if i<N-1:
+ output = '\n'.join([output] + lines[i:])
+
+ block.append((OUTPUT, output))
+ break
+
+ return block
+
+
+class EmbeddedSphinxShell(object):
+ """An embedded IPython instance to run inside Sphinx"""
+
+ def __init__(self, exec_lines=None):
+
+ self.cout = StringIO()
+
+ if exec_lines is None:
+ exec_lines = []
+
+ # Create config object for IPython
+ config = Config()
+ config.HistoryManager.hist_file = ':memory:'
+ config.InteractiveShell.autocall = False
+ config.InteractiveShell.autoindent = False
+ config.InteractiveShell.colors = 'NoColor'
+
+ # create a profile so instance history isn't saved
+ tmp_profile_dir = tempfile.mkdtemp(prefix='profile_')
+ profname = 'auto_profile_sphinx_build'
+ pdir = os.path.join(tmp_profile_dir,profname)
+ profile = ProfileDir.create_profile_dir(pdir)
+
+ # Create and initialize global ipython, but don't start its mainloop.
+ # This will persist across different EmbededSphinxShell instances.
+ IP = InteractiveShell.instance(config=config, profile_dir=profile)
+ atexit.register(self.cleanup)
+
+ # Store a few parts of IPython we'll need.
+ self.IP = IP
+ self.user_ns = self.IP.user_ns
+ self.user_global_ns = self.IP.user_global_ns
+
+ self.input = ''
+ self.output = ''
+ self.tmp_profile_dir = tmp_profile_dir
+
+ self.is_verbatim = False
+ self.is_doctest = False
+ self.is_suppress = False
+
+ # Optionally, provide more detailed information to shell.
+ # this is assigned by the SetUp method of IPythonDirective
+ # to point at itself.
+ #
+ # So, you can access handy things at self.directive.state
+ self.directive = None
+
+ # on the first call to the savefig decorator, we'll import
+ # pyplot as plt so we can make a call to the plt.gcf().savefig
+ self._pyplot_imported = False
+
+ # Prepopulate the namespace.
+ for line in exec_lines:
+ self.process_input_line(line, store_history=False)
+
+ def cleanup(self):
+ shutil.rmtree(self.tmp_profile_dir, ignore_errors=True)
+
+ def clear_cout(self):
+ self.cout.seek(0)
+ self.cout.truncate(0)
+
+ def process_input_line(self, line, store_history=True):
+ """process the input, capturing stdout"""
+
+ stdout = sys.stdout
+ splitter = self.IP.input_splitter
+ try:
+ sys.stdout = self.cout
+ splitter.push(line)
+ more = splitter.push_accepts_more()
+ if not more:
+ source_raw = splitter.raw_reset()
+ self.IP.run_cell(source_raw, store_history=store_history)
+ finally:
+ sys.stdout = stdout
+
+ def process_image(self, decorator):
+ """
+ # build out an image directive like
+ # .. image:: somefile.png
+ # :width 4in
+ #
+ # from an input like
+ # savefig somefile.png width=4in
+ """
+ savefig_dir = self.savefig_dir
+ source_dir = self.source_dir
+ saveargs = decorator.split(' ')
+ filename = saveargs[1]
# insert relative path to image file in source (as absolute path for Sphinx)
outfile = '/' + os.path.relpath(os.path.join(savefig_dir,filename),
source_dir)
-
- imagerows = ['.. image:: %s'%outfile]
-
- for kwarg in saveargs[2:]:
- arg, val = kwarg.split('=')
- arg = arg.strip()
- val = val.strip()
- imagerows.append(' :%s: %s'%(arg, val))
-
- image_file = os.path.basename(outfile) # only return file name
- image_directive = '\n'.join(imagerows)
- return image_file, image_directive
-
- # Callbacks for each type of token
- def process_input(self, data, input_prompt, lineno):
- """
- Process data block for INPUT token.
-
- """
- decorator, input, rest = data
- image_file = None
- image_directive = None
-
- is_verbatim = decorator=='@verbatim' or self.is_verbatim
- is_doctest = (decorator is not None and \
- decorator.startswith('@doctest')) or self.is_doctest
- is_suppress = decorator=='@suppress' or self.is_suppress
- is_okexcept = decorator=='@okexcept' or self.is_okexcept
- is_okwarning = decorator=='@okwarning' or self.is_okwarning
- is_savefig = decorator is not None and \
- decorator.startswith('@savefig')
-
- input_lines = input.split('\n')
- if len(input_lines) > 1:
- if input_lines[-1] != "":
- input_lines.append('') # make sure there's a blank line
- # so splitter buffer gets reset
-
- continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
-
- if is_savefig:
- image_file, image_directive = self.process_image(decorator)
-
- ret = []
- is_semicolon = False
-
- # Hold the execution count, if requested to do so.
- if is_suppress and self.hold_count:
- store_history = False
- else:
- store_history = True
-
- # Note: catch_warnings is not thread safe
- with warnings.catch_warnings(record=True) as ws:
- for i, line in enumerate(input_lines):
- if line.endswith(';'):
- is_semicolon = True
-
- if i == 0:
- # process the first input line
- if is_verbatim:
- self.process_input_line('')
- self.IP.execution_count += 1 # increment it anyway
- else:
- # only submit the line in non-verbatim mode
- self.process_input_line(line, store_history=store_history)
- formatted_line = '%s %s'%(input_prompt, line)
- else:
- # process a continuation line
- if not is_verbatim:
- self.process_input_line(line, store_history=store_history)
-
- formatted_line = '%s %s'%(continuation, line)
-
- if not is_suppress:
- ret.append(formatted_line)
-
- if not is_suppress and len(rest.strip()) and is_verbatim:
- # The "rest" is the standard output of the input. This needs to be
- # added when in verbatim mode. If there is no "rest", then we don't
- # add it, as the new line will be added by the processed output.
- ret.append(rest)
-
- # Fetch the processed output. (This is not the submitted output.)
- self.cout.seek(0)
- processed_output = self.cout.read()
- if not is_suppress and not is_semicolon:
- #
- # In IPythonDirective.run, the elements of `ret` are eventually
- # combined such that '' entries correspond to newlines. So if
- # `processed_output` is equal to '', then the adding it to `ret`
- # ensures that there is a blank line between consecutive inputs
- # that have no outputs, as in:
- #
- # In [1]: x = 4
- #
- # In [2]: x = 5
- #
- # When there is processed output, it has a '\n' at the tail end. So
- # adding the output to `ret` will provide the necessary spacing
- # between consecutive input/output blocks, as in:
- #
- # In [1]: x
- # Out[1]: 5
- #
- # In [2]: x
- # Out[2]: 5
- #
- # When there is stdout from the input, it also has a '\n' at the
- # tail end, and so this ensures proper spacing as well. E.g.:
- #
- # In [1]: print x
- # 5
- #
- # In [2]: x = 5
- #
- # When in verbatim mode, `processed_output` is empty (because
- # nothing was passed to IP. Sometimes the submitted code block has
- # an Out[] portion and sometimes it does not. When it does not, we
- # need to ensure proper spacing, so we have to add '' to `ret`.
- # However, if there is an Out[] in the submitted code, then we do
- # not want to add a newline as `process_output` has stuff to add.
- # The difficulty is that `process_input` doesn't know if
- # `process_output` will be called---so it doesn't know if there is
- # Out[] in the code block. The requires that we include a hack in
- # `process_block`. See the comments there.
- #
- ret.append(processed_output)
- elif is_semicolon:
- # Make sure there is a newline after the semicolon.
- ret.append('')
-
- # context information
- filename = "Unknown"
- lineno = 0
- if self.directive.state:
- filename = self.directive.state.document.current_source
- lineno = self.directive.state.document.current_line
-
- # output any exceptions raised during execution to stdout
- # unless :okexcept: has been specified.
- if not is_okexcept and "Traceback" in processed_output:
- s = "\nException in %s at block ending on line %s\n" % (filename, lineno)
- s += "Specify :okexcept: as an option in the ipython:: block to suppress this message\n"
- sys.stdout.write('\n\n>>>' + ('-' * 73))
- sys.stdout.write(s)
- sys.stdout.write(processed_output)
- sys.stdout.write('<<<' + ('-' * 73) + '\n\n')
-
- # output any warning raised during execution to stdout
- # unless :okwarning: has been specified.
- if not is_okwarning:
- for w in ws:
- s = "\nWarning in %s at block ending on line %s\n" % (filename, lineno)
- s += "Specify :okwarning: as an option in the ipython:: block to suppress this message\n"
- sys.stdout.write('\n\n>>>' + ('-' * 73))
- sys.stdout.write(s)
- sys.stdout.write(('-' * 76) + '\n')
- s=warnings.formatwarning(w.message, w.category,
- w.filename, w.lineno, w.line)
- sys.stdout.write(s)
- sys.stdout.write('<<<' + ('-' * 73) + '\n')
-
- self.cout.truncate(0)
-
- return (ret, input_lines, processed_output,
- is_doctest, decorator, image_file, image_directive)
-
-
- def process_output(self, data, output_prompt, input_lines, output,
- is_doctest, decorator, image_file):
- """
- Process data block for OUTPUT token.
-
- """
- # Recall: `data` is the submitted output, and `output` is the processed
- # output from `input_lines`.
-
- TAB = ' ' * 4
-
- if is_doctest and output is not None:
-
- found = output # This is the processed output
- found = found.strip()
- submitted = data.strip()
-
- if self.directive is None:
- source = 'Unavailable'
- content = 'Unavailable'
- else:
- source = self.directive.state.document.current_source
- content = self.directive.content
- # Add tabs and join into a single string.
- content = '\n'.join([TAB + line for line in content])
-
- # Make sure the output contains the output prompt.
- ind = found.find(output_prompt)
- if ind < 0:
- e = ('output does not contain output prompt\n\n'
- 'Document source: {0}\n\n'
- 'Raw content: \n{1}\n\n'
- 'Input line(s):\n{TAB}{2}\n\n'
- 'Output line(s):\n{TAB}{3}\n\n')
- e = e.format(source, content, '\n'.join(input_lines),
- repr(found), TAB=TAB)
- raise RuntimeError(e)
- found = found[len(output_prompt):].strip()
-
- # Handle the actual doctest comparison.
- if decorator.strip() == '@doctest':
- # Standard doctest
- if found != submitted:
- e = ('doctest failure\n\n'
- 'Document source: {0}\n\n'
- 'Raw content: \n{1}\n\n'
- 'On input line(s):\n{TAB}{2}\n\n'
- 'we found output:\n{TAB}{3}\n\n'
- 'instead of the expected:\n{TAB}{4}\n\n')
- e = e.format(source, content, '\n'.join(input_lines),
- repr(found), repr(submitted), TAB=TAB)
- raise RuntimeError(e)
- else:
- self.custom_doctest(decorator, input_lines, found, submitted)
-
- # When in verbatim mode, this holds additional submitted output
- # to be written in the final Sphinx output.
- # https://github.com/ipython/ipython/issues/5776
- out_data = []
-
- is_verbatim = decorator=='@verbatim' or self.is_verbatim
- if is_verbatim and data.strip():
- # Note that `ret` in `process_block` has '' as its last element if
- # the code block was in verbatim mode. So if there is no submitted
- # output, then we will have proper spacing only if we do not add
- # an additional '' to `out_data`. This is why we condition on
- # `and data.strip()`.
-
- # The submitted output has no output prompt. If we want the
- # prompt and the code to appear, we need to join them now
- # instead of adding them separately---as this would create an
- # undesired newline. How we do this ultimately depends on the
- # format of the output regex. I'll do what works for the default
- # prompt for now, and we might have to adjust if it doesn't work
- # in other cases. Finally, the submitted output does not have
- # a trailing newline, so we must add it manually.
- out_data.append("{0} {1}\n".format(output_prompt, data))
-
- return out_data
-
- def process_comment(self, data):
- """Process data fPblock for COMMENT token."""
- if not self.is_suppress:
- return [data]
-
- def save_image(self, image_file):
- """
- Saves the image file to disk.
- """
- self.ensure_pyplot()
- command = 'plt.gcf().savefig("%s")'%image_file
- #print 'SAVEFIG', command # dbg
- self.process_input_line('bookmark ipy_thisdir', store_history=False)
- self.process_input_line('cd -b ipy_savedir', store_history=False)
- self.process_input_line(command, store_history=False)
- self.process_input_line('cd -b ipy_thisdir', store_history=False)
- self.process_input_line('bookmark -d ipy_thisdir', store_history=False)
- self.clear_cout()
-
- def process_block(self, block):
- """
- process block from the block_parser and return a list of processed lines
- """
- ret = []
- output = None
- input_lines = None
- lineno = self.IP.execution_count
-
- input_prompt = self.promptin % lineno
- output_prompt = self.promptout % lineno
- image_file = None
- image_directive = None
-
- found_input = False
- for token, data in block:
- if token == COMMENT:
- out_data = self.process_comment(data)
- elif token == INPUT:
- found_input = True
- (out_data, input_lines, output, is_doctest,
- decorator, image_file, image_directive) = \
- self.process_input(data, input_prompt, lineno)
- elif token == OUTPUT:
- if not found_input:
-
- TAB = ' ' * 4
- linenumber = 0
- source = 'Unavailable'
- content = 'Unavailable'
- if self.directive:
- linenumber = self.directive.state.document.current_line
- source = self.directive.state.document.current_source
- content = self.directive.content
- # Add tabs and join into a single string.
- content = '\n'.join([TAB + line for line in content])
-
- e = ('\n\nInvalid block: Block contains an output prompt '
- 'without an input prompt.\n\n'
- 'Document source: {0}\n\n'
- 'Content begins at line {1}: \n\n{2}\n\n'
- 'Problematic block within content: \n\n{TAB}{3}\n\n')
- e = e.format(source, linenumber, content, block, TAB=TAB)
-
- # Write, rather than include in exception, since Sphinx
- # will truncate tracebacks.
- sys.stdout.write(e)
- raise RuntimeError('An invalid block was detected.')
-
- out_data = \
- self.process_output(data, output_prompt, input_lines,
- output, is_doctest, decorator,
- image_file)
- if out_data:
- # Then there was user submitted output in verbatim mode.
- # We need to remove the last element of `ret` that was
- # added in `process_input`, as it is '' and would introduce
- # an undesirable newline.
- assert(ret[-1] == '')
- del ret[-1]
-
- if out_data:
- ret.extend(out_data)
-
- # save the image files
- if image_file is not None:
- self.save_image(image_file)
-
- return ret, image_directive
-
- def ensure_pyplot(self):
- """
- Ensures that pyplot has been imported into the embedded IPython shell.
-
- Also, makes sure to set the backend appropriately if not set already.
-
- """
- # We are here if the @figure pseudo decorator was used. Thus, it's
- # possible that we could be here even if python_mplbackend were set to
- # `None`. That's also strange and perhaps worthy of raising an
- # exception, but for now, we just set the backend to 'agg'.
-
- if not self._pyplot_imported:
- if 'matplotlib.backends' not in sys.modules:
- # Then ipython_matplotlib was set to None but there was a
- # call to the @figure decorator (and ipython_execlines did
- # not set a backend).
- #raise Exception("No backend was set, but @figure was used!")
- import matplotlib
- matplotlib.use('agg')
-
- # Always import pyplot into embedded shell.
- self.process_input_line('import matplotlib.pyplot as plt',
- store_history=False)
- self._pyplot_imported = True
-
- def process_pure_python(self, content):
- """
- content is a list of strings. it is unedited directive content
-
- This runs it line by line in the InteractiveShell, prepends
- prompts as needed capturing stderr and stdout, then returns
- the content as a list as if it were ipython code
- """
- output = []
- savefig = False # keep up with this to clear figure
- multiline = False # to handle line continuation
- multiline_start = None
- fmtin = self.promptin
-
- ct = 0
-
- for lineno, line in enumerate(content):
-
- line_stripped = line.strip()
- if not len(line):
- output.append(line)
- continue
-
- # handle decorators
- if line_stripped.startswith('@'):
- output.extend([line])
- if 'savefig' in line:
- savefig = True # and need to clear figure
- continue
-
- # handle comments
- if line_stripped.startswith('#'):
- output.extend([line])
- continue
-
- # deal with lines checking for multiline
- continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2))
- if not multiline:
- modified = u"%s %s" % (fmtin % ct, line_stripped)
- output.append(modified)
- ct += 1
- try:
- ast.parse(line_stripped)
- output.append(u'')
- except Exception: # on a multiline
- multiline = True
- multiline_start = lineno
- else: # still on a multiline
- modified = u'%s %s' % (continuation, line)
- output.append(modified)
-
- # if the next line is indented, it should be part of multiline
- if len(content) > lineno + 1:
- nextline = content[lineno + 1]
- if len(nextline) - len(nextline.lstrip()) > 3:
- continue
- try:
- mod = ast.parse(
- '\n'.join(content[multiline_start:lineno+1]))
- if isinstance(mod.body[0], ast.FunctionDef):
- # check to see if we have the whole function
- for element in mod.body[0].body:
- if isinstance(element, ast.Return):
- multiline = False
- else:
- output.append(u'')
- multiline = False
- except Exception:
- pass
-
- if savefig: # clear figure if plotted
- self.ensure_pyplot()
- self.process_input_line('plt.clf()', store_history=False)
- self.clear_cout()
- savefig = False
-
- return output
-
- def custom_doctest(self, decorator, input_lines, found, submitted):
- """
- Perform a specialized doctest.
-
- """
- from .custom_doctests import doctests
-
- args = decorator.split()
- doctest_type = args[1]
- if doctest_type in doctests:
- doctests[doctest_type](self, args, input_lines, found, submitted)
- else:
- e = "Invalid option to @doctest: {0}".format(doctest_type)
- raise Exception(e)
-
-
-class IPythonDirective(Directive):
-
- has_content = True
- required_arguments = 0
- optional_arguments = 4 # python, suppress, verbatim, doctest
- final_argumuent_whitespace = True
- option_spec = { 'python': directives.unchanged,
- 'suppress' : directives.flag,
- 'verbatim' : directives.flag,
- 'doctest' : directives.flag,
- 'okexcept': directives.flag,
- 'okwarning': directives.flag
- }
-
- shell = None
-
- seen_docs = set()
-
- def get_config_options(self):
- # contains sphinx configuration variables
- config = self.state.document.settings.env.config
-
- # get config variables to set figure output directory
- savefig_dir = config.ipython_savefig_dir
+
+ imagerows = ['.. image:: %s'%outfile]
+
+ for kwarg in saveargs[2:]:
+ arg, val = kwarg.split('=')
+ arg = arg.strip()
+ val = val.strip()
+ imagerows.append(' :%s: %s'%(arg, val))
+
+ image_file = os.path.basename(outfile) # only return file name
+ image_directive = '\n'.join(imagerows)
+ return image_file, image_directive
+
+ # Callbacks for each type of token
+ def process_input(self, data, input_prompt, lineno):
+ """
+ Process data block for INPUT token.
+
+ """
+ decorator, input, rest = data
+ image_file = None
+ image_directive = None
+
+ is_verbatim = decorator=='@verbatim' or self.is_verbatim
+ is_doctest = (decorator is not None and \
+ decorator.startswith('@doctest')) or self.is_doctest
+ is_suppress = decorator=='@suppress' or self.is_suppress
+ is_okexcept = decorator=='@okexcept' or self.is_okexcept
+ is_okwarning = decorator=='@okwarning' or self.is_okwarning
+ is_savefig = decorator is not None and \
+ decorator.startswith('@savefig')
+
+ input_lines = input.split('\n')
+ if len(input_lines) > 1:
+ if input_lines[-1] != "":
+ input_lines.append('') # make sure there's a blank line
+ # so splitter buffer gets reset
+
+ continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
+
+ if is_savefig:
+ image_file, image_directive = self.process_image(decorator)
+
+ ret = []
+ is_semicolon = False
+
+ # Hold the execution count, if requested to do so.
+ if is_suppress and self.hold_count:
+ store_history = False
+ else:
+ store_history = True
+
+ # Note: catch_warnings is not thread safe
+ with warnings.catch_warnings(record=True) as ws:
+ for i, line in enumerate(input_lines):
+ if line.endswith(';'):
+ is_semicolon = True
+
+ if i == 0:
+ # process the first input line
+ if is_verbatim:
+ self.process_input_line('')
+ self.IP.execution_count += 1 # increment it anyway
+ else:
+ # only submit the line in non-verbatim mode
+ self.process_input_line(line, store_history=store_history)
+ formatted_line = '%s %s'%(input_prompt, line)
+ else:
+ # process a continuation line
+ if not is_verbatim:
+ self.process_input_line(line, store_history=store_history)
+
+ formatted_line = '%s %s'%(continuation, line)
+
+ if not is_suppress:
+ ret.append(formatted_line)
+
+ if not is_suppress and len(rest.strip()) and is_verbatim:
+ # The "rest" is the standard output of the input. This needs to be
+ # added when in verbatim mode. If there is no "rest", then we don't
+ # add it, as the new line will be added by the processed output.
+ ret.append(rest)
+
+ # Fetch the processed output. (This is not the submitted output.)
+ self.cout.seek(0)
+ processed_output = self.cout.read()
+ if not is_suppress and not is_semicolon:
+ #
+ # In IPythonDirective.run, the elements of `ret` are eventually
+ # combined such that '' entries correspond to newlines. So if
+ # `processed_output` is equal to '', then the adding it to `ret`
+ # ensures that there is a blank line between consecutive inputs
+ # that have no outputs, as in:
+ #
+ # In [1]: x = 4
+ #
+ # In [2]: x = 5
+ #
+ # When there is processed output, it has a '\n' at the tail end. So
+ # adding the output to `ret` will provide the necessary spacing
+ # between consecutive input/output blocks, as in:
+ #
+ # In [1]: x
+ # Out[1]: 5
+ #
+ # In [2]: x
+ # Out[2]: 5
+ #
+ # When there is stdout from the input, it also has a '\n' at the
+ # tail end, and so this ensures proper spacing as well. E.g.:
+ #
+ # In [1]: print x
+ # 5
+ #
+ # In [2]: x = 5
+ #
+ # When in verbatim mode, `processed_output` is empty (because
+ # nothing was passed to IP. Sometimes the submitted code block has
+ # an Out[] portion and sometimes it does not. When it does not, we
+ # need to ensure proper spacing, so we have to add '' to `ret`.
+ # However, if there is an Out[] in the submitted code, then we do
+ # not want to add a newline as `process_output` has stuff to add.
+ # The difficulty is that `process_input` doesn't know if
+ # `process_output` will be called---so it doesn't know if there is
+ # Out[] in the code block. The requires that we include a hack in
+ # `process_block`. See the comments there.
+ #
+ ret.append(processed_output)
+ elif is_semicolon:
+ # Make sure there is a newline after the semicolon.
+ ret.append('')
+
+ # context information
+ filename = "Unknown"
+ lineno = 0
+ if self.directive.state:
+ filename = self.directive.state.document.current_source
+ lineno = self.directive.state.document.current_line
+
+ # output any exceptions raised during execution to stdout
+ # unless :okexcept: has been specified.
+ if not is_okexcept and "Traceback" in processed_output:
+ s = "\nException in %s at block ending on line %s\n" % (filename, lineno)
+ s += "Specify :okexcept: as an option in the ipython:: block to suppress this message\n"
+ sys.stdout.write('\n\n>>>' + ('-' * 73))
+ sys.stdout.write(s)
+ sys.stdout.write(processed_output)
+ sys.stdout.write('<<<' + ('-' * 73) + '\n\n')
+
+ # output any warning raised during execution to stdout
+ # unless :okwarning: has been specified.
+ if not is_okwarning:
+ for w in ws:
+ s = "\nWarning in %s at block ending on line %s\n" % (filename, lineno)
+ s += "Specify :okwarning: as an option in the ipython:: block to suppress this message\n"
+ sys.stdout.write('\n\n>>>' + ('-' * 73))
+ sys.stdout.write(s)
+ sys.stdout.write(('-' * 76) + '\n')
+ s=warnings.formatwarning(w.message, w.category,
+ w.filename, w.lineno, w.line)
+ sys.stdout.write(s)
+ sys.stdout.write('<<<' + ('-' * 73) + '\n')
+
+ self.cout.truncate(0)
+
+ return (ret, input_lines, processed_output,
+ is_doctest, decorator, image_file, image_directive)
+
+
+ def process_output(self, data, output_prompt, input_lines, output,
+ is_doctest, decorator, image_file):
+ """
+ Process data block for OUTPUT token.
+
+ """
+ # Recall: `data` is the submitted output, and `output` is the processed
+ # output from `input_lines`.
+
+ TAB = ' ' * 4
+
+ if is_doctest and output is not None:
+
+ found = output # This is the processed output
+ found = found.strip()
+ submitted = data.strip()
+
+ if self.directive is None:
+ source = 'Unavailable'
+ content = 'Unavailable'
+ else:
+ source = self.directive.state.document.current_source
+ content = self.directive.content
+ # Add tabs and join into a single string.
+ content = '\n'.join([TAB + line for line in content])
+
+ # Make sure the output contains the output prompt.
+ ind = found.find(output_prompt)
+ if ind < 0:
+ e = ('output does not contain output prompt\n\n'
+ 'Document source: {0}\n\n'
+ 'Raw content: \n{1}\n\n'
+ 'Input line(s):\n{TAB}{2}\n\n'
+ 'Output line(s):\n{TAB}{3}\n\n')
+ e = e.format(source, content, '\n'.join(input_lines),
+ repr(found), TAB=TAB)
+ raise RuntimeError(e)
+ found = found[len(output_prompt):].strip()
+
+ # Handle the actual doctest comparison.
+ if decorator.strip() == '@doctest':
+ # Standard doctest
+ if found != submitted:
+ e = ('doctest failure\n\n'
+ 'Document source: {0}\n\n'
+ 'Raw content: \n{1}\n\n'
+ 'On input line(s):\n{TAB}{2}\n\n'
+ 'we found output:\n{TAB}{3}\n\n'
+ 'instead of the expected:\n{TAB}{4}\n\n')
+ e = e.format(source, content, '\n'.join(input_lines),
+ repr(found), repr(submitted), TAB=TAB)
+ raise RuntimeError(e)
+ else:
+ self.custom_doctest(decorator, input_lines, found, submitted)
+
+ # When in verbatim mode, this holds additional submitted output
+ # to be written in the final Sphinx output.
+ # https://github.com/ipython/ipython/issues/5776
+ out_data = []
+
+ is_verbatim = decorator=='@verbatim' or self.is_verbatim
+ if is_verbatim and data.strip():
+ # Note that `ret` in `process_block` has '' as its last element if
+ # the code block was in verbatim mode. So if there is no submitted
+ # output, then we will have proper spacing only if we do not add
+ # an additional '' to `out_data`. This is why we condition on
+ # `and data.strip()`.
+
+ # The submitted output has no output prompt. If we want the
+ # prompt and the code to appear, we need to join them now
+ # instead of adding them separately---as this would create an
+ # undesired newline. How we do this ultimately depends on the
+ # format of the output regex. I'll do what works for the default
+ # prompt for now, and we might have to adjust if it doesn't work
+ # in other cases. Finally, the submitted output does not have
+ # a trailing newline, so we must add it manually.
+ out_data.append("{0} {1}\n".format(output_prompt, data))
+
+ return out_data
+
+ def process_comment(self, data):
+ """Process data fPblock for COMMENT token."""
+ if not self.is_suppress:
+ return [data]
+
+ def save_image(self, image_file):
+ """
+ Saves the image file to disk.
+ """
+ self.ensure_pyplot()
+ command = 'plt.gcf().savefig("%s")'%image_file
+ #print 'SAVEFIG', command # dbg
+ self.process_input_line('bookmark ipy_thisdir', store_history=False)
+ self.process_input_line('cd -b ipy_savedir', store_history=False)
+ self.process_input_line(command, store_history=False)
+ self.process_input_line('cd -b ipy_thisdir', store_history=False)
+ self.process_input_line('bookmark -d ipy_thisdir', store_history=False)
+ self.clear_cout()
+
+ def process_block(self, block):
+ """
+ process block from the block_parser and return a list of processed lines
+ """
+ ret = []
+ output = None
+ input_lines = None
+ lineno = self.IP.execution_count
+
+ input_prompt = self.promptin % lineno
+ output_prompt = self.promptout % lineno
+ image_file = None
+ image_directive = None
+
+ found_input = False
+ for token, data in block:
+ if token == COMMENT:
+ out_data = self.process_comment(data)
+ elif token == INPUT:
+ found_input = True
+ (out_data, input_lines, output, is_doctest,
+ decorator, image_file, image_directive) = \
+ self.process_input(data, input_prompt, lineno)
+ elif token == OUTPUT:
+ if not found_input:
+
+ TAB = ' ' * 4
+ linenumber = 0
+ source = 'Unavailable'
+ content = 'Unavailable'
+ if self.directive:
+ linenumber = self.directive.state.document.current_line
+ source = self.directive.state.document.current_source
+ content = self.directive.content
+ # Add tabs and join into a single string.
+ content = '\n'.join([TAB + line for line in content])
+
+ e = ('\n\nInvalid block: Block contains an output prompt '
+ 'without an input prompt.\n\n'
+ 'Document source: {0}\n\n'
+ 'Content begins at line {1}: \n\n{2}\n\n'
+ 'Problematic block within content: \n\n{TAB}{3}\n\n')
+ e = e.format(source, linenumber, content, block, TAB=TAB)
+
+ # Write, rather than include in exception, since Sphinx
+ # will truncate tracebacks.
+ sys.stdout.write(e)
+ raise RuntimeError('An invalid block was detected.')
+
+ out_data = \
+ self.process_output(data, output_prompt, input_lines,
+ output, is_doctest, decorator,
+ image_file)
+ if out_data:
+ # Then there was user submitted output in verbatim mode.
+ # We need to remove the last element of `ret` that was
+ # added in `process_input`, as it is '' and would introduce
+ # an undesirable newline.
+ assert(ret[-1] == '')
+ del ret[-1]
+
+ if out_data:
+ ret.extend(out_data)
+
+ # save the image files
+ if image_file is not None:
+ self.save_image(image_file)
+
+ return ret, image_directive
+
+ def ensure_pyplot(self):
+ """
+ Ensures that pyplot has been imported into the embedded IPython shell.
+
+ Also, makes sure to set the backend appropriately if not set already.
+
+ """
+ # We are here if the @figure pseudo decorator was used. Thus, it's
+ # possible that we could be here even if python_mplbackend were set to
+ # `None`. That's also strange and perhaps worthy of raising an
+ # exception, but for now, we just set the backend to 'agg'.
+
+ if not self._pyplot_imported:
+ if 'matplotlib.backends' not in sys.modules:
+ # Then ipython_matplotlib was set to None but there was a
+ # call to the @figure decorator (and ipython_execlines did
+ # not set a backend).
+ #raise Exception("No backend was set, but @figure was used!")
+ import matplotlib
+ matplotlib.use('agg')
+
+ # Always import pyplot into embedded shell.
+ self.process_input_line('import matplotlib.pyplot as plt',
+ store_history=False)
+ self._pyplot_imported = True
+
+ def process_pure_python(self, content):
+ """
+ content is a list of strings. it is unedited directive content
+
+ This runs it line by line in the InteractiveShell, prepends
+ prompts as needed capturing stderr and stdout, then returns
+ the content as a list as if it were ipython code
+ """
+ output = []
+ savefig = False # keep up with this to clear figure
+ multiline = False # to handle line continuation
+ multiline_start = None
+ fmtin = self.promptin
+
+ ct = 0
+
+ for lineno, line in enumerate(content):
+
+ line_stripped = line.strip()
+ if not len(line):
+ output.append(line)
+ continue
+
+ # handle decorators
+ if line_stripped.startswith('@'):
+ output.extend([line])
+ if 'savefig' in line:
+ savefig = True # and need to clear figure
+ continue
+
+ # handle comments
+ if line_stripped.startswith('#'):
+ output.extend([line])
+ continue
+
+ # deal with lines checking for multiline
+ continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2))
+ if not multiline:
+ modified = u"%s %s" % (fmtin % ct, line_stripped)
+ output.append(modified)
+ ct += 1
+ try:
+ ast.parse(line_stripped)
+ output.append(u'')
+ except Exception: # on a multiline
+ multiline = True
+ multiline_start = lineno
+ else: # still on a multiline
+ modified = u'%s %s' % (continuation, line)
+ output.append(modified)
+
+ # if the next line is indented, it should be part of multiline
+ if len(content) > lineno + 1:
+ nextline = content[lineno + 1]
+ if len(nextline) - len(nextline.lstrip()) > 3:
+ continue
+ try:
+ mod = ast.parse(
+ '\n'.join(content[multiline_start:lineno+1]))
+ if isinstance(mod.body[0], ast.FunctionDef):
+ # check to see if we have the whole function
+ for element in mod.body[0].body:
+ if isinstance(element, ast.Return):
+ multiline = False
+ else:
+ output.append(u'')
+ multiline = False
+ except Exception:
+ pass
+
+ if savefig: # clear figure if plotted
+ self.ensure_pyplot()
+ self.process_input_line('plt.clf()', store_history=False)
+ self.clear_cout()
+ savefig = False
+
+ return output
+
+ def custom_doctest(self, decorator, input_lines, found, submitted):
+ """
+ Perform a specialized doctest.
+
+ """
+ from .custom_doctests import doctests
+
+ args = decorator.split()
+ doctest_type = args[1]
+ if doctest_type in doctests:
+ doctests[doctest_type](self, args, input_lines, found, submitted)
+ else:
+ e = "Invalid option to @doctest: {0}".format(doctest_type)
+ raise Exception(e)
+
+
+class IPythonDirective(Directive):
+
+ has_content = True
+ required_arguments = 0
+ optional_arguments = 4 # python, suppress, verbatim, doctest
+ final_argumuent_whitespace = True
+ option_spec = { 'python': directives.unchanged,
+ 'suppress' : directives.flag,
+ 'verbatim' : directives.flag,
+ 'doctest' : directives.flag,
+ 'okexcept': directives.flag,
+ 'okwarning': directives.flag
+ }
+
+ shell = None
+
+ seen_docs = set()
+
+ def get_config_options(self):
+ # contains sphinx configuration variables
+ config = self.state.document.settings.env.config
+
+ # get config variables to set figure output directory
+ savefig_dir = config.ipython_savefig_dir
source_dir = self.state.document.settings.env.srcdir
savefig_dir = os.path.join(source_dir, savefig_dir)
-
- # get regex and prompt stuff
- rgxin = config.ipython_rgxin
- rgxout = config.ipython_rgxout
- promptin = config.ipython_promptin
- promptout = config.ipython_promptout
- mplbackend = config.ipython_mplbackend
- exec_lines = config.ipython_execlines
- hold_count = config.ipython_holdcount
-
- return (savefig_dir, source_dir, rgxin, rgxout,
- promptin, promptout, mplbackend, exec_lines, hold_count)
-
- def setup(self):
- # Get configuration values.
- (savefig_dir, source_dir, rgxin, rgxout, promptin, promptout,
- mplbackend, exec_lines, hold_count) = self.get_config_options()
-
+
+ # get regex and prompt stuff
+ rgxin = config.ipython_rgxin
+ rgxout = config.ipython_rgxout
+ promptin = config.ipython_promptin
+ promptout = config.ipython_promptout
+ mplbackend = config.ipython_mplbackend
+ exec_lines = config.ipython_execlines
+ hold_count = config.ipython_holdcount
+
+ return (savefig_dir, source_dir, rgxin, rgxout,
+ promptin, promptout, mplbackend, exec_lines, hold_count)
+
+ def setup(self):
+ # Get configuration values.
+ (savefig_dir, source_dir, rgxin, rgxout, promptin, promptout,
+ mplbackend, exec_lines, hold_count) = self.get_config_options()
+
try:
os.makedirs(savefig_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
- if self.shell is None:
- # We will be here many times. However, when the
- # EmbeddedSphinxShell is created, its interactive shell member
- # is the same for each instance.
-
+ if self.shell is None:
+ # We will be here many times. However, when the
+ # EmbeddedSphinxShell is created, its interactive shell member
+ # is the same for each instance.
+
if mplbackend and 'matplotlib.backends' not in sys.modules:
- import matplotlib
- matplotlib.use(mplbackend)
-
- # Must be called after (potentially) importing matplotlib and
- # setting its backend since exec_lines might import pylab.
- self.shell = EmbeddedSphinxShell(exec_lines)
-
- # Store IPython directive to enable better error messages
- self.shell.directive = self
-
- # reset the execution count if we haven't processed this doc
- #NOTE: this may be borked if there are multiple seen_doc tmp files
- #check time stamp?
- if not self.state.document.current_source in self.seen_docs:
- self.shell.IP.history_manager.reset()
- self.shell.IP.execution_count = 1
- self.seen_docs.add(self.state.document.current_source)
-
- # and attach to shell so we don't have to pass them around
- self.shell.rgxin = rgxin
- self.shell.rgxout = rgxout
- self.shell.promptin = promptin
- self.shell.promptout = promptout
- self.shell.savefig_dir = savefig_dir
- self.shell.source_dir = source_dir
- self.shell.hold_count = hold_count
-
- # setup bookmark for saving figures directory
- self.shell.process_input_line('bookmark ipy_savedir %s'%savefig_dir,
- store_history=False)
- self.shell.clear_cout()
-
- return rgxin, rgxout, promptin, promptout
-
- def teardown(self):
- # delete last bookmark
- self.shell.process_input_line('bookmark -d ipy_savedir',
- store_history=False)
- self.shell.clear_cout()
-
- def run(self):
- debug = False
-
- #TODO, any reason block_parser can't be a method of embeddable shell
- # then we wouldn't have to carry these around
- rgxin, rgxout, promptin, promptout = self.setup()
-
- options = self.options
- self.shell.is_suppress = 'suppress' in options
- self.shell.is_doctest = 'doctest' in options
- self.shell.is_verbatim = 'verbatim' in options
- self.shell.is_okexcept = 'okexcept' in options
- self.shell.is_okwarning = 'okwarning' in options
-
- # handle pure python code
- if 'python' in self.arguments:
- content = self.content
- self.content = self.shell.process_pure_python(content)
-
- # parts consists of all text within the ipython-block.
- # Each part is an input/output block.
- parts = '\n'.join(self.content).split('\n\n')
-
- lines = ['.. code-block:: ipython', '']
- figures = []
-
- for part in parts:
- block = block_parser(part, rgxin, rgxout, promptin, promptout)
- if len(block):
- rows, figure = self.shell.process_block(block)
- for row in rows:
- lines.extend([' {0}'.format(line)
- for line in row.split('\n')])
-
- if figure is not None:
- figures.append(figure)
-
- for figure in figures:
- lines.append('')
- lines.extend(figure.split('\n'))
- lines.append('')
-
- if len(lines) > 2:
- if debug:
- print('\n'.join(lines))
- else:
- # This has to do with input, not output. But if we comment
- # these lines out, then no IPython code will appear in the
- # final output.
- self.state_machine.insert_input(
- lines, self.state_machine.input_lines.source(0))
-
- # cleanup
- self.teardown()
-
- return []
-
-# Enable as a proper Sphinx directive
-def setup(app):
- setup.app = app
-
- app.add_directive('ipython', IPythonDirective)
+ import matplotlib
+ matplotlib.use(mplbackend)
+
+ # Must be called after (potentially) importing matplotlib and
+ # setting its backend since exec_lines might import pylab.
+ self.shell = EmbeddedSphinxShell(exec_lines)
+
+ # Store IPython directive to enable better error messages
+ self.shell.directive = self
+
+ # reset the execution count if we haven't processed this doc
+ #NOTE: this may be borked if there are multiple seen_doc tmp files
+ #check time stamp?
+ if not self.state.document.current_source in self.seen_docs:
+ self.shell.IP.history_manager.reset()
+ self.shell.IP.execution_count = 1
+ self.seen_docs.add(self.state.document.current_source)
+
+ # and attach to shell so we don't have to pass them around
+ self.shell.rgxin = rgxin
+ self.shell.rgxout = rgxout
+ self.shell.promptin = promptin
+ self.shell.promptout = promptout
+ self.shell.savefig_dir = savefig_dir
+ self.shell.source_dir = source_dir
+ self.shell.hold_count = hold_count
+
+ # setup bookmark for saving figures directory
+ self.shell.process_input_line('bookmark ipy_savedir %s'%savefig_dir,
+ store_history=False)
+ self.shell.clear_cout()
+
+ return rgxin, rgxout, promptin, promptout
+
+ def teardown(self):
+ # delete last bookmark
+ self.shell.process_input_line('bookmark -d ipy_savedir',
+ store_history=False)
+ self.shell.clear_cout()
+
+ def run(self):
+ debug = False
+
+ #TODO, any reason block_parser can't be a method of embeddable shell
+ # then we wouldn't have to carry these around
+ rgxin, rgxout, promptin, promptout = self.setup()
+
+ options = self.options
+ self.shell.is_suppress = 'suppress' in options
+ self.shell.is_doctest = 'doctest' in options
+ self.shell.is_verbatim = 'verbatim' in options
+ self.shell.is_okexcept = 'okexcept' in options
+ self.shell.is_okwarning = 'okwarning' in options
+
+ # handle pure python code
+ if 'python' in self.arguments:
+ content = self.content
+ self.content = self.shell.process_pure_python(content)
+
+ # parts consists of all text within the ipython-block.
+ # Each part is an input/output block.
+ parts = '\n'.join(self.content).split('\n\n')
+
+ lines = ['.. code-block:: ipython', '']
+ figures = []
+
+ for part in parts:
+ block = block_parser(part, rgxin, rgxout, promptin, promptout)
+ if len(block):
+ rows, figure = self.shell.process_block(block)
+ for row in rows:
+ lines.extend([' {0}'.format(line)
+ for line in row.split('\n')])
+
+ if figure is not None:
+ figures.append(figure)
+
+ for figure in figures:
+ lines.append('')
+ lines.extend(figure.split('\n'))
+ lines.append('')
+
+ if len(lines) > 2:
+ if debug:
+ print('\n'.join(lines))
+ else:
+ # This has to do with input, not output. But if we comment
+ # these lines out, then no IPython code will appear in the
+ # final output.
+ self.state_machine.insert_input(
+ lines, self.state_machine.input_lines.source(0))
+
+ # cleanup
+ self.teardown()
+
+ return []
+
+# Enable as a proper Sphinx directive
+def setup(app):
+ setup.app = app
+
+ app.add_directive('ipython', IPythonDirective)
app.add_config_value('ipython_savefig_dir', 'savefig', 'env')
- app.add_config_value('ipython_rgxin',
- re.compile('In \[(\d+)\]:\s?(.*)\s*'), 'env')
- app.add_config_value('ipython_rgxout',
- re.compile('Out\[(\d+)\]:\s?(.*)\s*'), 'env')
- app.add_config_value('ipython_promptin', 'In [%d]:', 'env')
- app.add_config_value('ipython_promptout', 'Out[%d]:', 'env')
-
- # We could just let matplotlib pick whatever is specified as the default
- # backend in the matplotlibrc file, but this would cause issues if the
- # backend didn't work in headless environments. For this reason, 'agg'
- # is a good default backend choice.
- app.add_config_value('ipython_mplbackend', 'agg', 'env')
-
- # If the user sets this config value to `None`, then EmbeddedSphinxShell's
- # __init__ method will treat it as [].
- execlines = ['import numpy as np', 'import matplotlib.pyplot as plt']
- app.add_config_value('ipython_execlines', execlines, 'env')
-
- app.add_config_value('ipython_holdcount', True, 'env')
-
- metadata = {'parallel_read_safe': True, 'parallel_write_safe': True}
- return metadata
-
-# Simple smoke test, needs to be converted to a proper automatic test.
-def test():
-
- examples = [
- r"""
-In [9]: pwd
-Out[9]: '/home/jdhunter/py4science/book'
-
-In [10]: cd bookdata/
-/home/jdhunter/py4science/book/bookdata
-
-In [2]: from pylab import *
-
-In [2]: ion()
-
-In [3]: im = imread('stinkbug.png')
-
-@savefig mystinkbug.png width=4in
-In [4]: imshow(im)
-Out[4]: <matplotlib.image.AxesImage object at 0x39ea850>
-
-""",
- r"""
-
-In [1]: x = 'hello world'
-
-# string methods can be
-# used to alter the string
-@doctest
-In [2]: x.upper()
-Out[2]: 'HELLO WORLD'
-
-@verbatim
-In [3]: x.st<TAB>
-x.startswith x.strip
-""",
- r"""
-
-In [130]: url = 'http://ichart.finance.yahoo.com/table.csv?s=CROX\
- .....: &d=9&e=22&f=2009&g=d&a=1&br=8&c=2006&ignore=.csv'
-
-In [131]: print url.split('&')
-['http://ichart.finance.yahoo.com/table.csv?s=CROX', 'd=9', 'e=22', 'f=2009', 'g=d', 'a=1', 'b=8', 'c=2006', 'ignore=.csv']
-
-In [60]: import urllib
-
-""",
- r"""\
-
-In [133]: import numpy.random
-
-@suppress
-In [134]: numpy.random.seed(2358)
-
-@doctest
-In [135]: numpy.random.rand(10,2)
-Out[135]:
-array([[ 0.64524308, 0.59943846],
- [ 0.47102322, 0.8715456 ],
- [ 0.29370834, 0.74776844],
- [ 0.99539577, 0.1313423 ],
- [ 0.16250302, 0.21103583],
- [ 0.81626524, 0.1312433 ],
- [ 0.67338089, 0.72302393],
- [ 0.7566368 , 0.07033696],
- [ 0.22591016, 0.77731835],
- [ 0.0072729 , 0.34273127]])
-
-""",
-
- r"""
-In [106]: print x
-jdh
-
-In [109]: for i in range(10):
- .....: print i
- .....:
- .....:
-0
-1
-2
-3
-4
-5
-6
-7
-8
-9
-""",
-
- r"""
-
-In [144]: from pylab import *
-
-In [145]: ion()
-
-# use a semicolon to suppress the output
-@savefig test_hist.png width=4in
-In [151]: hist(np.random.randn(10000), 100);
-
-
-@savefig test_plot.png width=4in
-In [151]: plot(np.random.randn(10000), 'o');
- """,
-
- r"""
-# use a semicolon to suppress the output
-In [151]: plt.clf()
-
-@savefig plot_simple.png width=4in
-In [151]: plot([1,2,3])
-
-@savefig hist_simple.png width=4in
-In [151]: hist(np.random.randn(10000), 100);
-
-""",
- r"""
-# update the current fig
-In [151]: ylabel('number')
-
-In [152]: title('normal distribution')
-
-
-@savefig hist_with_text.png
-In [153]: grid(True)
-
-@doctest float
-In [154]: 0.1 + 0.2
-Out[154]: 0.3
-
-@doctest float
-In [155]: np.arange(16).reshape(4,4)
-Out[155]:
-array([[ 0, 1, 2, 3],
- [ 4, 5, 6, 7],
- [ 8, 9, 10, 11],
- [12, 13, 14, 15]])
-
-In [1]: x = np.arange(16, dtype=float).reshape(4,4)
-
-In [2]: x[0,0] = np.inf
-
-In [3]: x[0,1] = np.nan
-
-@doctest float
-In [4]: x
-Out[4]:
-array([[ inf, nan, 2., 3.],
- [ 4., 5., 6., 7.],
- [ 8., 9., 10., 11.],
- [ 12., 13., 14., 15.]])
-
-
- """,
- ]
- # skip local-file depending first example:
- examples = examples[1:]
-
- #ipython_directive.DEBUG = True # dbg
- #options = dict(suppress=True) # dbg
- options = dict()
- for example in examples:
- content = example.split('\n')
- IPythonDirective('debug', arguments=None, options=options,
- content=content, lineno=0,
- content_offset=None, block_text=None,
- state=None, state_machine=None,
- )
-
-# Run test suite as a script
-if __name__=='__main__':
- if not os.path.isdir('_static'):
- os.mkdir('_static')
- test()
- print('All OK? Check figures in _static/')
+ app.add_config_value('ipython_rgxin',
+ re.compile('In \[(\d+)\]:\s?(.*)\s*'), 'env')
+ app.add_config_value('ipython_rgxout',
+ re.compile('Out\[(\d+)\]:\s?(.*)\s*'), 'env')
+ app.add_config_value('ipython_promptin', 'In [%d]:', 'env')
+ app.add_config_value('ipython_promptout', 'Out[%d]:', 'env')
+
+ # We could just let matplotlib pick whatever is specified as the default
+ # backend in the matplotlibrc file, but this would cause issues if the
+ # backend didn't work in headless environments. For this reason, 'agg'
+ # is a good default backend choice.
+ app.add_config_value('ipython_mplbackend', 'agg', 'env')
+
+ # If the user sets this config value to `None`, then EmbeddedSphinxShell's
+ # __init__ method will treat it as [].
+ execlines = ['import numpy as np', 'import matplotlib.pyplot as plt']
+ app.add_config_value('ipython_execlines', execlines, 'env')
+
+ app.add_config_value('ipython_holdcount', True, 'env')
+
+ metadata = {'parallel_read_safe': True, 'parallel_write_safe': True}
+ return metadata
+
+# Simple smoke test, needs to be converted to a proper automatic test.
+def test():
+
+ examples = [
+ r"""
+In [9]: pwd
+Out[9]: '/home/jdhunter/py4science/book'
+
+In [10]: cd bookdata/
+/home/jdhunter/py4science/book/bookdata
+
+In [2]: from pylab import *
+
+In [2]: ion()
+
+In [3]: im = imread('stinkbug.png')
+
+@savefig mystinkbug.png width=4in
+In [4]: imshow(im)
+Out[4]: <matplotlib.image.AxesImage object at 0x39ea850>
+
+""",
+ r"""
+
+In [1]: x = 'hello world'
+
+# string methods can be
+# used to alter the string
+@doctest
+In [2]: x.upper()
+Out[2]: 'HELLO WORLD'
+
+@verbatim
+In [3]: x.st<TAB>
+x.startswith x.strip
+""",
+ r"""
+
+In [130]: url = 'http://ichart.finance.yahoo.com/table.csv?s=CROX\
+ .....: &d=9&e=22&f=2009&g=d&a=1&br=8&c=2006&ignore=.csv'
+
+In [131]: print url.split('&')
+['http://ichart.finance.yahoo.com/table.csv?s=CROX', 'd=9', 'e=22', 'f=2009', 'g=d', 'a=1', 'b=8', 'c=2006', 'ignore=.csv']
+
+In [60]: import urllib
+
+""",
+ r"""\
+
+In [133]: import numpy.random
+
+@suppress
+In [134]: numpy.random.seed(2358)
+
+@doctest
+In [135]: numpy.random.rand(10,2)
+Out[135]:
+array([[ 0.64524308, 0.59943846],
+ [ 0.47102322, 0.8715456 ],
+ [ 0.29370834, 0.74776844],
+ [ 0.99539577, 0.1313423 ],
+ [ 0.16250302, 0.21103583],
+ [ 0.81626524, 0.1312433 ],
+ [ 0.67338089, 0.72302393],
+ [ 0.7566368 , 0.07033696],
+ [ 0.22591016, 0.77731835],
+ [ 0.0072729 , 0.34273127]])
+
+""",
+
+ r"""
+In [106]: print x
+jdh
+
+In [109]: for i in range(10):
+ .....: print i
+ .....:
+ .....:
+0
+1
+2
+3
+4
+5
+6
+7
+8
+9
+""",
+
+ r"""
+
+In [144]: from pylab import *
+
+In [145]: ion()
+
+# use a semicolon to suppress the output
+@savefig test_hist.png width=4in
+In [151]: hist(np.random.randn(10000), 100);
+
+
+@savefig test_plot.png width=4in
+In [151]: plot(np.random.randn(10000), 'o');
+ """,
+
+ r"""
+# use a semicolon to suppress the output
+In [151]: plt.clf()
+
+@savefig plot_simple.png width=4in
+In [151]: plot([1,2,3])
+
+@savefig hist_simple.png width=4in
+In [151]: hist(np.random.randn(10000), 100);
+
+""",
+ r"""
+# update the current fig
+In [151]: ylabel('number')
+
+In [152]: title('normal distribution')
+
+
+@savefig hist_with_text.png
+In [153]: grid(True)
+
+@doctest float
+In [154]: 0.1 + 0.2
+Out[154]: 0.3
+
+@doctest float
+In [155]: np.arange(16).reshape(4,4)
+Out[155]:
+array([[ 0, 1, 2, 3],
+ [ 4, 5, 6, 7],
+ [ 8, 9, 10, 11],
+ [12, 13, 14, 15]])
+
+In [1]: x = np.arange(16, dtype=float).reshape(4,4)
+
+In [2]: x[0,0] = np.inf
+
+In [3]: x[0,1] = np.nan
+
+@doctest float
+In [4]: x
+Out[4]:
+array([[ inf, nan, 2., 3.],
+ [ 4., 5., 6., 7.],
+ [ 8., 9., 10., 11.],
+ [ 12., 13., 14., 15.]])
+
+
+ """,
+ ]
+ # skip local-file depending first example:
+ examples = examples[1:]
+
+ #ipython_directive.DEBUG = True # dbg
+ #options = dict(suppress=True) # dbg
+ options = dict()
+ for example in examples:
+ content = example.split('\n')
+ IPythonDirective('debug', arguments=None, options=options,
+ content=content, lineno=0,
+ content_offset=None, block_text=None,
+ state=None, state_machine=None,
+ )
+
+# Run test suite as a script
+if __name__=='__main__':
+ if not os.path.isdir('_static'):
+ os.mkdir('_static')
+ test()
+ print('All OK? Check figures in _static/')
diff --git a/contrib/python/ipython/py2/IPython/terminal/console.py b/contrib/python/ipython/py2/IPython/terminal/console.py
index 981ee46b17..65571a7572 100644
--- a/contrib/python/ipython/py2/IPython/terminal/console.py
+++ b/contrib/python/ipython/py2/IPython/terminal/console.py
@@ -1,19 +1,19 @@
-"""
-Shim to maintain backwards compatibility with old IPython.terminal.console imports.
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import sys
-from warnings import warn
-
-from IPython.utils.shimmodule import ShimModule, ShimWarning
-
+"""
+Shim to maintain backwards compatibility with old IPython.terminal.console imports.
+"""
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import sys
+from warnings import warn
+
+from IPython.utils.shimmodule import ShimModule, ShimWarning
+
warn("The `IPython.terminal.console` package has been deprecated since IPython 4.0. "
- "You should import from jupyter_console instead.", ShimWarning)
-
-# Unconditionally insert the shim into sys.modules so that further import calls
-# trigger the custom attribute access above
-
-sys.modules['IPython.terminal.console'] = ShimModule(
- src='IPython.terminal.console', mirror='jupyter_console')
+ "You should import from jupyter_console instead.", ShimWarning)
+
+# Unconditionally insert the shim into sys.modules so that further import calls
+# trigger the custom attribute access above
+
+sys.modules['IPython.terminal.console'] = ShimModule(
+ src='IPython.terminal.console', mirror='jupyter_console')
diff --git a/contrib/python/ipython/py2/IPython/terminal/embed.py b/contrib/python/ipython/py2/IPython/terminal/embed.py
index 8ff15313dd..5ad70431e4 100644
--- a/contrib/python/ipython/py2/IPython/terminal/embed.py
+++ b/contrib/python/ipython/py2/IPython/terminal/embed.py
@@ -1,33 +1,33 @@
-# encoding: utf-8
-"""
-An embedded IPython shell.
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import with_statement
-from __future__ import print_function
-
-import sys
-import warnings
-
-from IPython.core import ultratb, compilerop
+# encoding: utf-8
+"""
+An embedded IPython shell.
+"""
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import with_statement
+from __future__ import print_function
+
+import sys
+import warnings
+
+from IPython.core import ultratb, compilerop
from IPython.core import magic_arguments
-from IPython.core.magic import Magics, magics_class, line_magic
+from IPython.core.magic import Magics, magics_class, line_magic
from IPython.core.interactiveshell import DummyMod, InteractiveShell
-from IPython.terminal.interactiveshell import TerminalInteractiveShell
-from IPython.terminal.ipapp import load_default_config
-
-from traitlets import Bool, CBool, Unicode
-from IPython.utils.io import ask_yes_no
-
-class KillEmbeded(Exception):pass
-
-# This is an additional magic that is exposed in embedded shells.
-@magics_class
-class EmbeddedMagics(Magics):
-
- @line_magic
+from IPython.terminal.interactiveshell import TerminalInteractiveShell
+from IPython.terminal.ipapp import load_default_config
+
+from traitlets import Bool, CBool, Unicode
+from IPython.utils.io import ask_yes_no
+
+class KillEmbeded(Exception):pass
+
+# This is an additional magic that is exposed in embedded shells.
+@magics_class
+class EmbeddedMagics(Magics):
+
+ @line_magic
@magic_arguments.magic_arguments()
@magic_arguments.argument('-i', '--instance', action='store_true',
help='Kill instance instead of call location')
@@ -35,10 +35,10 @@ class EmbeddedMagics(Magics):
help='Also exit the current session')
@magic_arguments.argument('-y', '--yes', action='store_true',
help='Do not ask confirmation')
- def kill_embedded(self, parameter_s=''):
+ def kill_embedded(self, parameter_s=''):
"""%kill_embedded : deactivate for good the current embedded IPython
-
- This function (after asking for confirmation) sets an internal flag so
+
+ This function (after asking for confirmation) sets an internal flag so
that an embedded IPython will never activate again for the given call
location. This is useful to permanently disable a shell that is being
called inside a loop: once you've figured out what you needed from it,
@@ -60,8 +60,8 @@ class EmbeddedMagics(Magics):
This was the default behavior before IPython 5.2
- """
-
+ """
+
args = magic_arguments.parse_argstring(self.kill_embedded, parameter_s)
print(args)
if args.instance:
@@ -85,38 +85,38 @@ class EmbeddedMagics(Magics):
self.shell.embedded_active = False
print("This embedded IPython call location will not reactivate anymore "
"once you exit.")
-
+
if args.exit:
# Ask-exit does not really ask, it just set internals flags to exit
# on next loop.
self.shell.ask_exit()
-
-
- @line_magic
- def exit_raise(self, parameter_s=''):
- """%exit_raise Make the current embedded kernel exit and raise and exception.
-
- This function sets an internal flag so that an embedded IPython will
- raise a `IPython.terminal.embed.KillEmbeded` Exception on exit, and then exit the current I. This is
- useful to permanently exit a loop that create IPython embed instance.
- """
-
- self.shell.should_raise = True
- self.shell.ask_exit()
-
-
-
-class InteractiveShellEmbed(TerminalInteractiveShell):
-
- dummy_mode = Bool(False)
- exit_msg = Unicode('')
- embedded = CBool(True)
- should_raise = CBool(False)
- # Like the base class display_banner is not configurable, but here it
- # is True by default.
- display_banner = CBool(True)
- exit_msg = Unicode()
-
+
+
+ @line_magic
+ def exit_raise(self, parameter_s=''):
+ """%exit_raise Make the current embedded kernel exit and raise and exception.
+
+ This function sets an internal flag so that an embedded IPython will
+ raise a `IPython.terminal.embed.KillEmbeded` Exception on exit, and then exit the current I. This is
+ useful to permanently exit a loop that create IPython embed instance.
+ """
+
+ self.shell.should_raise = True
+ self.shell.ask_exit()
+
+
+
+class InteractiveShellEmbed(TerminalInteractiveShell):
+
+ dummy_mode = Bool(False)
+ exit_msg = Unicode('')
+ embedded = CBool(True)
+ should_raise = CBool(False)
+ # Like the base class display_banner is not configurable, but here it
+ # is True by default.
+ display_banner = CBool(True)
+ exit_msg = Unicode()
+
# When embedding, by default we don't change the terminal title
term_title = Bool(False,
help="Automatically set the terminal title"
@@ -144,140 +144,140 @@ class InteractiveShellEmbed(TerminalInteractiveShell):
InteractiveShellEmbed._inactive_locations.add(
self._call_location_id)
- def __init__(self, **kw):
- if kw.get('user_global_ns', None) is not None:
+ def __init__(self, **kw):
+ if kw.get('user_global_ns', None) is not None:
raise DeprecationWarning(
"Key word argument `user_global_ns` has been replaced by `user_module` since IPython 4.0.")
-
+
clid = kw.pop('_init_location_id', None)
if not clid:
frame = sys._getframe(1)
clid = '%s:%s' % (frame.f_code.co_filename, frame.f_lineno)
self._init_location_id = clid
- super(InteractiveShellEmbed,self).__init__(**kw)
-
- # don't use the ipython crash handler so that user exceptions aren't
- # trapped
- sys.excepthook = ultratb.FormattedTB(color_scheme=self.colors,
- mode=self.xmode,
- call_pdb=self.pdb)
-
- def init_sys_modules(self):
+ super(InteractiveShellEmbed,self).__init__(**kw)
+
+ # don't use the ipython crash handler so that user exceptions aren't
+ # trapped
+ sys.excepthook = ultratb.FormattedTB(color_scheme=self.colors,
+ mode=self.xmode,
+ call_pdb=self.pdb)
+
+ def init_sys_modules(self):
"""
Explicitly overwrite :mod:`IPython.core.interactiveshell` to do nothing.
"""
- pass
-
- def init_magics(self):
- super(InteractiveShellEmbed, self).init_magics()
- self.register_magics(EmbeddedMagics)
-
- def __call__(self, header='', local_ns=None, module=None, dummy=None,
+ pass
+
+ def init_magics(self):
+ super(InteractiveShellEmbed, self).init_magics()
+ self.register_magics(EmbeddedMagics)
+
+ def __call__(self, header='', local_ns=None, module=None, dummy=None,
stack_depth=1, global_ns=None, compile_flags=None, **kw):
- """Activate the interactive interpreter.
-
- __call__(self,header='',local_ns=None,module=None,dummy=None) -> Start
- the interpreter shell with the given local and global namespaces, and
- optionally print a header string at startup.
-
- The shell can be globally activated/deactivated using the
- dummy_mode attribute. This allows you to turn off a shell used
- for debugging globally.
-
- However, *each* time you call the shell you can override the current
- state of dummy_mode with the optional keyword parameter 'dummy'. For
- example, if you set dummy mode on with IPShell.dummy_mode = True, you
- can still have a specific call work by making it as IPShell(dummy=False).
- """
-
+ """Activate the interactive interpreter.
+
+ __call__(self,header='',local_ns=None,module=None,dummy=None) -> Start
+ the interpreter shell with the given local and global namespaces, and
+ optionally print a header string at startup.
+
+ The shell can be globally activated/deactivated using the
+ dummy_mode attribute. This allows you to turn off a shell used
+ for debugging globally.
+
+ However, *each* time you call the shell you can override the current
+ state of dummy_mode with the optional keyword parameter 'dummy'. For
+ example, if you set dummy mode on with IPShell.dummy_mode = True, you
+ can still have a specific call work by making it as IPShell(dummy=False).
+ """
+
# we are called, set the underlying interactiveshell not to exit.
self.keep_running = True
- # If the user has turned it off, go away
+ # If the user has turned it off, go away
clid = kw.pop('_call_location_id', None)
if not clid:
frame = sys._getframe(1)
clid = '%s:%s' % (frame.f_code.co_filename, frame.f_lineno)
self._call_location_id = clid
- if not self.embedded_active:
- return
-
- # Normal exits from interactive mode set this flag, so the shell can't
- # re-enter (it checks this variable at the start of interactive mode).
- self.exit_now = False
-
- # Allow the dummy parameter to override the global __dummy_mode
- if dummy or (dummy != 0 and self.dummy_mode):
- return
-
- # self.banner is auto computed
- if header:
- self.old_banner2 = self.banner2
- self.banner2 = self.banner2 + '\n' + header + '\n'
- else:
- self.old_banner2 = ''
-
+ if not self.embedded_active:
+ return
+
+ # Normal exits from interactive mode set this flag, so the shell can't
+ # re-enter (it checks this variable at the start of interactive mode).
+ self.exit_now = False
+
+ # Allow the dummy parameter to override the global __dummy_mode
+ if dummy or (dummy != 0 and self.dummy_mode):
+ return
+
+ # self.banner is auto computed
+ if header:
+ self.old_banner2 = self.banner2
+ self.banner2 = self.banner2 + '\n' + header + '\n'
+ else:
+ self.old_banner2 = ''
+
if self.display_banner:
self.show_banner()
- # Call the embedding code with a stack depth of 1 so it can skip over
- # our call and get the original caller's namespaces.
- self.mainloop(local_ns, module, stack_depth=stack_depth,
- global_ns=global_ns, compile_flags=compile_flags)
-
- self.banner2 = self.old_banner2
-
- if self.exit_msg is not None:
- print(self.exit_msg)
-
- if self.should_raise:
- raise KillEmbeded('Embedded IPython raising error, as user requested.')
-
-
- def mainloop(self, local_ns=None, module=None, stack_depth=0,
- display_banner=None, global_ns=None, compile_flags=None):
- """Embeds IPython into a running python program.
-
- Parameters
- ----------
-
- local_ns, module
- Working local namespace (a dict) and module (a module or similar
- object). If given as None, they are automatically taken from the scope
- where the shell was called, so that program variables become visible.
-
- stack_depth : int
- How many levels in the stack to go to looking for namespaces (when
- local_ns or module is None). This allows an intermediate caller to
- make sure that this function gets the namespace from the intended
- level in the stack. By default (0) it will get its locals and globals
- from the immediate caller.
-
- compile_flags
- A bit field identifying the __future__ features
- that are enabled, as passed to the builtin :func:`compile` function.
- If given as None, they are automatically taken from the scope where
- the shell was called.
-
- """
-
- if (global_ns is not None) and (module is None):
+ # Call the embedding code with a stack depth of 1 so it can skip over
+ # our call and get the original caller's namespaces.
+ self.mainloop(local_ns, module, stack_depth=stack_depth,
+ global_ns=global_ns, compile_flags=compile_flags)
+
+ self.banner2 = self.old_banner2
+
+ if self.exit_msg is not None:
+ print(self.exit_msg)
+
+ if self.should_raise:
+ raise KillEmbeded('Embedded IPython raising error, as user requested.')
+
+
+ def mainloop(self, local_ns=None, module=None, stack_depth=0,
+ display_banner=None, global_ns=None, compile_flags=None):
+ """Embeds IPython into a running python program.
+
+ Parameters
+ ----------
+
+ local_ns, module
+ Working local namespace (a dict) and module (a module or similar
+ object). If given as None, they are automatically taken from the scope
+ where the shell was called, so that program variables become visible.
+
+ stack_depth : int
+ How many levels in the stack to go to looking for namespaces (when
+ local_ns or module is None). This allows an intermediate caller to
+ make sure that this function gets the namespace from the intended
+ level in the stack. By default (0) it will get its locals and globals
+ from the immediate caller.
+
+ compile_flags
+ A bit field identifying the __future__ features
+ that are enabled, as passed to the builtin :func:`compile` function.
+ If given as None, they are automatically taken from the scope where
+ the shell was called.
+
+ """
+
+ if (global_ns is not None) and (module is None):
raise DeprecationWarning("'global_ns' keyword argument is deprecated, and has been removed in IPython 5.0 use `module` keyword argument instead.")
-
+
if (display_banner is not None):
warnings.warn("The display_banner parameter is deprecated since IPython 4.0", DeprecationWarning)
- # Get locals and globals from caller
- if ((local_ns is None or module is None or compile_flags is None)
- and self.default_user_namespaces):
- call_frame = sys._getframe(stack_depth).f_back
-
- if local_ns is None:
- local_ns = call_frame.f_locals
- if module is None:
- global_ns = call_frame.f_globals
+ # Get locals and globals from caller
+ if ((local_ns is None or module is None or compile_flags is None)
+ and self.default_user_namespaces):
+ call_frame = sys._getframe(stack_depth).f_back
+
+ if local_ns is None:
+ local_ns = call_frame.f_locals
+ if module is None:
+ global_ns = call_frame.f_globals
try:
module = sys.modules[global_ns['__name__']]
except KeyError:
@@ -286,110 +286,110 @@ class InteractiveShellEmbed(TerminalInteractiveShell):
)
module = DummyMod()
module.__dict__ = global_ns
- if compile_flags is None:
- compile_flags = (call_frame.f_code.co_flags &
- compilerop.PyCF_MASK)
-
- # Save original namespace and module so we can restore them after
- # embedding; otherwise the shell doesn't shut down correctly.
- orig_user_module = self.user_module
- orig_user_ns = self.user_ns
- orig_compile_flags = self.compile.flags
-
- # Update namespaces and fire up interpreter
-
- # The global one is easy, we can just throw it in
- if module is not None:
- self.user_module = module
-
- # But the user/local one is tricky: ipython needs it to store internal
- # data, but we also need the locals. We'll throw our hidden variables
- # like _ih and get_ipython() into the local namespace, but delete them
- # later.
- if local_ns is not None:
- reentrant_local_ns = {k: v for (k, v) in local_ns.items() if k not in self.user_ns_hidden.keys()}
- self.user_ns = reentrant_local_ns
- self.init_user_ns()
-
- # Compiler flags
- if compile_flags is not None:
- self.compile.flags = compile_flags
-
- # make sure the tab-completer has the correct frame information, so it
- # actually completes using the frame's locals/globals
- self.set_completer_frame()
-
- with self.builtin_trap, self.display_trap:
+ if compile_flags is None:
+ compile_flags = (call_frame.f_code.co_flags &
+ compilerop.PyCF_MASK)
+
+ # Save original namespace and module so we can restore them after
+ # embedding; otherwise the shell doesn't shut down correctly.
+ orig_user_module = self.user_module
+ orig_user_ns = self.user_ns
+ orig_compile_flags = self.compile.flags
+
+ # Update namespaces and fire up interpreter
+
+ # The global one is easy, we can just throw it in
+ if module is not None:
+ self.user_module = module
+
+ # But the user/local one is tricky: ipython needs it to store internal
+ # data, but we also need the locals. We'll throw our hidden variables
+ # like _ih and get_ipython() into the local namespace, but delete them
+ # later.
+ if local_ns is not None:
+ reentrant_local_ns = {k: v for (k, v) in local_ns.items() if k not in self.user_ns_hidden.keys()}
+ self.user_ns = reentrant_local_ns
+ self.init_user_ns()
+
+ # Compiler flags
+ if compile_flags is not None:
+ self.compile.flags = compile_flags
+
+ # make sure the tab-completer has the correct frame information, so it
+ # actually completes using the frame's locals/globals
+ self.set_completer_frame()
+
+ with self.builtin_trap, self.display_trap:
self.interact()
-
- # now, purge out the local namespace of IPython's hidden variables.
- if local_ns is not None:
- local_ns.update({k: v for (k, v) in self.user_ns.items() if k not in self.user_ns_hidden.keys()})
-
-
- # Restore original namespace so shell can shut down when we exit.
- self.user_module = orig_user_module
- self.user_ns = orig_user_ns
- self.compile.flags = orig_compile_flags
-
-
-def embed(**kwargs):
- """Call this to embed IPython at the current point in your program.
-
- The first invocation of this will create an :class:`InteractiveShellEmbed`
- instance and then call it. Consecutive calls just call the already
- created instance.
-
- If you don't want the kernel to initialize the namespace
- from the scope of the surrounding function,
- and/or you want to load full IPython configuration,
- you probably want `IPython.start_ipython()` instead.
-
- Here is a simple example::
-
- from IPython import embed
- a = 10
- b = 20
- embed(header='First time')
- c = 30
- d = 40
- embed()
-
- Full customization can be done by passing a :class:`Config` in as the
- config argument.
- """
- config = kwargs.get('config')
- header = kwargs.pop('header', u'')
- compile_flags = kwargs.pop('compile_flags', None)
- if config is None:
- config = load_default_config()
- config.InteractiveShellEmbed = config.TerminalInteractiveShell
- kwargs['config'] = config
- #save ps1/ps2 if defined
- ps1 = None
- ps2 = None
- try:
- ps1 = sys.ps1
- ps2 = sys.ps2
- except AttributeError:
- pass
- #save previous instance
- saved_shell_instance = InteractiveShell._instance
- if saved_shell_instance is not None:
- cls = type(saved_shell_instance)
- cls.clear_instance()
+
+ # now, purge out the local namespace of IPython's hidden variables.
+ if local_ns is not None:
+ local_ns.update({k: v for (k, v) in self.user_ns.items() if k not in self.user_ns_hidden.keys()})
+
+
+ # Restore original namespace so shell can shut down when we exit.
+ self.user_module = orig_user_module
+ self.user_ns = orig_user_ns
+ self.compile.flags = orig_compile_flags
+
+
+def embed(**kwargs):
+ """Call this to embed IPython at the current point in your program.
+
+ The first invocation of this will create an :class:`InteractiveShellEmbed`
+ instance and then call it. Consecutive calls just call the already
+ created instance.
+
+ If you don't want the kernel to initialize the namespace
+ from the scope of the surrounding function,
+ and/or you want to load full IPython configuration,
+ you probably want `IPython.start_ipython()` instead.
+
+ Here is a simple example::
+
+ from IPython import embed
+ a = 10
+ b = 20
+ embed(header='First time')
+ c = 30
+ d = 40
+ embed()
+
+ Full customization can be done by passing a :class:`Config` in as the
+ config argument.
+ """
+ config = kwargs.get('config')
+ header = kwargs.pop('header', u'')
+ compile_flags = kwargs.pop('compile_flags', None)
+ if config is None:
+ config = load_default_config()
+ config.InteractiveShellEmbed = config.TerminalInteractiveShell
+ kwargs['config'] = config
+ #save ps1/ps2 if defined
+ ps1 = None
+ ps2 = None
+ try:
+ ps1 = sys.ps1
+ ps2 = sys.ps2
+ except AttributeError:
+ pass
+ #save previous instance
+ saved_shell_instance = InteractiveShell._instance
+ if saved_shell_instance is not None:
+ cls = type(saved_shell_instance)
+ cls.clear_instance()
frame = sys._getframe(1)
shell = InteractiveShellEmbed.instance(_init_location_id='%s:%s' % (
frame.f_code.co_filename, frame.f_lineno), **kwargs)
shell(header=header, stack_depth=2, compile_flags=compile_flags,
_call_location_id='%s:%s' % (frame.f_code.co_filename, frame.f_lineno))
- InteractiveShellEmbed.clear_instance()
- #restore previous instance
- if saved_shell_instance is not None:
- cls = type(saved_shell_instance)
- cls.clear_instance()
- for subclass in cls._walk_mro():
- subclass._instance = saved_shell_instance
- if ps1 is not None:
- sys.ps1 = ps1
- sys.ps2 = ps2
+ InteractiveShellEmbed.clear_instance()
+ #restore previous instance
+ if saved_shell_instance is not None:
+ cls = type(saved_shell_instance)
+ cls.clear_instance()
+ for subclass in cls._walk_mro():
+ subclass._instance = saved_shell_instance
+ if ps1 is not None:
+ sys.ps1 = ps1
+ sys.ps2 = ps2
diff --git a/contrib/python/ipython/py2/IPython/terminal/interactiveshell.py b/contrib/python/ipython/py2/IPython/terminal/interactiveshell.py
index e80f8c1503..f67cc6b502 100644
--- a/contrib/python/ipython/py2/IPython/terminal/interactiveshell.py
+++ b/contrib/python/ipython/py2/IPython/terminal/interactiveshell.py
@@ -1,18 +1,18 @@
"""IPython terminal interface using prompt_toolkit"""
-from __future__ import print_function
-
-import os
-import sys
+from __future__ import print_function
+
+import os
+import sys
import warnings
from warnings import warn
-
-from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
+
+from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
from IPython.utils import io
from IPython.utils.py3compat import PY3, cast_unicode_py2, input, string_types
-from IPython.utils.terminal import toggle_set_term_title, set_term_title
-from IPython.utils.process import abbrev_cwd
+from IPython.utils.terminal import toggle_set_term_title, set_term_title
+from IPython.utils.process import abbrev_cwd
from traitlets import Bool, Unicode, Dict, Integer, observe, Instance, Type, default, Enum, Union
-
+
from prompt_toolkit.document import Document
from prompt_toolkit.enums import DEFAULT_BUFFER, EditingMode
from prompt_toolkit.filters import (HasFocus, Condition, IsDone)
@@ -22,7 +22,7 @@ from prompt_toolkit.interface import CommandLineInterface
from prompt_toolkit.key_binding.manager import KeyBindingManager
from prompt_toolkit.layout.processors import ConditionalProcessor, HighlightMatchingBracketProcessor
from prompt_toolkit.styles import PygmentsStyle, DynamicStyle
-
+
from pygments.styles import get_style_by_name
from pygments.style import Style
from pygments.token import Token
@@ -59,23 +59,23 @@ _style_overrides_linux = {
-def get_default_editor():
- try:
- ed = os.environ['EDITOR']
+def get_default_editor():
+ try:
+ ed = os.environ['EDITOR']
if not PY3:
- ed = ed.decode()
- return ed
- except KeyError:
- pass
- except UnicodeError:
- warn("$EDITOR environment variable is not pure ASCII. Using platform "
- "default editor.")
-
- if os.name == 'posix':
- return 'vi' # the only one guaranteed to be there!
- else:
- return 'notepad' # same in Windows!
-
+ ed = ed.decode()
+ return ed
+ except KeyError:
+ pass
+ except UnicodeError:
+ warn("$EDITOR environment variable is not pure ASCII. Using platform "
+ "default editor.")
+
+ if os.name == 'posix':
+ return 'vi' # the only one guaranteed to be there!
+ else:
+ return 'notepad' # same in Windows!
+
# conservatively check for tty
# overridden streams can result in things like:
# - sys.stdin = None
@@ -87,116 +87,116 @@ for _name in ('stdin', 'stdout', 'stderr'):
break
else:
_is_tty = True
-
-
+
+
_use_simple_prompt = ('IPY_TEST_SIMPLE_PROMPT' in os.environ) or (not _is_tty)
-
+
class TerminalInteractiveShell(InteractiveShell):
space_for_menu = Integer(6, help='Number of line at the bottom of the screen '
'to reserve for the completion menu'
).tag(config=True)
-
+
def _space_for_menu_changed(self, old, new):
self._update_layout()
-
+
pt_cli = None
debugger_history = None
_pt_app = None
-
+
simple_prompt = Bool(_use_simple_prompt,
help="""Use `raw_input` for the REPL, without completion and prompt colors.
-
+
Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR. Known usage are:
IPython own testing machinery, and emacs inferior-shell integration through elpy.
-
+
This mode default to `True` if the `IPY_TEST_SIMPLE_PROMPT`
environment variable is set, or the current terminal is not a tty.
-
+
"""
).tag(config=True)
-
+
@property
def debugger_cls(self):
return Pdb if self.simple_prompt else TerminalPdb
-
+
confirm_exit = Bool(True,
help="""
Set to confirm when you try to exit IPython with an EOF (Control-D
in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit',
you can force a direct exit without any confirmation.""",
).tag(config=True)
-
+
editing_mode = Unicode('emacs',
help="Shortcut style to use at the prompt. 'vi' or 'emacs'.",
).tag(config=True)
-
+
mouse_support = Bool(False,
help="Enable mouse support in the prompt"
).tag(config=True)
-
+
# We don't load the list of styles for the help string, because loading
# Pygments plugins takes time and can cause unexpected errors.
highlighting_style = Union([Unicode('legacy'), Type(klass=Style)],
help="""The name or class of a Pygments style to use for syntax
highlighting. To see available styles, run `pygmentize -L styles`."""
).tag(config=True)
-
+
@observe('highlighting_style')
@observe('colors')
def _highlighting_style_changed(self, change):
self.refresh_style()
-
+
def refresh_style(self):
self._style = self._make_style_from_name_or_cls(self.highlighting_style)
-
-
+
+
highlighting_style_overrides = Dict(
help="Override highlighting format for specific tokens"
).tag(config=True)
-
+
true_color = Bool(False,
help=("Use 24bit colors instead of 256 colors in prompt highlighting. "
"If your terminal supports true color, the following command "
"should print 'TRUECOLOR' in orange: "
"printf \"\\x1b[38;2;255;100;0mTRUECOLOR\\x1b[0m\\n\"")
).tag(config=True)
-
+
editor = Unicode(get_default_editor(),
help="Set the editor used by IPython (default to $EDITOR/vi/notepad)."
).tag(config=True)
-
+
prompts_class = Type(Prompts, help='Class used to generate Prompt token for prompt_toolkit').tag(config=True)
-
+
prompts = Instance(Prompts)
-
+
@default('prompts')
def _prompts_default(self):
return self.prompts_class(self)
-
+
@observe('prompts')
def _(self, change):
self._update_layout()
-
+
@default('displayhook_class')
def _displayhook_class_default(self):
return RichPromptDisplayHook
-
+
term_title = Bool(True,
help="Automatically set the terminal title"
).tag(config=True)
-
+
display_completions = Enum(('column', 'multicolumn','readlinelike'),
help= ( "Options for displaying tab completions, 'column', 'multicolumn', and "
"'readlinelike'. These options are for `prompt_toolkit`, see "
"`prompt_toolkit` documentation for more information."
),
default_value='multicolumn').tag(config=True)
-
+
highlight_matching_brackets = Bool(True,
help="Highlight matching brackets.",
).tag(config=True)
-
+
extra_open_editor_shortcuts = Bool(False,
help="Enable vi (v) or Emacs (C-X C-E) shortcuts to open an external editor. "
"This is in addition to the F2 binding, which is always enabled."
@@ -210,14 +210,14 @@ class TerminalInteractiveShell(InteractiveShell):
set_term_title('IPython: ' + abbrev_cwd())
else:
toggle_set_term_title(False)
-
+
def init_display_formatter(self):
super(TerminalInteractiveShell, self).init_display_formatter()
# terminal only supports plain text
self.display_formatter.active_types = ['text/plain']
# disable `_ipython_display_`
self.display_formatter.ipython_display_formatter.enabled = False
-
+
def init_prompt_toolkit_cli(self):
if self.simple_prompt:
# Fall back to plain non-interactive output for tests.
@@ -233,13 +233,13 @@ class TerminalInteractiveShell(InteractiveShell):
return isp.source_reset()
self.prompt_for_code = prompt
return
-
+
# Set up keyboard shortcuts
kbmanager = KeyBindingManager.for_prompt(
enable_open_in_editor=self.extra_open_editor_shortcuts,
)
register_ipython_shortcuts(kbmanager.registry, self)
-
+
# Pre-populate history from IPython's history database
history = InMemoryHistory()
last_cell = u""
@@ -250,12 +250,12 @@ class TerminalInteractiveShell(InteractiveShell):
if cell and (cell != last_cell):
history.append(cell)
last_cell = cell
-
+
self._style = self._make_style_from_name_or_cls(self.highlighting_style)
self.style = DynamicStyle(lambda: self._style)
-
+
editing_mode = getattr(EditingMode, self.editing_mode.upper())
-
+
def patch_stdout(**kwargs):
return self.pt_cli.patch_stdout_context(**kwargs)
@@ -274,13 +274,13 @@ class TerminalInteractiveShell(InteractiveShell):
self.pt_cli = CommandLineInterface(
self._pt_app, eventloop=self._eventloop,
output=create_output(true_color=self.true_color))
-
+
def _make_style_from_name_or_cls(self, name_or_cls):
"""
Small wrapper that make an IPython compatible style from a style name
-
+
We need that to add style for prompt ... etc.
- """
+ """
style_overrides = {}
if name_or_cls == 'legacy':
legacy = self.colors.lower()
@@ -336,9 +336,9 @@ class TerminalInteractiveShell(InteractiveShell):
style_overrides.update(self.highlighting_style_overrides)
style = PygmentsStyle.from_defaults(pygments_style_cls=style_cls,
style_dict=style_overrides)
-
+
return style
-
+
def _layout_options(self):
"""
Return the current layout option for the current Terminal InteractiveShell
@@ -350,7 +350,7 @@ class TerminalInteractiveShell(InteractiveShell):
'get_continuation_tokens':self.prompts.continuation_prompt_tokens,
'multiline':True,
'display_completions_in_columns': (self.display_completions == 'multicolumn'),
-
+
# Highlight matching brackets, but only when this setting is
# enabled, and only when the DEFAULT_BUFFER has the focus.
'extra_input_processors': [ConditionalProcessor(
@@ -358,20 +358,20 @@ class TerminalInteractiveShell(InteractiveShell):
filter=HasFocus(DEFAULT_BUFFER) & ~IsDone() &
Condition(lambda cli: self.highlight_matching_brackets))],
}
-
+
def _update_layout(self):
- """
+ """
Ask for a re computation of the application layout, if for example ,
some configuration options have changed.
"""
if self._pt_app:
self._pt_app.layout = create_prompt_layout(**self._layout_options())
-
+
def prompt_for_code(self):
document = self.pt_cli.run(
pre_run=self.pre_prompt, reset_current_buffer=True)
return document.text
-
+
def enable_win_unicode_console(self):
if sys.version_info >= (3, 6):
# Since PEP 528, Python uses the unicode APIs for the Windows
@@ -379,36 +379,36 @@ class TerminalInteractiveShell(InteractiveShell):
return
import win_unicode_console
-
+
if PY3:
win_unicode_console.enable()
- else:
+ else:
# https://github.com/ipython/ipython/issues/9768
from win_unicode_console.streams import (TextStreamWrapper,
stdout_text_transcoded, stderr_text_transcoded)
-
+
class LenientStrStreamWrapper(TextStreamWrapper):
def write(self, s):
if isinstance(s, bytes):
s = s.decode(self.encoding, 'replace')
-
+
self.base.write(s)
-
+
stdout_text_str = LenientStrStreamWrapper(stdout_text_transcoded)
stderr_text_str = LenientStrStreamWrapper(stderr_text_transcoded)
-
+
win_unicode_console.enable(stdout=stdout_text_str,
stderr=stderr_text_str)
-
+
def init_io(self):
if sys.platform not in {'win32', 'cli'}:
return
-
+
self.enable_win_unicode_console()
-
+
import colorama
colorama.init()
-
+
# For some reason we make these wrappers around stdout/stderr.
# For now, we need to reset them so all output gets coloured.
# https://github.com/ipython/ipython/issues/8669
@@ -418,37 +418,37 @@ class TerminalInteractiveShell(InteractiveShell):
warnings.simplefilter('ignore', DeprecationWarning)
io.stdout = io.IOStream(sys.stdout)
io.stderr = io.IOStream(sys.stderr)
-
+
def init_magics(self):
super(TerminalInteractiveShell, self).init_magics()
self.register_magics(TerminalMagics)
-
- def init_alias(self):
- # The parent class defines aliases that can be safely used with any
- # frontend.
- super(TerminalInteractiveShell, self).init_alias()
-
- # Now define aliases that only make sense on the terminal, because they
- # need direct access to the console in a way that we can't emulate in
- # GUI or web frontend
- if os.name == 'posix':
+
+ def init_alias(self):
+ # The parent class defines aliases that can be safely used with any
+ # frontend.
+ super(TerminalInteractiveShell, self).init_alias()
+
+ # Now define aliases that only make sense on the terminal, because they
+ # need direct access to the console in a way that we can't emulate in
+ # GUI or web frontend
+ if os.name == 'posix':
for cmd in ['clear', 'more', 'less', 'man']:
self.alias_manager.soft_define_alias(cmd, cmd)
-
-
+
+
def __init__(self, *args, **kwargs):
super(TerminalInteractiveShell, self).__init__(*args, **kwargs)
self.init_prompt_toolkit_cli()
self.init_term_title()
self.keep_running = True
-
+
self.debugger_history = InMemoryHistory()
-
+
def ask_exit(self):
self.keep_running = False
-
+
rl_next_input = None
-
+
def pre_prompt(self):
if self.rl_next_input:
# We can't set the buffer here, because it will be reset just after
@@ -464,34 +464,34 @@ class TerminalInteractiveShell(InteractiveShell):
# directly here.
set_doc()
self.rl_next_input = None
-
+
def interact(self, display_banner=DISPLAY_BANNER_DEPRECATED):
-
+
if display_banner is not DISPLAY_BANNER_DEPRECATED:
warn('interact `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2)
-
+
self.keep_running = True
while self.keep_running:
print(self.separate_in, end='')
-
- try:
+
+ try:
code = self.prompt_for_code()
except EOFError:
if (not self.confirm_exit) \
or self.ask_yes_no('Do you really want to exit ([y]/n)?','y','n'):
self.ask_exit()
-
- else:
+
+ else:
if code:
self.run_cell(code, store_history=True)
-
+
def mainloop(self, display_banner=DISPLAY_BANNER_DEPRECATED):
# An extra layer of protection in case someone mashing Ctrl-C breaks
# out of our internal code.
if display_banner is not DISPLAY_BANNER_DEPRECATED:
warn('mainloop `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2)
while True:
- try:
+ try:
self.interact()
break
except KeyboardInterrupt as e:
@@ -503,12 +503,12 @@ class TerminalInteractiveShell(InteractiveShell):
# https://github.com/ipython/ipython/pull/9867
if hasattr(self, '_eventloop'):
self._eventloop.stop()
-
+
_inputhook = None
def inputhook(self, context):
if self._inputhook is not None:
self._inputhook(context)
-
+
active_eventloop = None
def enable_gui(self, gui=None):
if gui:
@@ -516,24 +516,24 @@ class TerminalInteractiveShell(InteractiveShell):
get_inputhook_name_and_func(gui)
else:
self.active_eventloop = self._inputhook = None
-
+
# Run !system commands directly, not through pipes, so terminal programs
# work correctly.
system = InteractiveShell.system_raw
-
+
def auto_rewrite_input(self, cmd):
"""Overridden from the parent class to use fancy rewriting prompt"""
if not self.show_rewritten_input:
return
-
+
tokens = self.prompts.rewrite_prompt_tokens()
if self.pt_cli:
self.pt_cli.print_tokens(tokens)
print(cmd)
- else:
+ else:
prompt = ''.join(s for t, s in tokens)
print(prompt, cmd, sep='')
-
+
_prompts_before = None
def switch_doctest_mode(self, mode):
"""Switch prompts to classic for %doctest_mode"""
@@ -544,9 +544,9 @@ class TerminalInteractiveShell(InteractiveShell):
self.prompts = self._prompts_before
self._prompts_before = None
self._update_layout()
-
-
+
+
InteractiveShellABC.register(TerminalInteractiveShell)
-
+
if __name__ == '__main__':
TerminalInteractiveShell.instance().interact()
diff --git a/contrib/python/ipython/py2/IPython/terminal/ipapp.py b/contrib/python/ipython/py2/IPython/terminal/ipapp.py
index f8136ef0b6..6b25aaa3e3 100755
--- a/contrib/python/ipython/py2/IPython/terminal/ipapp.py
+++ b/contrib/python/ipython/py2/IPython/terminal/ipapp.py
@@ -1,190 +1,190 @@
-#!/usr/bin/env python
-# encoding: utf-8
-"""
-The :class:`~IPython.core.application.Application` object for the command
-line :command:`ipython` program.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import absolute_import
-from __future__ import print_function
-
-import logging
-import os
-import sys
+#!/usr/bin/env python
+# encoding: utf-8
+"""
+The :class:`~IPython.core.application.Application` object for the command
+line :command:`ipython` program.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import logging
+import os
+import sys
import warnings
-
-from traitlets.config.loader import Config
-from traitlets.config.application import boolean_flag, catch_config_error, Application
-from IPython.core import release
-from IPython.core import usage
-from IPython.core.completer import IPCompleter
-from IPython.core.crashhandler import CrashHandler
-from IPython.core.formatters import PlainTextFormatter
-from IPython.core.history import HistoryManager
-from IPython.core.application import (
- ProfileDir, BaseIPythonApplication, base_flags, base_aliases
-)
-from IPython.core.magics import ScriptMagics
-from IPython.core.shellapp import (
- InteractiveShellApp, shell_flags, shell_aliases
-)
-from IPython.extensions.storemagic import StoreMagics
+
+from traitlets.config.loader import Config
+from traitlets.config.application import boolean_flag, catch_config_error, Application
+from IPython.core import release
+from IPython.core import usage
+from IPython.core.completer import IPCompleter
+from IPython.core.crashhandler import CrashHandler
+from IPython.core.formatters import PlainTextFormatter
+from IPython.core.history import HistoryManager
+from IPython.core.application import (
+ ProfileDir, BaseIPythonApplication, base_flags, base_aliases
+)
+from IPython.core.magics import ScriptMagics
+from IPython.core.shellapp import (
+ InteractiveShellApp, shell_flags, shell_aliases
+)
+from IPython.extensions.storemagic import StoreMagics
from .interactiveshell import TerminalInteractiveShell
-from IPython.paths import get_ipython_dir
-from traitlets import (
+from IPython.paths import get_ipython_dir
+from traitlets import (
Bool, List, Dict, default, observe, Type
-)
-
-#-----------------------------------------------------------------------------
-# Globals, utilities and helpers
-#-----------------------------------------------------------------------------
-
-_examples = """
-ipython --matplotlib # enable matplotlib integration
-ipython --matplotlib=qt # enable matplotlib integration with qt4 backend
-
-ipython --log-level=DEBUG # set logging to DEBUG
-ipython --profile=foo # start with profile foo
-
-ipython profile create foo # create profile foo w/ default config files
-ipython help profile # show the help for the profile subcmd
-
-ipython locate # print the path to the IPython directory
-ipython locate profile foo # print the path to the directory for profile `foo`
-"""
-
-#-----------------------------------------------------------------------------
-# Crash handler for this application
-#-----------------------------------------------------------------------------
-
-class IPAppCrashHandler(CrashHandler):
- """sys.excepthook for IPython itself, leaves a detailed report on disk."""
-
- def __init__(self, app):
- contact_name = release.author
- contact_email = release.author_email
- bug_tracker = 'https://github.com/ipython/ipython/issues'
- super(IPAppCrashHandler,self).__init__(
- app, contact_name, contact_email, bug_tracker
- )
-
- def make_report(self,traceback):
- """Return a string containing a crash report."""
-
- sec_sep = self.section_sep
- # Start with parent report
- report = [super(IPAppCrashHandler, self).make_report(traceback)]
- # Add interactive-specific info we may have
- rpt_add = report.append
- try:
- rpt_add(sec_sep+"History of session input:")
- for line in self.app.shell.user_ns['_ih']:
- rpt_add(line)
- rpt_add('\n*** Last line of input (may not be in above history):\n')
- rpt_add(self.app.shell._last_input_line+'\n')
- except:
- pass
-
- return ''.join(report)
-
-#-----------------------------------------------------------------------------
-# Aliases and Flags
-#-----------------------------------------------------------------------------
-flags = dict(base_flags)
-flags.update(shell_flags)
-frontend_flags = {}
-addflag = lambda *args: frontend_flags.update(boolean_flag(*args))
-addflag('autoedit-syntax', 'TerminalInteractiveShell.autoedit_syntax',
- 'Turn on auto editing of files with syntax errors.',
- 'Turn off auto editing of files with syntax errors.'
-)
+)
+
+#-----------------------------------------------------------------------------
+# Globals, utilities and helpers
+#-----------------------------------------------------------------------------
+
+_examples = """
+ipython --matplotlib # enable matplotlib integration
+ipython --matplotlib=qt # enable matplotlib integration with qt4 backend
+
+ipython --log-level=DEBUG # set logging to DEBUG
+ipython --profile=foo # start with profile foo
+
+ipython profile create foo # create profile foo w/ default config files
+ipython help profile # show the help for the profile subcmd
+
+ipython locate # print the path to the IPython directory
+ipython locate profile foo # print the path to the directory for profile `foo`
+"""
+
+#-----------------------------------------------------------------------------
+# Crash handler for this application
+#-----------------------------------------------------------------------------
+
+class IPAppCrashHandler(CrashHandler):
+ """sys.excepthook for IPython itself, leaves a detailed report on disk."""
+
+ def __init__(self, app):
+ contact_name = release.author
+ contact_email = release.author_email
+ bug_tracker = 'https://github.com/ipython/ipython/issues'
+ super(IPAppCrashHandler,self).__init__(
+ app, contact_name, contact_email, bug_tracker
+ )
+
+ def make_report(self,traceback):
+ """Return a string containing a crash report."""
+
+ sec_sep = self.section_sep
+ # Start with parent report
+ report = [super(IPAppCrashHandler, self).make_report(traceback)]
+ # Add interactive-specific info we may have
+ rpt_add = report.append
+ try:
+ rpt_add(sec_sep+"History of session input:")
+ for line in self.app.shell.user_ns['_ih']:
+ rpt_add(line)
+ rpt_add('\n*** Last line of input (may not be in above history):\n')
+ rpt_add(self.app.shell._last_input_line+'\n')
+ except:
+ pass
+
+ return ''.join(report)
+
+#-----------------------------------------------------------------------------
+# Aliases and Flags
+#-----------------------------------------------------------------------------
+flags = dict(base_flags)
+flags.update(shell_flags)
+frontend_flags = {}
+addflag = lambda *args: frontend_flags.update(boolean_flag(*args))
+addflag('autoedit-syntax', 'TerminalInteractiveShell.autoedit_syntax',
+ 'Turn on auto editing of files with syntax errors.',
+ 'Turn off auto editing of files with syntax errors.'
+)
addflag('simple-prompt', 'TerminalInteractiveShell.simple_prompt',
"Force simple minimal prompt using `raw_input`",
"Use a rich interactive prompt with prompt_toolkit",
)
-addflag('banner', 'TerminalIPythonApp.display_banner',
- "Display a banner upon starting IPython.",
- "Don't display a banner upon starting IPython."
-)
-addflag('confirm-exit', 'TerminalInteractiveShell.confirm_exit',
- """Set to confirm when you try to exit IPython with an EOF (Control-D
- in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit',
- you can force a direct exit without any confirmation.""",
- "Don't prompt the user when exiting."
-)
-addflag('term-title', 'TerminalInteractiveShell.term_title',
- "Enable auto setting the terminal title.",
- "Disable auto setting the terminal title."
-)
-classic_config = Config()
-classic_config.InteractiveShell.cache_size = 0
-classic_config.PlainTextFormatter.pprint = False
+addflag('banner', 'TerminalIPythonApp.display_banner',
+ "Display a banner upon starting IPython.",
+ "Don't display a banner upon starting IPython."
+)
+addflag('confirm-exit', 'TerminalInteractiveShell.confirm_exit',
+ """Set to confirm when you try to exit IPython with an EOF (Control-D
+ in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit',
+ you can force a direct exit without any confirmation.""",
+ "Don't prompt the user when exiting."
+)
+addflag('term-title', 'TerminalInteractiveShell.term_title',
+ "Enable auto setting the terminal title.",
+ "Disable auto setting the terminal title."
+)
+classic_config = Config()
+classic_config.InteractiveShell.cache_size = 0
+classic_config.PlainTextFormatter.pprint = False
classic_config.TerminalInteractiveShell.prompts_class='IPython.terminal.prompts.ClassicPrompts'
-classic_config.InteractiveShell.separate_in = ''
-classic_config.InteractiveShell.separate_out = ''
-classic_config.InteractiveShell.separate_out2 = ''
-classic_config.InteractiveShell.colors = 'NoColor'
-classic_config.InteractiveShell.xmode = 'Plain'
-
-frontend_flags['classic']=(
- classic_config,
- "Gives IPython a similar feel to the classic Python prompt."
-)
-# # log doesn't make so much sense this way anymore
-# paa('--log','-l',
-# action='store_true', dest='InteractiveShell.logstart',
-# help="Start logging to the default log file (./ipython_log.py).")
-#
-# # quick is harder to implement
-frontend_flags['quick']=(
- {'TerminalIPythonApp' : {'quick' : True}},
- "Enable quick startup with no config files."
-)
-
-frontend_flags['i'] = (
- {'TerminalIPythonApp' : {'force_interact' : True}},
- """If running code from the command line, become interactive afterwards.
- It is often useful to follow this with `--` to treat remaining flags as
- script arguments.
- """
-)
-flags.update(frontend_flags)
-
-aliases = dict(base_aliases)
-aliases.update(shell_aliases)
-
-#-----------------------------------------------------------------------------
-# Main classes and functions
-#-----------------------------------------------------------------------------
-
-
-class LocateIPythonApp(BaseIPythonApplication):
- description = """print the path to the IPython dir"""
- subcommands = Dict(dict(
- profile=('IPython.core.profileapp.ProfileLocate',
- "print the path to an IPython profile directory",
- ),
- ))
- def start(self):
- if self.subapp is not None:
- return self.subapp.start()
- else:
- print(self.ipython_dir)
-
-
-class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp):
- name = u'ipython'
- description = usage.cl_usage
- crash_handler_class = IPAppCrashHandler
- examples = _examples
-
- flags = Dict(flags)
- aliases = Dict(aliases)
- classes = List()
+classic_config.InteractiveShell.separate_in = ''
+classic_config.InteractiveShell.separate_out = ''
+classic_config.InteractiveShell.separate_out2 = ''
+classic_config.InteractiveShell.colors = 'NoColor'
+classic_config.InteractiveShell.xmode = 'Plain'
+
+frontend_flags['classic']=(
+ classic_config,
+ "Gives IPython a similar feel to the classic Python prompt."
+)
+# # log doesn't make so much sense this way anymore
+# paa('--log','-l',
+# action='store_true', dest='InteractiveShell.logstart',
+# help="Start logging to the default log file (./ipython_log.py).")
+#
+# # quick is harder to implement
+frontend_flags['quick']=(
+ {'TerminalIPythonApp' : {'quick' : True}},
+ "Enable quick startup with no config files."
+)
+
+frontend_flags['i'] = (
+ {'TerminalIPythonApp' : {'force_interact' : True}},
+ """If running code from the command line, become interactive afterwards.
+ It is often useful to follow this with `--` to treat remaining flags as
+ script arguments.
+ """
+)
+flags.update(frontend_flags)
+
+aliases = dict(base_aliases)
+aliases.update(shell_aliases)
+
+#-----------------------------------------------------------------------------
+# Main classes and functions
+#-----------------------------------------------------------------------------
+
+
+class LocateIPythonApp(BaseIPythonApplication):
+ description = """print the path to the IPython dir"""
+ subcommands = Dict(dict(
+ profile=('IPython.core.profileapp.ProfileLocate',
+ "print the path to an IPython profile directory",
+ ),
+ ))
+ def start(self):
+ if self.subapp is not None:
+ return self.subapp.start()
+ else:
+ print(self.ipython_dir)
+
+
+class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp):
+ name = u'ipython'
+ description = usage.cl_usage
+ crash_handler_class = IPAppCrashHandler
+ examples = _examples
+
+ flags = Dict(flags)
+ aliases = Dict(aliases)
+ classes = List()
interactive_shell_class = Type(
klass=object, # use default_value otherwise which only allow subclasses.
@@ -193,185 +193,185 @@ class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp):
).tag(config=True)
@default('classes')
- def _classes_default(self):
- """This has to be in a method, for TerminalIPythonApp to be available."""
- return [
- InteractiveShellApp, # ShellApp comes before TerminalApp, because
- self.__class__, # it will also affect subclasses (e.g. QtConsole)
- TerminalInteractiveShell,
- HistoryManager,
- ProfileDir,
- PlainTextFormatter,
- IPCompleter,
- ScriptMagics,
- StoreMagics,
- ]
-
- deprecated_subcommands = dict(
- qtconsole=('qtconsole.qtconsoleapp.JupyterQtConsoleApp',
- """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter Qt Console."""
- ),
- notebook=('notebook.notebookapp.NotebookApp',
- """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter HTML Notebook Server."""
- ),
- console=('jupyter_console.app.ZMQTerminalIPythonApp',
- """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter terminal-based Console."""
- ),
- nbconvert=('nbconvert.nbconvertapp.NbConvertApp',
- "DEPRECATED, Will be removed in IPython 6.0 : Convert notebooks to/from other formats."
- ),
- trust=('nbformat.sign.TrustNotebookApp',
- "DEPRECATED, Will be removed in IPython 6.0 : Sign notebooks to trust their potentially unsafe contents at load."
- ),
- kernelspec=('jupyter_client.kernelspecapp.KernelSpecApp',
- "DEPRECATED, Will be removed in IPython 6.0 : Manage Jupyter kernel specifications."
- ),
- )
- subcommands = dict(
- profile = ("IPython.core.profileapp.ProfileApp",
- "Create and manage IPython profiles."
- ),
- kernel = ("ipykernel.kernelapp.IPKernelApp",
- "Start a kernel without an attached frontend."
- ),
- locate=('IPython.terminal.ipapp.LocateIPythonApp',
- LocateIPythonApp.description
- ),
- history=('IPython.core.historyapp.HistoryApp',
- "Manage the IPython history database."
- ),
- )
- deprecated_subcommands['install-nbextension'] = (
- "notebook.nbextensions.InstallNBExtensionApp",
- "DEPRECATED, Will be removed in IPython 6.0 : Install Jupyter notebook extension files"
- )
- subcommands.update(deprecated_subcommands)
-
- # *do* autocreate requested profile, but don't create the config file.
- auto_create=Bool(True)
- # configurables
+ def _classes_default(self):
+ """This has to be in a method, for TerminalIPythonApp to be available."""
+ return [
+ InteractiveShellApp, # ShellApp comes before TerminalApp, because
+ self.__class__, # it will also affect subclasses (e.g. QtConsole)
+ TerminalInteractiveShell,
+ HistoryManager,
+ ProfileDir,
+ PlainTextFormatter,
+ IPCompleter,
+ ScriptMagics,
+ StoreMagics,
+ ]
+
+ deprecated_subcommands = dict(
+ qtconsole=('qtconsole.qtconsoleapp.JupyterQtConsoleApp',
+ """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter Qt Console."""
+ ),
+ notebook=('notebook.notebookapp.NotebookApp',
+ """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter HTML Notebook Server."""
+ ),
+ console=('jupyter_console.app.ZMQTerminalIPythonApp',
+ """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter terminal-based Console."""
+ ),
+ nbconvert=('nbconvert.nbconvertapp.NbConvertApp',
+ "DEPRECATED, Will be removed in IPython 6.0 : Convert notebooks to/from other formats."
+ ),
+ trust=('nbformat.sign.TrustNotebookApp',
+ "DEPRECATED, Will be removed in IPython 6.0 : Sign notebooks to trust their potentially unsafe contents at load."
+ ),
+ kernelspec=('jupyter_client.kernelspecapp.KernelSpecApp',
+ "DEPRECATED, Will be removed in IPython 6.0 : Manage Jupyter kernel specifications."
+ ),
+ )
+ subcommands = dict(
+ profile = ("IPython.core.profileapp.ProfileApp",
+ "Create and manage IPython profiles."
+ ),
+ kernel = ("ipykernel.kernelapp.IPKernelApp",
+ "Start a kernel without an attached frontend."
+ ),
+ locate=('IPython.terminal.ipapp.LocateIPythonApp',
+ LocateIPythonApp.description
+ ),
+ history=('IPython.core.historyapp.HistoryApp',
+ "Manage the IPython history database."
+ ),
+ )
+ deprecated_subcommands['install-nbextension'] = (
+ "notebook.nbextensions.InstallNBExtensionApp",
+ "DEPRECATED, Will be removed in IPython 6.0 : Install Jupyter notebook extension files"
+ )
+ subcommands.update(deprecated_subcommands)
+
+ # *do* autocreate requested profile, but don't create the config file.
+ auto_create=Bool(True)
+ # configurables
quick = Bool(False,
- help="""Start IPython quickly by skipping the loading of config files."""
+ help="""Start IPython quickly by skipping the loading of config files."""
).tag(config=True)
@observe('quick')
def _quick_changed(self, change):
if change['new']:
- self.load_config_file = lambda *a, **kw: None
-
+ self.load_config_file = lambda *a, **kw: None
+
display_banner = Bool(True,
- help="Whether to display a banner upon starting IPython."
+ help="Whether to display a banner upon starting IPython."
).tag(config=True)
-
- # if there is code of files to run from the cmd line, don't interact
- # unless the --i flag (App.force_interact) is true.
+
+ # if there is code of files to run from the cmd line, don't interact
+ # unless the --i flag (App.force_interact) is true.
force_interact = Bool(False,
- help="""If a command or file is given via the command-line,
- e.g. 'ipython foo.py', start an interactive shell after executing the
- file or command."""
+ help="""If a command or file is given via the command-line,
+ e.g. 'ipython foo.py', start an interactive shell after executing the
+ file or command."""
).tag(config=True)
@observe('force_interact')
def _force_interact_changed(self, change):
if change['new']:
- self.interact = True
-
+ self.interact = True
+
@observe('file_to_run', 'code_to_run', 'module_to_run')
def _file_to_run_changed(self, change):
new = change['new']
- if new:
- self.something_to_run = True
- if new and not self.force_interact:
- self.interact = False
-
- # internal, not-configurable
- something_to_run=Bool(False)
-
- def parse_command_line(self, argv=None):
- """override to allow old '-pylab' flag with deprecation warning"""
-
- argv = sys.argv[1:] if argv is None else argv
-
- if '-pylab' in argv:
- # deprecated `-pylab` given,
- # warn and transform into current syntax
- argv = argv[:] # copy, don't clobber
- idx = argv.index('-pylab')
+ if new:
+ self.something_to_run = True
+ if new and not self.force_interact:
+ self.interact = False
+
+ # internal, not-configurable
+ something_to_run=Bool(False)
+
+ def parse_command_line(self, argv=None):
+ """override to allow old '-pylab' flag with deprecation warning"""
+
+ argv = sys.argv[1:] if argv is None else argv
+
+ if '-pylab' in argv:
+ # deprecated `-pylab` given,
+ # warn and transform into current syntax
+ argv = argv[:] # copy, don't clobber
+ idx = argv.index('-pylab')
warnings.warn("`-pylab` flag has been deprecated.\n"
- " Use `--matplotlib <backend>` and import pylab manually.")
- argv[idx] = '--pylab'
-
- return super(TerminalIPythonApp, self).parse_command_line(argv)
-
- @catch_config_error
- def initialize(self, argv=None):
- """Do actions after construct, but before starting the app."""
- super(TerminalIPythonApp, self).initialize(argv)
- if self.subapp is not None:
- # don't bother initializing further, starting subapp
- return
- # print self.extra_args
- if self.extra_args and not self.something_to_run:
- self.file_to_run = self.extra_args[0]
- self.init_path()
- # create the shell
- self.init_shell()
- # and draw the banner
- self.init_banner()
- # Now a variety of things that happen after the banner is printed.
- self.init_gui_pylab()
- self.init_extensions()
- self.init_code()
-
- def init_shell(self):
- """initialize the InteractiveShell instance"""
- # Create an InteractiveShell instance.
- # shell.display_banner should always be False for the terminal
- # based app, because we call shell.show_banner() by hand below
- # so the banner shows *before* all extension loading stuff.
+ " Use `--matplotlib <backend>` and import pylab manually.")
+ argv[idx] = '--pylab'
+
+ return super(TerminalIPythonApp, self).parse_command_line(argv)
+
+ @catch_config_error
+ def initialize(self, argv=None):
+ """Do actions after construct, but before starting the app."""
+ super(TerminalIPythonApp, self).initialize(argv)
+ if self.subapp is not None:
+ # don't bother initializing further, starting subapp
+ return
+ # print self.extra_args
+ if self.extra_args and not self.something_to_run:
+ self.file_to_run = self.extra_args[0]
+ self.init_path()
+ # create the shell
+ self.init_shell()
+ # and draw the banner
+ self.init_banner()
+ # Now a variety of things that happen after the banner is printed.
+ self.init_gui_pylab()
+ self.init_extensions()
+ self.init_code()
+
+ def init_shell(self):
+ """initialize the InteractiveShell instance"""
+ # Create an InteractiveShell instance.
+ # shell.display_banner should always be False for the terminal
+ # based app, because we call shell.show_banner() by hand below
+ # so the banner shows *before* all extension loading stuff.
self.shell = self.interactive_shell_class.instance(parent=self,
profile_dir=self.profile_dir,
- ipython_dir=self.ipython_dir, user_ns=self.user_ns)
- self.shell.configurables.append(self)
-
- def init_banner(self):
- """optionally display the banner"""
- if self.display_banner and self.interact:
- self.shell.show_banner()
- # Make sure there is a space below the banner.
- if self.log_level <= logging.INFO: print()
-
- def _pylab_changed(self, name, old, new):
- """Replace --pylab='inline' with --pylab='auto'"""
- if new == 'inline':
+ ipython_dir=self.ipython_dir, user_ns=self.user_ns)
+ self.shell.configurables.append(self)
+
+ def init_banner(self):
+ """optionally display the banner"""
+ if self.display_banner and self.interact:
+ self.shell.show_banner()
+ # Make sure there is a space below the banner.
+ if self.log_level <= logging.INFO: print()
+
+ def _pylab_changed(self, name, old, new):
+ """Replace --pylab='inline' with --pylab='auto'"""
+ if new == 'inline':
warnings.warn("'inline' not available as pylab backend, "
- "using 'auto' instead.")
- self.pylab = 'auto'
-
- def start(self):
- if self.subapp is not None:
- return self.subapp.start()
- # perform any prexec steps:
- if self.interact:
- self.log.debug("Starting IPython's mainloop...")
- self.shell.mainloop()
- else:
- self.log.debug("IPython not interactive...")
-
-def load_default_config(ipython_dir=None):
- """Load the default config file from the default ipython_dir.
-
- This is useful for embedded shells.
- """
- if ipython_dir is None:
- ipython_dir = get_ipython_dir()
-
- profile_dir = os.path.join(ipython_dir, 'profile_default')
+ "using 'auto' instead.")
+ self.pylab = 'auto'
+
+ def start(self):
+ if self.subapp is not None:
+ return self.subapp.start()
+ # perform any prexec steps:
+ if self.interact:
+ self.log.debug("Starting IPython's mainloop...")
+ self.shell.mainloop()
+ else:
+ self.log.debug("IPython not interactive...")
+
+def load_default_config(ipython_dir=None):
+ """Load the default config file from the default ipython_dir.
+
+ This is useful for embedded shells.
+ """
+ if ipython_dir is None:
+ ipython_dir = get_ipython_dir()
+
+ profile_dir = os.path.join(ipython_dir, 'profile_default')
app = TerminalIPythonApp()
app.config_file_paths.append(profile_dir)
app.load_config_file()
return app.config
-
-launch_new_instance = TerminalIPythonApp.launch_instance
-
-
-if __name__ == '__main__':
- launch_new_instance()
+
+launch_new_instance = TerminalIPythonApp.launch_instance
+
+
+if __name__ == '__main__':
+ launch_new_instance()
diff --git a/contrib/python/ipython/py2/IPython/testing/__init__.py b/contrib/python/ipython/py2/IPython/testing/__init__.py
index 6011b39f77..165f503169 100644
--- a/contrib/python/ipython/py2/IPython/testing/__init__.py
+++ b/contrib/python/ipython/py2/IPython/testing/__init__.py
@@ -1,38 +1,38 @@
-"""Testing support (tools to test IPython itself).
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2009-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Functions
-#-----------------------------------------------------------------------------
-
-# User-level entry point for testing
-def test(**kwargs):
- """Run the entire IPython test suite.
-
- Any of the options for run_iptestall() may be passed as keyword arguments.
-
- For example::
-
- IPython.test(testgroups=['lib', 'config', 'utils'], fast=2)
-
- will run those three sections of the test suite, using two processes.
- """
-
- # Do the import internally, so that this function doesn't increase total
- # import time
- from .iptestcontroller import run_iptestall, default_options
- options = default_options()
- for name, val in kwargs.items():
- setattr(options, name, val)
- run_iptestall(options)
-
-# So nose doesn't try to run this as a test itself and we end up with an
-# infinite test loop
-test.__test__ = False
+"""Testing support (tools to test IPython itself).
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2009-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Functions
+#-----------------------------------------------------------------------------
+
+# User-level entry point for testing
+def test(**kwargs):
+ """Run the entire IPython test suite.
+
+ Any of the options for run_iptestall() may be passed as keyword arguments.
+
+ For example::
+
+ IPython.test(testgroups=['lib', 'config', 'utils'], fast=2)
+
+ will run those three sections of the test suite, using two processes.
+ """
+
+ # Do the import internally, so that this function doesn't increase total
+ # import time
+ from .iptestcontroller import run_iptestall, default_options
+ options = default_options()
+ for name, val in kwargs.items():
+ setattr(options, name, val)
+ run_iptestall(options)
+
+# So nose doesn't try to run this as a test itself and we end up with an
+# infinite test loop
+test.__test__ = False
diff --git a/contrib/python/ipython/py2/IPython/testing/__main__.py b/contrib/python/ipython/py2/IPython/testing/__main__.py
index 179ec6f699..4b0bb8ba9c 100644
--- a/contrib/python/ipython/py2/IPython/testing/__main__.py
+++ b/contrib/python/ipython/py2/IPython/testing/__main__.py
@@ -1,3 +1,3 @@
-if __name__ == '__main__':
- from IPython.testing import iptestcontroller
- iptestcontroller.main()
+if __name__ == '__main__':
+ from IPython.testing import iptestcontroller
+ iptestcontroller.main()
diff --git a/contrib/python/ipython/py2/IPython/testing/decorators.py b/contrib/python/ipython/py2/IPython/testing/decorators.py
index 2fe72f6cae..c9807ce70e 100644
--- a/contrib/python/ipython/py2/IPython/testing/decorators.py
+++ b/contrib/python/ipython/py2/IPython/testing/decorators.py
@@ -1,384 +1,384 @@
-# -*- coding: utf-8 -*-
-"""Decorators for labeling test objects.
-
-Decorators that merely return a modified version of the original function
-object are straightforward. Decorators that return a new function object need
-to use nose.tools.make_decorator(original_function)(decorator) in returning the
-decorator, in order to preserve metadata such as function name, setup and
-teardown functions and so on - see nose.tools for more information.
-
-This module provides a set of useful decorators meant to be ready to use in
-your own tests. See the bottom of the file for the ready-made ones, and if you
-find yourself writing a new one that may be of generic use, add it here.
-
-Included decorators:
-
-
-Lightweight testing that remains unittest-compatible.
-
-- An @as_unittest decorator can be used to tag any normal parameter-less
- function as a unittest TestCase. Then, both nose and normal unittest will
- recognize it as such. This will make it easier to migrate away from Nose if
- we ever need/want to while maintaining very lightweight tests.
-
-NOTE: This file contains IPython-specific decorators. Using the machinery in
-IPython.external.decorators, we import either numpy.testing.decorators if numpy is
-available, OR use equivalent code in IPython.external._decorators, which
-we've copied verbatim from numpy.
-
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import sys
-import os
-import tempfile
-import unittest
-import warnings
-
-from decorator import decorator
-
-# Expose the unittest-driven decorators
-from .ipunittest import ipdoctest, ipdocstring
-
-# Grab the numpy-specific decorators which we keep in a file that we
-# occasionally update from upstream: decorators.py is a copy of
-# numpy.testing.decorators, we expose all of it here.
-from IPython.external.decorators import *
-
-# For onlyif_cmd_exists decorator
+# -*- coding: utf-8 -*-
+"""Decorators for labeling test objects.
+
+Decorators that merely return a modified version of the original function
+object are straightforward. Decorators that return a new function object need
+to use nose.tools.make_decorator(original_function)(decorator) in returning the
+decorator, in order to preserve metadata such as function name, setup and
+teardown functions and so on - see nose.tools for more information.
+
+This module provides a set of useful decorators meant to be ready to use in
+your own tests. See the bottom of the file for the ready-made ones, and if you
+find yourself writing a new one that may be of generic use, add it here.
+
+Included decorators:
+
+
+Lightweight testing that remains unittest-compatible.
+
+- An @as_unittest decorator can be used to tag any normal parameter-less
+ function as a unittest TestCase. Then, both nose and normal unittest will
+ recognize it as such. This will make it easier to migrate away from Nose if
+ we ever need/want to while maintaining very lightweight tests.
+
+NOTE: This file contains IPython-specific decorators. Using the machinery in
+IPython.external.decorators, we import either numpy.testing.decorators if numpy is
+available, OR use equivalent code in IPython.external._decorators, which
+we've copied verbatim from numpy.
+
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import sys
+import os
+import tempfile
+import unittest
+import warnings
+
+from decorator import decorator
+
+# Expose the unittest-driven decorators
+from .ipunittest import ipdoctest, ipdocstring
+
+# Grab the numpy-specific decorators which we keep in a file that we
+# occasionally update from upstream: decorators.py is a copy of
+# numpy.testing.decorators, we expose all of it here.
+from IPython.external.decorators import *
+
+# For onlyif_cmd_exists decorator
from IPython.utils.py3compat import string_types, which, PY2, PY3, PYPY
-
-#-----------------------------------------------------------------------------
-# Classes and functions
-#-----------------------------------------------------------------------------
-
-# Simple example of the basic idea
-def as_unittest(func):
- """Decorator to make a simple function into a normal test via unittest."""
- class Tester(unittest.TestCase):
- def test(self):
- func()
-
- Tester.__name__ = func.__name__
-
- return Tester
-
-# Utility functions
-
+
+#-----------------------------------------------------------------------------
+# Classes and functions
+#-----------------------------------------------------------------------------
+
+# Simple example of the basic idea
+def as_unittest(func):
+ """Decorator to make a simple function into a normal test via unittest."""
+ class Tester(unittest.TestCase):
+ def test(self):
+ func()
+
+ Tester.__name__ = func.__name__
+
+ return Tester
+
+# Utility functions
+
def apply_wrapper(wrapper, func):
- """Apply a wrapper to a function for decoration.
-
- This mixes Michele Simionato's decorator tool with nose's make_decorator,
- to apply a wrapper in a decorator so that all nose attributes, as well as
- function signature and other properties, survive the decoration cleanly.
- This will ensure that wrapped functions can still be well introspected via
- IPython, for example.
- """
+ """Apply a wrapper to a function for decoration.
+
+ This mixes Michele Simionato's decorator tool with nose's make_decorator,
+ to apply a wrapper in a decorator so that all nose attributes, as well as
+ function signature and other properties, survive the decoration cleanly.
+ This will ensure that wrapped functions can still be well introspected via
+ IPython, for example.
+ """
warnings.warn("The function `apply_wrapper` is deprecated since IPython 4.0",
DeprecationWarning, stacklevel=2)
- import nose.tools
-
- return decorator(wrapper,nose.tools.make_decorator(func)(wrapper))
-
-
+ import nose.tools
+
+ return decorator(wrapper,nose.tools.make_decorator(func)(wrapper))
+
+
def make_label_dec(label, ds=None):
- """Factory function to create a decorator that applies one or more labels.
-
- Parameters
- ----------
- label : string or sequence
- One or more labels that will be applied by the decorator to the functions
- it decorates. Labels are attributes of the decorated function with their
- value set to True.
-
- ds : string
- An optional docstring for the resulting decorator. If not given, a
- default docstring is auto-generated.
-
- Returns
- -------
- A decorator.
-
- Examples
- --------
-
- A simple labeling decorator:
-
- >>> slow = make_label_dec('slow')
- >>> slow.__doc__
- "Labels a test as 'slow'."
-
- And one that uses multiple labels and a custom docstring:
-
- >>> rare = make_label_dec(['slow','hard'],
- ... "Mix labels 'slow' and 'hard' for rare tests.")
- >>> rare.__doc__
- "Mix labels 'slow' and 'hard' for rare tests."
-
- Now, let's test using this one:
- >>> @rare
- ... def f(): pass
- ...
- >>>
- >>> f.slow
- True
- >>> f.hard
- True
- """
-
+ """Factory function to create a decorator that applies one or more labels.
+
+ Parameters
+ ----------
+ label : string or sequence
+ One or more labels that will be applied by the decorator to the functions
+ it decorates. Labels are attributes of the decorated function with their
+ value set to True.
+
+ ds : string
+ An optional docstring for the resulting decorator. If not given, a
+ default docstring is auto-generated.
+
+ Returns
+ -------
+ A decorator.
+
+ Examples
+ --------
+
+ A simple labeling decorator:
+
+ >>> slow = make_label_dec('slow')
+ >>> slow.__doc__
+ "Labels a test as 'slow'."
+
+ And one that uses multiple labels and a custom docstring:
+
+ >>> rare = make_label_dec(['slow','hard'],
+ ... "Mix labels 'slow' and 'hard' for rare tests.")
+ >>> rare.__doc__
+ "Mix labels 'slow' and 'hard' for rare tests."
+
+ Now, let's test using this one:
+ >>> @rare
+ ... def f(): pass
+ ...
+ >>>
+ >>> f.slow
+ True
+ >>> f.hard
+ True
+ """
+
warnings.warn("The function `make_label_dec` is deprecated since IPython 4.0",
DeprecationWarning, stacklevel=2)
- if isinstance(label, string_types):
- labels = [label]
- else:
- labels = label
-
- # Validate that the given label(s) are OK for use in setattr() by doing a
- # dry run on a dummy function.
- tmp = lambda : None
- for label in labels:
- setattr(tmp,label,True)
-
- # This is the actual decorator we'll return
- def decor(f):
- for label in labels:
- setattr(f,label,True)
- return f
-
- # Apply the user's docstring, or autogenerate a basic one
- if ds is None:
- ds = "Labels a test as %r." % label
- decor.__doc__ = ds
-
- return decor
-
-
-# Inspired by numpy's skipif, but uses the full apply_wrapper utility to
-# preserve function metadata better and allows the skip condition to be a
-# callable.
-def skipif(skip_condition, msg=None):
- ''' Make function raise SkipTest exception if skip_condition is true
-
- Parameters
- ----------
-
- skip_condition : bool or callable
- Flag to determine whether to skip test. If the condition is a
- callable, it is used at runtime to dynamically make the decision. This
- is useful for tests that may require costly imports, to delay the cost
- until the test suite is actually executed.
- msg : string
- Message to give on raising a SkipTest exception.
-
- Returns
- -------
- decorator : function
- Decorator, which, when applied to a function, causes SkipTest
- to be raised when the skip_condition was True, and the function
- to be called normally otherwise.
-
- Notes
- -----
- You will see from the code that we had to further decorate the
- decorator with the nose.tools.make_decorator function in order to
- transmit function name, and various other metadata.
- '''
-
- def skip_decorator(f):
- # Local import to avoid a hard nose dependency and only incur the
- # import time overhead at actual test-time.
- import nose
-
- # Allow for both boolean or callable skip conditions.
- if callable(skip_condition):
- skip_val = skip_condition
- else:
- skip_val = lambda : skip_condition
-
- def get_msg(func,msg=None):
- """Skip message with information about function being skipped."""
- if msg is None: out = 'Test skipped due to test condition.'
- else: out = msg
- return "Skipping test: %s. %s" % (func.__name__,out)
-
- # We need to define *two* skippers because Python doesn't allow both
- # return with value and yield inside the same function.
- def skipper_func(*args, **kwargs):
- """Skipper for normal test functions."""
- if skip_val():
- raise nose.SkipTest(get_msg(f,msg))
- else:
- return f(*args, **kwargs)
-
- def skipper_gen(*args, **kwargs):
- """Skipper for test generators."""
- if skip_val():
- raise nose.SkipTest(get_msg(f,msg))
- else:
- for x in f(*args, **kwargs):
- yield x
-
- # Choose the right skipper to use when building the actual generator.
- if nose.util.isgenerator(f):
- skipper = skipper_gen
- else:
- skipper = skipper_func
-
- return nose.tools.make_decorator(f)(skipper)
-
- return skip_decorator
-
-# A version with the condition set to true, common case just to attach a message
-# to a skip decorator
-def skip(msg=None):
- """Decorator factory - mark a test function for skipping from test suite.
-
- Parameters
- ----------
- msg : string
- Optional message to be added.
-
- Returns
- -------
- decorator : function
- Decorator, which, when applied to a function, causes SkipTest
- to be raised, with the optional message added.
- """
-
- return skipif(True,msg)
-
-
-def onlyif(condition, msg):
- """The reverse from skipif, see skipif for details."""
-
- if callable(condition):
- skip_condition = lambda : not condition()
- else:
- skip_condition = lambda : not condition
-
- return skipif(skip_condition, msg)
-
-#-----------------------------------------------------------------------------
-# Utility functions for decorators
-def module_not_available(module):
- """Can module be imported? Returns true if module does NOT import.
-
- This is used to make a decorator to skip tests that require module to be
- available, but delay the 'import numpy' to test execution time.
- """
- try:
- mod = __import__(module)
- mod_not_avail = False
- except ImportError:
- mod_not_avail = True
-
- return mod_not_avail
-
-
-def decorated_dummy(dec, name):
- """Return a dummy function decorated with dec, with the given name.
-
- Examples
- --------
- import IPython.testing.decorators as dec
- setup = dec.decorated_dummy(dec.skip_if_no_x11, __name__)
- """
+ if isinstance(label, string_types):
+ labels = [label]
+ else:
+ labels = label
+
+ # Validate that the given label(s) are OK for use in setattr() by doing a
+ # dry run on a dummy function.
+ tmp = lambda : None
+ for label in labels:
+ setattr(tmp,label,True)
+
+ # This is the actual decorator we'll return
+ def decor(f):
+ for label in labels:
+ setattr(f,label,True)
+ return f
+
+ # Apply the user's docstring, or autogenerate a basic one
+ if ds is None:
+ ds = "Labels a test as %r." % label
+ decor.__doc__ = ds
+
+ return decor
+
+
+# Inspired by numpy's skipif, but uses the full apply_wrapper utility to
+# preserve function metadata better and allows the skip condition to be a
+# callable.
+def skipif(skip_condition, msg=None):
+ ''' Make function raise SkipTest exception if skip_condition is true
+
+ Parameters
+ ----------
+
+ skip_condition : bool or callable
+ Flag to determine whether to skip test. If the condition is a
+ callable, it is used at runtime to dynamically make the decision. This
+ is useful for tests that may require costly imports, to delay the cost
+ until the test suite is actually executed.
+ msg : string
+ Message to give on raising a SkipTest exception.
+
+ Returns
+ -------
+ decorator : function
+ Decorator, which, when applied to a function, causes SkipTest
+ to be raised when the skip_condition was True, and the function
+ to be called normally otherwise.
+
+ Notes
+ -----
+ You will see from the code that we had to further decorate the
+ decorator with the nose.tools.make_decorator function in order to
+ transmit function name, and various other metadata.
+ '''
+
+ def skip_decorator(f):
+ # Local import to avoid a hard nose dependency and only incur the
+ # import time overhead at actual test-time.
+ import nose
+
+ # Allow for both boolean or callable skip conditions.
+ if callable(skip_condition):
+ skip_val = skip_condition
+ else:
+ skip_val = lambda : skip_condition
+
+ def get_msg(func,msg=None):
+ """Skip message with information about function being skipped."""
+ if msg is None: out = 'Test skipped due to test condition.'
+ else: out = msg
+ return "Skipping test: %s. %s" % (func.__name__,out)
+
+ # We need to define *two* skippers because Python doesn't allow both
+ # return with value and yield inside the same function.
+ def skipper_func(*args, **kwargs):
+ """Skipper for normal test functions."""
+ if skip_val():
+ raise nose.SkipTest(get_msg(f,msg))
+ else:
+ return f(*args, **kwargs)
+
+ def skipper_gen(*args, **kwargs):
+ """Skipper for test generators."""
+ if skip_val():
+ raise nose.SkipTest(get_msg(f,msg))
+ else:
+ for x in f(*args, **kwargs):
+ yield x
+
+ # Choose the right skipper to use when building the actual generator.
+ if nose.util.isgenerator(f):
+ skipper = skipper_gen
+ else:
+ skipper = skipper_func
+
+ return nose.tools.make_decorator(f)(skipper)
+
+ return skip_decorator
+
+# A version with the condition set to true, common case just to attach a message
+# to a skip decorator
+def skip(msg=None):
+ """Decorator factory - mark a test function for skipping from test suite.
+
+ Parameters
+ ----------
+ msg : string
+ Optional message to be added.
+
+ Returns
+ -------
+ decorator : function
+ Decorator, which, when applied to a function, causes SkipTest
+ to be raised, with the optional message added.
+ """
+
+ return skipif(True,msg)
+
+
+def onlyif(condition, msg):
+ """The reverse from skipif, see skipif for details."""
+
+ if callable(condition):
+ skip_condition = lambda : not condition()
+ else:
+ skip_condition = lambda : not condition
+
+ return skipif(skip_condition, msg)
+
+#-----------------------------------------------------------------------------
+# Utility functions for decorators
+def module_not_available(module):
+ """Can module be imported? Returns true if module does NOT import.
+
+ This is used to make a decorator to skip tests that require module to be
+ available, but delay the 'import numpy' to test execution time.
+ """
+ try:
+ mod = __import__(module)
+ mod_not_avail = False
+ except ImportError:
+ mod_not_avail = True
+
+ return mod_not_avail
+
+
+def decorated_dummy(dec, name):
+ """Return a dummy function decorated with dec, with the given name.
+
+ Examples
+ --------
+ import IPython.testing.decorators as dec
+ setup = dec.decorated_dummy(dec.skip_if_no_x11, __name__)
+ """
warnings.warn("The function `decorated_dummy` is deprecated since IPython 4.0",
DeprecationWarning, stacklevel=2)
- dummy = lambda: None
- dummy.__name__ = name
- return dec(dummy)
-
-#-----------------------------------------------------------------------------
-# Decorators for public use
-
-# Decorators to skip certain tests on specific platforms.
-skip_win32 = skipif(sys.platform == 'win32',
- "This test does not run under Windows")
-skip_linux = skipif(sys.platform.startswith('linux'),
- "This test does not run under Linux")
-skip_osx = skipif(sys.platform == 'darwin',"This test does not run under OS X")
-
-
-# Decorators to skip tests if not on specific platforms.
-skip_if_not_win32 = skipif(sys.platform != 'win32',
- "This test only runs under Windows")
-skip_if_not_linux = skipif(not sys.platform.startswith('linux'),
- "This test only runs under Linux")
-skip_if_not_osx = skipif(sys.platform != 'darwin',
- "This test only runs under OSX")
-
-
-_x11_skip_cond = (sys.platform not in ('darwin', 'win32') and
- os.environ.get('DISPLAY', '') == '')
-_x11_skip_msg = "Skipped under *nix when X11/XOrg not available"
-
-skip_if_no_x11 = skipif(_x11_skip_cond, _x11_skip_msg)
-
-# not a decorator itself, returns a dummy function to be used as setup
-def skip_file_no_x11(name):
+ dummy = lambda: None
+ dummy.__name__ = name
+ return dec(dummy)
+
+#-----------------------------------------------------------------------------
+# Decorators for public use
+
+# Decorators to skip certain tests on specific platforms.
+skip_win32 = skipif(sys.platform == 'win32',
+ "This test does not run under Windows")
+skip_linux = skipif(sys.platform.startswith('linux'),
+ "This test does not run under Linux")
+skip_osx = skipif(sys.platform == 'darwin',"This test does not run under OS X")
+
+
+# Decorators to skip tests if not on specific platforms.
+skip_if_not_win32 = skipif(sys.platform != 'win32',
+ "This test only runs under Windows")
+skip_if_not_linux = skipif(not sys.platform.startswith('linux'),
+ "This test only runs under Linux")
+skip_if_not_osx = skipif(sys.platform != 'darwin',
+ "This test only runs under OSX")
+
+
+_x11_skip_cond = (sys.platform not in ('darwin', 'win32') and
+ os.environ.get('DISPLAY', '') == '')
+_x11_skip_msg = "Skipped under *nix when X11/XOrg not available"
+
+skip_if_no_x11 = skipif(_x11_skip_cond, _x11_skip_msg)
+
+# not a decorator itself, returns a dummy function to be used as setup
+def skip_file_no_x11(name):
warnings.warn("The function `skip_file_no_x11` is deprecated since IPython 4.0",
DeprecationWarning, stacklevel=2)
- return decorated_dummy(skip_if_no_x11, name) if _x11_skip_cond else None
-
-# Other skip decorators
-
-# generic skip without module
-skip_without = lambda mod: skipif(module_not_available(mod), "This test requires %s" % mod)
-
-skipif_not_numpy = skip_without('numpy')
-
-skipif_not_matplotlib = skip_without('matplotlib')
-
-skipif_not_sympy = skip_without('sympy')
-
-skip_known_failure = knownfailureif(True,'This test is known to fail')
-
-known_failure_py3 = knownfailureif(sys.version_info[0] >= 3,
- 'This test is known to fail on Python 3.')
-
+ return decorated_dummy(skip_if_no_x11, name) if _x11_skip_cond else None
+
+# Other skip decorators
+
+# generic skip without module
+skip_without = lambda mod: skipif(module_not_available(mod), "This test requires %s" % mod)
+
+skipif_not_numpy = skip_without('numpy')
+
+skipif_not_matplotlib = skip_without('matplotlib')
+
+skipif_not_sympy = skip_without('sympy')
+
+skip_known_failure = knownfailureif(True,'This test is known to fail')
+
+known_failure_py3 = knownfailureif(sys.version_info[0] >= 3,
+ 'This test is known to fail on Python 3.')
+
cpython2_only = skipif(PY3 or PYPY, "This test only runs on CPython 2.")
-py2_only = skipif(PY3, "This test only runs on Python 2.")
-py3_only = skipif(PY2, "This test only runs on Python 3.")
-
-# A null 'decorator', useful to make more readable code that needs to pick
-# between different decorators based on OS or other conditions
-null_deco = lambda f: f
-
-# Some tests only run where we can use unicode paths. Note that we can't just
-# check os.path.supports_unicode_filenames, which is always False on Linux.
-try:
- f = tempfile.NamedTemporaryFile(prefix=u"tmp€")
-except UnicodeEncodeError:
- unicode_paths = False
-else:
- unicode_paths = True
- f.close()
-
-onlyif_unicode_paths = onlyif(unicode_paths, ("This test is only applicable "
- "where we can use unicode in filenames."))
-
-
-def onlyif_cmds_exist(*commands):
- """
- Decorator to skip test when at least one of `commands` is not found.
- """
- for cmd in commands:
- if not which(cmd):
- return skip("This test runs only if command '{0}' "
- "is installed".format(cmd))
- return null_deco
-
-def onlyif_any_cmd_exists(*commands):
- """
- Decorator to skip test unless at least one of `commands` is found.
- """
+py2_only = skipif(PY3, "This test only runs on Python 2.")
+py3_only = skipif(PY2, "This test only runs on Python 3.")
+
+# A null 'decorator', useful to make more readable code that needs to pick
+# between different decorators based on OS or other conditions
+null_deco = lambda f: f
+
+# Some tests only run where we can use unicode paths. Note that we can't just
+# check os.path.supports_unicode_filenames, which is always False on Linux.
+try:
+ f = tempfile.NamedTemporaryFile(prefix=u"tmp€")
+except UnicodeEncodeError:
+ unicode_paths = False
+else:
+ unicode_paths = True
+ f.close()
+
+onlyif_unicode_paths = onlyif(unicode_paths, ("This test is only applicable "
+ "where we can use unicode in filenames."))
+
+
+def onlyif_cmds_exist(*commands):
+ """
+ Decorator to skip test when at least one of `commands` is not found.
+ """
+ for cmd in commands:
+ if not which(cmd):
+ return skip("This test runs only if command '{0}' "
+ "is installed".format(cmd))
+ return null_deco
+
+def onlyif_any_cmd_exists(*commands):
+ """
+ Decorator to skip test unless at least one of `commands` is found.
+ """
warnings.warn("The function `onlyif_any_cmd_exists` is deprecated since IPython 4.0",
DeprecationWarning, stacklevel=2)
- for cmd in commands:
- if which(cmd):
- return null_deco
- return skip("This test runs only if one of the commands {0} "
- "is installed".format(commands))
+ for cmd in commands:
+ if which(cmd):
+ return null_deco
+ return skip("This test runs only if one of the commands {0} "
+ "is installed".format(commands))
diff --git a/contrib/python/ipython/py2/IPython/testing/globalipapp.py b/contrib/python/ipython/py2/IPython/testing/globalipapp.py
index a40702cc67..3983393112 100644
--- a/contrib/python/ipython/py2/IPython/testing/globalipapp.py
+++ b/contrib/python/ipython/py2/IPython/testing/globalipapp.py
@@ -1,138 +1,138 @@
-"""Global IPython app to support test running.
-
-We must start our own ipython object and heavily muck with it so that all the
-modifications IPython makes to system behavior don't send the doctest machinery
-into a fit. This code should be considered a gross hack, but it gets the job
-done.
-"""
-from __future__ import absolute_import
-from __future__ import print_function
-
+"""Global IPython app to support test running.
+
+We must start our own ipython object and heavily muck with it so that all the
+modifications IPython makes to system behavior don't send the doctest machinery
+into a fit. This code should be considered a gross hack, but it gets the job
+done.
+"""
+from __future__ import absolute_import
+from __future__ import print_function
+
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
-
-import sys
+
+import sys
import warnings
-
-from . import tools
-
-from IPython.core import page
-from IPython.utils import io
-from IPython.utils import py3compat
-from IPython.utils.py3compat import builtin_mod
-from IPython.terminal.interactiveshell import TerminalInteractiveShell
-
-
-class StreamProxy(io.IOStream):
- """Proxy for sys.stdout/err. This will request the stream *at call time*
- allowing for nose's Capture plugin's redirection of sys.stdout/err.
-
- Parameters
- ----------
- name : str
- The name of the stream. This will be requested anew at every call
- """
-
- def __init__(self, name):
+
+from . import tools
+
+from IPython.core import page
+from IPython.utils import io
+from IPython.utils import py3compat
+from IPython.utils.py3compat import builtin_mod
+from IPython.terminal.interactiveshell import TerminalInteractiveShell
+
+
+class StreamProxy(io.IOStream):
+ """Proxy for sys.stdout/err. This will request the stream *at call time*
+ allowing for nose's Capture plugin's redirection of sys.stdout/err.
+
+ Parameters
+ ----------
+ name : str
+ The name of the stream. This will be requested anew at every call
+ """
+
+ def __init__(self, name):
warnings.warn("StreamProxy is deprecated and unused as of IPython 5", DeprecationWarning,
stacklevel=2,
)
- self.name=name
-
- @property
- def stream(self):
- return getattr(sys, self.name)
-
- def flush(self):
- self.stream.flush()
-
-
-def get_ipython():
- # This will get replaced by the real thing once we start IPython below
- return start_ipython()
-
-
-# A couple of methods to override those in the running IPython to interact
-# better with doctest (doctest captures on raw stdout, so we need to direct
-# various types of output there otherwise it will miss them).
-
-def xsys(self, cmd):
- """Replace the default system call with a capturing one for doctest.
- """
- # We use getoutput, but we need to strip it because pexpect captures
- # the trailing newline differently from commands.getoutput
- print(self.getoutput(cmd, split=False, depth=1).rstrip(), end='', file=sys.stdout)
- sys.stdout.flush()
-
-
-def _showtraceback(self, etype, evalue, stb):
- """Print the traceback purely on stdout for doctest to capture it.
- """
- print(self.InteractiveTB.stb2text(stb), file=sys.stdout)
-
-
-def start_ipython():
- """Start a global IPython shell, which we need for IPython-specific syntax.
- """
- global get_ipython
-
- # This function should only ever run once!
- if hasattr(start_ipython, 'already_called'):
- return
- start_ipython.already_called = True
-
- # Store certain global objects that IPython modifies
- _displayhook = sys.displayhook
- _excepthook = sys.excepthook
- _main = sys.modules.get('__main__')
-
- # Create custom argv and namespaces for our IPython to be test-friendly
- config = tools.default_config()
+ self.name=name
+
+ @property
+ def stream(self):
+ return getattr(sys, self.name)
+
+ def flush(self):
+ self.stream.flush()
+
+
+def get_ipython():
+ # This will get replaced by the real thing once we start IPython below
+ return start_ipython()
+
+
+# A couple of methods to override those in the running IPython to interact
+# better with doctest (doctest captures on raw stdout, so we need to direct
+# various types of output there otherwise it will miss them).
+
+def xsys(self, cmd):
+ """Replace the default system call with a capturing one for doctest.
+ """
+ # We use getoutput, but we need to strip it because pexpect captures
+ # the trailing newline differently from commands.getoutput
+ print(self.getoutput(cmd, split=False, depth=1).rstrip(), end='', file=sys.stdout)
+ sys.stdout.flush()
+
+
+def _showtraceback(self, etype, evalue, stb):
+ """Print the traceback purely on stdout for doctest to capture it.
+ """
+ print(self.InteractiveTB.stb2text(stb), file=sys.stdout)
+
+
+def start_ipython():
+ """Start a global IPython shell, which we need for IPython-specific syntax.
+ """
+ global get_ipython
+
+ # This function should only ever run once!
+ if hasattr(start_ipython, 'already_called'):
+ return
+ start_ipython.already_called = True
+
+ # Store certain global objects that IPython modifies
+ _displayhook = sys.displayhook
+ _excepthook = sys.excepthook
+ _main = sys.modules.get('__main__')
+
+ # Create custom argv and namespaces for our IPython to be test-friendly
+ config = tools.default_config()
config.TerminalInteractiveShell.simple_prompt = True
-
- # Create and initialize our test-friendly IPython instance.
- shell = TerminalInteractiveShell.instance(config=config,
- )
-
- # A few more tweaks needed for playing nicely with doctests...
-
- # remove history file
- shell.tempfiles.append(config.HistoryManager.hist_file)
-
- # These traps are normally only active for interactive use, set them
- # permanently since we'll be mocking interactive sessions.
- shell.builtin_trap.activate()
-
- # Modify the IPython system call with one that uses getoutput, so that we
- # can capture subcommands and print them to Python's stdout, otherwise the
- # doctest machinery would miss them.
- shell.system = py3compat.MethodType(xsys, shell)
-
- shell._showtraceback = py3compat.MethodType(_showtraceback, shell)
-
- # IPython is ready, now clean up some global state...
-
- # Deactivate the various python system hooks added by ipython for
- # interactive convenience so we don't confuse the doctest system
- sys.modules['__main__'] = _main
- sys.displayhook = _displayhook
- sys.excepthook = _excepthook
-
- # So that ipython magics and aliases can be doctested (they work by making
- # a call into a global _ip object). Also make the top-level get_ipython
- # now return this without recursively calling here again.
- _ip = shell
- get_ipython = _ip.get_ipython
- builtin_mod._ip = _ip
- builtin_mod.get_ipython = get_ipython
-
- # Override paging, so we don't require user interaction during the tests.
- def nopage(strng, start=0, screen_lines=0, pager_cmd=None):
+
+ # Create and initialize our test-friendly IPython instance.
+ shell = TerminalInteractiveShell.instance(config=config,
+ )
+
+ # A few more tweaks needed for playing nicely with doctests...
+
+ # remove history file
+ shell.tempfiles.append(config.HistoryManager.hist_file)
+
+ # These traps are normally only active for interactive use, set them
+ # permanently since we'll be mocking interactive sessions.
+ shell.builtin_trap.activate()
+
+ # Modify the IPython system call with one that uses getoutput, so that we
+ # can capture subcommands and print them to Python's stdout, otherwise the
+ # doctest machinery would miss them.
+ shell.system = py3compat.MethodType(xsys, shell)
+
+ shell._showtraceback = py3compat.MethodType(_showtraceback, shell)
+
+ # IPython is ready, now clean up some global state...
+
+ # Deactivate the various python system hooks added by ipython for
+ # interactive convenience so we don't confuse the doctest system
+ sys.modules['__main__'] = _main
+ sys.displayhook = _displayhook
+ sys.excepthook = _excepthook
+
+ # So that ipython magics and aliases can be doctested (they work by making
+ # a call into a global _ip object). Also make the top-level get_ipython
+ # now return this without recursively calling here again.
+ _ip = shell
+ get_ipython = _ip.get_ipython
+ builtin_mod._ip = _ip
+ builtin_mod.get_ipython = get_ipython
+
+ # Override paging, so we don't require user interaction during the tests.
+ def nopage(strng, start=0, screen_lines=0, pager_cmd=None):
if isinstance(strng, dict):
strng = strng.get('text/plain', '')
- print(strng)
-
- page.orig_page = page.pager_page
- page.pager_page = nopage
-
- return _ip
+ print(strng)
+
+ page.orig_page = page.pager_page
+ page.pager_page = nopage
+
+ return _ip
diff --git a/contrib/python/ipython/py2/IPython/testing/iptest.py b/contrib/python/ipython/py2/IPython/testing/iptest.py
index 9a5a3d6f58..4018264125 100644
--- a/contrib/python/ipython/py2/IPython/testing/iptest.py
+++ b/contrib/python/ipython/py2/IPython/testing/iptest.py
@@ -1,55 +1,55 @@
-# -*- coding: utf-8 -*-
-"""IPython Test Suite Runner.
-
-This module provides a main entry point to a user script to test IPython
-itself from the command line. There are two ways of running this script:
-
-1. With the syntax `iptest all`. This runs our entire test suite by
- calling this script (with different arguments) recursively. This
- causes modules and package to be tested in different processes, using nose
- or trial where appropriate.
-2. With the regular nose syntax, like `iptest -vvs IPython`. In this form
- the script simply calls nose, but with special command line flags and
- plugins loaded.
-
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import print_function
-
-import glob
-from io import BytesIO
-import os
-import os.path as path
-import sys
-from threading import Thread, Lock, Event
-import warnings
-
-import nose.plugins.builtin
-from nose.plugins.xunit import Xunit
-from nose import SkipTest
-from nose.core import TestProgram
-from nose.plugins import Plugin
-from nose.util import safe_str
-
-from IPython import version_info
-from IPython.utils.py3compat import bytes_to_str
-from IPython.utils.importstring import import_item
-from IPython.testing.plugin.ipdoctest import IPythonDoctest
-from IPython.external.decorators import KnownFailure, knownfailureif
-
-pjoin = path.join
-
-
-# Enable printing all warnings raise by IPython's modules
+# -*- coding: utf-8 -*-
+"""IPython Test Suite Runner.
+
+This module provides a main entry point to a user script to test IPython
+itself from the command line. There are two ways of running this script:
+
+1. With the syntax `iptest all`. This runs our entire test suite by
+ calling this script (with different arguments) recursively. This
+ causes modules and package to be tested in different processes, using nose
+ or trial where appropriate.
+2. With the regular nose syntax, like `iptest -vvs IPython`. In this form
+ the script simply calls nose, but with special command line flags and
+ plugins loaded.
+
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import print_function
+
+import glob
+from io import BytesIO
+import os
+import os.path as path
+import sys
+from threading import Thread, Lock, Event
+import warnings
+
+import nose.plugins.builtin
+from nose.plugins.xunit import Xunit
+from nose import SkipTest
+from nose.core import TestProgram
+from nose.plugins import Plugin
+from nose.util import safe_str
+
+from IPython import version_info
+from IPython.utils.py3compat import bytes_to_str
+from IPython.utils.importstring import import_item
+from IPython.testing.plugin.ipdoctest import IPythonDoctest
+from IPython.external.decorators import KnownFailure, knownfailureif
+
+pjoin = path.join
+
+
+# Enable printing all warnings raise by IPython's modules
warnings.filterwarnings('ignore', message='.*Matplotlib is building the font cache.*', category=UserWarning, module='.*')
if sys.version_info > (3,0):
warnings.filterwarnings('error', message='.*', category=ResourceWarning, module='.*')
warnings.filterwarnings('error', message=".*{'config': True}.*", category=DeprecationWarning, module='IPy.*')
-warnings.filterwarnings('default', message='.*', category=Warning, module='IPy.*')
-
+warnings.filterwarnings('default', message='.*', category=Warning, module='IPy.*')
+
warnings.filterwarnings('error', message='.*apply_wrapper.*', category=DeprecationWarning, module='.*')
warnings.filterwarnings('error', message='.*make_label_dec', category=DeprecationWarning, module='.*')
warnings.filterwarnings('error', message='.*decorated_dummy.*', category=DeprecationWarning, module='.*')
@@ -60,384 +60,384 @@ warnings.filterwarnings('error', message='.*disable_gui.*', category=Deprecation
warnings.filterwarnings('error', message='.*ExceptionColors global is deprecated.*', category=DeprecationWarning, module='.*')
-if version_info < (6,):
- # nose.tools renames all things from `camelCase` to `snake_case` which raise an
- # warning with the runner they also import from standard import library. (as of Dec 2015)
- # Ignore, let's revisit that in a couple of years for IPython 6.
- warnings.filterwarnings('ignore', message='.*Please use assertEqual instead', category=Warning, module='IPython.*')
-
-
-# ------------------------------------------------------------------------------
-# Monkeypatch Xunit to count known failures as skipped.
-# ------------------------------------------------------------------------------
-def monkeypatch_xunit():
- try:
- knownfailureif(True)(lambda: None)()
- except Exception as e:
- KnownFailureTest = type(e)
-
- def addError(self, test, err, capt=None):
- if issubclass(err[0], KnownFailureTest):
- err = (SkipTest,) + err[1:]
- return self.orig_addError(test, err, capt)
-
- Xunit.orig_addError = Xunit.addError
- Xunit.addError = addError
-
-#-----------------------------------------------------------------------------
-# Check which dependencies are installed and greater than minimum version.
-#-----------------------------------------------------------------------------
-def extract_version(mod):
- return mod.__version__
-
-def test_for(item, min_version=None, callback=extract_version):
- """Test to see if item is importable, and optionally check against a minimum
- version.
-
- If min_version is given, the default behavior is to check against the
- `__version__` attribute of the item, but specifying `callback` allows you to
- extract the value you are interested in. e.g::
-
- In [1]: import sys
-
- In [2]: from IPython.testing.iptest import test_for
-
- In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info)
- Out[3]: True
-
- """
- try:
- check = import_item(item)
- except (ImportError, RuntimeError):
- # GTK reports Runtime error if it can't be initialized even if it's
- # importable.
- return False
- else:
- if min_version:
- if callback:
- # extra processing step to get version to compare
- check = callback(check)
-
- return check >= min_version
- else:
- return True
-
-# Global dict where we can store information on what we have and what we don't
-# have available at test run time
-have = {'matplotlib': test_for('matplotlib'),
- 'pygments': test_for('pygments'),
- 'sqlite3': test_for('sqlite3')}
-
-#-----------------------------------------------------------------------------
-# Test suite definitions
-#-----------------------------------------------------------------------------
-
-test_group_names = ['core',
- 'extensions', 'lib', 'terminal', 'testing', 'utils',
- ]
-
-class TestSection(object):
- def __init__(self, name, includes):
- self.name = name
- self.includes = includes
- self.excludes = []
- self.dependencies = []
- self.enabled = True
-
- def exclude(self, module):
- if not module.startswith('IPython'):
- module = self.includes[0] + "." + module
- self.excludes.append(module.replace('.', os.sep))
-
- def requires(self, *packages):
- self.dependencies.extend(packages)
-
- @property
- def will_run(self):
- return self.enabled and all(have[p] for p in self.dependencies)
-
-# Name -> (include, exclude, dependencies_met)
-test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names}
-
-
-# Exclusions and dependencies
-# ---------------------------
-
-# core:
-sec = test_sections['core']
-if not have['sqlite3']:
- sec.exclude('tests.test_history')
- sec.exclude('history')
-if not have['matplotlib']:
- sec.exclude('pylabtools'),
- sec.exclude('tests.test_pylabtools')
-
-# lib:
-sec = test_sections['lib']
-sec.exclude('kernel')
-if not have['pygments']:
- sec.exclude('tests.test_lexers')
-# We do this unconditionally, so that the test suite doesn't import
-# gtk, changing the default encoding and masking some unicode bugs.
-sec.exclude('inputhookgtk')
-# We also do this unconditionally, because wx can interfere with Unix signals.
-# There are currently no tests for it anyway.
-sec.exclude('inputhookwx')
-# Testing inputhook will need a lot of thought, to figure out
-# how to have tests that don't lock up with the gui event
-# loops in the picture
-sec.exclude('inputhook')
-
-# testing:
-sec = test_sections['testing']
-# These have to be skipped on win32 because they use echo, rm, cd, etc.
-# See ticket https://github.com/ipython/ipython/issues/87
-if sys.platform == 'win32':
- sec.exclude('plugin.test_exampleip')
- sec.exclude('plugin.dtexample')
-
-# don't run jupyter_console tests found via shim
-test_sections['terminal'].exclude('console')
-
-# extensions:
-sec = test_sections['extensions']
-# This is deprecated in favour of rpy2
-sec.exclude('rmagic')
-# autoreload does some strange stuff, so move it to its own test section
-sec.exclude('autoreload')
-sec.exclude('tests.test_autoreload')
-test_sections['autoreload'] = TestSection('autoreload',
- ['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload'])
-test_group_names.append('autoreload')
-
-
-#-----------------------------------------------------------------------------
-# Functions and classes
-#-----------------------------------------------------------------------------
-
-def check_exclusions_exist():
- from IPython.paths import get_ipython_package_dir
+if version_info < (6,):
+ # nose.tools renames all things from `camelCase` to `snake_case` which raise an
+ # warning with the runner they also import from standard import library. (as of Dec 2015)
+ # Ignore, let's revisit that in a couple of years for IPython 6.
+ warnings.filterwarnings('ignore', message='.*Please use assertEqual instead', category=Warning, module='IPython.*')
+
+
+# ------------------------------------------------------------------------------
+# Monkeypatch Xunit to count known failures as skipped.
+# ------------------------------------------------------------------------------
+def monkeypatch_xunit():
+ try:
+ knownfailureif(True)(lambda: None)()
+ except Exception as e:
+ KnownFailureTest = type(e)
+
+ def addError(self, test, err, capt=None):
+ if issubclass(err[0], KnownFailureTest):
+ err = (SkipTest,) + err[1:]
+ return self.orig_addError(test, err, capt)
+
+ Xunit.orig_addError = Xunit.addError
+ Xunit.addError = addError
+
+#-----------------------------------------------------------------------------
+# Check which dependencies are installed and greater than minimum version.
+#-----------------------------------------------------------------------------
+def extract_version(mod):
+ return mod.__version__
+
+def test_for(item, min_version=None, callback=extract_version):
+ """Test to see if item is importable, and optionally check against a minimum
+ version.
+
+ If min_version is given, the default behavior is to check against the
+ `__version__` attribute of the item, but specifying `callback` allows you to
+ extract the value you are interested in. e.g::
+
+ In [1]: import sys
+
+ In [2]: from IPython.testing.iptest import test_for
+
+ In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info)
+ Out[3]: True
+
+ """
+ try:
+ check = import_item(item)
+ except (ImportError, RuntimeError):
+ # GTK reports Runtime error if it can't be initialized even if it's
+ # importable.
+ return False
+ else:
+ if min_version:
+ if callback:
+ # extra processing step to get version to compare
+ check = callback(check)
+
+ return check >= min_version
+ else:
+ return True
+
+# Global dict where we can store information on what we have and what we don't
+# have available at test run time
+have = {'matplotlib': test_for('matplotlib'),
+ 'pygments': test_for('pygments'),
+ 'sqlite3': test_for('sqlite3')}
+
+#-----------------------------------------------------------------------------
+# Test suite definitions
+#-----------------------------------------------------------------------------
+
+test_group_names = ['core',
+ 'extensions', 'lib', 'terminal', 'testing', 'utils',
+ ]
+
+class TestSection(object):
+ def __init__(self, name, includes):
+ self.name = name
+ self.includes = includes
+ self.excludes = []
+ self.dependencies = []
+ self.enabled = True
+
+ def exclude(self, module):
+ if not module.startswith('IPython'):
+ module = self.includes[0] + "." + module
+ self.excludes.append(module.replace('.', os.sep))
+
+ def requires(self, *packages):
+ self.dependencies.extend(packages)
+
+ @property
+ def will_run(self):
+ return self.enabled and all(have[p] for p in self.dependencies)
+
+# Name -> (include, exclude, dependencies_met)
+test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names}
+
+
+# Exclusions and dependencies
+# ---------------------------
+
+# core:
+sec = test_sections['core']
+if not have['sqlite3']:
+ sec.exclude('tests.test_history')
+ sec.exclude('history')
+if not have['matplotlib']:
+ sec.exclude('pylabtools'),
+ sec.exclude('tests.test_pylabtools')
+
+# lib:
+sec = test_sections['lib']
+sec.exclude('kernel')
+if not have['pygments']:
+ sec.exclude('tests.test_lexers')
+# We do this unconditionally, so that the test suite doesn't import
+# gtk, changing the default encoding and masking some unicode bugs.
+sec.exclude('inputhookgtk')
+# We also do this unconditionally, because wx can interfere with Unix signals.
+# There are currently no tests for it anyway.
+sec.exclude('inputhookwx')
+# Testing inputhook will need a lot of thought, to figure out
+# how to have tests that don't lock up with the gui event
+# loops in the picture
+sec.exclude('inputhook')
+
+# testing:
+sec = test_sections['testing']
+# These have to be skipped on win32 because they use echo, rm, cd, etc.
+# See ticket https://github.com/ipython/ipython/issues/87
+if sys.platform == 'win32':
+ sec.exclude('plugin.test_exampleip')
+ sec.exclude('plugin.dtexample')
+
+# don't run jupyter_console tests found via shim
+test_sections['terminal'].exclude('console')
+
+# extensions:
+sec = test_sections['extensions']
+# This is deprecated in favour of rpy2
+sec.exclude('rmagic')
+# autoreload does some strange stuff, so move it to its own test section
+sec.exclude('autoreload')
+sec.exclude('tests.test_autoreload')
+test_sections['autoreload'] = TestSection('autoreload',
+ ['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload'])
+test_group_names.append('autoreload')
+
+
+#-----------------------------------------------------------------------------
+# Functions and classes
+#-----------------------------------------------------------------------------
+
+def check_exclusions_exist():
+ from IPython.paths import get_ipython_package_dir
from warnings import warn
- parent = os.path.dirname(get_ipython_package_dir())
- for sec in test_sections:
- for pattern in sec.exclusions:
- fullpath = pjoin(parent, pattern)
- if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'):
- warn("Excluding nonexistent file: %r" % pattern)
-
-
-class ExclusionPlugin(Plugin):
- """A nose plugin to effect our exclusions of files and directories.
- """
- name = 'exclusions'
- score = 3000 # Should come before any other plugins
-
- def __init__(self, exclude_patterns=None):
- """
- Parameters
- ----------
-
- exclude_patterns : sequence of strings, optional
- Filenames containing these patterns (as raw strings, not as regular
- expressions) are excluded from the tests.
- """
- self.exclude_patterns = exclude_patterns or []
- super(ExclusionPlugin, self).__init__()
-
- def options(self, parser, env=os.environ):
- Plugin.options(self, parser, env)
-
- def configure(self, options, config):
- Plugin.configure(self, options, config)
- # Override nose trying to disable plugin.
- self.enabled = True
-
- def wantFile(self, filename):
- """Return whether the given filename should be scanned for tests.
- """
- if any(pat in filename for pat in self.exclude_patterns):
- return False
- return None
-
- def wantDirectory(self, directory):
- """Return whether the given directory should be scanned for tests.
- """
- if any(pat in directory for pat in self.exclude_patterns):
- return False
- return None
-
-
-class StreamCapturer(Thread):
- daemon = True # Don't hang if main thread crashes
- started = False
- def __init__(self, echo=False):
- super(StreamCapturer, self).__init__()
- self.echo = echo
- self.streams = []
- self.buffer = BytesIO()
- self.readfd, self.writefd = os.pipe()
- self.buffer_lock = Lock()
- self.stop = Event()
-
- def run(self):
- self.started = True
-
- while not self.stop.is_set():
- chunk = os.read(self.readfd, 1024)
-
- with self.buffer_lock:
- self.buffer.write(chunk)
- if self.echo:
- sys.stdout.write(bytes_to_str(chunk))
-
- os.close(self.readfd)
- os.close(self.writefd)
-
- def reset_buffer(self):
- with self.buffer_lock:
- self.buffer.truncate(0)
- self.buffer.seek(0)
-
- def get_buffer(self):
- with self.buffer_lock:
- return self.buffer.getvalue()
-
- def ensure_started(self):
- if not self.started:
- self.start()
-
- def halt(self):
- """Safely stop the thread."""
- if not self.started:
- return
-
- self.stop.set()
- os.write(self.writefd, b'\0') # Ensure we're not locked in a read()
- self.join()
-
-class SubprocessStreamCapturePlugin(Plugin):
- name='subprocstreams'
- def __init__(self):
- Plugin.__init__(self)
- self.stream_capturer = StreamCapturer()
- self.destination = os.environ.get('IPTEST_SUBPROC_STREAMS', 'capture')
- # This is ugly, but distant parts of the test machinery need to be able
- # to redirect streams, so we make the object globally accessible.
- nose.iptest_stdstreams_fileno = self.get_write_fileno
-
- def get_write_fileno(self):
- if self.destination == 'capture':
- self.stream_capturer.ensure_started()
- return self.stream_capturer.writefd
- elif self.destination == 'discard':
- return os.open(os.devnull, os.O_WRONLY)
- else:
- return sys.__stdout__.fileno()
-
- def configure(self, options, config):
- Plugin.configure(self, options, config)
- # Override nose trying to disable plugin.
- if self.destination == 'capture':
- self.enabled = True
-
- def startTest(self, test):
- # Reset log capture
- self.stream_capturer.reset_buffer()
-
- def formatFailure(self, test, err):
- # Show output
- ec, ev, tb = err
- captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace')
- if captured.strip():
- ev = safe_str(ev)
- out = [ev, '>> begin captured subprocess output <<',
- captured,
- '>> end captured subprocess output <<']
- return ec, '\n'.join(out), tb
-
- return err
-
- formatError = formatFailure
-
- def finalize(self, result):
- self.stream_capturer.halt()
-
-
-def run_iptest():
- """Run the IPython test suite using nose.
-
- This function is called when this script is **not** called with the form
- `iptest all`. It simply calls nose with appropriate command line flags
- and accepts all of the standard nose arguments.
- """
- # Apply our monkeypatch to Xunit
- if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'):
- monkeypatch_xunit()
-
- arg1 = sys.argv[1]
- if arg1 in test_sections:
- section = test_sections[arg1]
- sys.argv[1:2] = section.includes
- elif arg1.startswith('IPython.') and arg1[8:] in test_sections:
- section = test_sections[arg1[8:]]
- sys.argv[1:2] = section.includes
- else:
- section = TestSection(arg1, includes=[arg1])
-
-
- argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks
- # We add --exe because of setuptools' imbecility (it
- # blindly does chmod +x on ALL files). Nose does the
- # right thing and it tries to avoid executables,
- # setuptools unfortunately forces our hand here. This
- # has been discussed on the distutils list and the
- # setuptools devs refuse to fix this problem!
- '--exe',
- ]
- if '-a' not in argv and '-A' not in argv:
- argv = argv + ['-a', '!crash']
-
- if nose.__version__ >= '0.11':
- # I don't fully understand why we need this one, but depending on what
- # directory the test suite is run from, if we don't give it, 0 tests
- # get run. Specifically, if the test suite is run from the source dir
- # with an argument (like 'iptest.py IPython.core', 0 tests are run,
- # even if the same call done in this directory works fine). It appears
- # that if the requested package is in the current dir, nose bails early
- # by default. Since it's otherwise harmless, leave it in by default
- # for nose >= 0.11, though unfortunately nose 0.10 doesn't support it.
- argv.append('--traverse-namespace')
-
- plugins = [ ExclusionPlugin(section.excludes), KnownFailure(),
- SubprocessStreamCapturePlugin() ]
-
- # we still have some vestigial doctests in core
- if (section.name.startswith(('core', 'IPython.core'))):
- plugins.append(IPythonDoctest())
- argv.extend([
- '--with-ipdoctest',
- '--ipdoctest-tests',
- '--ipdoctest-extension=txt',
- ])
-
-
- # Use working directory set by parent process (see iptestcontroller)
- if 'IPTEST_WORKING_DIR' in os.environ:
- os.chdir(os.environ['IPTEST_WORKING_DIR'])
-
- # We need a global ipython running in this process, but the special
- # in-process group spawns its own IPython kernels, so for *that* group we
- # must avoid also opening the global one (otherwise there's a conflict of
- # singletons). Ultimately the solution to this problem is to refactor our
- # assumptions about what needs to be a singleton and what doesn't (app
- # objects should, individual shells shouldn't). But for now, this
- # workaround allows the test suite for the inprocess module to complete.
- if 'kernel.inprocess' not in section.name:
- from IPython.testing import globalipapp
- globalipapp.start_ipython()
-
- # Now nose can run
- TestProgram(argv=argv, addplugins=plugins)
-
-if __name__ == '__main__':
- run_iptest()
+ parent = os.path.dirname(get_ipython_package_dir())
+ for sec in test_sections:
+ for pattern in sec.exclusions:
+ fullpath = pjoin(parent, pattern)
+ if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'):
+ warn("Excluding nonexistent file: %r" % pattern)
+
+
+class ExclusionPlugin(Plugin):
+ """A nose plugin to effect our exclusions of files and directories.
+ """
+ name = 'exclusions'
+ score = 3000 # Should come before any other plugins
+
+ def __init__(self, exclude_patterns=None):
+ """
+ Parameters
+ ----------
+
+ exclude_patterns : sequence of strings, optional
+ Filenames containing these patterns (as raw strings, not as regular
+ expressions) are excluded from the tests.
+ """
+ self.exclude_patterns = exclude_patterns or []
+ super(ExclusionPlugin, self).__init__()
+
+ def options(self, parser, env=os.environ):
+ Plugin.options(self, parser, env)
+
+ def configure(self, options, config):
+ Plugin.configure(self, options, config)
+ # Override nose trying to disable plugin.
+ self.enabled = True
+
+ def wantFile(self, filename):
+ """Return whether the given filename should be scanned for tests.
+ """
+ if any(pat in filename for pat in self.exclude_patterns):
+ return False
+ return None
+
+ def wantDirectory(self, directory):
+ """Return whether the given directory should be scanned for tests.
+ """
+ if any(pat in directory for pat in self.exclude_patterns):
+ return False
+ return None
+
+
+class StreamCapturer(Thread):
+ daemon = True # Don't hang if main thread crashes
+ started = False
+ def __init__(self, echo=False):
+ super(StreamCapturer, self).__init__()
+ self.echo = echo
+ self.streams = []
+ self.buffer = BytesIO()
+ self.readfd, self.writefd = os.pipe()
+ self.buffer_lock = Lock()
+ self.stop = Event()
+
+ def run(self):
+ self.started = True
+
+ while not self.stop.is_set():
+ chunk = os.read(self.readfd, 1024)
+
+ with self.buffer_lock:
+ self.buffer.write(chunk)
+ if self.echo:
+ sys.stdout.write(bytes_to_str(chunk))
+
+ os.close(self.readfd)
+ os.close(self.writefd)
+
+ def reset_buffer(self):
+ with self.buffer_lock:
+ self.buffer.truncate(0)
+ self.buffer.seek(0)
+
+ def get_buffer(self):
+ with self.buffer_lock:
+ return self.buffer.getvalue()
+
+ def ensure_started(self):
+ if not self.started:
+ self.start()
+
+ def halt(self):
+ """Safely stop the thread."""
+ if not self.started:
+ return
+
+ self.stop.set()
+ os.write(self.writefd, b'\0') # Ensure we're not locked in a read()
+ self.join()
+
+class SubprocessStreamCapturePlugin(Plugin):
+ name='subprocstreams'
+ def __init__(self):
+ Plugin.__init__(self)
+ self.stream_capturer = StreamCapturer()
+ self.destination = os.environ.get('IPTEST_SUBPROC_STREAMS', 'capture')
+ # This is ugly, but distant parts of the test machinery need to be able
+ # to redirect streams, so we make the object globally accessible.
+ nose.iptest_stdstreams_fileno = self.get_write_fileno
+
+ def get_write_fileno(self):
+ if self.destination == 'capture':
+ self.stream_capturer.ensure_started()
+ return self.stream_capturer.writefd
+ elif self.destination == 'discard':
+ return os.open(os.devnull, os.O_WRONLY)
+ else:
+ return sys.__stdout__.fileno()
+
+ def configure(self, options, config):
+ Plugin.configure(self, options, config)
+ # Override nose trying to disable plugin.
+ if self.destination == 'capture':
+ self.enabled = True
+
+ def startTest(self, test):
+ # Reset log capture
+ self.stream_capturer.reset_buffer()
+
+ def formatFailure(self, test, err):
+ # Show output
+ ec, ev, tb = err
+ captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace')
+ if captured.strip():
+ ev = safe_str(ev)
+ out = [ev, '>> begin captured subprocess output <<',
+ captured,
+ '>> end captured subprocess output <<']
+ return ec, '\n'.join(out), tb
+
+ return err
+
+ formatError = formatFailure
+
+ def finalize(self, result):
+ self.stream_capturer.halt()
+
+
+def run_iptest():
+ """Run the IPython test suite using nose.
+
+ This function is called when this script is **not** called with the form
+ `iptest all`. It simply calls nose with appropriate command line flags
+ and accepts all of the standard nose arguments.
+ """
+ # Apply our monkeypatch to Xunit
+ if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'):
+ monkeypatch_xunit()
+
+ arg1 = sys.argv[1]
+ if arg1 in test_sections:
+ section = test_sections[arg1]
+ sys.argv[1:2] = section.includes
+ elif arg1.startswith('IPython.') and arg1[8:] in test_sections:
+ section = test_sections[arg1[8:]]
+ sys.argv[1:2] = section.includes
+ else:
+ section = TestSection(arg1, includes=[arg1])
+
+
+ argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks
+ # We add --exe because of setuptools' imbecility (it
+ # blindly does chmod +x on ALL files). Nose does the
+ # right thing and it tries to avoid executables,
+ # setuptools unfortunately forces our hand here. This
+ # has been discussed on the distutils list and the
+ # setuptools devs refuse to fix this problem!
+ '--exe',
+ ]
+ if '-a' not in argv and '-A' not in argv:
+ argv = argv + ['-a', '!crash']
+
+ if nose.__version__ >= '0.11':
+ # I don't fully understand why we need this one, but depending on what
+ # directory the test suite is run from, if we don't give it, 0 tests
+ # get run. Specifically, if the test suite is run from the source dir
+ # with an argument (like 'iptest.py IPython.core', 0 tests are run,
+ # even if the same call done in this directory works fine). It appears
+ # that if the requested package is in the current dir, nose bails early
+ # by default. Since it's otherwise harmless, leave it in by default
+ # for nose >= 0.11, though unfortunately nose 0.10 doesn't support it.
+ argv.append('--traverse-namespace')
+
+ plugins = [ ExclusionPlugin(section.excludes), KnownFailure(),
+ SubprocessStreamCapturePlugin() ]
+
+ # we still have some vestigial doctests in core
+ if (section.name.startswith(('core', 'IPython.core'))):
+ plugins.append(IPythonDoctest())
+ argv.extend([
+ '--with-ipdoctest',
+ '--ipdoctest-tests',
+ '--ipdoctest-extension=txt',
+ ])
+
+
+ # Use working directory set by parent process (see iptestcontroller)
+ if 'IPTEST_WORKING_DIR' in os.environ:
+ os.chdir(os.environ['IPTEST_WORKING_DIR'])
+
+ # We need a global ipython running in this process, but the special
+ # in-process group spawns its own IPython kernels, so for *that* group we
+ # must avoid also opening the global one (otherwise there's a conflict of
+ # singletons). Ultimately the solution to this problem is to refactor our
+ # assumptions about what needs to be a singleton and what doesn't (app
+ # objects should, individual shells shouldn't). But for now, this
+ # workaround allows the test suite for the inprocess module to complete.
+ if 'kernel.inprocess' not in section.name:
+ from IPython.testing import globalipapp
+ globalipapp.start_ipython()
+
+ # Now nose can run
+ TestProgram(argv=argv, addplugins=plugins)
+
+if __name__ == '__main__':
+ run_iptest()
diff --git a/contrib/python/ipython/py2/IPython/testing/iptestcontroller.py b/contrib/python/ipython/py2/IPython/testing/iptestcontroller.py
index 6d8834f193..95aa06e4a4 100644
--- a/contrib/python/ipython/py2/IPython/testing/iptestcontroller.py
+++ b/contrib/python/ipython/py2/IPython/testing/iptestcontroller.py
@@ -1,532 +1,532 @@
-# -*- coding: utf-8 -*-
-"""IPython Test Process Controller
-
-This module runs one or more subprocesses which will actually run the IPython
-test suite.
-
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import print_function
-
-import argparse
-import json
-import multiprocessing.pool
-import os
-import stat
-import re
-import requests
-import shutil
-import signal
-import sys
-import subprocess
-import time
-
-from .iptest import (
- have, test_group_names as py_test_group_names, test_sections, StreamCapturer,
- test_for,
-)
-from IPython.utils.path import compress_user
-from IPython.utils.py3compat import bytes_to_str
-from IPython.utils.sysinfo import get_sys_info
-from IPython.utils.tempdir import TemporaryDirectory
-from IPython.utils.text import strip_ansi
-
-try:
- # Python >= 3.3
- from subprocess import TimeoutExpired
- def popen_wait(p, timeout):
- return p.wait(timeout)
-except ImportError:
- class TimeoutExpired(Exception):
- pass
- def popen_wait(p, timeout):
- """backport of Popen.wait from Python 3"""
- for i in range(int(10 * timeout)):
- if p.poll() is not None:
- return
- time.sleep(0.1)
- if p.poll() is None:
- raise TimeoutExpired
-
-NOTEBOOK_SHUTDOWN_TIMEOUT = 10
-
-class TestController(object):
- """Run tests in a subprocess
- """
- #: str, IPython test suite to be executed.
- section = None
- #: list, command line arguments to be executed
- cmd = None
- #: dict, extra environment variables to set for the subprocess
- env = None
- #: list, TemporaryDirectory instances to clear up when the process finishes
- dirs = None
- #: subprocess.Popen instance
- process = None
- #: str, process stdout+stderr
- stdout = None
-
- def __init__(self):
- self.cmd = []
- self.env = {}
- self.dirs = []
-
- def setup(self):
- """Create temporary directories etc.
-
- This is only called when we know the test group will be run. Things
- created here may be cleaned up by self.cleanup().
- """
- pass
-
- def launch(self, buffer_output=False, capture_output=False):
- # print('*** ENV:', self.env) # dbg
- # print('*** CMD:', self.cmd) # dbg
- env = os.environ.copy()
- env.update(self.env)
- if buffer_output:
- capture_output = True
- self.stdout_capturer = c = StreamCapturer(echo=not buffer_output)
- c.start()
- stdout = c.writefd if capture_output else None
- stderr = subprocess.STDOUT if capture_output else None
- self.process = subprocess.Popen(self.cmd, stdout=stdout,
- stderr=stderr, env=env)
-
- def wait(self):
- self.process.wait()
- self.stdout_capturer.halt()
- self.stdout = self.stdout_capturer.get_buffer()
- return self.process.returncode
-
- def print_extra_info(self):
- """Print extra information about this test run.
-
- If we're running in parallel and showing the concise view, this is only
- called if the test group fails. Otherwise, it's called before the test
- group is started.
-
- The base implementation does nothing, but it can be overridden by
- subclasses.
- """
- return
-
- def cleanup_process(self):
- """Cleanup on exit by killing any leftover processes."""
- subp = self.process
- if subp is None or (subp.poll() is not None):
- return # Process doesn't exist, or is already dead.
-
- try:
- print('Cleaning up stale PID: %d' % subp.pid)
- subp.kill()
- except: # (OSError, WindowsError) ?
- # This is just a best effort, if we fail or the process was
- # really gone, ignore it.
- pass
- else:
- for i in range(10):
- if subp.poll() is None:
- time.sleep(0.1)
- else:
- break
-
- if subp.poll() is None:
- # The process did not die...
- print('... failed. Manual cleanup may be required.')
-
- def cleanup(self):
- "Kill process if it's still alive, and clean up temporary directories"
- self.cleanup_process()
- for td in self.dirs:
- td.cleanup()
-
- __del__ = cleanup
-
-
-class PyTestController(TestController):
- """Run Python tests using IPython.testing.iptest"""
- #: str, Python command to execute in subprocess
- pycmd = None
-
- def __init__(self, section, options):
- """Create new test runner."""
- TestController.__init__(self)
- self.section = section
- # pycmd is put into cmd[2] in PyTestController.launch()
- self.cmd = [sys.executable, '-c', None, section]
- self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()"
- self.options = options
-
- def setup(self):
- ipydir = TemporaryDirectory()
- self.dirs.append(ipydir)
- self.env['IPYTHONDIR'] = ipydir.name
- self.workingdir = workingdir = TemporaryDirectory()
- self.dirs.append(workingdir)
- self.env['IPTEST_WORKING_DIR'] = workingdir.name
- # This means we won't get odd effects from our own matplotlib config
- self.env['MPLCONFIGDIR'] = workingdir.name
- # For security reasons (http://bugs.python.org/issue16202), use
- # a temporary directory to which other users have no access.
- self.env['TMPDIR'] = workingdir.name
-
- # Add a non-accessible directory to PATH (see gh-7053)
- noaccess = os.path.join(self.workingdir.name, "_no_access_")
- self.noaccess = noaccess
- os.mkdir(noaccess, 0)
-
- PATH = os.environ.get('PATH', '')
- if PATH:
- PATH = noaccess + os.pathsep + PATH
- else:
- PATH = noaccess
- self.env['PATH'] = PATH
-
- # From options:
- if self.options.xunit:
- self.add_xunit()
- if self.options.coverage:
- self.add_coverage()
- self.env['IPTEST_SUBPROC_STREAMS'] = self.options.subproc_streams
- self.cmd.extend(self.options.extra_args)
-
- def cleanup(self):
- """
- Make the non-accessible directory created in setup() accessible
- again, otherwise deleting the workingdir will fail.
- """
- os.chmod(self.noaccess, stat.S_IRWXU)
- TestController.cleanup(self)
-
- @property
- def will_run(self):
- try:
- return test_sections[self.section].will_run
- except KeyError:
- return True
-
- def add_xunit(self):
- xunit_file = os.path.abspath(self.section + '.xunit.xml')
- self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file])
-
- def add_coverage(self):
- try:
- sources = test_sections[self.section].includes
- except KeyError:
- sources = ['IPython']
-
- coverage_rc = ("[run]\n"
- "data_file = {data_file}\n"
- "source =\n"
- " {source}\n"
- ).format(data_file=os.path.abspath('.coverage.'+self.section),
- source="\n ".join(sources))
- config_file = os.path.join(self.workingdir.name, '.coveragerc')
- with open(config_file, 'w') as f:
- f.write(coverage_rc)
-
- self.env['COVERAGE_PROCESS_START'] = config_file
- self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd
-
- def launch(self, buffer_output=False):
- self.cmd[2] = self.pycmd
- super(PyTestController, self).launch(buffer_output=buffer_output)
-
-
-def prepare_controllers(options):
- """Returns two lists of TestController instances, those to run, and those
- not to run."""
- testgroups = options.testgroups
- if not testgroups:
- testgroups = py_test_group_names
-
- controllers = [PyTestController(name, options) for name in testgroups]
-
- to_run = [c for c in controllers if c.will_run]
- not_run = [c for c in controllers if not c.will_run]
- return to_run, not_run
-
-def do_run(controller, buffer_output=True):
- """Setup and run a test controller.
-
- If buffer_output is True, no output is displayed, to avoid it appearing
- interleaved. In this case, the caller is responsible for displaying test
- output on failure.
-
- Returns
- -------
- controller : TestController
- The same controller as passed in, as a convenience for using map() type
- APIs.
- exitcode : int
- The exit code of the test subprocess. Non-zero indicates failure.
- """
- try:
- try:
- controller.setup()
- if not buffer_output:
- controller.print_extra_info()
- controller.launch(buffer_output=buffer_output)
- except Exception:
- import traceback
- traceback.print_exc()
- return controller, 1 # signal failure
-
- exitcode = controller.wait()
- return controller, exitcode
-
- except KeyboardInterrupt:
- return controller, -signal.SIGINT
- finally:
- controller.cleanup()
-
-def report():
- """Return a string with a summary report of test-related variables."""
- inf = get_sys_info()
- out = []
- def _add(name, value):
- out.append((name, value))
-
- _add('IPython version', inf['ipython_version'])
- _add('IPython commit', "{} ({})".format(inf['commit_hash'], inf['commit_source']))
- _add('IPython package', compress_user(inf['ipython_path']))
- _add('Python version', inf['sys_version'].replace('\n',''))
- _add('sys.executable', compress_user(inf['sys_executable']))
- _add('Platform', inf['platform'])
-
- width = max(len(n) for (n,v) in out)
- out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out]
-
- avail = []
- not_avail = []
-
- for k, is_avail in have.items():
- if is_avail:
- avail.append(k)
- else:
- not_avail.append(k)
-
- if avail:
- out.append('\nTools and libraries available at test time:\n')
- avail.sort()
- out.append(' ' + ' '.join(avail)+'\n')
-
- if not_avail:
- out.append('\nTools and libraries NOT available at test time:\n')
- not_avail.sort()
- out.append(' ' + ' '.join(not_avail)+'\n')
-
- return ''.join(out)
-
-def run_iptestall(options):
- """Run the entire IPython test suite by calling nose and trial.
-
- This function constructs :class:`IPTester` instances for all IPython
- modules and package and then runs each of them. This causes the modules
- and packages of IPython to be tested each in their own subprocess using
- nose.
-
- Parameters
- ----------
-
- All parameters are passed as attributes of the options object.
-
- testgroups : list of str
- Run only these sections of the test suite. If empty, run all the available
- sections.
-
- fast : int or None
- Run the test suite in parallel, using n simultaneous processes. If None
- is passed, one process is used per CPU core. Default 1 (i.e. sequential)
-
- inc_slow : bool
- Include slow tests. By default, these tests aren't run.
-
- url : unicode
- Address:port to use when running the JS tests.
-
- xunit : bool
- Produce Xunit XML output. This is written to multiple foo.xunit.xml files.
-
- coverage : bool or str
- Measure code coverage from tests. True will store the raw coverage data,
- or pass 'html' or 'xml' to get reports.
-
- extra_args : list
- Extra arguments to pass to the test subprocesses, e.g. '-v'
- """
- to_run, not_run = prepare_controllers(options)
-
- def justify(ltext, rtext, width=70, fill='-'):
- ltext += ' '
- rtext = (' ' + rtext).rjust(width - len(ltext), fill)
- return ltext + rtext
-
- # Run all test runners, tracking execution time
- failed = []
- t_start = time.time()
-
- print()
- if options.fast == 1:
- # This actually means sequential, i.e. with 1 job
- for controller in to_run:
- print('Test group:', controller.section)
- sys.stdout.flush() # Show in correct order when output is piped
- controller, res = do_run(controller, buffer_output=False)
- if res:
- failed.append(controller)
- if res == -signal.SIGINT:
- print("Interrupted")
- break
- print()
-
- else:
- # Run tests concurrently
- try:
- pool = multiprocessing.pool.ThreadPool(options.fast)
- for (controller, res) in pool.imap_unordered(do_run, to_run):
- res_string = 'OK' if res == 0 else 'FAILED'
- print(justify('Test group: ' + controller.section, res_string))
- if res:
- controller.print_extra_info()
- print(bytes_to_str(controller.stdout))
- failed.append(controller)
- if res == -signal.SIGINT:
- print("Interrupted")
- break
- except KeyboardInterrupt:
- return
-
- for controller in not_run:
- print(justify('Test group: ' + controller.section, 'NOT RUN'))
-
- t_end = time.time()
- t_tests = t_end - t_start
- nrunners = len(to_run)
- nfail = len(failed)
- # summarize results
- print('_'*70)
- print('Test suite completed for system with the following information:')
- print(report())
- took = "Took %.3fs." % t_tests
- print('Status: ', end='')
- if not failed:
- print('OK (%d test groups).' % nrunners, took)
- else:
- # If anything went wrong, point out what command to rerun manually to
- # see the actual errors and individual summary
- failed_sections = [c.section for c in failed]
- print('ERROR - {} out of {} test groups failed ({}).'.format(nfail,
- nrunners, ', '.join(failed_sections)), took)
- print()
- print('You may wish to rerun these, with:')
- print(' iptest', *failed_sections)
- print()
-
- if options.coverage:
- from coverage import coverage, CoverageException
- cov = coverage(data_file='.coverage')
- cov.combine()
- cov.save()
-
- # Coverage HTML report
- if options.coverage == 'html':
- html_dir = 'ipy_htmlcov'
- shutil.rmtree(html_dir, ignore_errors=True)
- print("Writing HTML coverage report to %s/ ... " % html_dir, end="")
- sys.stdout.flush()
-
- # Custom HTML reporter to clean up module names.
- from coverage.html import HtmlReporter
- class CustomHtmlReporter(HtmlReporter):
- def find_code_units(self, morfs):
- super(CustomHtmlReporter, self).find_code_units(morfs)
- for cu in self.code_units:
- nameparts = cu.name.split(os.sep)
- if 'IPython' not in nameparts:
- continue
- ix = nameparts.index('IPython')
- cu.name = '.'.join(nameparts[ix:])
-
- # Reimplement the html_report method with our custom reporter
- cov.get_data()
- cov.config.from_args(omit='*{0}tests{0}*'.format(os.sep), html_dir=html_dir,
- html_title='IPython test coverage',
- )
- reporter = CustomHtmlReporter(cov, cov.config)
- reporter.report(None)
- print('done.')
-
- # Coverage XML report
- elif options.coverage == 'xml':
- try:
- cov.xml_report(outfile='ipy_coverage.xml')
- except CoverageException as e:
- print('Generating coverage report failed. Are you running javascript tests only?')
- import traceback
- traceback.print_exc()
-
- if failed:
- # Ensure that our exit code indicates failure
- sys.exit(1)
-
-argparser = argparse.ArgumentParser(description='Run IPython test suite')
-argparser.add_argument('testgroups', nargs='*',
- help='Run specified groups of tests. If omitted, run '
- 'all tests.')
-argparser.add_argument('--all', action='store_true',
- help='Include slow tests not run by default.')
-argparser.add_argument('--url', help="URL to use for the JS tests.")
-argparser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int,
- help='Run test sections in parallel. This starts as many '
- 'processes as you have cores, or you can specify a number.')
-argparser.add_argument('--xunit', action='store_true',
- help='Produce Xunit XML results')
-argparser.add_argument('--coverage', nargs='?', const=True, default=False,
- help="Measure test coverage. Specify 'html' or "
- "'xml' to get reports.")
-argparser.add_argument('--subproc-streams', default='capture',
- help="What to do with stdout/stderr from subprocesses. "
- "'capture' (default), 'show' and 'discard' are the options.")
-
-def default_options():
- """Get an argparse Namespace object with the default arguments, to pass to
- :func:`run_iptestall`.
- """
- options = argparser.parse_args([])
- options.extra_args = []
- return options
-
-def main():
- # iptest doesn't work correctly if the working directory is the
- # root of the IPython source tree. Tell the user to avoid
- # frustration.
- if os.path.exists(os.path.join(os.getcwd(),
- 'IPython', 'testing', '__main__.py')):
- print("Don't run iptest from the IPython source directory",
- file=sys.stderr)
- sys.exit(1)
- # Arguments after -- should be passed through to nose. Argparse treats
- # everything after -- as regular positional arguments, so we separate them
- # first.
- try:
- ix = sys.argv.index('--')
- except ValueError:
- to_parse = sys.argv[1:]
- extra_args = []
- else:
- to_parse = sys.argv[1:ix]
- extra_args = sys.argv[ix+1:]
-
- options = argparser.parse_args(to_parse)
- options.extra_args = extra_args
-
- run_iptestall(options)
-
-
-if __name__ == '__main__':
- main()
+# -*- coding: utf-8 -*-
+"""IPython Test Process Controller
+
+This module runs one or more subprocesses which will actually run the IPython
+test suite.
+
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import print_function
+
+import argparse
+import json
+import multiprocessing.pool
+import os
+import stat
+import re
+import requests
+import shutil
+import signal
+import sys
+import subprocess
+import time
+
+from .iptest import (
+ have, test_group_names as py_test_group_names, test_sections, StreamCapturer,
+ test_for,
+)
+from IPython.utils.path import compress_user
+from IPython.utils.py3compat import bytes_to_str
+from IPython.utils.sysinfo import get_sys_info
+from IPython.utils.tempdir import TemporaryDirectory
+from IPython.utils.text import strip_ansi
+
+try:
+ # Python >= 3.3
+ from subprocess import TimeoutExpired
+ def popen_wait(p, timeout):
+ return p.wait(timeout)
+except ImportError:
+ class TimeoutExpired(Exception):
+ pass
+ def popen_wait(p, timeout):
+ """backport of Popen.wait from Python 3"""
+ for i in range(int(10 * timeout)):
+ if p.poll() is not None:
+ return
+ time.sleep(0.1)
+ if p.poll() is None:
+ raise TimeoutExpired
+
+NOTEBOOK_SHUTDOWN_TIMEOUT = 10
+
+class TestController(object):
+ """Run tests in a subprocess
+ """
+ #: str, IPython test suite to be executed.
+ section = None
+ #: list, command line arguments to be executed
+ cmd = None
+ #: dict, extra environment variables to set for the subprocess
+ env = None
+ #: list, TemporaryDirectory instances to clear up when the process finishes
+ dirs = None
+ #: subprocess.Popen instance
+ process = None
+ #: str, process stdout+stderr
+ stdout = None
+
+ def __init__(self):
+ self.cmd = []
+ self.env = {}
+ self.dirs = []
+
+ def setup(self):
+ """Create temporary directories etc.
+
+ This is only called when we know the test group will be run. Things
+ created here may be cleaned up by self.cleanup().
+ """
+ pass
+
+ def launch(self, buffer_output=False, capture_output=False):
+ # print('*** ENV:', self.env) # dbg
+ # print('*** CMD:', self.cmd) # dbg
+ env = os.environ.copy()
+ env.update(self.env)
+ if buffer_output:
+ capture_output = True
+ self.stdout_capturer = c = StreamCapturer(echo=not buffer_output)
+ c.start()
+ stdout = c.writefd if capture_output else None
+ stderr = subprocess.STDOUT if capture_output else None
+ self.process = subprocess.Popen(self.cmd, stdout=stdout,
+ stderr=stderr, env=env)
+
+ def wait(self):
+ self.process.wait()
+ self.stdout_capturer.halt()
+ self.stdout = self.stdout_capturer.get_buffer()
+ return self.process.returncode
+
+ def print_extra_info(self):
+ """Print extra information about this test run.
+
+ If we're running in parallel and showing the concise view, this is only
+ called if the test group fails. Otherwise, it's called before the test
+ group is started.
+
+ The base implementation does nothing, but it can be overridden by
+ subclasses.
+ """
+ return
+
+ def cleanup_process(self):
+ """Cleanup on exit by killing any leftover processes."""
+ subp = self.process
+ if subp is None or (subp.poll() is not None):
+ return # Process doesn't exist, or is already dead.
+
+ try:
+ print('Cleaning up stale PID: %d' % subp.pid)
+ subp.kill()
+ except: # (OSError, WindowsError) ?
+ # This is just a best effort, if we fail or the process was
+ # really gone, ignore it.
+ pass
+ else:
+ for i in range(10):
+ if subp.poll() is None:
+ time.sleep(0.1)
+ else:
+ break
+
+ if subp.poll() is None:
+ # The process did not die...
+ print('... failed. Manual cleanup may be required.')
+
+ def cleanup(self):
+ "Kill process if it's still alive, and clean up temporary directories"
+ self.cleanup_process()
+ for td in self.dirs:
+ td.cleanup()
+
+ __del__ = cleanup
+
+
+class PyTestController(TestController):
+ """Run Python tests using IPython.testing.iptest"""
+ #: str, Python command to execute in subprocess
+ pycmd = None
+
+ def __init__(self, section, options):
+ """Create new test runner."""
+ TestController.__init__(self)
+ self.section = section
+ # pycmd is put into cmd[2] in PyTestController.launch()
+ self.cmd = [sys.executable, '-c', None, section]
+ self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()"
+ self.options = options
+
+ def setup(self):
+ ipydir = TemporaryDirectory()
+ self.dirs.append(ipydir)
+ self.env['IPYTHONDIR'] = ipydir.name
+ self.workingdir = workingdir = TemporaryDirectory()
+ self.dirs.append(workingdir)
+ self.env['IPTEST_WORKING_DIR'] = workingdir.name
+ # This means we won't get odd effects from our own matplotlib config
+ self.env['MPLCONFIGDIR'] = workingdir.name
+ # For security reasons (http://bugs.python.org/issue16202), use
+ # a temporary directory to which other users have no access.
+ self.env['TMPDIR'] = workingdir.name
+
+ # Add a non-accessible directory to PATH (see gh-7053)
+ noaccess = os.path.join(self.workingdir.name, "_no_access_")
+ self.noaccess = noaccess
+ os.mkdir(noaccess, 0)
+
+ PATH = os.environ.get('PATH', '')
+ if PATH:
+ PATH = noaccess + os.pathsep + PATH
+ else:
+ PATH = noaccess
+ self.env['PATH'] = PATH
+
+ # From options:
+ if self.options.xunit:
+ self.add_xunit()
+ if self.options.coverage:
+ self.add_coverage()
+ self.env['IPTEST_SUBPROC_STREAMS'] = self.options.subproc_streams
+ self.cmd.extend(self.options.extra_args)
+
+ def cleanup(self):
+ """
+ Make the non-accessible directory created in setup() accessible
+ again, otherwise deleting the workingdir will fail.
+ """
+ os.chmod(self.noaccess, stat.S_IRWXU)
+ TestController.cleanup(self)
+
+ @property
+ def will_run(self):
+ try:
+ return test_sections[self.section].will_run
+ except KeyError:
+ return True
+
+ def add_xunit(self):
+ xunit_file = os.path.abspath(self.section + '.xunit.xml')
+ self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file])
+
+ def add_coverage(self):
+ try:
+ sources = test_sections[self.section].includes
+ except KeyError:
+ sources = ['IPython']
+
+ coverage_rc = ("[run]\n"
+ "data_file = {data_file}\n"
+ "source =\n"
+ " {source}\n"
+ ).format(data_file=os.path.abspath('.coverage.'+self.section),
+ source="\n ".join(sources))
+ config_file = os.path.join(self.workingdir.name, '.coveragerc')
+ with open(config_file, 'w') as f:
+ f.write(coverage_rc)
+
+ self.env['COVERAGE_PROCESS_START'] = config_file
+ self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd
+
+ def launch(self, buffer_output=False):
+ self.cmd[2] = self.pycmd
+ super(PyTestController, self).launch(buffer_output=buffer_output)
+
+
+def prepare_controllers(options):
+ """Returns two lists of TestController instances, those to run, and those
+ not to run."""
+ testgroups = options.testgroups
+ if not testgroups:
+ testgroups = py_test_group_names
+
+ controllers = [PyTestController(name, options) for name in testgroups]
+
+ to_run = [c for c in controllers if c.will_run]
+ not_run = [c for c in controllers if not c.will_run]
+ return to_run, not_run
+
+def do_run(controller, buffer_output=True):
+ """Setup and run a test controller.
+
+ If buffer_output is True, no output is displayed, to avoid it appearing
+ interleaved. In this case, the caller is responsible for displaying test
+ output on failure.
+
+ Returns
+ -------
+ controller : TestController
+ The same controller as passed in, as a convenience for using map() type
+ APIs.
+ exitcode : int
+ The exit code of the test subprocess. Non-zero indicates failure.
+ """
+ try:
+ try:
+ controller.setup()
+ if not buffer_output:
+ controller.print_extra_info()
+ controller.launch(buffer_output=buffer_output)
+ except Exception:
+ import traceback
+ traceback.print_exc()
+ return controller, 1 # signal failure
+
+ exitcode = controller.wait()
+ return controller, exitcode
+
+ except KeyboardInterrupt:
+ return controller, -signal.SIGINT
+ finally:
+ controller.cleanup()
+
+def report():
+ """Return a string with a summary report of test-related variables."""
+ inf = get_sys_info()
+ out = []
+ def _add(name, value):
+ out.append((name, value))
+
+ _add('IPython version', inf['ipython_version'])
+ _add('IPython commit', "{} ({})".format(inf['commit_hash'], inf['commit_source']))
+ _add('IPython package', compress_user(inf['ipython_path']))
+ _add('Python version', inf['sys_version'].replace('\n',''))
+ _add('sys.executable', compress_user(inf['sys_executable']))
+ _add('Platform', inf['platform'])
+
+ width = max(len(n) for (n,v) in out)
+ out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out]
+
+ avail = []
+ not_avail = []
+
+ for k, is_avail in have.items():
+ if is_avail:
+ avail.append(k)
+ else:
+ not_avail.append(k)
+
+ if avail:
+ out.append('\nTools and libraries available at test time:\n')
+ avail.sort()
+ out.append(' ' + ' '.join(avail)+'\n')
+
+ if not_avail:
+ out.append('\nTools and libraries NOT available at test time:\n')
+ not_avail.sort()
+ out.append(' ' + ' '.join(not_avail)+'\n')
+
+ return ''.join(out)
+
+def run_iptestall(options):
+ """Run the entire IPython test suite by calling nose and trial.
+
+ This function constructs :class:`IPTester` instances for all IPython
+ modules and package and then runs each of them. This causes the modules
+ and packages of IPython to be tested each in their own subprocess using
+ nose.
+
+ Parameters
+ ----------
+
+ All parameters are passed as attributes of the options object.
+
+ testgroups : list of str
+ Run only these sections of the test suite. If empty, run all the available
+ sections.
+
+ fast : int or None
+ Run the test suite in parallel, using n simultaneous processes. If None
+ is passed, one process is used per CPU core. Default 1 (i.e. sequential)
+
+ inc_slow : bool
+ Include slow tests. By default, these tests aren't run.
+
+ url : unicode
+ Address:port to use when running the JS tests.
+
+ xunit : bool
+ Produce Xunit XML output. This is written to multiple foo.xunit.xml files.
+
+ coverage : bool or str
+ Measure code coverage from tests. True will store the raw coverage data,
+ or pass 'html' or 'xml' to get reports.
+
+ extra_args : list
+ Extra arguments to pass to the test subprocesses, e.g. '-v'
+ """
+ to_run, not_run = prepare_controllers(options)
+
+ def justify(ltext, rtext, width=70, fill='-'):
+ ltext += ' '
+ rtext = (' ' + rtext).rjust(width - len(ltext), fill)
+ return ltext + rtext
+
+ # Run all test runners, tracking execution time
+ failed = []
+ t_start = time.time()
+
+ print()
+ if options.fast == 1:
+ # This actually means sequential, i.e. with 1 job
+ for controller in to_run:
+ print('Test group:', controller.section)
+ sys.stdout.flush() # Show in correct order when output is piped
+ controller, res = do_run(controller, buffer_output=False)
+ if res:
+ failed.append(controller)
+ if res == -signal.SIGINT:
+ print("Interrupted")
+ break
+ print()
+
+ else:
+ # Run tests concurrently
+ try:
+ pool = multiprocessing.pool.ThreadPool(options.fast)
+ for (controller, res) in pool.imap_unordered(do_run, to_run):
+ res_string = 'OK' if res == 0 else 'FAILED'
+ print(justify('Test group: ' + controller.section, res_string))
+ if res:
+ controller.print_extra_info()
+ print(bytes_to_str(controller.stdout))
+ failed.append(controller)
+ if res == -signal.SIGINT:
+ print("Interrupted")
+ break
+ except KeyboardInterrupt:
+ return
+
+ for controller in not_run:
+ print(justify('Test group: ' + controller.section, 'NOT RUN'))
+
+ t_end = time.time()
+ t_tests = t_end - t_start
+ nrunners = len(to_run)
+ nfail = len(failed)
+ # summarize results
+ print('_'*70)
+ print('Test suite completed for system with the following information:')
+ print(report())
+ took = "Took %.3fs." % t_tests
+ print('Status: ', end='')
+ if not failed:
+ print('OK (%d test groups).' % nrunners, took)
+ else:
+ # If anything went wrong, point out what command to rerun manually to
+ # see the actual errors and individual summary
+ failed_sections = [c.section for c in failed]
+ print('ERROR - {} out of {} test groups failed ({}).'.format(nfail,
+ nrunners, ', '.join(failed_sections)), took)
+ print()
+ print('You may wish to rerun these, with:')
+ print(' iptest', *failed_sections)
+ print()
+
+ if options.coverage:
+ from coverage import coverage, CoverageException
+ cov = coverage(data_file='.coverage')
+ cov.combine()
+ cov.save()
+
+ # Coverage HTML report
+ if options.coverage == 'html':
+ html_dir = 'ipy_htmlcov'
+ shutil.rmtree(html_dir, ignore_errors=True)
+ print("Writing HTML coverage report to %s/ ... " % html_dir, end="")
+ sys.stdout.flush()
+
+ # Custom HTML reporter to clean up module names.
+ from coverage.html import HtmlReporter
+ class CustomHtmlReporter(HtmlReporter):
+ def find_code_units(self, morfs):
+ super(CustomHtmlReporter, self).find_code_units(morfs)
+ for cu in self.code_units:
+ nameparts = cu.name.split(os.sep)
+ if 'IPython' not in nameparts:
+ continue
+ ix = nameparts.index('IPython')
+ cu.name = '.'.join(nameparts[ix:])
+
+ # Reimplement the html_report method with our custom reporter
+ cov.get_data()
+ cov.config.from_args(omit='*{0}tests{0}*'.format(os.sep), html_dir=html_dir,
+ html_title='IPython test coverage',
+ )
+ reporter = CustomHtmlReporter(cov, cov.config)
+ reporter.report(None)
+ print('done.')
+
+ # Coverage XML report
+ elif options.coverage == 'xml':
+ try:
+ cov.xml_report(outfile='ipy_coverage.xml')
+ except CoverageException as e:
+ print('Generating coverage report failed. Are you running javascript tests only?')
+ import traceback
+ traceback.print_exc()
+
+ if failed:
+ # Ensure that our exit code indicates failure
+ sys.exit(1)
+
+argparser = argparse.ArgumentParser(description='Run IPython test suite')
+argparser.add_argument('testgroups', nargs='*',
+ help='Run specified groups of tests. If omitted, run '
+ 'all tests.')
+argparser.add_argument('--all', action='store_true',
+ help='Include slow tests not run by default.')
+argparser.add_argument('--url', help="URL to use for the JS tests.")
+argparser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int,
+ help='Run test sections in parallel. This starts as many '
+ 'processes as you have cores, or you can specify a number.')
+argparser.add_argument('--xunit', action='store_true',
+ help='Produce Xunit XML results')
+argparser.add_argument('--coverage', nargs='?', const=True, default=False,
+ help="Measure test coverage. Specify 'html' or "
+ "'xml' to get reports.")
+argparser.add_argument('--subproc-streams', default='capture',
+ help="What to do with stdout/stderr from subprocesses. "
+ "'capture' (default), 'show' and 'discard' are the options.")
+
+def default_options():
+ """Get an argparse Namespace object with the default arguments, to pass to
+ :func:`run_iptestall`.
+ """
+ options = argparser.parse_args([])
+ options.extra_args = []
+ return options
+
+def main():
+ # iptest doesn't work correctly if the working directory is the
+ # root of the IPython source tree. Tell the user to avoid
+ # frustration.
+ if os.path.exists(os.path.join(os.getcwd(),
+ 'IPython', 'testing', '__main__.py')):
+ print("Don't run iptest from the IPython source directory",
+ file=sys.stderr)
+ sys.exit(1)
+ # Arguments after -- should be passed through to nose. Argparse treats
+ # everything after -- as regular positional arguments, so we separate them
+ # first.
+ try:
+ ix = sys.argv.index('--')
+ except ValueError:
+ to_parse = sys.argv[1:]
+ extra_args = []
+ else:
+ to_parse = sys.argv[1:ix]
+ extra_args = sys.argv[ix+1:]
+
+ options = argparser.parse_args(to_parse)
+ options.extra_args = extra_args
+
+ run_iptestall(options)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/contrib/python/ipython/py2/IPython/testing/ipunittest.py b/contrib/python/ipython/py2/IPython/testing/ipunittest.py
index 04ea8320fb..da059816e2 100644
--- a/contrib/python/ipython/py2/IPython/testing/ipunittest.py
+++ b/contrib/python/ipython/py2/IPython/testing/ipunittest.py
@@ -1,178 +1,178 @@
-"""Experimental code for cleaner support of IPython syntax with unittest.
-
-In IPython up until 0.10, we've used very hacked up nose machinery for running
-tests with IPython special syntax, and this has proved to be extremely slow.
-This module provides decorators to try a different approach, stemming from a
-conversation Brian and I (FP) had about this problem Sept/09.
-
-The goal is to be able to easily write simple functions that can be seen by
-unittest as tests, and ultimately for these to support doctests with full
-IPython syntax. Nose already offers this based on naming conventions and our
-hackish plugins, but we are seeking to move away from nose dependencies if
-possible.
-
-This module follows a different approach, based on decorators.
-
-- A decorator called @ipdoctest can mark any function as having a docstring
- that should be viewed as a doctest, but after syntax conversion.
-
-Authors
--------
-
-- Fernando Perez <Fernando.Perez@berkeley.edu>
-"""
-
-from __future__ import absolute_import
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2009-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-# Stdlib
-import re
-import unittest
-from doctest import DocTestFinder, DocTestRunner, TestResults
-
-#-----------------------------------------------------------------------------
-# Classes and functions
-#-----------------------------------------------------------------------------
-
-def count_failures(runner):
- """Count number of failures in a doctest runner.
-
- Code modeled after the summarize() method in doctest.
- """
- return [TestResults(f, t) for f, t in runner._name2ft.values() if f > 0 ]
-
-
-class IPython2PythonConverter(object):
- """Convert IPython 'syntax' to valid Python.
-
- Eventually this code may grow to be the full IPython syntax conversion
- implementation, but for now it only does prompt convertion."""
-
- def __init__(self):
- self.rps1 = re.compile(r'In\ \[\d+\]: ')
- self.rps2 = re.compile(r'\ \ \ \.\.\.+: ')
- self.rout = re.compile(r'Out\[\d+\]: \s*?\n?')
- self.pyps1 = '>>> '
- self.pyps2 = '... '
- self.rpyps1 = re.compile ('(\s*%s)(.*)$' % self.pyps1)
- self.rpyps2 = re.compile ('(\s*%s)(.*)$' % self.pyps2)
-
- def __call__(self, ds):
- """Convert IPython prompts to python ones in a string."""
- from . import globalipapp
-
- pyps1 = '>>> '
- pyps2 = '... '
- pyout = ''
-
- dnew = ds
- dnew = self.rps1.sub(pyps1, dnew)
- dnew = self.rps2.sub(pyps2, dnew)
- dnew = self.rout.sub(pyout, dnew)
- ip = globalipapp.get_ipython()
-
- # Convert input IPython source into valid Python.
- out = []
- newline = out.append
- for line in dnew.splitlines():
-
- mps1 = self.rpyps1.match(line)
- if mps1 is not None:
- prompt, text = mps1.groups()
- newline(prompt+ip.prefilter(text, False))
- continue
-
- mps2 = self.rpyps2.match(line)
- if mps2 is not None:
- prompt, text = mps2.groups()
- newline(prompt+ip.prefilter(text, True))
- continue
-
- newline(line)
- newline('') # ensure a closing newline, needed by doctest
- #print "PYSRC:", '\n'.join(out) # dbg
- return '\n'.join(out)
-
- #return dnew
-
-
-class Doc2UnitTester(object):
- """Class whose instances act as a decorator for docstring testing.
-
- In practice we're only likely to need one instance ever, made below (though
- no attempt is made at turning it into a singleton, there is no need for
- that).
- """
- def __init__(self, verbose=False):
- """New decorator.
-
- Parameters
- ----------
-
- verbose : boolean, optional (False)
- Passed to the doctest finder and runner to control verbosity.
- """
- self.verbose = verbose
- # We can reuse the same finder for all instances
- self.finder = DocTestFinder(verbose=verbose, recurse=False)
-
- def __call__(self, func):
- """Use as a decorator: doctest a function's docstring as a unittest.
-
- This version runs normal doctests, but the idea is to make it later run
- ipython syntax instead."""
-
- # Capture the enclosing instance with a different name, so the new
- # class below can see it without confusion regarding its own 'self'
- # that will point to the test instance at runtime
- d2u = self
-
- # Rewrite the function's docstring to have python syntax
- if func.__doc__ is not None:
- func.__doc__ = ip2py(func.__doc__)
-
- # Now, create a tester object that is a real unittest instance, so
- # normal unittest machinery (or Nose, or Trial) can find it.
- class Tester(unittest.TestCase):
- def test(self):
- # Make a new runner per function to be tested
- runner = DocTestRunner(verbose=d2u.verbose)
+"""Experimental code for cleaner support of IPython syntax with unittest.
+
+In IPython up until 0.10, we've used very hacked up nose machinery for running
+tests with IPython special syntax, and this has proved to be extremely slow.
+This module provides decorators to try a different approach, stemming from a
+conversation Brian and I (FP) had about this problem Sept/09.
+
+The goal is to be able to easily write simple functions that can be seen by
+unittest as tests, and ultimately for these to support doctests with full
+IPython syntax. Nose already offers this based on naming conventions and our
+hackish plugins, but we are seeking to move away from nose dependencies if
+possible.
+
+This module follows a different approach, based on decorators.
+
+- A decorator called @ipdoctest can mark any function as having a docstring
+ that should be viewed as a doctest, but after syntax conversion.
+
+Authors
+-------
+
+- Fernando Perez <Fernando.Perez@berkeley.edu>
+"""
+
+from __future__ import absolute_import
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2009-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+# Stdlib
+import re
+import unittest
+from doctest import DocTestFinder, DocTestRunner, TestResults
+
+#-----------------------------------------------------------------------------
+# Classes and functions
+#-----------------------------------------------------------------------------
+
+def count_failures(runner):
+ """Count number of failures in a doctest runner.
+
+ Code modeled after the summarize() method in doctest.
+ """
+ return [TestResults(f, t) for f, t in runner._name2ft.values() if f > 0 ]
+
+
+class IPython2PythonConverter(object):
+ """Convert IPython 'syntax' to valid Python.
+
+ Eventually this code may grow to be the full IPython syntax conversion
+ implementation, but for now it only does prompt convertion."""
+
+ def __init__(self):
+ self.rps1 = re.compile(r'In\ \[\d+\]: ')
+ self.rps2 = re.compile(r'\ \ \ \.\.\.+: ')
+ self.rout = re.compile(r'Out\[\d+\]: \s*?\n?')
+ self.pyps1 = '>>> '
+ self.pyps2 = '... '
+ self.rpyps1 = re.compile ('(\s*%s)(.*)$' % self.pyps1)
+ self.rpyps2 = re.compile ('(\s*%s)(.*)$' % self.pyps2)
+
+ def __call__(self, ds):
+ """Convert IPython prompts to python ones in a string."""
+ from . import globalipapp
+
+ pyps1 = '>>> '
+ pyps2 = '... '
+ pyout = ''
+
+ dnew = ds
+ dnew = self.rps1.sub(pyps1, dnew)
+ dnew = self.rps2.sub(pyps2, dnew)
+ dnew = self.rout.sub(pyout, dnew)
+ ip = globalipapp.get_ipython()
+
+ # Convert input IPython source into valid Python.
+ out = []
+ newline = out.append
+ for line in dnew.splitlines():
+
+ mps1 = self.rpyps1.match(line)
+ if mps1 is not None:
+ prompt, text = mps1.groups()
+ newline(prompt+ip.prefilter(text, False))
+ continue
+
+ mps2 = self.rpyps2.match(line)
+ if mps2 is not None:
+ prompt, text = mps2.groups()
+ newline(prompt+ip.prefilter(text, True))
+ continue
+
+ newline(line)
+ newline('') # ensure a closing newline, needed by doctest
+ #print "PYSRC:", '\n'.join(out) # dbg
+ return '\n'.join(out)
+
+ #return dnew
+
+
+class Doc2UnitTester(object):
+ """Class whose instances act as a decorator for docstring testing.
+
+ In practice we're only likely to need one instance ever, made below (though
+ no attempt is made at turning it into a singleton, there is no need for
+ that).
+ """
+ def __init__(self, verbose=False):
+ """New decorator.
+
+ Parameters
+ ----------
+
+ verbose : boolean, optional (False)
+ Passed to the doctest finder and runner to control verbosity.
+ """
+ self.verbose = verbose
+ # We can reuse the same finder for all instances
+ self.finder = DocTestFinder(verbose=verbose, recurse=False)
+
+ def __call__(self, func):
+ """Use as a decorator: doctest a function's docstring as a unittest.
+
+ This version runs normal doctests, but the idea is to make it later run
+ ipython syntax instead."""
+
+ # Capture the enclosing instance with a different name, so the new
+ # class below can see it without confusion regarding its own 'self'
+ # that will point to the test instance at runtime
+ d2u = self
+
+ # Rewrite the function's docstring to have python syntax
+ if func.__doc__ is not None:
+ func.__doc__ = ip2py(func.__doc__)
+
+ # Now, create a tester object that is a real unittest instance, so
+ # normal unittest machinery (or Nose, or Trial) can find it.
+ class Tester(unittest.TestCase):
+ def test(self):
+ # Make a new runner per function to be tested
+ runner = DocTestRunner(verbose=d2u.verbose)
for the_test in d2u.finder.find(func, func.__name__):
runner.run(the_test)
- failed = count_failures(runner)
- if failed:
- # Since we only looked at a single function's docstring,
- # failed should contain at most one item. More than that
- # is a case we can't handle and should error out on
- if len(failed) > 1:
- err = "Invalid number of test results:" % failed
- raise ValueError(err)
- # Report a normal failure.
- self.fail('failed doctests: %s' % str(failed[0]))
-
- # Rename it so test reports have the original signature.
- Tester.__name__ = func.__name__
- return Tester
-
-
-def ipdocstring(func):
- """Change the function docstring via ip2py.
- """
- if func.__doc__ is not None:
- func.__doc__ = ip2py(func.__doc__)
- return func
-
-
-# Make an instance of the classes for public use
-ipdoctest = Doc2UnitTester()
-ip2py = IPython2PythonConverter()
+ failed = count_failures(runner)
+ if failed:
+ # Since we only looked at a single function's docstring,
+ # failed should contain at most one item. More than that
+ # is a case we can't handle and should error out on
+ if len(failed) > 1:
+ err = "Invalid number of test results:" % failed
+ raise ValueError(err)
+ # Report a normal failure.
+ self.fail('failed doctests: %s' % str(failed[0]))
+
+ # Rename it so test reports have the original signature.
+ Tester.__name__ = func.__name__
+ return Tester
+
+
+def ipdocstring(func):
+ """Change the function docstring via ip2py.
+ """
+ if func.__doc__ is not None:
+ func.__doc__ = ip2py(func.__doc__)
+ return func
+
+
+# Make an instance of the classes for public use
+ipdoctest = Doc2UnitTester()
+ip2py = IPython2PythonConverter()
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/Makefile b/contrib/python/ipython/py2/IPython/testing/plugin/Makefile
index d57d198f15..6f999a38fd 100644
--- a/contrib/python/ipython/py2/IPython/testing/plugin/Makefile
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/Makefile
@@ -1,74 +1,74 @@
-# Set this prefix to where you want to install the plugin
-PREFIX=/usr/local
-
-NOSE0=nosetests -vs --with-doctest --doctest-tests --detailed-errors
-NOSE=nosetests -vvs --with-ipdoctest --doctest-tests --doctest-extension=txt \
---detailed-errors
-
-SRC=ipdoctest.py setup.py ../decorators.py
-
-# Default target for clean 'make'
-default: interactiveshell
-
-# The actual plugin installation
-plugin: IPython_doctest_plugin.egg-info
-
-# Simple targets that test one thing
-simple: plugin simple.py
- $(NOSE) simple.py
-
-dtest: plugin dtexample.py
- $(NOSE) dtexample.py
-
-rtest: plugin test_refs.py
- $(NOSE) test_refs.py
-
-test: plugin dtexample.py
- $(NOSE) dtexample.py test*.py test*.txt
-
-deb: plugin dtexample.py
- $(NOSE) test_combo.txt
-
-# IPython tests
-deco:
- $(NOSE0) IPython.testing.decorators
-
-magic: plugin
- $(NOSE) IPython.core.magic
-
-excolors: plugin
- $(NOSE) IPython.core.excolors
-
-interactiveshell: plugin
- $(NOSE) IPython.core.interactiveshell
-
-strd: plugin
- $(NOSE) IPython.core.strdispatch
-
-engine: plugin
- $(NOSE) IPython.kernel
-
-tf: plugin
- $(NOSE) IPython.config.traitlets
-
-# All of ipython itself
-ipython: plugin
- $(NOSE) IPython
-
-
-# Combined targets
-sr: rtest strd
-
-base: dtest rtest test strd deco
-
-quick: base interactiveshell ipipe
-
-all: base ipython
-
-# Main plugin and cleanup
-IPython_doctest_plugin.egg-info: $(SRC)
- python setup.py install --prefix=$(PREFIX)
- touch $@
-
-clean:
- rm -rf IPython_doctest_plugin.egg-info *~ *pyc build/ dist/
+# Set this prefix to where you want to install the plugin
+PREFIX=/usr/local
+
+NOSE0=nosetests -vs --with-doctest --doctest-tests --detailed-errors
+NOSE=nosetests -vvs --with-ipdoctest --doctest-tests --doctest-extension=txt \
+--detailed-errors
+
+SRC=ipdoctest.py setup.py ../decorators.py
+
+# Default target for clean 'make'
+default: interactiveshell
+
+# The actual plugin installation
+plugin: IPython_doctest_plugin.egg-info
+
+# Simple targets that test one thing
+simple: plugin simple.py
+ $(NOSE) simple.py
+
+dtest: plugin dtexample.py
+ $(NOSE) dtexample.py
+
+rtest: plugin test_refs.py
+ $(NOSE) test_refs.py
+
+test: plugin dtexample.py
+ $(NOSE) dtexample.py test*.py test*.txt
+
+deb: plugin dtexample.py
+ $(NOSE) test_combo.txt
+
+# IPython tests
+deco:
+ $(NOSE0) IPython.testing.decorators
+
+magic: plugin
+ $(NOSE) IPython.core.magic
+
+excolors: plugin
+ $(NOSE) IPython.core.excolors
+
+interactiveshell: plugin
+ $(NOSE) IPython.core.interactiveshell
+
+strd: plugin
+ $(NOSE) IPython.core.strdispatch
+
+engine: plugin
+ $(NOSE) IPython.kernel
+
+tf: plugin
+ $(NOSE) IPython.config.traitlets
+
+# All of ipython itself
+ipython: plugin
+ $(NOSE) IPython
+
+
+# Combined targets
+sr: rtest strd
+
+base: dtest rtest test strd deco
+
+quick: base interactiveshell ipipe
+
+all: base ipython
+
+# Main plugin and cleanup
+IPython_doctest_plugin.egg-info: $(SRC)
+ python setup.py install --prefix=$(PREFIX)
+ touch $@
+
+clean:
+ rm -rf IPython_doctest_plugin.egg-info *~ *pyc build/ dist/
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/README.txt b/contrib/python/ipython/py2/IPython/testing/plugin/README.txt
index e08380d9de..6b34f9e5e1 100644
--- a/contrib/python/ipython/py2/IPython/testing/plugin/README.txt
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/README.txt
@@ -1,39 +1,39 @@
-=======================================================
- Nose plugin with IPython and extension module support
-=======================================================
-
-This directory provides the key functionality for test support that IPython
-needs as a nose plugin, which can be installed for use in projects other than
-IPython.
-
-The presence of a Makefile here is mostly for development and debugging
-purposes as it only provides a few shorthand commands. You can manually
-install the plugin by using standard Python procedures (``setup.py install``
-with appropriate arguments).
-
-To install the plugin using the Makefile, edit its first line to reflect where
-you'd like the installation. If you want it system-wide, you may want to edit
-the install line in the plugin target to use sudo and no prefix::
-
- sudo python setup.py install
-
-instead of the code using `--prefix` that's in there.
-
-Once you've set the prefix, simply build/install the plugin with::
-
- make
-
-and run the tests with::
-
- make test
-
-You should see output similar to::
-
- maqroll[plugin]> make test
- nosetests -s --with-ipdoctest --doctest-tests dtexample.py
- ..
- ----------------------------------------------------------------------
- Ran 2 tests in 0.016s
-
- OK
-
+=======================================================
+ Nose plugin with IPython and extension module support
+=======================================================
+
+This directory provides the key functionality for test support that IPython
+needs as a nose plugin, which can be installed for use in projects other than
+IPython.
+
+The presence of a Makefile here is mostly for development and debugging
+purposes as it only provides a few shorthand commands. You can manually
+install the plugin by using standard Python procedures (``setup.py install``
+with appropriate arguments).
+
+To install the plugin using the Makefile, edit its first line to reflect where
+you'd like the installation. If you want it system-wide, you may want to edit
+the install line in the plugin target to use sudo and no prefix::
+
+ sudo python setup.py install
+
+instead of the code using `--prefix` that's in there.
+
+Once you've set the prefix, simply build/install the plugin with::
+
+ make
+
+and run the tests with::
+
+ make test
+
+You should see output similar to::
+
+ maqroll[plugin]> make test
+ nosetests -s --with-ipdoctest --doctest-tests dtexample.py
+ ..
+ ----------------------------------------------------------------------
+ Ran 2 tests in 0.016s
+
+ OK
+
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/dtexample.py b/contrib/python/ipython/py2/IPython/testing/plugin/dtexample.py
index 081bf35571..5e02629bf7 100644
--- a/contrib/python/ipython/py2/IPython/testing/plugin/dtexample.py
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/dtexample.py
@@ -1,158 +1,158 @@
-"""Simple example using doctests.
-
-This file just contains doctests both using plain python and IPython prompts.
-All tests should be loaded by nose.
-"""
-from __future__ import print_function
-
-def pyfunc():
- """Some pure python tests...
-
- >>> pyfunc()
- 'pyfunc'
-
- >>> import os
-
- >>> 2+3
- 5
-
- >>> for i in range(3):
- ... print(i, end=' ')
- ... print(i+1, end=' ')
- ...
- 0 1 1 2 2 3
- """
- return 'pyfunc'
-
-def ipfunc():
- """Some ipython tests...
-
- In [1]: import os
-
- In [3]: 2+3
- Out[3]: 5
-
- In [26]: for i in range(3):
- ....: print(i, end=' ')
- ....: print(i+1, end=' ')
- ....:
- 0 1 1 2 2 3
-
-
- Examples that access the operating system work:
-
- In [1]: !echo hello
- hello
-
- In [2]: !echo hello > /tmp/foo_iptest
-
- In [3]: !cat /tmp/foo_iptest
- hello
-
- In [4]: rm -f /tmp/foo_iptest
-
- It's OK to use '_' for the last result, but do NOT try to use IPython's
- numbered history of _NN outputs, since those won't exist under the
- doctest environment:
-
- In [7]: 'hi'
- Out[7]: 'hi'
-
- In [8]: print(repr(_))
- 'hi'
-
- In [7]: 3+4
- Out[7]: 7
-
- In [8]: _+3
- Out[8]: 10
-
- In [9]: ipfunc()
- Out[9]: 'ipfunc'
- """
- return 'ipfunc'
-
-
-def ranfunc():
- """A function with some random output.
-
- Normal examples are verified as usual:
- >>> 1+3
- 4
-
- But if you put '# random' in the output, it is ignored:
- >>> 1+3
- junk goes here... # random
-
- >>> 1+2
- again, anything goes #random
- if multiline, the random mark is only needed once.
-
- >>> 1+2
- You can also put the random marker at the end:
- # random
-
- >>> 1+2
- # random
- .. or at the beginning.
-
- More correct input is properly verified:
- >>> ranfunc()
- 'ranfunc'
- """
- return 'ranfunc'
-
-
-def random_all():
- """A function where we ignore the output of ALL examples.
-
- Examples:
-
- # all-random
-
- This mark tells the testing machinery that all subsequent examples should
- be treated as random (ignoring their output). They are still executed,
- so if a they raise an error, it will be detected as such, but their
- output is completely ignored.
-
- >>> 1+3
- junk goes here...
-
- >>> 1+3
- klasdfj;
-
- >>> 1+2
- again, anything goes
- blah...
- """
- pass
-
-def iprand():
- """Some ipython tests with random output.
-
- In [7]: 3+4
- Out[7]: 7
-
- In [8]: print('hello')
- world # random
-
- In [9]: iprand()
- Out[9]: 'iprand'
- """
- return 'iprand'
-
-def iprand_all():
- """Some ipython tests with fully random output.
-
- # all-random
-
- In [7]: 1
- Out[7]: 99
-
- In [8]: print('hello')
- world
-
- In [9]: iprand_all()
- Out[9]: 'junk'
- """
- return 'iprand_all'
+"""Simple example using doctests.
+
+This file just contains doctests both using plain python and IPython prompts.
+All tests should be loaded by nose.
+"""
+from __future__ import print_function
+
+def pyfunc():
+ """Some pure python tests...
+
+ >>> pyfunc()
+ 'pyfunc'
+
+ >>> import os
+
+ >>> 2+3
+ 5
+
+ >>> for i in range(3):
+ ... print(i, end=' ')
+ ... print(i+1, end=' ')
+ ...
+ 0 1 1 2 2 3
+ """
+ return 'pyfunc'
+
+def ipfunc():
+ """Some ipython tests...
+
+ In [1]: import os
+
+ In [3]: 2+3
+ Out[3]: 5
+
+ In [26]: for i in range(3):
+ ....: print(i, end=' ')
+ ....: print(i+1, end=' ')
+ ....:
+ 0 1 1 2 2 3
+
+
+ Examples that access the operating system work:
+
+ In [1]: !echo hello
+ hello
+
+ In [2]: !echo hello > /tmp/foo_iptest
+
+ In [3]: !cat /tmp/foo_iptest
+ hello
+
+ In [4]: rm -f /tmp/foo_iptest
+
+ It's OK to use '_' for the last result, but do NOT try to use IPython's
+ numbered history of _NN outputs, since those won't exist under the
+ doctest environment:
+
+ In [7]: 'hi'
+ Out[7]: 'hi'
+
+ In [8]: print(repr(_))
+ 'hi'
+
+ In [7]: 3+4
+ Out[7]: 7
+
+ In [8]: _+3
+ Out[8]: 10
+
+ In [9]: ipfunc()
+ Out[9]: 'ipfunc'
+ """
+ return 'ipfunc'
+
+
+def ranfunc():
+ """A function with some random output.
+
+ Normal examples are verified as usual:
+ >>> 1+3
+ 4
+
+ But if you put '# random' in the output, it is ignored:
+ >>> 1+3
+ junk goes here... # random
+
+ >>> 1+2
+ again, anything goes #random
+ if multiline, the random mark is only needed once.
+
+ >>> 1+2
+ You can also put the random marker at the end:
+ # random
+
+ >>> 1+2
+ # random
+ .. or at the beginning.
+
+ More correct input is properly verified:
+ >>> ranfunc()
+ 'ranfunc'
+ """
+ return 'ranfunc'
+
+
+def random_all():
+ """A function where we ignore the output of ALL examples.
+
+ Examples:
+
+ # all-random
+
+ This mark tells the testing machinery that all subsequent examples should
+ be treated as random (ignoring their output). They are still executed,
+ so if a they raise an error, it will be detected as such, but their
+ output is completely ignored.
+
+ >>> 1+3
+ junk goes here...
+
+ >>> 1+3
+ klasdfj;
+
+ >>> 1+2
+ again, anything goes
+ blah...
+ """
+ pass
+
+def iprand():
+ """Some ipython tests with random output.
+
+ In [7]: 3+4
+ Out[7]: 7
+
+ In [8]: print('hello')
+ world # random
+
+ In [9]: iprand()
+ Out[9]: 'iprand'
+ """
+ return 'iprand'
+
+def iprand_all():
+ """Some ipython tests with fully random output.
+
+ # all-random
+
+ In [7]: 1
+ Out[7]: 99
+
+ In [8]: print('hello')
+ world
+
+ In [9]: iprand_all()
+ Out[9]: 'junk'
+ """
+ return 'iprand_all'
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/ipdoctest.py b/contrib/python/ipython/py2/IPython/testing/plugin/ipdoctest.py
index 64e7e536f8..bc750e0efd 100644
--- a/contrib/python/ipython/py2/IPython/testing/plugin/ipdoctest.py
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/ipdoctest.py
@@ -1,769 +1,769 @@
-"""Nose Plugin that supports IPython doctests.
-
-Limitations:
-
-- When generating examples for use as doctests, make sure that you have
- pretty-printing OFF. This can be done either by setting the
- ``PlainTextFormatter.pprint`` option in your configuration file to False, or
- by interactively disabling it with %Pprint. This is required so that IPython
- output matches that of normal Python, which is used by doctest for internal
- execution.
-
-- Do not rely on specific prompt numbers for results (such as using
- '_34==True', for example). For IPython tests run via an external process the
- prompt numbers may be different, and IPython tests run as normal python code
- won't even have these special _NN variables set at all.
-"""
-
-#-----------------------------------------------------------------------------
-# Module imports
-
-# From the standard library
-import doctest
-import inspect
-import logging
-import os
-import re
-import sys
-
+"""Nose Plugin that supports IPython doctests.
+
+Limitations:
+
+- When generating examples for use as doctests, make sure that you have
+ pretty-printing OFF. This can be done either by setting the
+ ``PlainTextFormatter.pprint`` option in your configuration file to False, or
+ by interactively disabling it with %Pprint. This is required so that IPython
+ output matches that of normal Python, which is used by doctest for internal
+ execution.
+
+- Do not rely on specific prompt numbers for results (such as using
+ '_34==True', for example). For IPython tests run via an external process the
+ prompt numbers may be different, and IPython tests run as normal python code
+ won't even have these special _NN variables set at all.
+"""
+
+#-----------------------------------------------------------------------------
+# Module imports
+
+# From the standard library
+import doctest
+import inspect
+import logging
+import os
+import re
+import sys
+
from testpath import modified_env
-from inspect import getmodule
-
-# We are overriding the default doctest runner, so we need to import a few
-# things from doctest directly
-from doctest import (REPORTING_FLAGS, REPORT_ONLY_FIRST_FAILURE,
- _unittest_reportflags, DocTestRunner,
- _extract_future_flags, pdb, _OutputRedirectingPdb,
- _exception_traceback,
- linecache)
-
-# Third-party modules
-
-from nose.plugins import doctests, Plugin
+from inspect import getmodule
+
+# We are overriding the default doctest runner, so we need to import a few
+# things from doctest directly
+from doctest import (REPORTING_FLAGS, REPORT_ONLY_FIRST_FAILURE,
+ _unittest_reportflags, DocTestRunner,
+ _extract_future_flags, pdb, _OutputRedirectingPdb,
+ _exception_traceback,
+ linecache)
+
+# Third-party modules
+
+from nose.plugins import doctests, Plugin
from nose.util import anyp, tolist
-
-# Our own imports
-from IPython.utils.py3compat import builtin_mod, PY3, getcwd
-
-if PY3:
- from io import StringIO
-else:
- from StringIO import StringIO
-
-#-----------------------------------------------------------------------------
-# Module globals and other constants
-#-----------------------------------------------------------------------------
-
-log = logging.getLogger(__name__)
-
-
-#-----------------------------------------------------------------------------
-# Classes and functions
-#-----------------------------------------------------------------------------
-
-def is_extension_module(filename):
- """Return whether the given filename is an extension module.
-
- This simply checks that the extension is either .so or .pyd.
- """
- return os.path.splitext(filename)[1].lower() in ('.so','.pyd')
-
-
-class DocTestSkip(object):
- """Object wrapper for doctests to be skipped."""
-
- ds_skip = """Doctest to skip.
- >>> 1 #doctest: +SKIP
- """
-
- def __init__(self,obj):
- self.obj = obj
-
- def __getattribute__(self,key):
- if key == '__doc__':
- return DocTestSkip.ds_skip
- else:
- return getattr(object.__getattribute__(self,'obj'),key)
-
-# Modified version of the one in the stdlib, that fixes a python bug (doctests
-# not found in extension modules, http://bugs.python.org/issue3158)
-class DocTestFinder(doctest.DocTestFinder):
-
- def _from_module(self, module, object):
- """
- Return true if the given object is defined in the given
- module.
- """
- if module is None:
- return True
- elif inspect.isfunction(object):
- return module.__dict__ is object.__globals__
- elif inspect.isbuiltin(object):
- return module.__name__ == object.__module__
- elif inspect.isclass(object):
- return module.__name__ == object.__module__
- elif inspect.ismethod(object):
- # This one may be a bug in cython that fails to correctly set the
- # __module__ attribute of methods, but since the same error is easy
- # to make by extension code writers, having this safety in place
- # isn't such a bad idea
- return module.__name__ == object.__self__.__class__.__module__
- elif inspect.getmodule(object) is not None:
- return module is inspect.getmodule(object)
- elif hasattr(object, '__module__'):
- return module.__name__ == object.__module__
- elif isinstance(object, property):
- return True # [XX] no way not be sure.
- elif inspect.ismethoddescriptor(object):
- # Unbound PyQt signals reach this point in Python 3.4b3, and we want
- # to avoid throwing an error. See also http://bugs.python.org/issue3158
- return False
- else:
- raise ValueError("object must be a class or function, got %r" % object)
-
- def _find(self, tests, obj, name, module, source_lines, globs, seen):
- """
- Find tests for the given object and any contained objects, and
- add them to `tests`.
- """
- print('_find for:', obj, name, module) # dbg
- if hasattr(obj,"skip_doctest"):
- #print 'SKIPPING DOCTEST FOR:',obj # dbg
- obj = DocTestSkip(obj)
-
- doctest.DocTestFinder._find(self,tests, obj, name, module,
- source_lines, globs, seen)
-
- # Below we re-run pieces of the above method with manual modifications,
- # because the original code is buggy and fails to correctly identify
- # doctests in extension modules.
-
- # Local shorthands
+
+# Our own imports
+from IPython.utils.py3compat import builtin_mod, PY3, getcwd
+
+if PY3:
+ from io import StringIO
+else:
+ from StringIO import StringIO
+
+#-----------------------------------------------------------------------------
+# Module globals and other constants
+#-----------------------------------------------------------------------------
+
+log = logging.getLogger(__name__)
+
+
+#-----------------------------------------------------------------------------
+# Classes and functions
+#-----------------------------------------------------------------------------
+
+def is_extension_module(filename):
+ """Return whether the given filename is an extension module.
+
+ This simply checks that the extension is either .so or .pyd.
+ """
+ return os.path.splitext(filename)[1].lower() in ('.so','.pyd')
+
+
+class DocTestSkip(object):
+ """Object wrapper for doctests to be skipped."""
+
+ ds_skip = """Doctest to skip.
+ >>> 1 #doctest: +SKIP
+ """
+
+ def __init__(self,obj):
+ self.obj = obj
+
+ def __getattribute__(self,key):
+ if key == '__doc__':
+ return DocTestSkip.ds_skip
+ else:
+ return getattr(object.__getattribute__(self,'obj'),key)
+
+# Modified version of the one in the stdlib, that fixes a python bug (doctests
+# not found in extension modules, http://bugs.python.org/issue3158)
+class DocTestFinder(doctest.DocTestFinder):
+
+ def _from_module(self, module, object):
+ """
+ Return true if the given object is defined in the given
+ module.
+ """
+ if module is None:
+ return True
+ elif inspect.isfunction(object):
+ return module.__dict__ is object.__globals__
+ elif inspect.isbuiltin(object):
+ return module.__name__ == object.__module__
+ elif inspect.isclass(object):
+ return module.__name__ == object.__module__
+ elif inspect.ismethod(object):
+ # This one may be a bug in cython that fails to correctly set the
+ # __module__ attribute of methods, but since the same error is easy
+ # to make by extension code writers, having this safety in place
+ # isn't such a bad idea
+ return module.__name__ == object.__self__.__class__.__module__
+ elif inspect.getmodule(object) is not None:
+ return module is inspect.getmodule(object)
+ elif hasattr(object, '__module__'):
+ return module.__name__ == object.__module__
+ elif isinstance(object, property):
+ return True # [XX] no way not be sure.
+ elif inspect.ismethoddescriptor(object):
+ # Unbound PyQt signals reach this point in Python 3.4b3, and we want
+ # to avoid throwing an error. See also http://bugs.python.org/issue3158
+ return False
+ else:
+ raise ValueError("object must be a class or function, got %r" % object)
+
+ def _find(self, tests, obj, name, module, source_lines, globs, seen):
+ """
+ Find tests for the given object and any contained objects, and
+ add them to `tests`.
+ """
+ print('_find for:', obj, name, module) # dbg
+ if hasattr(obj,"skip_doctest"):
+ #print 'SKIPPING DOCTEST FOR:',obj # dbg
+ obj = DocTestSkip(obj)
+
+ doctest.DocTestFinder._find(self,tests, obj, name, module,
+ source_lines, globs, seen)
+
+ # Below we re-run pieces of the above method with manual modifications,
+ # because the original code is buggy and fails to correctly identify
+ # doctests in extension modules.
+
+ # Local shorthands
from inspect import isroutine, isclass
-
- # Look for tests in a module's contained objects.
- if inspect.ismodule(obj) and self._recurse:
- for valname, val in obj.__dict__.items():
- valname1 = '%s.%s' % (name, valname)
- if ( (isroutine(val) or isclass(val))
- and self._from_module(module, val) ):
-
- self._find(tests, val, valname1, module, source_lines,
- globs, seen)
-
- # Look for tests in a class's contained objects.
- if inspect.isclass(obj) and self._recurse:
- #print 'RECURSE into class:',obj # dbg
- for valname, val in obj.__dict__.items():
- # Special handling for staticmethod/classmethod.
- if isinstance(val, staticmethod):
- val = getattr(obj, valname)
- if isinstance(val, classmethod):
- val = getattr(obj, valname).__func__
-
- # Recurse to methods, properties, and nested classes.
- if ((inspect.isfunction(val) or inspect.isclass(val) or
- inspect.ismethod(val) or
- isinstance(val, property)) and
- self._from_module(module, val)):
- valname = '%s.%s' % (name, valname)
- self._find(tests, val, valname, module, source_lines,
- globs, seen)
-
-
-class IPDoctestOutputChecker(doctest.OutputChecker):
- """Second-chance checker with support for random tests.
-
- If the default comparison doesn't pass, this checker looks in the expected
- output string for flags that tell us to ignore the output.
- """
-
- random_re = re.compile(r'#\s*random\s+')
-
- def check_output(self, want, got, optionflags):
- """Check output, accepting special markers embedded in the output.
-
- If the output didn't pass the default validation but the special string
- '#random' is included, we accept it."""
-
- # Let the original tester verify first, in case people have valid tests
- # that happen to have a comment saying '#random' embedded in.
- ret = doctest.OutputChecker.check_output(self, want, got,
- optionflags)
- if not ret and self.random_re.search(want):
- #print >> sys.stderr, 'RANDOM OK:',want # dbg
- return True
-
- return ret
-
-
-class DocTestCase(doctests.DocTestCase):
- """Proxy for DocTestCase: provides an address() method that
- returns the correct address for the doctest case. Otherwise
- acts as a proxy to the test case. To provide hints for address(),
- an obj may also be passed -- this will be used as the test object
- for purposes of determining the test address, if it is provided.
- """
-
- # Note: this method was taken from numpy's nosetester module.
-
- # Subclass nose.plugins.doctests.DocTestCase to work around a bug in
- # its constructor that blocks non-default arguments from being passed
- # down into doctest.DocTestCase
-
- def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
- checker=None, obj=None, result_var='_'):
- self._result_var = result_var
- doctests.DocTestCase.__init__(self, test,
- optionflags=optionflags,
- setUp=setUp, tearDown=tearDown,
- checker=checker)
- # Now we must actually copy the original constructor from the stdlib
- # doctest class, because we can't call it directly and a bug in nose
- # means it never gets passed the right arguments.
-
- self._dt_optionflags = optionflags
- self._dt_checker = checker
- self._dt_test = test
- self._dt_test_globs_ori = test.globs
- self._dt_setUp = setUp
- self._dt_tearDown = tearDown
-
- # XXX - store this runner once in the object!
- runner = IPDocTestRunner(optionflags=optionflags,
- checker=checker, verbose=False)
- self._dt_runner = runner
-
-
- # Each doctest should remember the directory it was loaded from, so
- # things like %run work without too many contortions
- self._ori_dir = os.path.dirname(test.filename)
-
- # Modified runTest from the default stdlib
- def runTest(self):
- test = self._dt_test
- runner = self._dt_runner
-
- old = sys.stdout
- new = StringIO()
- optionflags = self._dt_optionflags
-
- if not (optionflags & REPORTING_FLAGS):
- # The option flags don't include any reporting flags,
- # so add the default reporting flags
- optionflags |= _unittest_reportflags
-
- try:
- # Save our current directory and switch out to the one where the
- # test was originally created, in case another doctest did a
- # directory change. We'll restore this in the finally clause.
- curdir = getcwd()
- #print 'runTest in dir:', self._ori_dir # dbg
- os.chdir(self._ori_dir)
-
- runner.DIVIDER = "-"*70
- failures, tries = runner.run(test,out=new.write,
- clear_globs=False)
- finally:
- sys.stdout = old
- os.chdir(curdir)
-
- if failures:
- raise self.failureException(self.format_failure(new.getvalue()))
-
- def setUp(self):
- """Modified test setup that syncs with ipython namespace"""
- #print "setUp test", self._dt_test.examples # dbg
- if isinstance(self._dt_test.examples[0], IPExample):
- # for IPython examples *only*, we swap the globals with the ipython
- # namespace, after updating it with the globals (which doctest
- # fills with the necessary info from the module being tested).
- self.user_ns_orig = {}
- self.user_ns_orig.update(_ip.user_ns)
- _ip.user_ns.update(self._dt_test.globs)
- # We must remove the _ key in the namespace, so that Python's
- # doctest code sets it naturally
- _ip.user_ns.pop('_', None)
- _ip.user_ns['__builtins__'] = builtin_mod
- self._dt_test.globs = _ip.user_ns
-
- super(DocTestCase, self).setUp()
-
- def tearDown(self):
-
- # Undo the test.globs reassignment we made, so that the parent class
- # teardown doesn't destroy the ipython namespace
- if isinstance(self._dt_test.examples[0], IPExample):
- self._dt_test.globs = self._dt_test_globs_ori
- _ip.user_ns.clear()
- _ip.user_ns.update(self.user_ns_orig)
-
- # XXX - fperez: I am not sure if this is truly a bug in nose 0.11, but
- # it does look like one to me: its tearDown method tries to run
- #
- # delattr(builtin_mod, self._result_var)
- #
- # without checking that the attribute really is there; it implicitly
- # assumes it should have been set via displayhook. But if the
- # displayhook was never called, this doesn't necessarily happen. I
- # haven't been able to find a little self-contained example outside of
- # ipython that would show the problem so I can report it to the nose
- # team, but it does happen a lot in our code.
- #
- # So here, we just protect as narrowly as possible by trapping an
- # attribute error whose message would be the name of self._result_var,
- # and letting any other error propagate.
- try:
- super(DocTestCase, self).tearDown()
- except AttributeError as exc:
- if exc.args[0] != self._result_var:
- raise
-
-
-# A simple subclassing of the original with a different class name, so we can
-# distinguish and treat differently IPython examples from pure python ones.
-class IPExample(doctest.Example): pass
-
-
-class IPExternalExample(doctest.Example):
- """Doctest examples to be run in an external process."""
-
- def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
- options=None):
- # Parent constructor
- doctest.Example.__init__(self,source,want,exc_msg,lineno,indent,options)
-
- # An EXTRA newline is needed to prevent pexpect hangs
- self.source += '\n'
-
-
-class IPDocTestParser(doctest.DocTestParser):
- """
- A class used to parse strings containing doctest examples.
-
- Note: This is a version modified to properly recognize IPython input and
- convert any IPython examples into valid Python ones.
- """
- # This regular expression is used to find doctest examples in a
- # string. It defines three groups: `source` is the source code
- # (including leading indentation and prompts); `indent` is the
- # indentation of the first (PS1) line of the source code; and
- # `want` is the expected output (including leading indentation).
-
- # Classic Python prompts or default IPython ones
- _PS1_PY = r'>>>'
- _PS2_PY = r'\.\.\.'
-
- _PS1_IP = r'In\ \[\d+\]:'
- _PS2_IP = r'\ \ \ \.\.\.+:'
-
- _RE_TPL = r'''
- # Source consists of a PS1 line followed by zero or more PS2 lines.
- (?P<source>
- (?:^(?P<indent> [ ]*) (?P<ps1> %s) .*) # PS1 line
- (?:\n [ ]* (?P<ps2> %s) .*)*) # PS2 lines
- \n? # a newline
- # Want consists of any non-blank lines that do not start with PS1.
- (?P<want> (?:(?![ ]*$) # Not a blank line
- (?![ ]*%s) # Not a line starting with PS1
- (?![ ]*%s) # Not a line starting with PS2
- .*$\n? # But any other line
- )*)
- '''
-
- _EXAMPLE_RE_PY = re.compile( _RE_TPL % (_PS1_PY,_PS2_PY,_PS1_PY,_PS2_PY),
- re.MULTILINE | re.VERBOSE)
-
- _EXAMPLE_RE_IP = re.compile( _RE_TPL % (_PS1_IP,_PS2_IP,_PS1_IP,_PS2_IP),
- re.MULTILINE | re.VERBOSE)
-
- # Mark a test as being fully random. In this case, we simply append the
- # random marker ('#random') to each individual example's output. This way
- # we don't need to modify any other code.
- _RANDOM_TEST = re.compile(r'#\s*all-random\s+')
-
- # Mark tests to be executed in an external process - currently unsupported.
- _EXTERNAL_IP = re.compile(r'#\s*ipdoctest:\s*EXTERNAL')
-
- def ip2py(self,source):
- """Convert input IPython source into valid Python."""
- block = _ip.input_transformer_manager.transform_cell(source)
- if len(block.splitlines()) == 1:
- return _ip.prefilter(block)
- else:
- return block
-
- def parse(self, string, name='<string>'):
- """
- Divide the given string into examples and intervening text,
- and return them as a list of alternating Examples and strings.
- Line numbers for the Examples are 0-based. The optional
- argument `name` is a name identifying this string, and is only
- used for error messages.
- """
-
- #print 'Parse string:\n',string # dbg
-
- string = string.expandtabs()
- # If all lines begin with the same indentation, then strip it.
- min_indent = self._min_indent(string)
- if min_indent > 0:
- string = '\n'.join([l[min_indent:] for l in string.split('\n')])
-
- output = []
- charno, lineno = 0, 0
-
- # We make 'all random' tests by adding the '# random' mark to every
- # block of output in the test.
- if self._RANDOM_TEST.search(string):
- random_marker = '\n# random'
- else:
- random_marker = ''
-
- # Whether to convert the input from ipython to python syntax
- ip2py = False
- # Find all doctest examples in the string. First, try them as Python
- # examples, then as IPython ones
- terms = list(self._EXAMPLE_RE_PY.finditer(string))
- if terms:
- # Normal Python example
- #print '-'*70 # dbg
- #print 'PyExample, Source:\n',string # dbg
- #print '-'*70 # dbg
- Example = doctest.Example
- else:
- # It's an ipython example. Note that IPExamples are run
- # in-process, so their syntax must be turned into valid python.
- # IPExternalExamples are run out-of-process (via pexpect) so they
- # don't need any filtering (a real ipython will be executing them).
- terms = list(self._EXAMPLE_RE_IP.finditer(string))
- if self._EXTERNAL_IP.search(string):
- #print '-'*70 # dbg
- #print 'IPExternalExample, Source:\n',string # dbg
- #print '-'*70 # dbg
- Example = IPExternalExample
- else:
- #print '-'*70 # dbg
- #print 'IPExample, Source:\n',string # dbg
- #print '-'*70 # dbg
- Example = IPExample
- ip2py = True
-
- for m in terms:
- # Add the pre-example text to `output`.
- output.append(string[charno:m.start()])
- # Update lineno (lines before this example)
- lineno += string.count('\n', charno, m.start())
- # Extract info from the regexp match.
- (source, options, want, exc_msg) = \
- self._parse_example(m, name, lineno,ip2py)
-
- # Append the random-output marker (it defaults to empty in most
- # cases, it's only non-empty for 'all-random' tests):
- want += random_marker
-
- if Example is IPExternalExample:
- options[doctest.NORMALIZE_WHITESPACE] = True
- want += '\n'
-
- # Create an Example, and add it to the list.
- if not self._IS_BLANK_OR_COMMENT(source):
- output.append(Example(source, want, exc_msg,
- lineno=lineno,
- indent=min_indent+len(m.group('indent')),
- options=options))
- # Update lineno (lines inside this example)
- lineno += string.count('\n', m.start(), m.end())
- # Update charno.
- charno = m.end()
- # Add any remaining post-example text to `output`.
- output.append(string[charno:])
- return output
-
- def _parse_example(self, m, name, lineno,ip2py=False):
- """
- Given a regular expression match from `_EXAMPLE_RE` (`m`),
- return a pair `(source, want)`, where `source` is the matched
- example's source code (with prompts and indentation stripped);
- and `want` is the example's expected output (with indentation
- stripped).
-
- `name` is the string's name, and `lineno` is the line number
- where the example starts; both are used for error messages.
-
- Optional:
- `ip2py`: if true, filter the input via IPython to convert the syntax
- into valid python.
- """
-
- # Get the example's indentation level.
- indent = len(m.group('indent'))
-
- # Divide source into lines; check that they're properly
- # indented; and then strip their indentation & prompts.
- source_lines = m.group('source').split('\n')
-
- # We're using variable-length input prompts
- ps1 = m.group('ps1')
- ps2 = m.group('ps2')
- ps1_len = len(ps1)
-
- self._check_prompt_blank(source_lines, indent, name, lineno,ps1_len)
- if ps2:
- self._check_prefix(source_lines[1:], ' '*indent + ps2, name, lineno)
-
- source = '\n'.join([sl[indent+ps1_len+1:] for sl in source_lines])
-
- if ip2py:
- # Convert source input from IPython into valid Python syntax
- source = self.ip2py(source)
-
- # Divide want into lines; check that it's properly indented; and
- # then strip the indentation. Spaces before the last newline should
- # be preserved, so plain rstrip() isn't good enough.
- want = m.group('want')
- want_lines = want.split('\n')
- if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
- del want_lines[-1] # forget final newline & spaces after it
- self._check_prefix(want_lines, ' '*indent, name,
- lineno + len(source_lines))
-
- # Remove ipython output prompt that might be present in the first line
- want_lines[0] = re.sub(r'Out\[\d+\]: \s*?\n?','',want_lines[0])
-
- want = '\n'.join([wl[indent:] for wl in want_lines])
-
- # If `want` contains a traceback message, then extract it.
- m = self._EXCEPTION_RE.match(want)
- if m:
- exc_msg = m.group('msg')
- else:
- exc_msg = None
-
- # Extract options from the source.
- options = self._find_options(source, name, lineno)
-
- return source, options, want, exc_msg
-
- def _check_prompt_blank(self, lines, indent, name, lineno, ps1_len):
- """
- Given the lines of a source string (including prompts and
- leading indentation), check to make sure that every prompt is
- followed by a space character. If any line is not followed by
- a space character, then raise ValueError.
-
- Note: IPython-modified version which takes the input prompt length as a
- parameter, so that prompts of variable length can be dealt with.
- """
- space_idx = indent+ps1_len
- min_len = space_idx+1
- for i, line in enumerate(lines):
- if len(line) >= min_len and line[space_idx] != ' ':
- raise ValueError('line %r of the docstring for %s '
- 'lacks blank after %s: %r' %
- (lineno+i+1, name,
- line[indent:space_idx], line))
-
-
-SKIP = doctest.register_optionflag('SKIP')
-
-
-class IPDocTestRunner(doctest.DocTestRunner,object):
- """Test runner that synchronizes the IPython namespace with test globals.
- """
-
- def run(self, test, compileflags=None, out=None, clear_globs=True):
-
- # Hack: ipython needs access to the execution context of the example,
- # so that it can propagate user variables loaded by %run into
- # test.globs. We put them here into our modified %run as a function
- # attribute. Our new %run will then only make the namespace update
- # when called (rather than unconconditionally updating test.globs here
- # for all examples, most of which won't be calling %run anyway).
- #_ip._ipdoctest_test_globs = test.globs
- #_ip._ipdoctest_test_filename = test.filename
-
- test.globs.update(_ip.user_ns)
-
+
+ # Look for tests in a module's contained objects.
+ if inspect.ismodule(obj) and self._recurse:
+ for valname, val in obj.__dict__.items():
+ valname1 = '%s.%s' % (name, valname)
+ if ( (isroutine(val) or isclass(val))
+ and self._from_module(module, val) ):
+
+ self._find(tests, val, valname1, module, source_lines,
+ globs, seen)
+
+ # Look for tests in a class's contained objects.
+ if inspect.isclass(obj) and self._recurse:
+ #print 'RECURSE into class:',obj # dbg
+ for valname, val in obj.__dict__.items():
+ # Special handling for staticmethod/classmethod.
+ if isinstance(val, staticmethod):
+ val = getattr(obj, valname)
+ if isinstance(val, classmethod):
+ val = getattr(obj, valname).__func__
+
+ # Recurse to methods, properties, and nested classes.
+ if ((inspect.isfunction(val) or inspect.isclass(val) or
+ inspect.ismethod(val) or
+ isinstance(val, property)) and
+ self._from_module(module, val)):
+ valname = '%s.%s' % (name, valname)
+ self._find(tests, val, valname, module, source_lines,
+ globs, seen)
+
+
+class IPDoctestOutputChecker(doctest.OutputChecker):
+ """Second-chance checker with support for random tests.
+
+ If the default comparison doesn't pass, this checker looks in the expected
+ output string for flags that tell us to ignore the output.
+ """
+
+ random_re = re.compile(r'#\s*random\s+')
+
+ def check_output(self, want, got, optionflags):
+ """Check output, accepting special markers embedded in the output.
+
+ If the output didn't pass the default validation but the special string
+ '#random' is included, we accept it."""
+
+ # Let the original tester verify first, in case people have valid tests
+ # that happen to have a comment saying '#random' embedded in.
+ ret = doctest.OutputChecker.check_output(self, want, got,
+ optionflags)
+ if not ret and self.random_re.search(want):
+ #print >> sys.stderr, 'RANDOM OK:',want # dbg
+ return True
+
+ return ret
+
+
+class DocTestCase(doctests.DocTestCase):
+ """Proxy for DocTestCase: provides an address() method that
+ returns the correct address for the doctest case. Otherwise
+ acts as a proxy to the test case. To provide hints for address(),
+ an obj may also be passed -- this will be used as the test object
+ for purposes of determining the test address, if it is provided.
+ """
+
+ # Note: this method was taken from numpy's nosetester module.
+
+ # Subclass nose.plugins.doctests.DocTestCase to work around a bug in
+ # its constructor that blocks non-default arguments from being passed
+ # down into doctest.DocTestCase
+
+ def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
+ checker=None, obj=None, result_var='_'):
+ self._result_var = result_var
+ doctests.DocTestCase.__init__(self, test,
+ optionflags=optionflags,
+ setUp=setUp, tearDown=tearDown,
+ checker=checker)
+ # Now we must actually copy the original constructor from the stdlib
+ # doctest class, because we can't call it directly and a bug in nose
+ # means it never gets passed the right arguments.
+
+ self._dt_optionflags = optionflags
+ self._dt_checker = checker
+ self._dt_test = test
+ self._dt_test_globs_ori = test.globs
+ self._dt_setUp = setUp
+ self._dt_tearDown = tearDown
+
+ # XXX - store this runner once in the object!
+ runner = IPDocTestRunner(optionflags=optionflags,
+ checker=checker, verbose=False)
+ self._dt_runner = runner
+
+
+ # Each doctest should remember the directory it was loaded from, so
+ # things like %run work without too many contortions
+ self._ori_dir = os.path.dirname(test.filename)
+
+ # Modified runTest from the default stdlib
+ def runTest(self):
+ test = self._dt_test
+ runner = self._dt_runner
+
+ old = sys.stdout
+ new = StringIO()
+ optionflags = self._dt_optionflags
+
+ if not (optionflags & REPORTING_FLAGS):
+ # The option flags don't include any reporting flags,
+ # so add the default reporting flags
+ optionflags |= _unittest_reportflags
+
+ try:
+ # Save our current directory and switch out to the one where the
+ # test was originally created, in case another doctest did a
+ # directory change. We'll restore this in the finally clause.
+ curdir = getcwd()
+ #print 'runTest in dir:', self._ori_dir # dbg
+ os.chdir(self._ori_dir)
+
+ runner.DIVIDER = "-"*70
+ failures, tries = runner.run(test,out=new.write,
+ clear_globs=False)
+ finally:
+ sys.stdout = old
+ os.chdir(curdir)
+
+ if failures:
+ raise self.failureException(self.format_failure(new.getvalue()))
+
+ def setUp(self):
+ """Modified test setup that syncs with ipython namespace"""
+ #print "setUp test", self._dt_test.examples # dbg
+ if isinstance(self._dt_test.examples[0], IPExample):
+ # for IPython examples *only*, we swap the globals with the ipython
+ # namespace, after updating it with the globals (which doctest
+ # fills with the necessary info from the module being tested).
+ self.user_ns_orig = {}
+ self.user_ns_orig.update(_ip.user_ns)
+ _ip.user_ns.update(self._dt_test.globs)
+ # We must remove the _ key in the namespace, so that Python's
+ # doctest code sets it naturally
+ _ip.user_ns.pop('_', None)
+ _ip.user_ns['__builtins__'] = builtin_mod
+ self._dt_test.globs = _ip.user_ns
+
+ super(DocTestCase, self).setUp()
+
+ def tearDown(self):
+
+ # Undo the test.globs reassignment we made, so that the parent class
+ # teardown doesn't destroy the ipython namespace
+ if isinstance(self._dt_test.examples[0], IPExample):
+ self._dt_test.globs = self._dt_test_globs_ori
+ _ip.user_ns.clear()
+ _ip.user_ns.update(self.user_ns_orig)
+
+ # XXX - fperez: I am not sure if this is truly a bug in nose 0.11, but
+ # it does look like one to me: its tearDown method tries to run
+ #
+ # delattr(builtin_mod, self._result_var)
+ #
+ # without checking that the attribute really is there; it implicitly
+ # assumes it should have been set via displayhook. But if the
+ # displayhook was never called, this doesn't necessarily happen. I
+ # haven't been able to find a little self-contained example outside of
+ # ipython that would show the problem so I can report it to the nose
+ # team, but it does happen a lot in our code.
+ #
+ # So here, we just protect as narrowly as possible by trapping an
+ # attribute error whose message would be the name of self._result_var,
+ # and letting any other error propagate.
+ try:
+ super(DocTestCase, self).tearDown()
+ except AttributeError as exc:
+ if exc.args[0] != self._result_var:
+ raise
+
+
+# A simple subclassing of the original with a different class name, so we can
+# distinguish and treat differently IPython examples from pure python ones.
+class IPExample(doctest.Example): pass
+
+
+class IPExternalExample(doctest.Example):
+ """Doctest examples to be run in an external process."""
+
+ def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
+ options=None):
+ # Parent constructor
+ doctest.Example.__init__(self,source,want,exc_msg,lineno,indent,options)
+
+ # An EXTRA newline is needed to prevent pexpect hangs
+ self.source += '\n'
+
+
+class IPDocTestParser(doctest.DocTestParser):
+ """
+ A class used to parse strings containing doctest examples.
+
+ Note: This is a version modified to properly recognize IPython input and
+ convert any IPython examples into valid Python ones.
+ """
+ # This regular expression is used to find doctest examples in a
+ # string. It defines three groups: `source` is the source code
+ # (including leading indentation and prompts); `indent` is the
+ # indentation of the first (PS1) line of the source code; and
+ # `want` is the expected output (including leading indentation).
+
+ # Classic Python prompts or default IPython ones
+ _PS1_PY = r'>>>'
+ _PS2_PY = r'\.\.\.'
+
+ _PS1_IP = r'In\ \[\d+\]:'
+ _PS2_IP = r'\ \ \ \.\.\.+:'
+
+ _RE_TPL = r'''
+ # Source consists of a PS1 line followed by zero or more PS2 lines.
+ (?P<source>
+ (?:^(?P<indent> [ ]*) (?P<ps1> %s) .*) # PS1 line
+ (?:\n [ ]* (?P<ps2> %s) .*)*) # PS2 lines
+ \n? # a newline
+ # Want consists of any non-blank lines that do not start with PS1.
+ (?P<want> (?:(?![ ]*$) # Not a blank line
+ (?![ ]*%s) # Not a line starting with PS1
+ (?![ ]*%s) # Not a line starting with PS2
+ .*$\n? # But any other line
+ )*)
+ '''
+
+ _EXAMPLE_RE_PY = re.compile( _RE_TPL % (_PS1_PY,_PS2_PY,_PS1_PY,_PS2_PY),
+ re.MULTILINE | re.VERBOSE)
+
+ _EXAMPLE_RE_IP = re.compile( _RE_TPL % (_PS1_IP,_PS2_IP,_PS1_IP,_PS2_IP),
+ re.MULTILINE | re.VERBOSE)
+
+ # Mark a test as being fully random. In this case, we simply append the
+ # random marker ('#random') to each individual example's output. This way
+ # we don't need to modify any other code.
+ _RANDOM_TEST = re.compile(r'#\s*all-random\s+')
+
+ # Mark tests to be executed in an external process - currently unsupported.
+ _EXTERNAL_IP = re.compile(r'#\s*ipdoctest:\s*EXTERNAL')
+
+ def ip2py(self,source):
+ """Convert input IPython source into valid Python."""
+ block = _ip.input_transformer_manager.transform_cell(source)
+ if len(block.splitlines()) == 1:
+ return _ip.prefilter(block)
+ else:
+ return block
+
+ def parse(self, string, name='<string>'):
+ """
+ Divide the given string into examples and intervening text,
+ and return them as a list of alternating Examples and strings.
+ Line numbers for the Examples are 0-based. The optional
+ argument `name` is a name identifying this string, and is only
+ used for error messages.
+ """
+
+ #print 'Parse string:\n',string # dbg
+
+ string = string.expandtabs()
+ # If all lines begin with the same indentation, then strip it.
+ min_indent = self._min_indent(string)
+ if min_indent > 0:
+ string = '\n'.join([l[min_indent:] for l in string.split('\n')])
+
+ output = []
+ charno, lineno = 0, 0
+
+ # We make 'all random' tests by adding the '# random' mark to every
+ # block of output in the test.
+ if self._RANDOM_TEST.search(string):
+ random_marker = '\n# random'
+ else:
+ random_marker = ''
+
+ # Whether to convert the input from ipython to python syntax
+ ip2py = False
+ # Find all doctest examples in the string. First, try them as Python
+ # examples, then as IPython ones
+ terms = list(self._EXAMPLE_RE_PY.finditer(string))
+ if terms:
+ # Normal Python example
+ #print '-'*70 # dbg
+ #print 'PyExample, Source:\n',string # dbg
+ #print '-'*70 # dbg
+ Example = doctest.Example
+ else:
+ # It's an ipython example. Note that IPExamples are run
+ # in-process, so their syntax must be turned into valid python.
+ # IPExternalExamples are run out-of-process (via pexpect) so they
+ # don't need any filtering (a real ipython will be executing them).
+ terms = list(self._EXAMPLE_RE_IP.finditer(string))
+ if self._EXTERNAL_IP.search(string):
+ #print '-'*70 # dbg
+ #print 'IPExternalExample, Source:\n',string # dbg
+ #print '-'*70 # dbg
+ Example = IPExternalExample
+ else:
+ #print '-'*70 # dbg
+ #print 'IPExample, Source:\n',string # dbg
+ #print '-'*70 # dbg
+ Example = IPExample
+ ip2py = True
+
+ for m in terms:
+ # Add the pre-example text to `output`.
+ output.append(string[charno:m.start()])
+ # Update lineno (lines before this example)
+ lineno += string.count('\n', charno, m.start())
+ # Extract info from the regexp match.
+ (source, options, want, exc_msg) = \
+ self._parse_example(m, name, lineno,ip2py)
+
+ # Append the random-output marker (it defaults to empty in most
+ # cases, it's only non-empty for 'all-random' tests):
+ want += random_marker
+
+ if Example is IPExternalExample:
+ options[doctest.NORMALIZE_WHITESPACE] = True
+ want += '\n'
+
+ # Create an Example, and add it to the list.
+ if not self._IS_BLANK_OR_COMMENT(source):
+ output.append(Example(source, want, exc_msg,
+ lineno=lineno,
+ indent=min_indent+len(m.group('indent')),
+ options=options))
+ # Update lineno (lines inside this example)
+ lineno += string.count('\n', m.start(), m.end())
+ # Update charno.
+ charno = m.end()
+ # Add any remaining post-example text to `output`.
+ output.append(string[charno:])
+ return output
+
+ def _parse_example(self, m, name, lineno,ip2py=False):
+ """
+ Given a regular expression match from `_EXAMPLE_RE` (`m`),
+ return a pair `(source, want)`, where `source` is the matched
+ example's source code (with prompts and indentation stripped);
+ and `want` is the example's expected output (with indentation
+ stripped).
+
+ `name` is the string's name, and `lineno` is the line number
+ where the example starts; both are used for error messages.
+
+ Optional:
+ `ip2py`: if true, filter the input via IPython to convert the syntax
+ into valid python.
+ """
+
+ # Get the example's indentation level.
+ indent = len(m.group('indent'))
+
+ # Divide source into lines; check that they're properly
+ # indented; and then strip their indentation & prompts.
+ source_lines = m.group('source').split('\n')
+
+ # We're using variable-length input prompts
+ ps1 = m.group('ps1')
+ ps2 = m.group('ps2')
+ ps1_len = len(ps1)
+
+ self._check_prompt_blank(source_lines, indent, name, lineno,ps1_len)
+ if ps2:
+ self._check_prefix(source_lines[1:], ' '*indent + ps2, name, lineno)
+
+ source = '\n'.join([sl[indent+ps1_len+1:] for sl in source_lines])
+
+ if ip2py:
+ # Convert source input from IPython into valid Python syntax
+ source = self.ip2py(source)
+
+ # Divide want into lines; check that it's properly indented; and
+ # then strip the indentation. Spaces before the last newline should
+ # be preserved, so plain rstrip() isn't good enough.
+ want = m.group('want')
+ want_lines = want.split('\n')
+ if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
+ del want_lines[-1] # forget final newline & spaces after it
+ self._check_prefix(want_lines, ' '*indent, name,
+ lineno + len(source_lines))
+
+ # Remove ipython output prompt that might be present in the first line
+ want_lines[0] = re.sub(r'Out\[\d+\]: \s*?\n?','',want_lines[0])
+
+ want = '\n'.join([wl[indent:] for wl in want_lines])
+
+ # If `want` contains a traceback message, then extract it.
+ m = self._EXCEPTION_RE.match(want)
+ if m:
+ exc_msg = m.group('msg')
+ else:
+ exc_msg = None
+
+ # Extract options from the source.
+ options = self._find_options(source, name, lineno)
+
+ return source, options, want, exc_msg
+
+ def _check_prompt_blank(self, lines, indent, name, lineno, ps1_len):
+ """
+ Given the lines of a source string (including prompts and
+ leading indentation), check to make sure that every prompt is
+ followed by a space character. If any line is not followed by
+ a space character, then raise ValueError.
+
+ Note: IPython-modified version which takes the input prompt length as a
+ parameter, so that prompts of variable length can be dealt with.
+ """
+ space_idx = indent+ps1_len
+ min_len = space_idx+1
+ for i, line in enumerate(lines):
+ if len(line) >= min_len and line[space_idx] != ' ':
+ raise ValueError('line %r of the docstring for %s '
+ 'lacks blank after %s: %r' %
+ (lineno+i+1, name,
+ line[indent:space_idx], line))
+
+
+SKIP = doctest.register_optionflag('SKIP')
+
+
+class IPDocTestRunner(doctest.DocTestRunner,object):
+ """Test runner that synchronizes the IPython namespace with test globals.
+ """
+
+ def run(self, test, compileflags=None, out=None, clear_globs=True):
+
+ # Hack: ipython needs access to the execution context of the example,
+ # so that it can propagate user variables loaded by %run into
+ # test.globs. We put them here into our modified %run as a function
+ # attribute. Our new %run will then only make the namespace update
+ # when called (rather than unconconditionally updating test.globs here
+ # for all examples, most of which won't be calling %run anyway).
+ #_ip._ipdoctest_test_globs = test.globs
+ #_ip._ipdoctest_test_filename = test.filename
+
+ test.globs.update(_ip.user_ns)
+
# Override terminal size to standardise traceback format
with modified_env({'COLUMNS': '80', 'LINES': '24'}):
return super(IPDocTestRunner,self).run(test,
compileflags,out,clear_globs)
-
-
-class DocFileCase(doctest.DocFileCase):
- """Overrides to provide filename
- """
- def address(self):
- return (self._dt_test.filename, None, None)
-
-
-class ExtensionDoctest(doctests.Doctest):
- """Nose Plugin that supports doctests in extension modules.
- """
- name = 'extdoctest' # call nosetests with --with-extdoctest
- enabled = True
-
- def options(self, parser, env=os.environ):
- Plugin.options(self, parser, env)
- parser.add_option('--doctest-tests', action='store_true',
- dest='doctest_tests',
- default=env.get('NOSE_DOCTEST_TESTS',True),
- help="Also look for doctests in test modules. "
- "Note that classes, methods and functions should "
- "have either doctests or non-doctest tests, "
- "not both. [NOSE_DOCTEST_TESTS]")
- parser.add_option('--doctest-extension', action="append",
- dest="doctestExtension",
- help="Also look for doctests in files with "
- "this extension [NOSE_DOCTEST_EXTENSION]")
- # Set the default as a list, if given in env; otherwise
- # an additional value set on the command line will cause
- # an error.
- env_setting = env.get('NOSE_DOCTEST_EXTENSION')
- if env_setting is not None:
- parser.set_defaults(doctestExtension=tolist(env_setting))
-
-
- def configure(self, options, config):
- Plugin.configure(self, options, config)
- # Pull standard doctest plugin out of config; we will do doctesting
- config.plugins.plugins = [p for p in config.plugins.plugins
- if p.name != 'doctest']
- self.doctest_tests = options.doctest_tests
- self.extension = tolist(options.doctestExtension)
-
- self.parser = doctest.DocTestParser()
- self.finder = DocTestFinder()
- self.checker = IPDoctestOutputChecker()
- self.globs = None
- self.extraglobs = None
-
-
- def loadTestsFromExtensionModule(self,filename):
- bpath,mod = os.path.split(filename)
- modname = os.path.splitext(mod)[0]
- try:
- sys.path.append(bpath)
- module = __import__(modname)
- tests = list(self.loadTestsFromModule(module))
- finally:
- sys.path.pop()
- return tests
-
- # NOTE: the method below is almost a copy of the original one in nose, with
- # a few modifications to control output checking.
-
- def loadTestsFromModule(self, module):
- #print '*** ipdoctest - lTM',module # dbg
-
- if not self.matches(module.__name__):
- log.debug("Doctest doesn't want module %s", module)
- return
-
- tests = self.finder.find(module,globs=self.globs,
- extraglobs=self.extraglobs)
- if not tests:
- return
-
- # always use whitespace and ellipsis options
- optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS
-
- tests.sort()
- module_file = module.__file__
- if module_file[-4:] in ('.pyc', '.pyo'):
- module_file = module_file[:-1]
- for test in tests:
- if not test.examples:
- continue
- if not test.filename:
- test.filename = module_file
-
- yield DocTestCase(test,
- optionflags=optionflags,
- checker=self.checker)
-
-
- def loadTestsFromFile(self, filename):
- #print "ipdoctest - from file", filename # dbg
- if is_extension_module(filename):
- for t in self.loadTestsFromExtensionModule(filename):
- yield t
- else:
- if self.extension and anyp(filename.endswith, self.extension):
- name = os.path.basename(filename)
- dh = open(filename)
- try:
- doc = dh.read()
- finally:
- dh.close()
- test = self.parser.get_doctest(
- doc, globs={'__file__': filename}, name=name,
- filename=filename, lineno=0)
- if test.examples:
- #print 'FileCase:',test.examples # dbg
- yield DocFileCase(test)
- else:
- yield False # no tests to load
-
-
-class IPythonDoctest(ExtensionDoctest):
- """Nose Plugin that supports doctests in extension modules.
- """
- name = 'ipdoctest' # call nosetests with --with-ipdoctest
- enabled = True
-
- def makeTest(self, obj, parent):
- """Look for doctests in the given object, which will be a
- function, method or class.
- """
- #print 'Plugin analyzing:', obj, parent # dbg
- # always use whitespace and ellipsis options
- optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS
-
- doctests = self.finder.find(obj, module=getmodule(parent))
- if doctests:
- for test in doctests:
- if len(test.examples) == 0:
- continue
-
- yield DocTestCase(test, obj=obj,
- optionflags=optionflags,
- checker=self.checker)
-
- def options(self, parser, env=os.environ):
- #print "Options for nose plugin:", self.name # dbg
- Plugin.options(self, parser, env)
- parser.add_option('--ipdoctest-tests', action='store_true',
- dest='ipdoctest_tests',
- default=env.get('NOSE_IPDOCTEST_TESTS',True),
- help="Also look for doctests in test modules. "
- "Note that classes, methods and functions should "
- "have either doctests or non-doctest tests, "
- "not both. [NOSE_IPDOCTEST_TESTS]")
- parser.add_option('--ipdoctest-extension', action="append",
- dest="ipdoctest_extension",
- help="Also look for doctests in files with "
- "this extension [NOSE_IPDOCTEST_EXTENSION]")
- # Set the default as a list, if given in env; otherwise
- # an additional value set on the command line will cause
- # an error.
- env_setting = env.get('NOSE_IPDOCTEST_EXTENSION')
- if env_setting is not None:
- parser.set_defaults(ipdoctest_extension=tolist(env_setting))
-
- def configure(self, options, config):
- #print "Configuring nose plugin:", self.name # dbg
- Plugin.configure(self, options, config)
- # Pull standard doctest plugin out of config; we will do doctesting
- config.plugins.plugins = [p for p in config.plugins.plugins
- if p.name != 'doctest']
- self.doctest_tests = options.ipdoctest_tests
- self.extension = tolist(options.ipdoctest_extension)
-
- self.parser = IPDocTestParser()
- self.finder = DocTestFinder(parser=self.parser)
- self.checker = IPDoctestOutputChecker()
- self.globs = None
- self.extraglobs = None
+
+
+class DocFileCase(doctest.DocFileCase):
+ """Overrides to provide filename
+ """
+ def address(self):
+ return (self._dt_test.filename, None, None)
+
+
+class ExtensionDoctest(doctests.Doctest):
+ """Nose Plugin that supports doctests in extension modules.
+ """
+ name = 'extdoctest' # call nosetests with --with-extdoctest
+ enabled = True
+
+ def options(self, parser, env=os.environ):
+ Plugin.options(self, parser, env)
+ parser.add_option('--doctest-tests', action='store_true',
+ dest='doctest_tests',
+ default=env.get('NOSE_DOCTEST_TESTS',True),
+ help="Also look for doctests in test modules. "
+ "Note that classes, methods and functions should "
+ "have either doctests or non-doctest tests, "
+ "not both. [NOSE_DOCTEST_TESTS]")
+ parser.add_option('--doctest-extension', action="append",
+ dest="doctestExtension",
+ help="Also look for doctests in files with "
+ "this extension [NOSE_DOCTEST_EXTENSION]")
+ # Set the default as a list, if given in env; otherwise
+ # an additional value set on the command line will cause
+ # an error.
+ env_setting = env.get('NOSE_DOCTEST_EXTENSION')
+ if env_setting is not None:
+ parser.set_defaults(doctestExtension=tolist(env_setting))
+
+
+ def configure(self, options, config):
+ Plugin.configure(self, options, config)
+ # Pull standard doctest plugin out of config; we will do doctesting
+ config.plugins.plugins = [p for p in config.plugins.plugins
+ if p.name != 'doctest']
+ self.doctest_tests = options.doctest_tests
+ self.extension = tolist(options.doctestExtension)
+
+ self.parser = doctest.DocTestParser()
+ self.finder = DocTestFinder()
+ self.checker = IPDoctestOutputChecker()
+ self.globs = None
+ self.extraglobs = None
+
+
+ def loadTestsFromExtensionModule(self,filename):
+ bpath,mod = os.path.split(filename)
+ modname = os.path.splitext(mod)[0]
+ try:
+ sys.path.append(bpath)
+ module = __import__(modname)
+ tests = list(self.loadTestsFromModule(module))
+ finally:
+ sys.path.pop()
+ return tests
+
+ # NOTE: the method below is almost a copy of the original one in nose, with
+ # a few modifications to control output checking.
+
+ def loadTestsFromModule(self, module):
+ #print '*** ipdoctest - lTM',module # dbg
+
+ if not self.matches(module.__name__):
+ log.debug("Doctest doesn't want module %s", module)
+ return
+
+ tests = self.finder.find(module,globs=self.globs,
+ extraglobs=self.extraglobs)
+ if not tests:
+ return
+
+ # always use whitespace and ellipsis options
+ optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS
+
+ tests.sort()
+ module_file = module.__file__
+ if module_file[-4:] in ('.pyc', '.pyo'):
+ module_file = module_file[:-1]
+ for test in tests:
+ if not test.examples:
+ continue
+ if not test.filename:
+ test.filename = module_file
+
+ yield DocTestCase(test,
+ optionflags=optionflags,
+ checker=self.checker)
+
+
+ def loadTestsFromFile(self, filename):
+ #print "ipdoctest - from file", filename # dbg
+ if is_extension_module(filename):
+ for t in self.loadTestsFromExtensionModule(filename):
+ yield t
+ else:
+ if self.extension and anyp(filename.endswith, self.extension):
+ name = os.path.basename(filename)
+ dh = open(filename)
+ try:
+ doc = dh.read()
+ finally:
+ dh.close()
+ test = self.parser.get_doctest(
+ doc, globs={'__file__': filename}, name=name,
+ filename=filename, lineno=0)
+ if test.examples:
+ #print 'FileCase:',test.examples # dbg
+ yield DocFileCase(test)
+ else:
+ yield False # no tests to load
+
+
+class IPythonDoctest(ExtensionDoctest):
+ """Nose Plugin that supports doctests in extension modules.
+ """
+ name = 'ipdoctest' # call nosetests with --with-ipdoctest
+ enabled = True
+
+ def makeTest(self, obj, parent):
+ """Look for doctests in the given object, which will be a
+ function, method or class.
+ """
+ #print 'Plugin analyzing:', obj, parent # dbg
+ # always use whitespace and ellipsis options
+ optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS
+
+ doctests = self.finder.find(obj, module=getmodule(parent))
+ if doctests:
+ for test in doctests:
+ if len(test.examples) == 0:
+ continue
+
+ yield DocTestCase(test, obj=obj,
+ optionflags=optionflags,
+ checker=self.checker)
+
+ def options(self, parser, env=os.environ):
+ #print "Options for nose plugin:", self.name # dbg
+ Plugin.options(self, parser, env)
+ parser.add_option('--ipdoctest-tests', action='store_true',
+ dest='ipdoctest_tests',
+ default=env.get('NOSE_IPDOCTEST_TESTS',True),
+ help="Also look for doctests in test modules. "
+ "Note that classes, methods and functions should "
+ "have either doctests or non-doctest tests, "
+ "not both. [NOSE_IPDOCTEST_TESTS]")
+ parser.add_option('--ipdoctest-extension', action="append",
+ dest="ipdoctest_extension",
+ help="Also look for doctests in files with "
+ "this extension [NOSE_IPDOCTEST_EXTENSION]")
+ # Set the default as a list, if given in env; otherwise
+ # an additional value set on the command line will cause
+ # an error.
+ env_setting = env.get('NOSE_IPDOCTEST_EXTENSION')
+ if env_setting is not None:
+ parser.set_defaults(ipdoctest_extension=tolist(env_setting))
+
+ def configure(self, options, config):
+ #print "Configuring nose plugin:", self.name # dbg
+ Plugin.configure(self, options, config)
+ # Pull standard doctest plugin out of config; we will do doctesting
+ config.plugins.plugins = [p for p in config.plugins.plugins
+ if p.name != 'doctest']
+ self.doctest_tests = options.ipdoctest_tests
+ self.extension = tolist(options.ipdoctest_extension)
+
+ self.parser = IPDocTestParser()
+ self.finder = DocTestFinder(parser=self.parser)
+ self.checker = IPDoctestOutputChecker()
+ self.globs = None
+ self.extraglobs = None
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/iptest.py b/contrib/python/ipython/py2/IPython/testing/plugin/iptest.py
index 25b4634f2f..a75cab993f 100755
--- a/contrib/python/ipython/py2/IPython/testing/plugin/iptest.py
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/iptest.py
@@ -1,19 +1,19 @@
-#!/usr/bin/env python
-"""Nose-based test runner.
-"""
-from __future__ import print_function
-
-from nose.core import main
-from nose.plugins.builtin import plugins
-from nose.plugins.doctests import Doctest
-
-from . import ipdoctest
-from .ipdoctest import IPDocTestRunner
-
-if __name__ == '__main__':
- print('WARNING: this code is incomplete!')
- print()
-
- pp = [x() for x in plugins] # activate all builtin plugins first
- main(testRunner=IPDocTestRunner(),
- plugins=pp+[ipdoctest.IPythonDoctest(),Doctest()])
+#!/usr/bin/env python
+"""Nose-based test runner.
+"""
+from __future__ import print_function
+
+from nose.core import main
+from nose.plugins.builtin import plugins
+from nose.plugins.doctests import Doctest
+
+from . import ipdoctest
+from .ipdoctest import IPDocTestRunner
+
+if __name__ == '__main__':
+ print('WARNING: this code is incomplete!')
+ print()
+
+ pp = [x() for x in plugins] # activate all builtin plugins first
+ main(testRunner=IPDocTestRunner(),
+ plugins=pp+[ipdoctest.IPythonDoctest(),Doctest()])
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/setup.py b/contrib/python/ipython/py2/IPython/testing/plugin/setup.py
index 785704337b..a3281d30c8 100755
--- a/contrib/python/ipython/py2/IPython/testing/plugin/setup.py
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/setup.py
@@ -1,18 +1,18 @@
-#!/usr/bin/env python
-"""A Nose plugin to support IPython doctests.
-"""
-
-from setuptools import setup
-
-setup(name='IPython doctest plugin',
- version='0.1',
- author='The IPython Team',
- description = 'Nose plugin to load IPython-extended doctests',
- license = 'LGPL',
- py_modules = ['ipdoctest'],
- entry_points = {
- 'nose.plugins.0.10': ['ipdoctest = ipdoctest:IPythonDoctest',
- 'extdoctest = ipdoctest:ExtensionDoctest',
- ],
- },
- )
+#!/usr/bin/env python
+"""A Nose plugin to support IPython doctests.
+"""
+
+from setuptools import setup
+
+setup(name='IPython doctest plugin',
+ version='0.1',
+ author='The IPython Team',
+ description = 'Nose plugin to load IPython-extended doctests',
+ license = 'LGPL',
+ py_modules = ['ipdoctest'],
+ entry_points = {
+ 'nose.plugins.0.10': ['ipdoctest = ipdoctest:IPythonDoctest',
+ 'extdoctest = ipdoctest:ExtensionDoctest',
+ ],
+ },
+ )
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/show_refs.py b/contrib/python/ipython/py2/IPython/testing/plugin/show_refs.py
index 4c517da949..ef7dd157ae 100644
--- a/contrib/python/ipython/py2/IPython/testing/plugin/show_refs.py
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/show_refs.py
@@ -1,20 +1,20 @@
-"""Simple script to show reference holding behavior.
-
-This is used by a companion test case.
-"""
-from __future__ import print_function
-
-import gc
-
-class C(object):
- def __del__(self):
- pass
- #print 'deleting object...' # dbg
-
-if __name__ == '__main__':
- c = C()
-
- c_refs = gc.get_referrers(c)
- ref_ids = list(map(id,c_refs))
-
- print('c referrers:',list(map(type,c_refs)))
+"""Simple script to show reference holding behavior.
+
+This is used by a companion test case.
+"""
+from __future__ import print_function
+
+import gc
+
+class C(object):
+ def __del__(self):
+ pass
+ #print 'deleting object...' # dbg
+
+if __name__ == '__main__':
+ c = C()
+
+ c_refs = gc.get_referrers(c)
+ ref_ids = list(map(id,c_refs))
+
+ print('c referrers:',list(map(type,c_refs)))
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/simple.py b/contrib/python/ipython/py2/IPython/testing/plugin/simple.py
index bcc43f55e8..a7d33d9a16 100644
--- a/contrib/python/ipython/py2/IPython/testing/plugin/simple.py
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/simple.py
@@ -1,34 +1,34 @@
-"""Simple example using doctests.
-
-This file just contains doctests both using plain python and IPython prompts.
-All tests should be loaded by nose.
-"""
-from __future__ import print_function
-
-def pyfunc():
- """Some pure python tests...
-
- >>> pyfunc()
- 'pyfunc'
-
- >>> import os
-
- >>> 2+3
- 5
-
- >>> for i in range(3):
- ... print(i, end=' ')
- ... print(i+1, end=' ')
- ...
- 0 1 1 2 2 3
- """
- return 'pyfunc'
-
-
-def ipyfunc2():
- """Some pure python tests...
-
- >>> 1+1
- 2
- """
- return 'pyfunc2'
+"""Simple example using doctests.
+
+This file just contains doctests both using plain python and IPython prompts.
+All tests should be loaded by nose.
+"""
+from __future__ import print_function
+
+def pyfunc():
+ """Some pure python tests...
+
+ >>> pyfunc()
+ 'pyfunc'
+
+ >>> import os
+
+ >>> 2+3
+ 5
+
+ >>> for i in range(3):
+ ... print(i, end=' ')
+ ... print(i+1, end=' ')
+ ...
+ 0 1 1 2 2 3
+ """
+ return 'pyfunc'
+
+
+def ipyfunc2():
+ """Some pure python tests...
+
+ >>> 1+1
+ 2
+ """
+ return 'pyfunc2'
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/simplevars.py b/contrib/python/ipython/py2/IPython/testing/plugin/simplevars.py
index ee4039a59a..5134c6e928 100644
--- a/contrib/python/ipython/py2/IPython/testing/plugin/simplevars.py
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/simplevars.py
@@ -1,3 +1,3 @@
-from __future__ import print_function
-x = 1
-print('x is:',x)
+from __future__ import print_function
+x = 1
+print('x is:',x)
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/test_combo.txt b/contrib/python/ipython/py2/IPython/testing/plugin/test_combo.txt
index 0de694fe7e..6c8759f3e7 100644
--- a/contrib/python/ipython/py2/IPython/testing/plugin/test_combo.txt
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/test_combo.txt
@@ -1,36 +1,36 @@
-=======================
- Combo testing example
-=======================
-
-This is a simple example that mixes ipython doctests::
-
- In [1]: import code
-
- In [2]: 2**12
- Out[2]: 4096
-
-with command-line example information that does *not* get executed::
-
- $ mpirun -n 4 ipengine --controller-port=10000 --controller-ip=host0
-
-and with literal examples of Python source code::
-
- controller = dict(host='myhost',
- engine_port=None, # default is 10105
- control_port=None,
- )
-
- # keys are hostnames, values are the number of engine on that host
- engines = dict(node1=2,
- node2=2,
- node3=2,
- node3=2,
- )
-
- # Force failure to detect that this test is being run.
- 1/0
-
-These source code examples are executed but no output is compared at all. An
-error or failure is reported only if an exception is raised.
-
-NOTE: the execution of pure python blocks is not yet working!
+=======================
+ Combo testing example
+=======================
+
+This is a simple example that mixes ipython doctests::
+
+ In [1]: import code
+
+ In [2]: 2**12
+ Out[2]: 4096
+
+with command-line example information that does *not* get executed::
+
+ $ mpirun -n 4 ipengine --controller-port=10000 --controller-ip=host0
+
+and with literal examples of Python source code::
+
+ controller = dict(host='myhost',
+ engine_port=None, # default is 10105
+ control_port=None,
+ )
+
+ # keys are hostnames, values are the number of engine on that host
+ engines = dict(node1=2,
+ node2=2,
+ node3=2,
+ node3=2,
+ )
+
+ # Force failure to detect that this test is being run.
+ 1/0
+
+These source code examples are executed but no output is compared at all. An
+error or failure is reported only if an exception is raised.
+
+NOTE: the execution of pure python blocks is not yet working!
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/test_example.txt b/contrib/python/ipython/py2/IPython/testing/plugin/test_example.txt
index f6258b0615..f8b681eb4f 100644
--- a/contrib/python/ipython/py2/IPython/testing/plugin/test_example.txt
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/test_example.txt
@@ -1,24 +1,24 @@
-=====================================
- Tests in example form - pure python
-=====================================
-
-This file contains doctest examples embedded as code blocks, using normal
-Python prompts. See the accompanying file for similar examples using IPython
-prompts (you can't mix both types within one file). The following will be run
-as a test::
-
- >>> 1+1
- 2
- >>> print ("hello")
- hello
-
-More than one example works::
-
- >>> s="Hello World"
-
- >>> s.upper()
- 'HELLO WORLD'
-
-but you should note that the *entire* test file is considered to be a single
-test. Individual code blocks that fail are printed separately as ``example
-failures``, but the whole file is still counted and reported as one test.
+=====================================
+ Tests in example form - pure python
+=====================================
+
+This file contains doctest examples embedded as code blocks, using normal
+Python prompts. See the accompanying file for similar examples using IPython
+prompts (you can't mix both types within one file). The following will be run
+as a test::
+
+ >>> 1+1
+ 2
+ >>> print ("hello")
+ hello
+
+More than one example works::
+
+ >>> s="Hello World"
+
+ >>> s.upper()
+ 'HELLO WORLD'
+
+but you should note that the *entire* test file is considered to be a single
+test. Individual code blocks that fail are printed separately as ``example
+failures``, but the whole file is still counted and reported as one test.
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/test_exampleip.txt b/contrib/python/ipython/py2/IPython/testing/plugin/test_exampleip.txt
index cbc00cc976..8afcbfdf7d 100644
--- a/contrib/python/ipython/py2/IPython/testing/plugin/test_exampleip.txt
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/test_exampleip.txt
@@ -1,30 +1,30 @@
-=================================
- Tests in example form - IPython
-=================================
-
-You can write text files with examples that use IPython prompts (as long as you
-use the nose ipython doctest plugin), but you can not mix and match prompt
-styles in a single file. That is, you either use all ``>>>`` prompts or all
-IPython-style prompts. Your test suite *can* have both types, you just need to
-put each type of example in a separate. Using IPython prompts, you can paste
-directly from your session::
-
- In [5]: s="Hello World"
-
- In [6]: s.upper()
- Out[6]: 'HELLO WORLD'
-
-Another example::
-
- In [8]: 1+3
- Out[8]: 4
-
-Just like in IPython docstrings, you can use all IPython syntax and features::
-
- In [9]: !echo "hello"
- hello
-
- In [10]: a='hi'
-
- In [11]: !echo $a
- hi
+=================================
+ Tests in example form - IPython
+=================================
+
+You can write text files with examples that use IPython prompts (as long as you
+use the nose ipython doctest plugin), but you can not mix and match prompt
+styles in a single file. That is, you either use all ``>>>`` prompts or all
+IPython-style prompts. Your test suite *can* have both types, you just need to
+put each type of example in a separate. Using IPython prompts, you can paste
+directly from your session::
+
+ In [5]: s="Hello World"
+
+ In [6]: s.upper()
+ Out[6]: 'HELLO WORLD'
+
+Another example::
+
+ In [8]: 1+3
+ Out[8]: 4
+
+Just like in IPython docstrings, you can use all IPython syntax and features::
+
+ In [9]: !echo "hello"
+ hello
+
+ In [10]: a='hi'
+
+ In [11]: !echo $a
+ hi
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/test_ipdoctest.py b/contrib/python/ipython/py2/IPython/testing/plugin/test_ipdoctest.py
index 05dc387d9b..a7add7da79 100644
--- a/contrib/python/ipython/py2/IPython/testing/plugin/test_ipdoctest.py
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/test_ipdoctest.py
@@ -1,80 +1,80 @@
-"""Tests for the ipdoctest machinery itself.
-
-Note: in a file named test_X, functions whose only test is their docstring (as
-a doctest) and which have no test functionality of their own, should be called
-'doctest_foo' instead of 'test_foo', otherwise they get double-counted (the
-empty function call is counted as a test, which just inflates tests numbers
-artificially).
-"""
-from IPython.utils.py3compat import doctest_refactor_print
-
-@doctest_refactor_print
-def doctest_simple():
- """ipdoctest must handle simple inputs
-
- In [1]: 1
- Out[1]: 1
-
- In [2]: print 1
- 1
- """
-
-@doctest_refactor_print
-def doctest_multiline1():
- """The ipdoctest machinery must handle multiline examples gracefully.
-
- In [2]: for i in range(4):
- ...: print i
- ...:
- 0
- 1
- 2
- 3
- """
-
-@doctest_refactor_print
-def doctest_multiline2():
- """Multiline examples that define functions and print output.
-
- In [7]: def f(x):
- ...: return x+1
- ...:
-
- In [8]: f(1)
- Out[8]: 2
-
- In [9]: def g(x):
- ...: print 'x is:',x
- ...:
-
- In [10]: g(1)
- x is: 1
-
- In [11]: g('hello')
- x is: hello
- """
-
-
-def doctest_multiline3():
- """Multiline examples with blank lines.
-
- In [12]: def h(x):
- ....: if x>1:
- ....: return x**2
- ....: # To leave a blank line in the input, you must mark it
- ....: # with a comment character:
- ....: #
- ....: # otherwise the doctest parser gets confused.
- ....: else:
- ....: return -1
- ....:
-
- In [13]: h(5)
- Out[13]: 25
-
- In [14]: h(1)
- Out[14]: -1
-
- In [15]: h(0)
- Out[15]: -1
- """
+"""Tests for the ipdoctest machinery itself.
+
+Note: in a file named test_X, functions whose only test is their docstring (as
+a doctest) and which have no test functionality of their own, should be called
+'doctest_foo' instead of 'test_foo', otherwise they get double-counted (the
+empty function call is counted as a test, which just inflates tests numbers
+artificially).
+"""
+from IPython.utils.py3compat import doctest_refactor_print
+
+@doctest_refactor_print
+def doctest_simple():
+ """ipdoctest must handle simple inputs
+
+ In [1]: 1
+ Out[1]: 1
+
+ In [2]: print 1
+ 1
+ """
+
+@doctest_refactor_print
+def doctest_multiline1():
+ """The ipdoctest machinery must handle multiline examples gracefully.
+
+ In [2]: for i in range(4):
+ ...: print i
+ ...:
+ 0
+ 1
+ 2
+ 3
+ """
+
+@doctest_refactor_print
+def doctest_multiline2():
+ """Multiline examples that define functions and print output.
+
+ In [7]: def f(x):
+ ...: return x+1
+ ...:
+
+ In [8]: f(1)
+ Out[8]: 2
+
+ In [9]: def g(x):
+ ...: print 'x is:',x
+ ...:
+
+ In [10]: g(1)
+ x is: 1
+
+ In [11]: g('hello')
+ x is: hello
+ """
+
+
+def doctest_multiline3():
+ """Multiline examples with blank lines.
+
+ In [12]: def h(x):
+ ....: if x>1:
+ ....: return x**2
+ ....: # To leave a blank line in the input, you must mark it
+ ....: # with a comment character:
+ ....: #
+ ....: # otherwise the doctest parser gets confused.
+ ....: else:
+ ....: return -1
+ ....:
+
+ In [13]: h(5)
+ Out[13]: 25
+
+ In [14]: h(1)
+ Out[14]: -1
+
+ In [15]: h(0)
+ Out[15]: -1
+ """
diff --git a/contrib/python/ipython/py2/IPython/testing/plugin/test_refs.py b/contrib/python/ipython/py2/IPython/testing/plugin/test_refs.py
index 8a2a78b75d..50d0857134 100644
--- a/contrib/python/ipython/py2/IPython/testing/plugin/test_refs.py
+++ b/contrib/python/ipython/py2/IPython/testing/plugin/test_refs.py
@@ -1,46 +1,46 @@
-"""Some simple tests for the plugin while running scripts.
-"""
-# Module imports
-# Std lib
-import inspect
-
-# Our own
-
-#-----------------------------------------------------------------------------
-# Testing functions
-
-def test_trivial():
- """A trivial passing test."""
- pass
-
-def doctest_run():
- """Test running a trivial script.
-
- In [13]: run simplevars.py
- x is: 1
- """
-
-def doctest_runvars():
- """Test that variables defined in scripts get loaded correcly via %run.
-
- In [13]: run simplevars.py
- x is: 1
-
- In [14]: x
- Out[14]: 1
- """
-
-def doctest_ivars():
- """Test that variables defined interactively are picked up.
- In [5]: zz=1
-
- In [6]: zz
- Out[6]: 1
- """
-
-def doctest_refs():
- """DocTest reference holding issues when running scripts.
-
- In [32]: run show_refs.py
- c referrers: [<... 'dict'>]
- """
+"""Some simple tests for the plugin while running scripts.
+"""
+# Module imports
+# Std lib
+import inspect
+
+# Our own
+
+#-----------------------------------------------------------------------------
+# Testing functions
+
+def test_trivial():
+ """A trivial passing test."""
+ pass
+
+def doctest_run():
+ """Test running a trivial script.
+
+ In [13]: run simplevars.py
+ x is: 1
+ """
+
+def doctest_runvars():
+ """Test that variables defined in scripts get loaded correcly via %run.
+
+ In [13]: run simplevars.py
+ x is: 1
+
+ In [14]: x
+ Out[14]: 1
+ """
+
+def doctest_ivars():
+ """Test that variables defined interactively are picked up.
+ In [5]: zz=1
+
+ In [6]: zz
+ Out[6]: 1
+ """
+
+def doctest_refs():
+ """DocTest reference holding issues when running scripts.
+
+ In [32]: run show_refs.py
+ c referrers: [<... 'dict'>]
+ """
diff --git a/contrib/python/ipython/py2/IPython/testing/skipdoctest.py b/contrib/python/ipython/py2/IPython/testing/skipdoctest.py
index 8357f609b0..564ca54027 100644
--- a/contrib/python/ipython/py2/IPython/testing/skipdoctest.py
+++ b/contrib/python/ipython/py2/IPython/testing/skipdoctest.py
@@ -1,41 +1,41 @@
-"""Decorators marks that a doctest should be skipped, for both python 2 and 3.
-
-The IPython.testing.decorators module triggers various extra imports, including
-numpy and sympy if they're present. Since this decorator is used in core parts
-of IPython, it's in a separate module so that running IPython doesn't trigger
-those imports."""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2009-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import sys
-
-#-----------------------------------------------------------------------------
-# Decorators
-#-----------------------------------------------------------------------------
-
-def skip_doctest(f):
- """Decorator - mark a function or method for skipping its doctest.
-
- This decorator allows you to mark a function whose docstring you wish to
- omit from testing, while preserving the docstring for introspection, help,
- etc."""
- f.skip_doctest = True
- return f
-
-
-def skip_doctest_py3(f):
- """Decorator - skip the doctest under Python 3."""
- f.skip_doctest = (sys.version_info[0] >= 3)
- return f
+"""Decorators marks that a doctest should be skipped, for both python 2 and 3.
+
+The IPython.testing.decorators module triggers various extra imports, including
+numpy and sympy if they're present. Since this decorator is used in core parts
+of IPython, it's in a separate module so that running IPython doesn't trigger
+those imports."""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2009-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import sys
+
+#-----------------------------------------------------------------------------
+# Decorators
+#-----------------------------------------------------------------------------
+
+def skip_doctest(f):
+ """Decorator - mark a function or method for skipping its doctest.
+
+ This decorator allows you to mark a function whose docstring you wish to
+ omit from testing, while preserving the docstring for introspection, help,
+ etc."""
+ f.skip_doctest = True
+ return f
+
+
+def skip_doctest_py3(f):
+ """Decorator - skip the doctest under Python 3."""
+ f.skip_doctest = (sys.version_info[0] >= 3)
+ return f
def skip_doctest_py2(f):
"""Decorator - skip the doctest under Python 3."""
diff --git a/contrib/python/ipython/py2/IPython/testing/tools.py b/contrib/python/ipython/py2/IPython/testing/tools.py
index 98f796fe3c..23bf6a68cb 100644
--- a/contrib/python/ipython/py2/IPython/testing/tools.py
+++ b/contrib/python/ipython/py2/IPython/testing/tools.py
@@ -1,296 +1,296 @@
-"""Generic testing tools.
-
-Authors
--------
-- Fernando Perez <Fernando.Perez@berkeley.edu>
-"""
-
-from __future__ import absolute_import
-
+"""Generic testing tools.
+
+Authors
+-------
+- Fernando Perez <Fernando.Perez@berkeley.edu>
+"""
+
+from __future__ import absolute_import
+
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
-
-import os
-import re
-import sys
-import tempfile
-
-from contextlib import contextmanager
-from io import StringIO
-from subprocess import Popen, PIPE
+
+import os
+import re
+import sys
+import tempfile
+
+from contextlib import contextmanager
+from io import StringIO
+from subprocess import Popen, PIPE
try:
from unittest.mock import patch
except ImportError:
# Python 2 compatibility
from mock import patch
-
-try:
- # These tools are used by parts of the runtime, so we make the nose
- # dependency optional at this point. Nose is a hard dependency to run the
- # test suite, but NOT to use ipython itself.
- import nose.tools as nt
- has_nose = True
-except ImportError:
- has_nose = False
-
-from traitlets.config.loader import Config
-from IPython.utils.process import get_output_error_code
-from IPython.utils.text import list_strings
-from IPython.utils.io import temp_pyfile, Tee
-from IPython.utils import py3compat
-from IPython.utils.encoding import DEFAULT_ENCODING
-
-from . import decorators as dec
-from . import skipdoctest
-
-
-# The docstring for full_path doctests differently on win32 (different path
-# separator) so just skip the doctest there. The example remains informative.
-doctest_deco = skipdoctest.skip_doctest if sys.platform == 'win32' else dec.null_deco
-
-@doctest_deco
-def full_path(startPath,files):
- """Make full paths for all the listed files, based on startPath.
-
- Only the base part of startPath is kept, since this routine is typically
- used with a script's ``__file__`` variable as startPath. The base of startPath
- is then prepended to all the listed files, forming the output list.
-
- Parameters
- ----------
- startPath : string
- Initial path to use as the base for the results. This path is split
- using os.path.split() and only its first component is kept.
-
- files : string or list
- One or more files.
-
- Examples
- --------
-
- >>> full_path('/foo/bar.py',['a.txt','b.txt'])
- ['/foo/a.txt', '/foo/b.txt']
-
- >>> full_path('/foo',['a.txt','b.txt'])
- ['/a.txt', '/b.txt']
-
- If a single file is given, the output is still a list::
-
- >>> full_path('/foo','a.txt')
- ['/a.txt']
- """
-
- files = list_strings(files)
- base = os.path.split(startPath)[0]
- return [ os.path.join(base,f) for f in files ]
-
-
-def parse_test_output(txt):
- """Parse the output of a test run and return errors, failures.
-
- Parameters
- ----------
- txt : str
- Text output of a test run, assumed to contain a line of one of the
- following forms::
-
- 'FAILED (errors=1)'
- 'FAILED (failures=1)'
- 'FAILED (errors=1, failures=1)'
-
- Returns
- -------
- nerr, nfail
- number of errors and failures.
- """
-
- err_m = re.search(r'^FAILED \(errors=(\d+)\)', txt, re.MULTILINE)
- if err_m:
- nerr = int(err_m.group(1))
- nfail = 0
- return nerr, nfail
-
- fail_m = re.search(r'^FAILED \(failures=(\d+)\)', txt, re.MULTILINE)
- if fail_m:
- nerr = 0
- nfail = int(fail_m.group(1))
- return nerr, nfail
-
- both_m = re.search(r'^FAILED \(errors=(\d+), failures=(\d+)\)', txt,
- re.MULTILINE)
- if both_m:
- nerr = int(both_m.group(1))
- nfail = int(both_m.group(2))
- return nerr, nfail
-
- # If the input didn't match any of these forms, assume no error/failures
- return 0, 0
-
-
-# So nose doesn't think this is a test
-parse_test_output.__test__ = False
-
-
-def default_argv():
- """Return a valid default argv for creating testing instances of ipython"""
-
- return ['--quick', # so no config file is loaded
- # Other defaults to minimize side effects on stdout
- '--colors=NoColor', '--no-term-title','--no-banner',
- '--autocall=0']
-
-
-def default_config():
- """Return a config object with good defaults for testing."""
- config = Config()
- config.TerminalInteractiveShell.colors = 'NoColor'
- config.TerminalTerminalInteractiveShell.term_title = False,
- config.TerminalInteractiveShell.autocall = 0
- f = tempfile.NamedTemporaryFile(suffix=u'test_hist.sqlite', delete=False)
- config.HistoryManager.hist_file = f.name
- f.close()
- config.HistoryManager.db_cache_size = 10000
- return config
-
-
-def get_ipython_cmd(as_string=False):
- """
- Return appropriate IPython command line name. By default, this will return
- a list that can be used with subprocess.Popen, for example, but passing
- `as_string=True` allows for returning the IPython command as a string.
-
- Parameters
- ----------
- as_string: bool
- Flag to allow to return the command as a string.
- """
- ipython_cmd = [sys.executable, "-m", "IPython"]
-
- if as_string:
- ipython_cmd = " ".join(ipython_cmd)
-
- return ipython_cmd
-
-def ipexec(fname, options=None, commands=()):
- """Utility to call 'ipython filename'.
-
- Starts IPython with a minimal and safe configuration to make startup as fast
- as possible.
-
- Note that this starts IPython in a subprocess!
-
- Parameters
- ----------
- fname : str
- Name of file to be executed (should have .py or .ipy extension).
-
- options : optional, list
- Extra command-line flags to be passed to IPython.
-
- commands : optional, list
- Commands to send in on stdin
-
- Returns
- -------
- (stdout, stderr) of ipython subprocess.
- """
- if options is None: options = []
-
+
+try:
+ # These tools are used by parts of the runtime, so we make the nose
+ # dependency optional at this point. Nose is a hard dependency to run the
+ # test suite, but NOT to use ipython itself.
+ import nose.tools as nt
+ has_nose = True
+except ImportError:
+ has_nose = False
+
+from traitlets.config.loader import Config
+from IPython.utils.process import get_output_error_code
+from IPython.utils.text import list_strings
+from IPython.utils.io import temp_pyfile, Tee
+from IPython.utils import py3compat
+from IPython.utils.encoding import DEFAULT_ENCODING
+
+from . import decorators as dec
+from . import skipdoctest
+
+
+# The docstring for full_path doctests differently on win32 (different path
+# separator) so just skip the doctest there. The example remains informative.
+doctest_deco = skipdoctest.skip_doctest if sys.platform == 'win32' else dec.null_deco
+
+@doctest_deco
+def full_path(startPath,files):
+ """Make full paths for all the listed files, based on startPath.
+
+ Only the base part of startPath is kept, since this routine is typically
+ used with a script's ``__file__`` variable as startPath. The base of startPath
+ is then prepended to all the listed files, forming the output list.
+
+ Parameters
+ ----------
+ startPath : string
+ Initial path to use as the base for the results. This path is split
+ using os.path.split() and only its first component is kept.
+
+ files : string or list
+ One or more files.
+
+ Examples
+ --------
+
+ >>> full_path('/foo/bar.py',['a.txt','b.txt'])
+ ['/foo/a.txt', '/foo/b.txt']
+
+ >>> full_path('/foo',['a.txt','b.txt'])
+ ['/a.txt', '/b.txt']
+
+ If a single file is given, the output is still a list::
+
+ >>> full_path('/foo','a.txt')
+ ['/a.txt']
+ """
+
+ files = list_strings(files)
+ base = os.path.split(startPath)[0]
+ return [ os.path.join(base,f) for f in files ]
+
+
+def parse_test_output(txt):
+ """Parse the output of a test run and return errors, failures.
+
+ Parameters
+ ----------
+ txt : str
+ Text output of a test run, assumed to contain a line of one of the
+ following forms::
+
+ 'FAILED (errors=1)'
+ 'FAILED (failures=1)'
+ 'FAILED (errors=1, failures=1)'
+
+ Returns
+ -------
+ nerr, nfail
+ number of errors and failures.
+ """
+
+ err_m = re.search(r'^FAILED \(errors=(\d+)\)', txt, re.MULTILINE)
+ if err_m:
+ nerr = int(err_m.group(1))
+ nfail = 0
+ return nerr, nfail
+
+ fail_m = re.search(r'^FAILED \(failures=(\d+)\)', txt, re.MULTILINE)
+ if fail_m:
+ nerr = 0
+ nfail = int(fail_m.group(1))
+ return nerr, nfail
+
+ both_m = re.search(r'^FAILED \(errors=(\d+), failures=(\d+)\)', txt,
+ re.MULTILINE)
+ if both_m:
+ nerr = int(both_m.group(1))
+ nfail = int(both_m.group(2))
+ return nerr, nfail
+
+ # If the input didn't match any of these forms, assume no error/failures
+ return 0, 0
+
+
+# So nose doesn't think this is a test
+parse_test_output.__test__ = False
+
+
+def default_argv():
+ """Return a valid default argv for creating testing instances of ipython"""
+
+ return ['--quick', # so no config file is loaded
+ # Other defaults to minimize side effects on stdout
+ '--colors=NoColor', '--no-term-title','--no-banner',
+ '--autocall=0']
+
+
+def default_config():
+ """Return a config object with good defaults for testing."""
+ config = Config()
+ config.TerminalInteractiveShell.colors = 'NoColor'
+ config.TerminalTerminalInteractiveShell.term_title = False,
+ config.TerminalInteractiveShell.autocall = 0
+ f = tempfile.NamedTemporaryFile(suffix=u'test_hist.sqlite', delete=False)
+ config.HistoryManager.hist_file = f.name
+ f.close()
+ config.HistoryManager.db_cache_size = 10000
+ return config
+
+
+def get_ipython_cmd(as_string=False):
+ """
+ Return appropriate IPython command line name. By default, this will return
+ a list that can be used with subprocess.Popen, for example, but passing
+ `as_string=True` allows for returning the IPython command as a string.
+
+ Parameters
+ ----------
+ as_string: bool
+ Flag to allow to return the command as a string.
+ """
+ ipython_cmd = [sys.executable, "-m", "IPython"]
+
+ if as_string:
+ ipython_cmd = " ".join(ipython_cmd)
+
+ return ipython_cmd
+
+def ipexec(fname, options=None, commands=()):
+ """Utility to call 'ipython filename'.
+
+ Starts IPython with a minimal and safe configuration to make startup as fast
+ as possible.
+
+ Note that this starts IPython in a subprocess!
+
+ Parameters
+ ----------
+ fname : str
+ Name of file to be executed (should have .py or .ipy extension).
+
+ options : optional, list
+ Extra command-line flags to be passed to IPython.
+
+ commands : optional, list
+ Commands to send in on stdin
+
+ Returns
+ -------
+ (stdout, stderr) of ipython subprocess.
+ """
+ if options is None: options = []
+
cmdargs = default_argv() + options
-
- test_dir = os.path.dirname(__file__)
-
- ipython_cmd = get_ipython_cmd()
- # Absolute path for filename
- full_fname = os.path.join(test_dir, fname)
- full_cmd = ipython_cmd + cmdargs + [full_fname]
- env = os.environ.copy()
- # FIXME: ignore all warnings in ipexec while we have shims
- # should we keep suppressing warnings here, even after removing shims?
- env['PYTHONWARNINGS'] = 'ignore'
- # env.pop('PYTHONWARNINGS', None) # Avoid extraneous warnings appearing on stderr
- for k, v in env.items():
- # Debug a bizarre failure we've seen on Windows:
- # TypeError: environment can only contain strings
- if not isinstance(v, str):
- print(k, v)
- p = Popen(full_cmd, stdout=PIPE, stderr=PIPE, stdin=PIPE, env=env)
- out, err = p.communicate(input=py3compat.str_to_bytes('\n'.join(commands)) or None)
- out, err = py3compat.bytes_to_str(out), py3compat.bytes_to_str(err)
- # `import readline` causes 'ESC[?1034h' to be output sometimes,
- # so strip that out before doing comparisons
- if out:
- out = re.sub(r'\x1b\[[^h]+h', '', out)
- return out, err
-
-
-def ipexec_validate(fname, expected_out, expected_err='',
- options=None, commands=()):
- """Utility to call 'ipython filename' and validate output/error.
-
- This function raises an AssertionError if the validation fails.
-
- Note that this starts IPython in a subprocess!
-
- Parameters
- ----------
- fname : str
- Name of the file to be executed (should have .py or .ipy extension).
-
- expected_out : str
- Expected stdout of the process.
-
- expected_err : optional, str
- Expected stderr of the process.
-
- options : optional, list
- Extra command-line flags to be passed to IPython.
-
- Returns
- -------
- None
- """
-
- import nose.tools as nt
-
- out, err = ipexec(fname, options, commands)
- #print 'OUT', out # dbg
- #print 'ERR', err # dbg
- # If there are any errors, we must check those befor stdout, as they may be
- # more informative than simply having an empty stdout.
- if err:
- if expected_err:
- nt.assert_equal("\n".join(err.strip().splitlines()), "\n".join(expected_err.strip().splitlines()))
- else:
- raise ValueError('Running file %r produced error: %r' %
- (fname, err))
- # If no errors or output on stderr was expected, match stdout
- nt.assert_equal("\n".join(out.strip().splitlines()), "\n".join(expected_out.strip().splitlines()))
-
-
-class TempFileMixin(object):
- """Utility class to create temporary Python/IPython files.
-
- Meant as a mixin class for test cases."""
-
- def mktmp(self, src, ext='.py'):
- """Make a valid python temp file."""
- fname, f = temp_pyfile(src, ext)
- self.tmpfile = f
- self.fname = fname
-
- def tearDown(self):
- if hasattr(self, 'tmpfile'):
- # If the tmpfile wasn't made because of skipped tests, like in
- # win32, there's nothing to cleanup.
- self.tmpfile.close()
- try:
- os.unlink(self.fname)
- except:
- # On Windows, even though we close the file, we still can't
- # delete it. I have no clue why
- pass
-
+
+ test_dir = os.path.dirname(__file__)
+
+ ipython_cmd = get_ipython_cmd()
+ # Absolute path for filename
+ full_fname = os.path.join(test_dir, fname)
+ full_cmd = ipython_cmd + cmdargs + [full_fname]
+ env = os.environ.copy()
+ # FIXME: ignore all warnings in ipexec while we have shims
+ # should we keep suppressing warnings here, even after removing shims?
+ env['PYTHONWARNINGS'] = 'ignore'
+ # env.pop('PYTHONWARNINGS', None) # Avoid extraneous warnings appearing on stderr
+ for k, v in env.items():
+ # Debug a bizarre failure we've seen on Windows:
+ # TypeError: environment can only contain strings
+ if not isinstance(v, str):
+ print(k, v)
+ p = Popen(full_cmd, stdout=PIPE, stderr=PIPE, stdin=PIPE, env=env)
+ out, err = p.communicate(input=py3compat.str_to_bytes('\n'.join(commands)) or None)
+ out, err = py3compat.bytes_to_str(out), py3compat.bytes_to_str(err)
+ # `import readline` causes 'ESC[?1034h' to be output sometimes,
+ # so strip that out before doing comparisons
+ if out:
+ out = re.sub(r'\x1b\[[^h]+h', '', out)
+ return out, err
+
+
+def ipexec_validate(fname, expected_out, expected_err='',
+ options=None, commands=()):
+ """Utility to call 'ipython filename' and validate output/error.
+
+ This function raises an AssertionError if the validation fails.
+
+ Note that this starts IPython in a subprocess!
+
+ Parameters
+ ----------
+ fname : str
+ Name of the file to be executed (should have .py or .ipy extension).
+
+ expected_out : str
+ Expected stdout of the process.
+
+ expected_err : optional, str
+ Expected stderr of the process.
+
+ options : optional, list
+ Extra command-line flags to be passed to IPython.
+
+ Returns
+ -------
+ None
+ """
+
+ import nose.tools as nt
+
+ out, err = ipexec(fname, options, commands)
+ #print 'OUT', out # dbg
+ #print 'ERR', err # dbg
+ # If there are any errors, we must check those befor stdout, as they may be
+ # more informative than simply having an empty stdout.
+ if err:
+ if expected_err:
+ nt.assert_equal("\n".join(err.strip().splitlines()), "\n".join(expected_err.strip().splitlines()))
+ else:
+ raise ValueError('Running file %r produced error: %r' %
+ (fname, err))
+ # If no errors or output on stderr was expected, match stdout
+ nt.assert_equal("\n".join(out.strip().splitlines()), "\n".join(expected_out.strip().splitlines()))
+
+
+class TempFileMixin(object):
+ """Utility class to create temporary Python/IPython files.
+
+ Meant as a mixin class for test cases."""
+
+ def mktmp(self, src, ext='.py'):
+ """Make a valid python temp file."""
+ fname, f = temp_pyfile(src, ext)
+ self.tmpfile = f
+ self.fname = fname
+
+ def tearDown(self):
+ if hasattr(self, 'tmpfile'):
+ # If the tmpfile wasn't made because of skipped tests, like in
+ # win32, there's nothing to cleanup.
+ self.tmpfile.close()
+ try:
+ os.unlink(self.fname)
+ except:
+ # On Windows, even though we close the file, we still can't
+ # delete it. I have no clue why
+ pass
+
def __enter__(self):
return self
@@ -298,149 +298,149 @@ class TempFileMixin(object):
self.tearDown()
-pair_fail_msg = ("Testing {0}\n\n"
- "In:\n"
- " {1!r}\n"
- "Expected:\n"
- " {2!r}\n"
- "Got:\n"
- " {3!r}\n")
-def check_pairs(func, pairs):
- """Utility function for the common case of checking a function with a
- sequence of input/output pairs.
-
- Parameters
- ----------
- func : callable
- The function to be tested. Should accept a single argument.
- pairs : iterable
- A list of (input, expected_output) tuples.
-
- Returns
- -------
- None. Raises an AssertionError if any output does not match the expected
- value.
- """
- name = getattr(func, "func_name", getattr(func, "__name__", "<unknown>"))
- for inp, expected in pairs:
- out = func(inp)
- assert out == expected, pair_fail_msg.format(name, inp, expected, out)
-
-
-if py3compat.PY3:
- MyStringIO = StringIO
-else:
- # In Python 2, stdout/stderr can have either bytes or unicode written to them,
- # so we need a class that can handle both.
- class MyStringIO(StringIO):
- def write(self, s):
- s = py3compat.cast_unicode(s, encoding=DEFAULT_ENCODING)
- super(MyStringIO, self).write(s)
-
-_re_type = type(re.compile(r''))
-
-notprinted_msg = """Did not find {0!r} in printed output (on {1}):
--------
-{2!s}
--------
-"""
-
-class AssertPrints(object):
- """Context manager for testing that code prints certain text.
-
- Examples
- --------
- >>> with AssertPrints("abc", suppress=False):
- ... print("abcd")
- ... print("def")
- ...
- abcd
- def
- """
- def __init__(self, s, channel='stdout', suppress=True):
- self.s = s
- if isinstance(self.s, (py3compat.string_types, _re_type)):
- self.s = [self.s]
- self.channel = channel
- self.suppress = suppress
-
- def __enter__(self):
- self.orig_stream = getattr(sys, self.channel)
- self.buffer = MyStringIO()
- self.tee = Tee(self.buffer, channel=self.channel)
- setattr(sys, self.channel, self.buffer if self.suppress else self.tee)
-
- def __exit__(self, etype, value, traceback):
- try:
- if value is not None:
- # If an error was raised, don't check anything else
- return False
- self.tee.flush()
- setattr(sys, self.channel, self.orig_stream)
- printed = self.buffer.getvalue()
- for s in self.s:
- if isinstance(s, _re_type):
- assert s.search(printed), notprinted_msg.format(s.pattern, self.channel, printed)
- else:
- assert s in printed, notprinted_msg.format(s, self.channel, printed)
- return False
- finally:
- self.tee.close()
-
-printed_msg = """Found {0!r} in printed output (on {1}):
--------
-{2!s}
--------
-"""
-
-class AssertNotPrints(AssertPrints):
- """Context manager for checking that certain output *isn't* produced.
-
- Counterpart of AssertPrints"""
- def __exit__(self, etype, value, traceback):
- try:
- if value is not None:
- # If an error was raised, don't check anything else
- self.tee.close()
- return False
- self.tee.flush()
- setattr(sys, self.channel, self.orig_stream)
- printed = self.buffer.getvalue()
- for s in self.s:
- if isinstance(s, _re_type):
- assert not s.search(printed),printed_msg.format(
- s.pattern, self.channel, printed)
- else:
- assert s not in printed, printed_msg.format(
- s, self.channel, printed)
- return False
- finally:
- self.tee.close()
-
-@contextmanager
-def mute_warn():
- from IPython.utils import warn
- save_warn = warn.warn
- warn.warn = lambda *a, **kw: None
- try:
- yield
- finally:
- warn.warn = save_warn
-
-@contextmanager
-def make_tempfile(name):
- """ Create an empty, named, temporary file for the duration of the context.
- """
- f = open(name, 'w')
- f.close()
- try:
- yield
- finally:
- os.unlink(name)
-
+pair_fail_msg = ("Testing {0}\n\n"
+ "In:\n"
+ " {1!r}\n"
+ "Expected:\n"
+ " {2!r}\n"
+ "Got:\n"
+ " {3!r}\n")
+def check_pairs(func, pairs):
+ """Utility function for the common case of checking a function with a
+ sequence of input/output pairs.
+
+ Parameters
+ ----------
+ func : callable
+ The function to be tested. Should accept a single argument.
+ pairs : iterable
+ A list of (input, expected_output) tuples.
+
+ Returns
+ -------
+ None. Raises an AssertionError if any output does not match the expected
+ value.
+ """
+ name = getattr(func, "func_name", getattr(func, "__name__", "<unknown>"))
+ for inp, expected in pairs:
+ out = func(inp)
+ assert out == expected, pair_fail_msg.format(name, inp, expected, out)
+
+
+if py3compat.PY3:
+ MyStringIO = StringIO
+else:
+ # In Python 2, stdout/stderr can have either bytes or unicode written to them,
+ # so we need a class that can handle both.
+ class MyStringIO(StringIO):
+ def write(self, s):
+ s = py3compat.cast_unicode(s, encoding=DEFAULT_ENCODING)
+ super(MyStringIO, self).write(s)
+
+_re_type = type(re.compile(r''))
+
+notprinted_msg = """Did not find {0!r} in printed output (on {1}):
+-------
+{2!s}
+-------
+"""
+
+class AssertPrints(object):
+ """Context manager for testing that code prints certain text.
+
+ Examples
+ --------
+ >>> with AssertPrints("abc", suppress=False):
+ ... print("abcd")
+ ... print("def")
+ ...
+ abcd
+ def
+ """
+ def __init__(self, s, channel='stdout', suppress=True):
+ self.s = s
+ if isinstance(self.s, (py3compat.string_types, _re_type)):
+ self.s = [self.s]
+ self.channel = channel
+ self.suppress = suppress
+
+ def __enter__(self):
+ self.orig_stream = getattr(sys, self.channel)
+ self.buffer = MyStringIO()
+ self.tee = Tee(self.buffer, channel=self.channel)
+ setattr(sys, self.channel, self.buffer if self.suppress else self.tee)
+
+ def __exit__(self, etype, value, traceback):
+ try:
+ if value is not None:
+ # If an error was raised, don't check anything else
+ return False
+ self.tee.flush()
+ setattr(sys, self.channel, self.orig_stream)
+ printed = self.buffer.getvalue()
+ for s in self.s:
+ if isinstance(s, _re_type):
+ assert s.search(printed), notprinted_msg.format(s.pattern, self.channel, printed)
+ else:
+ assert s in printed, notprinted_msg.format(s, self.channel, printed)
+ return False
+ finally:
+ self.tee.close()
+
+printed_msg = """Found {0!r} in printed output (on {1}):
+-------
+{2!s}
+-------
+"""
+
+class AssertNotPrints(AssertPrints):
+ """Context manager for checking that certain output *isn't* produced.
+
+ Counterpart of AssertPrints"""
+ def __exit__(self, etype, value, traceback):
+ try:
+ if value is not None:
+ # If an error was raised, don't check anything else
+ self.tee.close()
+ return False
+ self.tee.flush()
+ setattr(sys, self.channel, self.orig_stream)
+ printed = self.buffer.getvalue()
+ for s in self.s:
+ if isinstance(s, _re_type):
+ assert not s.search(printed),printed_msg.format(
+ s.pattern, self.channel, printed)
+ else:
+ assert s not in printed, printed_msg.format(
+ s, self.channel, printed)
+ return False
+ finally:
+ self.tee.close()
+
+@contextmanager
+def mute_warn():
+ from IPython.utils import warn
+ save_warn = warn.warn
+ warn.warn = lambda *a, **kw: None
+ try:
+ yield
+ finally:
+ warn.warn = save_warn
+
+@contextmanager
+def make_tempfile(name):
+ """ Create an empty, named, temporary file for the duration of the context.
+ """
+ f = open(name, 'w')
+ f.close()
+ try:
+ yield
+ finally:
+ os.unlink(name)
+
def fake_input(inputs):
"""Temporarily replace the input() function to return the given values
-
+
Use as a context manager:
with fake_input(['result1', 'result2']):
@@ -460,24 +460,24 @@ def fake_input(inputs):
'input' if py3compat.PY3 else 'raw_input')
return patch(input_name, mock_input)
-def help_output_test(subcommand=''):
- """test that `ipython [subcommand] -h` works"""
- cmd = get_ipython_cmd() + [subcommand, '-h']
- out, err, rc = get_output_error_code(cmd)
- nt.assert_equal(rc, 0, err)
- nt.assert_not_in("Traceback", err)
- nt.assert_in("Options", out)
- nt.assert_in("--help-all", out)
- return out, err
-
-
-def help_all_output_test(subcommand=''):
- """test that `ipython [subcommand] --help-all` works"""
- cmd = get_ipython_cmd() + [subcommand, '--help-all']
- out, err, rc = get_output_error_code(cmd)
- nt.assert_equal(rc, 0, err)
- nt.assert_not_in("Traceback", err)
- nt.assert_in("Options", out)
+def help_output_test(subcommand=''):
+ """test that `ipython [subcommand] -h` works"""
+ cmd = get_ipython_cmd() + [subcommand, '-h']
+ out, err, rc = get_output_error_code(cmd)
+ nt.assert_equal(rc, 0, err)
+ nt.assert_not_in("Traceback", err)
+ nt.assert_in("Options", out)
+ nt.assert_in("--help-all", out)
+ return out, err
+
+
+def help_all_output_test(subcommand=''):
+ """test that `ipython [subcommand] --help-all` works"""
+ cmd = get_ipython_cmd() + [subcommand, '--help-all']
+ out, err, rc = get_output_error_code(cmd)
+ nt.assert_equal(rc, 0, err)
+ nt.assert_not_in("Traceback", err)
+ nt.assert_in("Options", out)
nt.assert_in("Class", out)
- return out, err
-
+ return out, err
+
diff --git a/contrib/python/ipython/py2/IPython/utils/PyColorize.py b/contrib/python/ipython/py2/IPython/utils/PyColorize.py
index 13c03c3398..124eb2d4e3 100644
--- a/contrib/python/ipython/py2/IPython/utils/PyColorize.py
+++ b/contrib/python/ipython/py2/IPython/utils/PyColorize.py
@@ -1,127 +1,127 @@
-# -*- coding: utf-8 -*-
-"""
-Class and program to colorize python source code for ANSI terminals.
-
-Based on an HTML code highlighter by Jurgen Hermann found at:
-http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52298
-
-Modifications by Fernando Perez (fperez@colorado.edu).
-
-Information on the original HTML highlighter follows:
-
-MoinMoin - Python Source Parser
-
-Title: Colorize Python source using the built-in tokenizer
-
-Submitter: Jurgen Hermann
-Last Updated:2001/04/06
-
-Version no:1.2
-
-Description:
-
-This code is part of MoinMoin (http://moin.sourceforge.net/) and converts
-Python source code to HTML markup, rendering comments, keywords,
-operators, numeric and string literals in different colors.
-
-It shows how to use the built-in keyword, token and tokenize modules to
-scan Python source code and re-emit it with no changes to its original
-formatting (which is the hard part).
-"""
-from __future__ import print_function
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-__all__ = ['ANSICodeColors','Parser']
-
-_scheme_default = 'Linux'
-
-
-# Imports
-import keyword
-import os
-import sys
-import token
-import tokenize
-
-try:
- generate_tokens = tokenize.generate_tokens
-except AttributeError:
- # Python 3. Note that we use the undocumented _tokenize because it expects
- # strings, not bytes. See also Python issue #9969.
- generate_tokens = tokenize._tokenize
-
-from IPython.utils.coloransi import TermColors, InputTermColors ,ColorScheme, ColorSchemeTable
-from IPython.utils.py3compat import PY3
-
+# -*- coding: utf-8 -*-
+"""
+Class and program to colorize python source code for ANSI terminals.
+
+Based on an HTML code highlighter by Jurgen Hermann found at:
+http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52298
+
+Modifications by Fernando Perez (fperez@colorado.edu).
+
+Information on the original HTML highlighter follows:
+
+MoinMoin - Python Source Parser
+
+Title: Colorize Python source using the built-in tokenizer
+
+Submitter: Jurgen Hermann
+Last Updated:2001/04/06
+
+Version no:1.2
+
+Description:
+
+This code is part of MoinMoin (http://moin.sourceforge.net/) and converts
+Python source code to HTML markup, rendering comments, keywords,
+operators, numeric and string literals in different colors.
+
+It shows how to use the built-in keyword, token and tokenize modules to
+scan Python source code and re-emit it with no changes to its original
+formatting (which is the hard part).
+"""
+from __future__ import print_function
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+__all__ = ['ANSICodeColors','Parser']
+
+_scheme_default = 'Linux'
+
+
+# Imports
+import keyword
+import os
+import sys
+import token
+import tokenize
+
+try:
+ generate_tokens = tokenize.generate_tokens
+except AttributeError:
+ # Python 3. Note that we use the undocumented _tokenize because it expects
+ # strings, not bytes. See also Python issue #9969.
+ generate_tokens = tokenize._tokenize
+
+from IPython.utils.coloransi import TermColors, InputTermColors ,ColorScheme, ColorSchemeTable
+from IPython.utils.py3compat import PY3
+
from .colorable import Colorable
-if PY3:
- from io import StringIO
-else:
- from StringIO import StringIO
-
-#############################################################################
-### Python Source Parser (does Hilighting)
-#############################################################################
-
-_KEYWORD = token.NT_OFFSET + 1
-_TEXT = token.NT_OFFSET + 2
-
-#****************************************************************************
-# Builtin color schemes
-
-Colors = TermColors # just a shorthand
-
-# Build a few color schemes
-NoColor = ColorScheme(
- 'NoColor',{
- 'header' : Colors.NoColor,
- token.NUMBER : Colors.NoColor,
- token.OP : Colors.NoColor,
- token.STRING : Colors.NoColor,
- tokenize.COMMENT : Colors.NoColor,
- token.NAME : Colors.NoColor,
- token.ERRORTOKEN : Colors.NoColor,
-
- _KEYWORD : Colors.NoColor,
- _TEXT : Colors.NoColor,
-
- 'in_prompt' : InputTermColors.NoColor, # Input prompt
- 'in_number' : InputTermColors.NoColor, # Input prompt number
- 'in_prompt2' : InputTermColors.NoColor, # Continuation prompt
- 'in_normal' : InputTermColors.NoColor, # color off (usu. Colors.Normal)
-
- 'out_prompt' : Colors.NoColor, # Output prompt
- 'out_number' : Colors.NoColor, # Output prompt number
-
- 'normal' : Colors.NoColor # color off (usu. Colors.Normal)
- } )
-
-LinuxColors = ColorScheme(
- 'Linux',{
- 'header' : Colors.LightRed,
- token.NUMBER : Colors.LightCyan,
- token.OP : Colors.Yellow,
- token.STRING : Colors.LightBlue,
- tokenize.COMMENT : Colors.LightRed,
- token.NAME : Colors.Normal,
- token.ERRORTOKEN : Colors.Red,
-
- _KEYWORD : Colors.LightGreen,
- _TEXT : Colors.Yellow,
-
- 'in_prompt' : InputTermColors.Green,
- 'in_number' : InputTermColors.LightGreen,
- 'in_prompt2' : InputTermColors.Green,
- 'in_normal' : InputTermColors.Normal, # color off (usu. Colors.Normal)
-
- 'out_prompt' : Colors.Red,
- 'out_number' : Colors.LightRed,
-
- 'normal' : Colors.Normal # color off (usu. Colors.Normal)
- } )
-
+if PY3:
+ from io import StringIO
+else:
+ from StringIO import StringIO
+
+#############################################################################
+### Python Source Parser (does Hilighting)
+#############################################################################
+
+_KEYWORD = token.NT_OFFSET + 1
+_TEXT = token.NT_OFFSET + 2
+
+#****************************************************************************
+# Builtin color schemes
+
+Colors = TermColors # just a shorthand
+
+# Build a few color schemes
+NoColor = ColorScheme(
+ 'NoColor',{
+ 'header' : Colors.NoColor,
+ token.NUMBER : Colors.NoColor,
+ token.OP : Colors.NoColor,
+ token.STRING : Colors.NoColor,
+ tokenize.COMMENT : Colors.NoColor,
+ token.NAME : Colors.NoColor,
+ token.ERRORTOKEN : Colors.NoColor,
+
+ _KEYWORD : Colors.NoColor,
+ _TEXT : Colors.NoColor,
+
+ 'in_prompt' : InputTermColors.NoColor, # Input prompt
+ 'in_number' : InputTermColors.NoColor, # Input prompt number
+ 'in_prompt2' : InputTermColors.NoColor, # Continuation prompt
+ 'in_normal' : InputTermColors.NoColor, # color off (usu. Colors.Normal)
+
+ 'out_prompt' : Colors.NoColor, # Output prompt
+ 'out_number' : Colors.NoColor, # Output prompt number
+
+ 'normal' : Colors.NoColor # color off (usu. Colors.Normal)
+ } )
+
+LinuxColors = ColorScheme(
+ 'Linux',{
+ 'header' : Colors.LightRed,
+ token.NUMBER : Colors.LightCyan,
+ token.OP : Colors.Yellow,
+ token.STRING : Colors.LightBlue,
+ tokenize.COMMENT : Colors.LightRed,
+ token.NAME : Colors.Normal,
+ token.ERRORTOKEN : Colors.Red,
+
+ _KEYWORD : Colors.LightGreen,
+ _TEXT : Colors.Yellow,
+
+ 'in_prompt' : InputTermColors.Green,
+ 'in_number' : InputTermColors.LightGreen,
+ 'in_prompt2' : InputTermColors.Green,
+ 'in_normal' : InputTermColors.Normal, # color off (usu. Colors.Normal)
+
+ 'out_prompt' : Colors.Red,
+ 'out_number' : Colors.LightRed,
+
+ 'normal' : Colors.Normal # color off (usu. Colors.Normal)
+ } )
+
NeutralColors = ColorScheme(
'Neutral',{
'header' : Colors.Red,
@@ -156,227 +156,227 @@ NeutralColors = ColorScheme(
if os.name == 'nt':
NeutralColors = LinuxColors.copy(name='Neutral')
-LightBGColors = ColorScheme(
- 'LightBG',{
- 'header' : Colors.Red,
- token.NUMBER : Colors.Cyan,
- token.OP : Colors.Blue,
- token.STRING : Colors.Blue,
- tokenize.COMMENT : Colors.Red,
- token.NAME : Colors.Normal,
- token.ERRORTOKEN : Colors.Red,
-
-
- _KEYWORD : Colors.Green,
- _TEXT : Colors.Blue,
-
- 'in_prompt' : InputTermColors.Blue,
- 'in_number' : InputTermColors.LightBlue,
- 'in_prompt2' : InputTermColors.Blue,
- 'in_normal' : InputTermColors.Normal, # color off (usu. Colors.Normal)
-
- 'out_prompt' : Colors.Red,
- 'out_number' : Colors.LightRed,
-
- 'normal' : Colors.Normal # color off (usu. Colors.Normal)
- } )
-
-# Build table of color schemes (needed by the parser)
+LightBGColors = ColorScheme(
+ 'LightBG',{
+ 'header' : Colors.Red,
+ token.NUMBER : Colors.Cyan,
+ token.OP : Colors.Blue,
+ token.STRING : Colors.Blue,
+ tokenize.COMMENT : Colors.Red,
+ token.NAME : Colors.Normal,
+ token.ERRORTOKEN : Colors.Red,
+
+
+ _KEYWORD : Colors.Green,
+ _TEXT : Colors.Blue,
+
+ 'in_prompt' : InputTermColors.Blue,
+ 'in_number' : InputTermColors.LightBlue,
+ 'in_prompt2' : InputTermColors.Blue,
+ 'in_normal' : InputTermColors.Normal, # color off (usu. Colors.Normal)
+
+ 'out_prompt' : Colors.Red,
+ 'out_number' : Colors.LightRed,
+
+ 'normal' : Colors.Normal # color off (usu. Colors.Normal)
+ } )
+
+# Build table of color schemes (needed by the parser)
ANSICodeColors = ColorSchemeTable([NoColor,LinuxColors,LightBGColors, NeutralColors],
- _scheme_default)
-
+ _scheme_default)
+
class Parser(Colorable):
- """ Format colored Python source.
- """
-
+ """ Format colored Python source.
+ """
+
def __init__(self, color_table=None, out = sys.stdout, parent=None, style=None):
- """ Create a parser with a specified color table and output channel.
-
- Call format() to process code.
- """
+ """ Create a parser with a specified color table and output channel.
+
+ Call format() to process code.
+ """
super(Parser, self).__init__(parent=parent)
- self.color_table = color_table and color_table or ANSICodeColors
- self.out = out
-
- def format(self, raw, out = None, scheme = ''):
- return self.format2(raw, out, scheme)[0]
-
- def format2(self, raw, out = None, scheme = ''):
- """ Parse and send the colored source.
-
- If out and scheme are not specified, the defaults (given to
- constructor) are used.
-
- out should be a file-type object. Optionally, out can be given as the
- string 'str' and the parser will automatically return the output in a
- string."""
-
- string_output = 0
- if out == 'str' or self.out == 'str' or \
- isinstance(self.out,StringIO):
- # XXX - I don't really like this state handling logic, but at this
- # point I don't want to make major changes, so adding the
- # isinstance() check is the simplest I can do to ensure correct
- # behavior.
- out_old = self.out
- self.out = StringIO()
- string_output = 1
- elif out is not None:
- self.out = out
-
- # Fast return of the unmodified input for NoColor scheme
- if scheme == 'NoColor':
- error = False
- self.out.write(raw)
- if string_output:
- return raw,error
- else:
- return None,error
-
- # local shorthands
- colors = self.color_table[scheme].colors
- self.colors = colors # put in object so __call__ sees it
-
- # Remove trailing whitespace and normalize tabs
- self.raw = raw.expandtabs().rstrip()
-
- # store line offsets in self.lines
- self.lines = [0, 0]
- pos = 0
- raw_find = self.raw.find
- lines_append = self.lines.append
- while 1:
- pos = raw_find('\n', pos) + 1
- if not pos: break
- lines_append(pos)
- lines_append(len(self.raw))
-
- # parse the source and write it
- self.pos = 0
- text = StringIO(self.raw)
-
- error = False
- try:
- for atoken in generate_tokens(text.readline):
- self(*atoken)
- except tokenize.TokenError as ex:
- msg = ex.args[0]
- line = ex.args[1][0]
- self.out.write("%s\n\n*** ERROR: %s%s%s\n" %
- (colors[token.ERRORTOKEN],
- msg, self.raw[self.lines[line]:],
- colors.normal)
- )
- error = True
- self.out.write(colors.normal+'\n')
- if string_output:
- output = self.out.getvalue()
- self.out = out_old
- return (output, error)
- return (None, error)
-
- def __call__(self, toktype, toktext, start_pos, end_pos, line):
- """ Token handler, with syntax highlighting."""
- (srow,scol) = start_pos
- (erow,ecol) = end_pos
- colors = self.colors
- owrite = self.out.write
-
- # line separator, so this works across platforms
- linesep = os.linesep
-
- # calculate new positions
- oldpos = self.pos
- newpos = self.lines[srow] + scol
- self.pos = newpos + len(toktext)
-
- # send the original whitespace, if needed
- if newpos > oldpos:
- owrite(self.raw[oldpos:newpos])
-
- # skip indenting tokens
- if toktype in [token.INDENT, token.DEDENT]:
- self.pos = newpos
- return
-
- # map token type to a color group
- if token.LPAR <= toktype <= token.OP:
- toktype = token.OP
- elif toktype == token.NAME and keyword.iskeyword(toktext):
- toktype = _KEYWORD
- color = colors.get(toktype, colors[_TEXT])
-
- #print '<%s>' % toktext, # dbg
-
- # Triple quoted strings must be handled carefully so that backtracking
- # in pagers works correctly. We need color terminators on _each_ line.
- if linesep in toktext:
- toktext = toktext.replace(linesep, '%s%s%s' %
- (colors.normal,linesep,color))
-
- # send text
- owrite('%s%s%s' % (color,toktext,colors.normal))
-
-def main(argv=None):
- """Run as a command-line script: colorize a python file or stdin using ANSI
- color escapes and print to stdout.
-
- Inputs:
-
- - argv(None): a list of strings like sys.argv[1:] giving the command-line
- arguments. If None, use sys.argv[1:].
- """
-
- usage_msg = """%prog [options] [filename]
-
-Colorize a python file or stdin using ANSI color escapes and print to stdout.
-If no filename is given, or if filename is -, read standard input."""
-
- import optparse
- parser = optparse.OptionParser(usage=usage_msg)
- newopt = parser.add_option
- newopt('-s','--scheme',metavar='NAME',dest='scheme_name',action='store',
- choices=['Linux','LightBG','NoColor'],default=_scheme_default,
- help="give the color scheme to use. Currently only 'Linux'\
- (default) and 'LightBG' and 'NoColor' are implemented (give without\
- quotes)")
-
- opts,args = parser.parse_args(argv)
-
- if len(args) > 1:
- parser.error("you must give at most one filename.")
-
- if len(args) == 0:
- fname = '-' # no filename given; setup to read from stdin
- else:
- fname = args[0]
-
- if fname == '-':
- stream = sys.stdin
- else:
- try:
- stream = open(fname)
- except IOError as msg:
- print(msg, file=sys.stderr)
- sys.exit(1)
-
- parser = Parser()
-
- # we need nested try blocks because pre-2.5 python doesn't support unified
- # try-except-finally
- try:
- try:
- # write colorized version to stdout
- parser.format(stream.read(),scheme=opts.scheme_name)
- except IOError as msg:
- # if user reads through a pager and quits, don't print traceback
- if msg.args != (32,'Broken pipe'):
- raise
- finally:
- if stream is not sys.stdin:
- stream.close() # in case a non-handled exception happened above
-
-if __name__ == "__main__":
- main()
+ self.color_table = color_table and color_table or ANSICodeColors
+ self.out = out
+
+ def format(self, raw, out = None, scheme = ''):
+ return self.format2(raw, out, scheme)[0]
+
+ def format2(self, raw, out = None, scheme = ''):
+ """ Parse and send the colored source.
+
+ If out and scheme are not specified, the defaults (given to
+ constructor) are used.
+
+ out should be a file-type object. Optionally, out can be given as the
+ string 'str' and the parser will automatically return the output in a
+ string."""
+
+ string_output = 0
+ if out == 'str' or self.out == 'str' or \
+ isinstance(self.out,StringIO):
+ # XXX - I don't really like this state handling logic, but at this
+ # point I don't want to make major changes, so adding the
+ # isinstance() check is the simplest I can do to ensure correct
+ # behavior.
+ out_old = self.out
+ self.out = StringIO()
+ string_output = 1
+ elif out is not None:
+ self.out = out
+
+ # Fast return of the unmodified input for NoColor scheme
+ if scheme == 'NoColor':
+ error = False
+ self.out.write(raw)
+ if string_output:
+ return raw,error
+ else:
+ return None,error
+
+ # local shorthands
+ colors = self.color_table[scheme].colors
+ self.colors = colors # put in object so __call__ sees it
+
+ # Remove trailing whitespace and normalize tabs
+ self.raw = raw.expandtabs().rstrip()
+
+ # store line offsets in self.lines
+ self.lines = [0, 0]
+ pos = 0
+ raw_find = self.raw.find
+ lines_append = self.lines.append
+ while 1:
+ pos = raw_find('\n', pos) + 1
+ if not pos: break
+ lines_append(pos)
+ lines_append(len(self.raw))
+
+ # parse the source and write it
+ self.pos = 0
+ text = StringIO(self.raw)
+
+ error = False
+ try:
+ for atoken in generate_tokens(text.readline):
+ self(*atoken)
+ except tokenize.TokenError as ex:
+ msg = ex.args[0]
+ line = ex.args[1][0]
+ self.out.write("%s\n\n*** ERROR: %s%s%s\n" %
+ (colors[token.ERRORTOKEN],
+ msg, self.raw[self.lines[line]:],
+ colors.normal)
+ )
+ error = True
+ self.out.write(colors.normal+'\n')
+ if string_output:
+ output = self.out.getvalue()
+ self.out = out_old
+ return (output, error)
+ return (None, error)
+
+ def __call__(self, toktype, toktext, start_pos, end_pos, line):
+ """ Token handler, with syntax highlighting."""
+ (srow,scol) = start_pos
+ (erow,ecol) = end_pos
+ colors = self.colors
+ owrite = self.out.write
+
+ # line separator, so this works across platforms
+ linesep = os.linesep
+
+ # calculate new positions
+ oldpos = self.pos
+ newpos = self.lines[srow] + scol
+ self.pos = newpos + len(toktext)
+
+ # send the original whitespace, if needed
+ if newpos > oldpos:
+ owrite(self.raw[oldpos:newpos])
+
+ # skip indenting tokens
+ if toktype in [token.INDENT, token.DEDENT]:
+ self.pos = newpos
+ return
+
+ # map token type to a color group
+ if token.LPAR <= toktype <= token.OP:
+ toktype = token.OP
+ elif toktype == token.NAME and keyword.iskeyword(toktext):
+ toktype = _KEYWORD
+ color = colors.get(toktype, colors[_TEXT])
+
+ #print '<%s>' % toktext, # dbg
+
+ # Triple quoted strings must be handled carefully so that backtracking
+ # in pagers works correctly. We need color terminators on _each_ line.
+ if linesep in toktext:
+ toktext = toktext.replace(linesep, '%s%s%s' %
+ (colors.normal,linesep,color))
+
+ # send text
+ owrite('%s%s%s' % (color,toktext,colors.normal))
+
+def main(argv=None):
+ """Run as a command-line script: colorize a python file or stdin using ANSI
+ color escapes and print to stdout.
+
+ Inputs:
+
+ - argv(None): a list of strings like sys.argv[1:] giving the command-line
+ arguments. If None, use sys.argv[1:].
+ """
+
+ usage_msg = """%prog [options] [filename]
+
+Colorize a python file or stdin using ANSI color escapes and print to stdout.
+If no filename is given, or if filename is -, read standard input."""
+
+ import optparse
+ parser = optparse.OptionParser(usage=usage_msg)
+ newopt = parser.add_option
+ newopt('-s','--scheme',metavar='NAME',dest='scheme_name',action='store',
+ choices=['Linux','LightBG','NoColor'],default=_scheme_default,
+ help="give the color scheme to use. Currently only 'Linux'\
+ (default) and 'LightBG' and 'NoColor' are implemented (give without\
+ quotes)")
+
+ opts,args = parser.parse_args(argv)
+
+ if len(args) > 1:
+ parser.error("you must give at most one filename.")
+
+ if len(args) == 0:
+ fname = '-' # no filename given; setup to read from stdin
+ else:
+ fname = args[0]
+
+ if fname == '-':
+ stream = sys.stdin
+ else:
+ try:
+ stream = open(fname)
+ except IOError as msg:
+ print(msg, file=sys.stderr)
+ sys.exit(1)
+
+ parser = Parser()
+
+ # we need nested try blocks because pre-2.5 python doesn't support unified
+ # try-except-finally
+ try:
+ try:
+ # write colorized version to stdout
+ parser.format(stream.read(),scheme=opts.scheme_name)
+ except IOError as msg:
+ # if user reads through a pager and quits, don't print traceback
+ if msg.args != (32,'Broken pipe'):
+ raise
+ finally:
+ if stream is not sys.stdin:
+ stream.close() # in case a non-handled exception happened above
+
+if __name__ == "__main__":
+ main()
diff --git a/contrib/python/ipython/py2/IPython/utils/_process_cli.py b/contrib/python/ipython/py2/IPython/utils/_process_cli.py
index a65decf3b6..a7b7b90b68 100644
--- a/contrib/python/ipython/py2/IPython/utils/_process_cli.py
+++ b/contrib/python/ipython/py2/IPython/utils/_process_cli.py
@@ -1,78 +1,78 @@
-"""cli-specific implementation of process utilities.
-
-cli - Common Language Infrastructure for IronPython. Code
- can run on any operating system. Check os.name for os-
- specific settings.
-
-This file is only meant to be imported by process.py, not by end-users.
-
-This file is largely untested. To become a full drop-in process
-interface for IronPython will probably require you to help fill
-in the details.
-"""
-
-# Import cli libraries:
-import clr
-import System
-
-# Import Python libraries:
-import os
-
-# Import IPython libraries:
-from IPython.utils import py3compat
-from ._process_common import arg_split
-
-def _find_cmd(cmd):
- """Find the full path to a command using which."""
- paths = System.Environment.GetEnvironmentVariable("PATH").Split(os.pathsep)
- for path in paths:
- filename = os.path.join(path, cmd)
- if System.IO.File.Exists(filename):
- return py3compat.bytes_to_str(filename)
- raise OSError("command %r not found" % cmd)
-
-def system(cmd):
- """
- system(cmd) should work in a cli environment on Mac OSX, Linux,
- and Windows
- """
- psi = System.Diagnostics.ProcessStartInfo(cmd)
- psi.RedirectStandardOutput = True
- psi.RedirectStandardError = True
- psi.WindowStyle = System.Diagnostics.ProcessWindowStyle.Normal
- psi.UseShellExecute = False
- # Start up process:
- reg = System.Diagnostics.Process.Start(psi)
-
-def getoutput(cmd):
- """
- getoutput(cmd) should work in a cli environment on Mac OSX, Linux,
- and Windows
- """
- psi = System.Diagnostics.ProcessStartInfo(cmd)
- psi.RedirectStandardOutput = True
- psi.RedirectStandardError = True
- psi.WindowStyle = System.Diagnostics.ProcessWindowStyle.Normal
- psi.UseShellExecute = False
- # Start up process:
- reg = System.Diagnostics.Process.Start(psi)
- myOutput = reg.StandardOutput
- output = myOutput.ReadToEnd()
- myError = reg.StandardError
- error = myError.ReadToEnd()
- return output
-
-def check_pid(pid):
- """
- Check if a process with the given PID (pid) exists
- """
- try:
- System.Diagnostics.Process.GetProcessById(pid)
- # process with given pid is running
- return True
- except System.InvalidOperationException:
- # process wasn't started by this object (but is running)
- return True
- except System.ArgumentException:
- # process with given pid isn't running
- return False
+"""cli-specific implementation of process utilities.
+
+cli - Common Language Infrastructure for IronPython. Code
+ can run on any operating system. Check os.name for os-
+ specific settings.
+
+This file is only meant to be imported by process.py, not by end-users.
+
+This file is largely untested. To become a full drop-in process
+interface for IronPython will probably require you to help fill
+in the details.
+"""
+
+# Import cli libraries:
+import clr
+import System
+
+# Import Python libraries:
+import os
+
+# Import IPython libraries:
+from IPython.utils import py3compat
+from ._process_common import arg_split
+
+def _find_cmd(cmd):
+ """Find the full path to a command using which."""
+ paths = System.Environment.GetEnvironmentVariable("PATH").Split(os.pathsep)
+ for path in paths:
+ filename = os.path.join(path, cmd)
+ if System.IO.File.Exists(filename):
+ return py3compat.bytes_to_str(filename)
+ raise OSError("command %r not found" % cmd)
+
+def system(cmd):
+ """
+ system(cmd) should work in a cli environment on Mac OSX, Linux,
+ and Windows
+ """
+ psi = System.Diagnostics.ProcessStartInfo(cmd)
+ psi.RedirectStandardOutput = True
+ psi.RedirectStandardError = True
+ psi.WindowStyle = System.Diagnostics.ProcessWindowStyle.Normal
+ psi.UseShellExecute = False
+ # Start up process:
+ reg = System.Diagnostics.Process.Start(psi)
+
+def getoutput(cmd):
+ """
+ getoutput(cmd) should work in a cli environment on Mac OSX, Linux,
+ and Windows
+ """
+ psi = System.Diagnostics.ProcessStartInfo(cmd)
+ psi.RedirectStandardOutput = True
+ psi.RedirectStandardError = True
+ psi.WindowStyle = System.Diagnostics.ProcessWindowStyle.Normal
+ psi.UseShellExecute = False
+ # Start up process:
+ reg = System.Diagnostics.Process.Start(psi)
+ myOutput = reg.StandardOutput
+ output = myOutput.ReadToEnd()
+ myError = reg.StandardError
+ error = myError.ReadToEnd()
+ return output
+
+def check_pid(pid):
+ """
+ Check if a process with the given PID (pid) exists
+ """
+ try:
+ System.Diagnostics.Process.GetProcessById(pid)
+ # process with given pid is running
+ return True
+ except System.InvalidOperationException:
+ # process wasn't started by this object (but is running)
+ return True
+ except System.ArgumentException:
+ # process with given pid isn't running
+ return False
diff --git a/contrib/python/ipython/py2/IPython/utils/_process_common.py b/contrib/python/ipython/py2/IPython/utils/_process_common.py
index 6851e41869..9ede30d3f8 100644
--- a/contrib/python/ipython/py2/IPython/utils/_process_common.py
+++ b/contrib/python/ipython/py2/IPython/utils/_process_common.py
@@ -1,75 +1,75 @@
-"""Common utilities for the various process_* implementations.
-
-This file is only meant to be imported by the platform-specific implementations
-of subprocess utilities, and it contains tools that are common to all of them.
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2010-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-import subprocess
-import shlex
-import sys
+"""Common utilities for the various process_* implementations.
+
+This file is only meant to be imported by the platform-specific implementations
+of subprocess utilities, and it contains tools that are common to all of them.
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2010-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+import subprocess
+import shlex
+import sys
import os
-
-from IPython.utils import py3compat
-
-#-----------------------------------------------------------------------------
-# Function definitions
-#-----------------------------------------------------------------------------
-
-def read_no_interrupt(p):
- """Read from a pipe ignoring EINTR errors.
-
- This is necessary because when reading from pipes with GUI event loops
- running in the background, often interrupts are raised that stop the
- command from completing."""
- import errno
-
- try:
- return p.read()
- except IOError as err:
- if err.errno != errno.EINTR:
- raise
-
-
-def process_handler(cmd, callback, stderr=subprocess.PIPE):
- """Open a command in a shell subprocess and execute a callback.
-
- This function provides common scaffolding for creating subprocess.Popen()
- calls. It creates a Popen object and then calls the callback with it.
-
- Parameters
- ----------
- cmd : str or list
- A command to be executed by the system, using :class:`subprocess.Popen`.
- If a string is passed, it will be run in the system shell. If a list is
- passed, it will be used directly as arguments.
-
- callback : callable
- A one-argument function that will be called with the Popen object.
-
- stderr : file descriptor number, optional
- By default this is set to ``subprocess.PIPE``, but you can also pass the
- value ``subprocess.STDOUT`` to force the subprocess' stderr to go into
- the same file descriptor as its stdout. This is useful to read stdout
- and stderr combined in the order they are generated.
-
- Returns
- -------
- The return value of the provided callback is returned.
- """
- sys.stdout.flush()
- sys.stderr.flush()
- # On win32, close_fds can't be true when using pipes for stdin/out/err
- close_fds = sys.platform != 'win32'
+
+from IPython.utils import py3compat
+
+#-----------------------------------------------------------------------------
+# Function definitions
+#-----------------------------------------------------------------------------
+
+def read_no_interrupt(p):
+ """Read from a pipe ignoring EINTR errors.
+
+ This is necessary because when reading from pipes with GUI event loops
+ running in the background, often interrupts are raised that stop the
+ command from completing."""
+ import errno
+
+ try:
+ return p.read()
+ except IOError as err:
+ if err.errno != errno.EINTR:
+ raise
+
+
+def process_handler(cmd, callback, stderr=subprocess.PIPE):
+ """Open a command in a shell subprocess and execute a callback.
+
+ This function provides common scaffolding for creating subprocess.Popen()
+ calls. It creates a Popen object and then calls the callback with it.
+
+ Parameters
+ ----------
+ cmd : str or list
+ A command to be executed by the system, using :class:`subprocess.Popen`.
+ If a string is passed, it will be run in the system shell. If a list is
+ passed, it will be used directly as arguments.
+
+ callback : callable
+ A one-argument function that will be called with the Popen object.
+
+ stderr : file descriptor number, optional
+ By default this is set to ``subprocess.PIPE``, but you can also pass the
+ value ``subprocess.STDOUT`` to force the subprocess' stderr to go into
+ the same file descriptor as its stdout. This is useful to read stdout
+ and stderr combined in the order they are generated.
+
+ Returns
+ -------
+ The return value of the provided callback is returned.
+ """
+ sys.stdout.flush()
+ sys.stderr.flush()
+ # On win32, close_fds can't be true when using pipes for stdin/out/err
+ close_fds = sys.platform != 'win32'
# Determine if cmd should be run with system shell.
shell = isinstance(cmd, py3compat.string_types)
# On POSIX systems run shell commands with user-preferred shell.
@@ -78,146 +78,146 @@ def process_handler(cmd, callback, stderr=subprocess.PIPE):
executable = os.environ['SHELL']
p = subprocess.Popen(cmd, shell=shell,
executable=executable,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=stderr,
- close_fds=close_fds)
-
- try:
- out = callback(p)
- except KeyboardInterrupt:
- print('^C')
- sys.stdout.flush()
- sys.stderr.flush()
- out = None
- finally:
- # Make really sure that we don't leave processes behind, in case the
- # call above raises an exception
- # We start by assuming the subprocess finished (to avoid NameErrors
- # later depending on the path taken)
- if p.returncode is None:
- try:
- p.terminate()
- p.poll()
- except OSError:
- pass
- # One last try on our way out
- if p.returncode is None:
- try:
- p.kill()
- except OSError:
- pass
-
- return out
-
-
-def getoutput(cmd):
- """Run a command and return its stdout/stderr as a string.
-
- Parameters
- ----------
- cmd : str or list
- A command to be executed in the system shell.
-
- Returns
- -------
- output : str
- A string containing the combination of stdout and stderr from the
- subprocess, in whatever order the subprocess originally wrote to its
- file descriptors (so the order of the information in this string is the
- correct order as would be seen if running the command in a terminal).
- """
- out = process_handler(cmd, lambda p: p.communicate()[0], subprocess.STDOUT)
- if out is None:
- return ''
- return py3compat.bytes_to_str(out)
-
-
-def getoutputerror(cmd):
- """Return (standard output, standard error) of executing cmd in a shell.
-
- Accepts the same arguments as os.system().
-
- Parameters
- ----------
- cmd : str or list
- A command to be executed in the system shell.
-
- Returns
- -------
- stdout : str
- stderr : str
- """
- return get_output_error_code(cmd)[:2]
-
-def get_output_error_code(cmd):
- """Return (standard output, standard error, return code) of executing cmd
- in a shell.
-
- Accepts the same arguments as os.system().
-
- Parameters
- ----------
- cmd : str or list
- A command to be executed in the system shell.
-
- Returns
- -------
- stdout : str
- stderr : str
- returncode: int
- """
-
- out_err, p = process_handler(cmd, lambda p: (p.communicate(), p))
- if out_err is None:
- return '', '', p.returncode
- out, err = out_err
- return py3compat.bytes_to_str(out), py3compat.bytes_to_str(err), p.returncode
-
-def arg_split(s, posix=False, strict=True):
- """Split a command line's arguments in a shell-like manner.
-
- This is a modified version of the standard library's shlex.split()
- function, but with a default of posix=False for splitting, so that quotes
- in inputs are respected.
-
- if strict=False, then any errors shlex.split would raise will result in the
- unparsed remainder being the last element of the list, rather than raising.
- This is because we sometimes use arg_split to parse things other than
- command-line args.
- """
-
- # Unfortunately, python's shlex module is buggy with unicode input:
- # http://bugs.python.org/issue1170
- # At least encoding the input when it's unicode seems to help, but there
- # may be more problems lurking. Apparently this is fixed in python3.
- is_unicode = False
- if (not py3compat.PY3) and isinstance(s, unicode):
- is_unicode = True
- s = s.encode('utf-8')
- lex = shlex.shlex(s, posix=posix)
- lex.whitespace_split = True
- # Extract tokens, ensuring that things like leaving open quotes
- # does not cause this to raise. This is important, because we
- # sometimes pass Python source through this (e.g. %timeit f(" ")),
- # and it shouldn't raise an exception.
- # It may be a bad idea to parse things that are not command-line args
- # through this function, but we do, so let's be safe about it.
- lex.commenters='' #fix for GH-1269
- tokens = []
- while True:
- try:
- tokens.append(next(lex))
- except StopIteration:
- break
- except ValueError:
- if strict:
- raise
- # couldn't parse, get remaining blob as last token
- tokens.append(lex.token)
- break
-
- if is_unicode:
- # Convert the tokens back to unicode.
- tokens = [x.decode('utf-8') for x in tokens]
- return tokens
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=stderr,
+ close_fds=close_fds)
+
+ try:
+ out = callback(p)
+ except KeyboardInterrupt:
+ print('^C')
+ sys.stdout.flush()
+ sys.stderr.flush()
+ out = None
+ finally:
+ # Make really sure that we don't leave processes behind, in case the
+ # call above raises an exception
+ # We start by assuming the subprocess finished (to avoid NameErrors
+ # later depending on the path taken)
+ if p.returncode is None:
+ try:
+ p.terminate()
+ p.poll()
+ except OSError:
+ pass
+ # One last try on our way out
+ if p.returncode is None:
+ try:
+ p.kill()
+ except OSError:
+ pass
+
+ return out
+
+
+def getoutput(cmd):
+ """Run a command and return its stdout/stderr as a string.
+
+ Parameters
+ ----------
+ cmd : str or list
+ A command to be executed in the system shell.
+
+ Returns
+ -------
+ output : str
+ A string containing the combination of stdout and stderr from the
+ subprocess, in whatever order the subprocess originally wrote to its
+ file descriptors (so the order of the information in this string is the
+ correct order as would be seen if running the command in a terminal).
+ """
+ out = process_handler(cmd, lambda p: p.communicate()[0], subprocess.STDOUT)
+ if out is None:
+ return ''
+ return py3compat.bytes_to_str(out)
+
+
+def getoutputerror(cmd):
+ """Return (standard output, standard error) of executing cmd in a shell.
+
+ Accepts the same arguments as os.system().
+
+ Parameters
+ ----------
+ cmd : str or list
+ A command to be executed in the system shell.
+
+ Returns
+ -------
+ stdout : str
+ stderr : str
+ """
+ return get_output_error_code(cmd)[:2]
+
+def get_output_error_code(cmd):
+ """Return (standard output, standard error, return code) of executing cmd
+ in a shell.
+
+ Accepts the same arguments as os.system().
+
+ Parameters
+ ----------
+ cmd : str or list
+ A command to be executed in the system shell.
+
+ Returns
+ -------
+ stdout : str
+ stderr : str
+ returncode: int
+ """
+
+ out_err, p = process_handler(cmd, lambda p: (p.communicate(), p))
+ if out_err is None:
+ return '', '', p.returncode
+ out, err = out_err
+ return py3compat.bytes_to_str(out), py3compat.bytes_to_str(err), p.returncode
+
+def arg_split(s, posix=False, strict=True):
+ """Split a command line's arguments in a shell-like manner.
+
+ This is a modified version of the standard library's shlex.split()
+ function, but with a default of posix=False for splitting, so that quotes
+ in inputs are respected.
+
+ if strict=False, then any errors shlex.split would raise will result in the
+ unparsed remainder being the last element of the list, rather than raising.
+ This is because we sometimes use arg_split to parse things other than
+ command-line args.
+ """
+
+ # Unfortunately, python's shlex module is buggy with unicode input:
+ # http://bugs.python.org/issue1170
+ # At least encoding the input when it's unicode seems to help, but there
+ # may be more problems lurking. Apparently this is fixed in python3.
+ is_unicode = False
+ if (not py3compat.PY3) and isinstance(s, unicode):
+ is_unicode = True
+ s = s.encode('utf-8')
+ lex = shlex.shlex(s, posix=posix)
+ lex.whitespace_split = True
+ # Extract tokens, ensuring that things like leaving open quotes
+ # does not cause this to raise. This is important, because we
+ # sometimes pass Python source through this (e.g. %timeit f(" ")),
+ # and it shouldn't raise an exception.
+ # It may be a bad idea to parse things that are not command-line args
+ # through this function, but we do, so let's be safe about it.
+ lex.commenters='' #fix for GH-1269
+ tokens = []
+ while True:
+ try:
+ tokens.append(next(lex))
+ except StopIteration:
+ break
+ except ValueError:
+ if strict:
+ raise
+ # couldn't parse, get remaining blob as last token
+ tokens.append(lex.token)
+ break
+
+ if is_unicode:
+ # Convert the tokens back to unicode.
+ tokens = [x.decode('utf-8') for x in tokens]
+ return tokens
diff --git a/contrib/python/ipython/py2/IPython/utils/_process_posix.py b/contrib/python/ipython/py2/IPython/utils/_process_posix.py
index 059e80c991..ac3a9a0507 100644
--- a/contrib/python/ipython/py2/IPython/utils/_process_posix.py
+++ b/contrib/python/ipython/py2/IPython/utils/_process_posix.py
@@ -1,225 +1,225 @@
-"""Posix-specific implementation of process utilities.
-
-This file is only meant to be imported by process.py, not by end-users.
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2010-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-from __future__ import print_function
-
-# Stdlib
-import errno
-import os
-import subprocess as sp
-import sys
-
-import pexpect
-
-# Our own
-from ._process_common import getoutput, arg_split
-from IPython.utils import py3compat
-from IPython.utils.encoding import DEFAULT_ENCODING
-
-#-----------------------------------------------------------------------------
-# Function definitions
-#-----------------------------------------------------------------------------
-
-def _find_cmd(cmd):
- """Find the full path to a command using which."""
-
- path = sp.Popen(['/usr/bin/env', 'which', cmd],
- stdout=sp.PIPE, stderr=sp.PIPE).communicate()[0]
- return py3compat.bytes_to_str(path)
-
-
-class ProcessHandler(object):
- """Execute subprocesses under the control of pexpect.
- """
- # Timeout in seconds to wait on each reading of the subprocess' output.
- # This should not be set too low to avoid cpu overusage from our side,
- # since we read in a loop whose period is controlled by this timeout.
- read_timeout = 0.05
-
- # Timeout to give a process if we receive SIGINT, between sending the
- # SIGINT to the process and forcefully terminating it.
- terminate_timeout = 0.2
-
- # File object where stdout and stderr of the subprocess will be written
- logfile = None
-
- # Shell to call for subprocesses to execute
- _sh = None
-
- @property
- def sh(self):
- if self._sh is None:
- self._sh = pexpect.which('sh')
- if self._sh is None:
- raise OSError('"sh" shell not found')
-
- return self._sh
-
- def __init__(self, logfile=None, read_timeout=None, terminate_timeout=None):
- """Arguments are used for pexpect calls."""
- self.read_timeout = (ProcessHandler.read_timeout if read_timeout is
- None else read_timeout)
- self.terminate_timeout = (ProcessHandler.terminate_timeout if
- terminate_timeout is None else
- terminate_timeout)
- self.logfile = sys.stdout if logfile is None else logfile
-
- def getoutput(self, cmd):
- """Run a command and return its stdout/stderr as a string.
-
- Parameters
- ----------
- cmd : str
- A command to be executed in the system shell.
-
- Returns
- -------
- output : str
- A string containing the combination of stdout and stderr from the
- subprocess, in whatever order the subprocess originally wrote to its
- file descriptors (so the order of the information in this string is the
- correct order as would be seen if running the command in a terminal).
- """
- try:
- return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n')
- except KeyboardInterrupt:
- print('^C', file=sys.stderr, end='')
-
- def getoutput_pexpect(self, cmd):
- """Run a command and return its stdout/stderr as a string.
-
- Parameters
- ----------
- cmd : str
- A command to be executed in the system shell.
-
- Returns
- -------
- output : str
- A string containing the combination of stdout and stderr from the
- subprocess, in whatever order the subprocess originally wrote to its
- file descriptors (so the order of the information in this string is the
- correct order as would be seen if running the command in a terminal).
- """
- try:
- return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n')
- except KeyboardInterrupt:
- print('^C', file=sys.stderr, end='')
-
- def system(self, cmd):
- """Execute a command in a subshell.
-
- Parameters
- ----------
- cmd : str
- A command to be executed in the system shell.
-
- Returns
- -------
- int : child's exitstatus
- """
- # Get likely encoding for the output.
- enc = DEFAULT_ENCODING
-
- # Patterns to match on the output, for pexpect. We read input and
- # allow either a short timeout or EOF
- patterns = [pexpect.TIMEOUT, pexpect.EOF]
- # the index of the EOF pattern in the list.
- # even though we know it's 1, this call means we don't have to worry if
- # we change the above list, and forget to change this value:
- EOF_index = patterns.index(pexpect.EOF)
- # The size of the output stored so far in the process output buffer.
- # Since pexpect only appends to this buffer, each time we print we
- # record how far we've printed, so that next time we only print *new*
- # content from the buffer.
- out_size = 0
- try:
- # Since we're not really searching the buffer for text patterns, we
- # can set pexpect's search window to be tiny and it won't matter.
- # We only search for the 'patterns' timeout or EOF, which aren't in
- # the text itself.
- #child = pexpect.spawn(pcmd, searchwindowsize=1)
- if hasattr(pexpect, 'spawnb'):
- child = pexpect.spawnb(self.sh, args=['-c', cmd]) # Pexpect-U
- else:
- child = pexpect.spawn(self.sh, args=['-c', cmd]) # Vanilla Pexpect
- flush = sys.stdout.flush
- while True:
- # res is the index of the pattern that caused the match, so we
- # know whether we've finished (if we matched EOF) or not
- res_idx = child.expect_list(patterns, self.read_timeout)
- print(child.before[out_size:].decode(enc, 'replace'), end='')
- flush()
- if res_idx==EOF_index:
- break
- # Update the pointer to what we've already printed
- out_size = len(child.before)
- except KeyboardInterrupt:
- # We need to send ^C to the process. The ascii code for '^C' is 3
- # (the character is known as ETX for 'End of Text', see
- # curses.ascii.ETX).
- child.sendline(chr(3))
- # Read and print any more output the program might produce on its
- # way out.
- try:
- out_size = len(child.before)
- child.expect_list(patterns, self.terminate_timeout)
- print(child.before[out_size:].decode(enc, 'replace'), end='')
- sys.stdout.flush()
- except KeyboardInterrupt:
- # Impatient users tend to type it multiple times
- pass
- finally:
- # Ensure the subprocess really is terminated
- child.terminate(force=True)
- # add isalive check, to ensure exitstatus is set:
- child.isalive()
-
- # We follow the subprocess pattern, returning either the exit status
- # as a positive number, or the terminating signal as a negative
- # number.
- # on Linux, sh returns 128+n for signals terminating child processes on Linux
- # on BSD (OS X), the signal code is set instead
- if child.exitstatus is None:
- # on WIFSIGNALED, pexpect sets signalstatus, leaving exitstatus=None
- if child.signalstatus is None:
- # this condition may never occur,
- # but let's be certain we always return an integer.
- return 0
- return -child.signalstatus
- if child.exitstatus > 128:
- return -(child.exitstatus - 128)
- return child.exitstatus
-
-
-# Make system() with a functional interface for outside use. Note that we use
-# getoutput() from the _common utils, which is built on top of popen(). Using
-# pexpect to get subprocess output produces difficult to parse output, since
-# programs think they are talking to a tty and produce highly formatted output
-# (ls is a good example) that makes them hard.
-system = ProcessHandler().system
-
-def check_pid(pid):
- try:
- os.kill(pid, 0)
- except OSError as err:
- if err.errno == errno.ESRCH:
- return False
- elif err.errno == errno.EPERM:
- # Don't have permission to signal the process - probably means it exists
- return True
- raise
- else:
- return True
+"""Posix-specific implementation of process utilities.
+
+This file is only meant to be imported by process.py, not by end-users.
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2010-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+from __future__ import print_function
+
+# Stdlib
+import errno
+import os
+import subprocess as sp
+import sys
+
+import pexpect
+
+# Our own
+from ._process_common import getoutput, arg_split
+from IPython.utils import py3compat
+from IPython.utils.encoding import DEFAULT_ENCODING
+
+#-----------------------------------------------------------------------------
+# Function definitions
+#-----------------------------------------------------------------------------
+
+def _find_cmd(cmd):
+ """Find the full path to a command using which."""
+
+ path = sp.Popen(['/usr/bin/env', 'which', cmd],
+ stdout=sp.PIPE, stderr=sp.PIPE).communicate()[0]
+ return py3compat.bytes_to_str(path)
+
+
+class ProcessHandler(object):
+ """Execute subprocesses under the control of pexpect.
+ """
+ # Timeout in seconds to wait on each reading of the subprocess' output.
+ # This should not be set too low to avoid cpu overusage from our side,
+ # since we read in a loop whose period is controlled by this timeout.
+ read_timeout = 0.05
+
+ # Timeout to give a process if we receive SIGINT, between sending the
+ # SIGINT to the process and forcefully terminating it.
+ terminate_timeout = 0.2
+
+ # File object where stdout and stderr of the subprocess will be written
+ logfile = None
+
+ # Shell to call for subprocesses to execute
+ _sh = None
+
+ @property
+ def sh(self):
+ if self._sh is None:
+ self._sh = pexpect.which('sh')
+ if self._sh is None:
+ raise OSError('"sh" shell not found')
+
+ return self._sh
+
+ def __init__(self, logfile=None, read_timeout=None, terminate_timeout=None):
+ """Arguments are used for pexpect calls."""
+ self.read_timeout = (ProcessHandler.read_timeout if read_timeout is
+ None else read_timeout)
+ self.terminate_timeout = (ProcessHandler.terminate_timeout if
+ terminate_timeout is None else
+ terminate_timeout)
+ self.logfile = sys.stdout if logfile is None else logfile
+
+ def getoutput(self, cmd):
+ """Run a command and return its stdout/stderr as a string.
+
+ Parameters
+ ----------
+ cmd : str
+ A command to be executed in the system shell.
+
+ Returns
+ -------
+ output : str
+ A string containing the combination of stdout and stderr from the
+ subprocess, in whatever order the subprocess originally wrote to its
+ file descriptors (so the order of the information in this string is the
+ correct order as would be seen if running the command in a terminal).
+ """
+ try:
+ return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n')
+ except KeyboardInterrupt:
+ print('^C', file=sys.stderr, end='')
+
+ def getoutput_pexpect(self, cmd):
+ """Run a command and return its stdout/stderr as a string.
+
+ Parameters
+ ----------
+ cmd : str
+ A command to be executed in the system shell.
+
+ Returns
+ -------
+ output : str
+ A string containing the combination of stdout and stderr from the
+ subprocess, in whatever order the subprocess originally wrote to its
+ file descriptors (so the order of the information in this string is the
+ correct order as would be seen if running the command in a terminal).
+ """
+ try:
+ return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n')
+ except KeyboardInterrupt:
+ print('^C', file=sys.stderr, end='')
+
+ def system(self, cmd):
+ """Execute a command in a subshell.
+
+ Parameters
+ ----------
+ cmd : str
+ A command to be executed in the system shell.
+
+ Returns
+ -------
+ int : child's exitstatus
+ """
+ # Get likely encoding for the output.
+ enc = DEFAULT_ENCODING
+
+ # Patterns to match on the output, for pexpect. We read input and
+ # allow either a short timeout or EOF
+ patterns = [pexpect.TIMEOUT, pexpect.EOF]
+ # the index of the EOF pattern in the list.
+ # even though we know it's 1, this call means we don't have to worry if
+ # we change the above list, and forget to change this value:
+ EOF_index = patterns.index(pexpect.EOF)
+ # The size of the output stored so far in the process output buffer.
+ # Since pexpect only appends to this buffer, each time we print we
+ # record how far we've printed, so that next time we only print *new*
+ # content from the buffer.
+ out_size = 0
+ try:
+ # Since we're not really searching the buffer for text patterns, we
+ # can set pexpect's search window to be tiny and it won't matter.
+ # We only search for the 'patterns' timeout or EOF, which aren't in
+ # the text itself.
+ #child = pexpect.spawn(pcmd, searchwindowsize=1)
+ if hasattr(pexpect, 'spawnb'):
+ child = pexpect.spawnb(self.sh, args=['-c', cmd]) # Pexpect-U
+ else:
+ child = pexpect.spawn(self.sh, args=['-c', cmd]) # Vanilla Pexpect
+ flush = sys.stdout.flush
+ while True:
+ # res is the index of the pattern that caused the match, so we
+ # know whether we've finished (if we matched EOF) or not
+ res_idx = child.expect_list(patterns, self.read_timeout)
+ print(child.before[out_size:].decode(enc, 'replace'), end='')
+ flush()
+ if res_idx==EOF_index:
+ break
+ # Update the pointer to what we've already printed
+ out_size = len(child.before)
+ except KeyboardInterrupt:
+ # We need to send ^C to the process. The ascii code for '^C' is 3
+ # (the character is known as ETX for 'End of Text', see
+ # curses.ascii.ETX).
+ child.sendline(chr(3))
+ # Read and print any more output the program might produce on its
+ # way out.
+ try:
+ out_size = len(child.before)
+ child.expect_list(patterns, self.terminate_timeout)
+ print(child.before[out_size:].decode(enc, 'replace'), end='')
+ sys.stdout.flush()
+ except KeyboardInterrupt:
+ # Impatient users tend to type it multiple times
+ pass
+ finally:
+ # Ensure the subprocess really is terminated
+ child.terminate(force=True)
+ # add isalive check, to ensure exitstatus is set:
+ child.isalive()
+
+ # We follow the subprocess pattern, returning either the exit status
+ # as a positive number, or the terminating signal as a negative
+ # number.
+ # on Linux, sh returns 128+n for signals terminating child processes on Linux
+ # on BSD (OS X), the signal code is set instead
+ if child.exitstatus is None:
+ # on WIFSIGNALED, pexpect sets signalstatus, leaving exitstatus=None
+ if child.signalstatus is None:
+ # this condition may never occur,
+ # but let's be certain we always return an integer.
+ return 0
+ return -child.signalstatus
+ if child.exitstatus > 128:
+ return -(child.exitstatus - 128)
+ return child.exitstatus
+
+
+# Make system() with a functional interface for outside use. Note that we use
+# getoutput() from the _common utils, which is built on top of popen(). Using
+# pexpect to get subprocess output produces difficult to parse output, since
+# programs think they are talking to a tty and produce highly formatted output
+# (ls is a good example) that makes them hard.
+system = ProcessHandler().system
+
+def check_pid(pid):
+ try:
+ os.kill(pid, 0)
+ except OSError as err:
+ if err.errno == errno.ESRCH:
+ return False
+ elif err.errno == errno.EPERM:
+ # Don't have permission to signal the process - probably means it exists
+ return True
+ raise
+ else:
+ return True
diff --git a/contrib/python/ipython/py2/IPython/utils/_process_win32.py b/contrib/python/ipython/py2/IPython/utils/_process_win32.py
index 6d7d0f4197..3ac59b2c29 100644
--- a/contrib/python/ipython/py2/IPython/utils/_process_win32.py
+++ b/contrib/python/ipython/py2/IPython/utils/_process_win32.py
@@ -1,192 +1,192 @@
-"""Windows-specific implementation of process utilities.
-
-This file is only meant to be imported by process.py, not by end-users.
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2010-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-from __future__ import print_function
-
-# stdlib
-import os
-import sys
-import ctypes
-
-from ctypes import c_int, POINTER
-from ctypes.wintypes import LPCWSTR, HLOCAL
-from subprocess import STDOUT
-
-# our own imports
-from ._process_common import read_no_interrupt, process_handler, arg_split as py_arg_split
-from . import py3compat
-from .encoding import DEFAULT_ENCODING
-
-#-----------------------------------------------------------------------------
-# Function definitions
-#-----------------------------------------------------------------------------
-
-class AvoidUNCPath(object):
- """A context manager to protect command execution from UNC paths.
-
- In the Win32 API, commands can't be invoked with the cwd being a UNC path.
- This context manager temporarily changes directory to the 'C:' drive on
- entering, and restores the original working directory on exit.
-
- The context manager returns the starting working directory *if* it made a
- change and None otherwise, so that users can apply the necessary adjustment
- to their system calls in the event of a change.
-
- Examples
- --------
- ::
- cmd = 'dir'
- with AvoidUNCPath() as path:
- if path is not None:
- cmd = '"pushd %s &&"%s' % (path, cmd)
- os.system(cmd)
- """
- def __enter__(self):
- self.path = py3compat.getcwd()
- self.is_unc_path = self.path.startswith(r"\\")
- if self.is_unc_path:
- # change to c drive (as cmd.exe cannot handle UNC addresses)
- os.chdir("C:")
- return self.path
- else:
- # We return None to signal that there was no change in the working
- # directory
- return None
-
- def __exit__(self, exc_type, exc_value, traceback):
- if self.is_unc_path:
- os.chdir(self.path)
-
-
-def _find_cmd(cmd):
- """Find the full path to a .bat or .exe using the win32api module."""
- try:
- from win32api import SearchPath
- except ImportError:
- raise ImportError('you need to have pywin32 installed for this to work')
- else:
- PATH = os.environ['PATH']
- extensions = ['.exe', '.com', '.bat', '.py']
- path = None
- for ext in extensions:
- try:
- path = SearchPath(PATH, cmd, ext)[0]
- except:
- pass
- if path is None:
- raise OSError("command %r not found" % cmd)
- else:
- return path
-
-
-def _system_body(p):
- """Callback for _system."""
- enc = DEFAULT_ENCODING
- for line in read_no_interrupt(p.stdout).splitlines():
- line = line.decode(enc, 'replace')
- print(line, file=sys.stdout)
- for line in read_no_interrupt(p.stderr).splitlines():
- line = line.decode(enc, 'replace')
- print(line, file=sys.stderr)
-
- # Wait to finish for returncode
- return p.wait()
-
-
-def system(cmd):
- """Win32 version of os.system() that works with network shares.
-
- Note that this implementation returns None, as meant for use in IPython.
-
- Parameters
- ----------
- cmd : str or list
- A command to be executed in the system shell.
-
- Returns
- -------
- None : we explicitly do NOT return the subprocess status code, as this
- utility is meant to be used extensively in IPython, where any return value
- would trigger :func:`sys.displayhook` calls.
- """
- # The controller provides interactivity with both
- # stdin and stdout
- #import _process_win32_controller
- #_process_win32_controller.system(cmd)
-
- with AvoidUNCPath() as path:
- if path is not None:
- cmd = '"pushd %s &&"%s' % (path, cmd)
- return process_handler(cmd, _system_body)
-
-def getoutput(cmd):
- """Return standard output of executing cmd in a shell.
-
- Accepts the same arguments as os.system().
-
- Parameters
- ----------
- cmd : str or list
- A command to be executed in the system shell.
-
- Returns
- -------
- stdout : str
- """
-
- with AvoidUNCPath() as path:
- if path is not None:
- cmd = '"pushd %s &&"%s' % (path, cmd)
- out = process_handler(cmd, lambda p: p.communicate()[0], STDOUT)
-
- if out is None:
- out = b''
- return py3compat.bytes_to_str(out)
-
-try:
- CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW
- CommandLineToArgvW.arg_types = [LPCWSTR, POINTER(c_int)]
- CommandLineToArgvW.restype = POINTER(LPCWSTR)
- LocalFree = ctypes.windll.kernel32.LocalFree
- LocalFree.res_type = HLOCAL
- LocalFree.arg_types = [HLOCAL]
-
- def arg_split(commandline, posix=False, strict=True):
- """Split a command line's arguments in a shell-like manner.
-
- This is a special version for windows that use a ctypes call to CommandLineToArgvW
- to do the argv splitting. The posix paramter is ignored.
-
- If strict=False, process_common.arg_split(...strict=False) is used instead.
- """
- #CommandLineToArgvW returns path to executable if called with empty string.
- if commandline.strip() == "":
- return []
- if not strict:
- # not really a cl-arg, fallback on _process_common
- return py_arg_split(commandline, posix=posix, strict=strict)
- argvn = c_int()
- result_pointer = CommandLineToArgvW(py3compat.cast_unicode(commandline.lstrip()), ctypes.byref(argvn))
- result_array_type = LPCWSTR * argvn.value
- result = [arg for arg in result_array_type.from_address(ctypes.addressof(result_pointer.contents))]
- retval = LocalFree(result_pointer)
- return result
-except AttributeError:
- arg_split = py_arg_split
-
-def check_pid(pid):
- # OpenProcess returns 0 if no such process (of ours) exists
- # positive int otherwise
- return bool(ctypes.windll.kernel32.OpenProcess(1,0,pid))
+"""Windows-specific implementation of process utilities.
+
+This file is only meant to be imported by process.py, not by end-users.
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2010-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+from __future__ import print_function
+
+# stdlib
+import os
+import sys
+import ctypes
+
+from ctypes import c_int, POINTER
+from ctypes.wintypes import LPCWSTR, HLOCAL
+from subprocess import STDOUT
+
+# our own imports
+from ._process_common import read_no_interrupt, process_handler, arg_split as py_arg_split
+from . import py3compat
+from .encoding import DEFAULT_ENCODING
+
+#-----------------------------------------------------------------------------
+# Function definitions
+#-----------------------------------------------------------------------------
+
+class AvoidUNCPath(object):
+ """A context manager to protect command execution from UNC paths.
+
+ In the Win32 API, commands can't be invoked with the cwd being a UNC path.
+ This context manager temporarily changes directory to the 'C:' drive on
+ entering, and restores the original working directory on exit.
+
+ The context manager returns the starting working directory *if* it made a
+ change and None otherwise, so that users can apply the necessary adjustment
+ to their system calls in the event of a change.
+
+ Examples
+ --------
+ ::
+ cmd = 'dir'
+ with AvoidUNCPath() as path:
+ if path is not None:
+ cmd = '"pushd %s &&"%s' % (path, cmd)
+ os.system(cmd)
+ """
+ def __enter__(self):
+ self.path = py3compat.getcwd()
+ self.is_unc_path = self.path.startswith(r"\\")
+ if self.is_unc_path:
+ # change to c drive (as cmd.exe cannot handle UNC addresses)
+ os.chdir("C:")
+ return self.path
+ else:
+ # We return None to signal that there was no change in the working
+ # directory
+ return None
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ if self.is_unc_path:
+ os.chdir(self.path)
+
+
+def _find_cmd(cmd):
+ """Find the full path to a .bat or .exe using the win32api module."""
+ try:
+ from win32api import SearchPath
+ except ImportError:
+ raise ImportError('you need to have pywin32 installed for this to work')
+ else:
+ PATH = os.environ['PATH']
+ extensions = ['.exe', '.com', '.bat', '.py']
+ path = None
+ for ext in extensions:
+ try:
+ path = SearchPath(PATH, cmd, ext)[0]
+ except:
+ pass
+ if path is None:
+ raise OSError("command %r not found" % cmd)
+ else:
+ return path
+
+
+def _system_body(p):
+ """Callback for _system."""
+ enc = DEFAULT_ENCODING
+ for line in read_no_interrupt(p.stdout).splitlines():
+ line = line.decode(enc, 'replace')
+ print(line, file=sys.stdout)
+ for line in read_no_interrupt(p.stderr).splitlines():
+ line = line.decode(enc, 'replace')
+ print(line, file=sys.stderr)
+
+ # Wait to finish for returncode
+ return p.wait()
+
+
+def system(cmd):
+ """Win32 version of os.system() that works with network shares.
+
+ Note that this implementation returns None, as meant for use in IPython.
+
+ Parameters
+ ----------
+ cmd : str or list
+ A command to be executed in the system shell.
+
+ Returns
+ -------
+ None : we explicitly do NOT return the subprocess status code, as this
+ utility is meant to be used extensively in IPython, where any return value
+ would trigger :func:`sys.displayhook` calls.
+ """
+ # The controller provides interactivity with both
+ # stdin and stdout
+ #import _process_win32_controller
+ #_process_win32_controller.system(cmd)
+
+ with AvoidUNCPath() as path:
+ if path is not None:
+ cmd = '"pushd %s &&"%s' % (path, cmd)
+ return process_handler(cmd, _system_body)
+
+def getoutput(cmd):
+ """Return standard output of executing cmd in a shell.
+
+ Accepts the same arguments as os.system().
+
+ Parameters
+ ----------
+ cmd : str or list
+ A command to be executed in the system shell.
+
+ Returns
+ -------
+ stdout : str
+ """
+
+ with AvoidUNCPath() as path:
+ if path is not None:
+ cmd = '"pushd %s &&"%s' % (path, cmd)
+ out = process_handler(cmd, lambda p: p.communicate()[0], STDOUT)
+
+ if out is None:
+ out = b''
+ return py3compat.bytes_to_str(out)
+
+try:
+ CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW
+ CommandLineToArgvW.arg_types = [LPCWSTR, POINTER(c_int)]
+ CommandLineToArgvW.restype = POINTER(LPCWSTR)
+ LocalFree = ctypes.windll.kernel32.LocalFree
+ LocalFree.res_type = HLOCAL
+ LocalFree.arg_types = [HLOCAL]
+
+ def arg_split(commandline, posix=False, strict=True):
+ """Split a command line's arguments in a shell-like manner.
+
+ This is a special version for windows that use a ctypes call to CommandLineToArgvW
+ to do the argv splitting. The posix paramter is ignored.
+
+ If strict=False, process_common.arg_split(...strict=False) is used instead.
+ """
+ #CommandLineToArgvW returns path to executable if called with empty string.
+ if commandline.strip() == "":
+ return []
+ if not strict:
+ # not really a cl-arg, fallback on _process_common
+ return py_arg_split(commandline, posix=posix, strict=strict)
+ argvn = c_int()
+ result_pointer = CommandLineToArgvW(py3compat.cast_unicode(commandline.lstrip()), ctypes.byref(argvn))
+ result_array_type = LPCWSTR * argvn.value
+ result = [arg for arg in result_array_type.from_address(ctypes.addressof(result_pointer.contents))]
+ retval = LocalFree(result_pointer)
+ return result
+except AttributeError:
+ arg_split = py_arg_split
+
+def check_pid(pid):
+ # OpenProcess returns 0 if no such process (of ours) exists
+ # positive int otherwise
+ return bool(ctypes.windll.kernel32.OpenProcess(1,0,pid))
diff --git a/contrib/python/ipython/py2/IPython/utils/_process_win32_controller.py b/contrib/python/ipython/py2/IPython/utils/_process_win32_controller.py
index 607e411916..555eec23b3 100644
--- a/contrib/python/ipython/py2/IPython/utils/_process_win32_controller.py
+++ b/contrib/python/ipython/py2/IPython/utils/_process_win32_controller.py
@@ -1,577 +1,577 @@
-"""Windows-specific implementation of process utilities with direct WinAPI.
-
-This file is meant to be used by process.py
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2010-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-from __future__ import print_function
-
-# stdlib
-import os, sys, threading
-import ctypes, msvcrt
-
-# local imports
-from . import py3compat
-
-# Win32 API types needed for the API calls
-from ctypes import POINTER
-from ctypes.wintypes import HANDLE, HLOCAL, LPVOID, WORD, DWORD, BOOL, \
- ULONG, LPCWSTR
-LPDWORD = POINTER(DWORD)
-LPHANDLE = POINTER(HANDLE)
-ULONG_PTR = POINTER(ULONG)
-class SECURITY_ATTRIBUTES(ctypes.Structure):
- _fields_ = [("nLength", DWORD),
- ("lpSecurityDescriptor", LPVOID),
- ("bInheritHandle", BOOL)]
-LPSECURITY_ATTRIBUTES = POINTER(SECURITY_ATTRIBUTES)
-class STARTUPINFO(ctypes.Structure):
- _fields_ = [("cb", DWORD),
- ("lpReserved", LPCWSTR),
- ("lpDesktop", LPCWSTR),
- ("lpTitle", LPCWSTR),
- ("dwX", DWORD),
- ("dwY", DWORD),
- ("dwXSize", DWORD),
- ("dwYSize", DWORD),
- ("dwXCountChars", DWORD),
- ("dwYCountChars", DWORD),
- ("dwFillAttribute", DWORD),
- ("dwFlags", DWORD),
- ("wShowWindow", WORD),
- ("cbReserved2", WORD),
- ("lpReserved2", LPVOID),
- ("hStdInput", HANDLE),
- ("hStdOutput", HANDLE),
- ("hStdError", HANDLE)]
-LPSTARTUPINFO = POINTER(STARTUPINFO)
-class PROCESS_INFORMATION(ctypes.Structure):
- _fields_ = [("hProcess", HANDLE),
- ("hThread", HANDLE),
- ("dwProcessId", DWORD),
- ("dwThreadId", DWORD)]
-LPPROCESS_INFORMATION = POINTER(PROCESS_INFORMATION)
-
-# Win32 API constants needed
-ERROR_HANDLE_EOF = 38
-ERROR_BROKEN_PIPE = 109
-ERROR_NO_DATA = 232
-HANDLE_FLAG_INHERIT = 0x0001
-STARTF_USESTDHANDLES = 0x0100
-CREATE_SUSPENDED = 0x0004
-CREATE_NEW_CONSOLE = 0x0010
-CREATE_NO_WINDOW = 0x08000000
-STILL_ACTIVE = 259
-WAIT_TIMEOUT = 0x0102
-WAIT_FAILED = 0xFFFFFFFF
-INFINITE = 0xFFFFFFFF
-DUPLICATE_SAME_ACCESS = 0x00000002
-ENABLE_ECHO_INPUT = 0x0004
-ENABLE_LINE_INPUT = 0x0002
-ENABLE_PROCESSED_INPUT = 0x0001
-
-# Win32 API functions needed
-GetLastError = ctypes.windll.kernel32.GetLastError
-GetLastError.argtypes = []
-GetLastError.restype = DWORD
-
-CreateFile = ctypes.windll.kernel32.CreateFileW
-CreateFile.argtypes = [LPCWSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE]
-CreateFile.restype = HANDLE
-
-CreatePipe = ctypes.windll.kernel32.CreatePipe
-CreatePipe.argtypes = [POINTER(HANDLE), POINTER(HANDLE),
- LPSECURITY_ATTRIBUTES, DWORD]
-CreatePipe.restype = BOOL
-
-CreateProcess = ctypes.windll.kernel32.CreateProcessW
-CreateProcess.argtypes = [LPCWSTR, LPCWSTR, LPSECURITY_ATTRIBUTES,
- LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPCWSTR, LPSTARTUPINFO,
- LPPROCESS_INFORMATION]
-CreateProcess.restype = BOOL
-
-GetExitCodeProcess = ctypes.windll.kernel32.GetExitCodeProcess
-GetExitCodeProcess.argtypes = [HANDLE, LPDWORD]
-GetExitCodeProcess.restype = BOOL
-
-GetCurrentProcess = ctypes.windll.kernel32.GetCurrentProcess
-GetCurrentProcess.argtypes = []
-GetCurrentProcess.restype = HANDLE
-
-ResumeThread = ctypes.windll.kernel32.ResumeThread
-ResumeThread.argtypes = [HANDLE]
-ResumeThread.restype = DWORD
-
-ReadFile = ctypes.windll.kernel32.ReadFile
-ReadFile.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD, LPVOID]
-ReadFile.restype = BOOL
-
-WriteFile = ctypes.windll.kernel32.WriteFile
-WriteFile.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD, LPVOID]
-WriteFile.restype = BOOL
-
-GetConsoleMode = ctypes.windll.kernel32.GetConsoleMode
-GetConsoleMode.argtypes = [HANDLE, LPDWORD]
-GetConsoleMode.restype = BOOL
-
-SetConsoleMode = ctypes.windll.kernel32.SetConsoleMode
-SetConsoleMode.argtypes = [HANDLE, DWORD]
-SetConsoleMode.restype = BOOL
-
-FlushConsoleInputBuffer = ctypes.windll.kernel32.FlushConsoleInputBuffer
-FlushConsoleInputBuffer.argtypes = [HANDLE]
-FlushConsoleInputBuffer.restype = BOOL
-
-WaitForSingleObject = ctypes.windll.kernel32.WaitForSingleObject
-WaitForSingleObject.argtypes = [HANDLE, DWORD]
-WaitForSingleObject.restype = DWORD
-
-DuplicateHandle = ctypes.windll.kernel32.DuplicateHandle
-DuplicateHandle.argtypes = [HANDLE, HANDLE, HANDLE, LPHANDLE,
- DWORD, BOOL, DWORD]
-DuplicateHandle.restype = BOOL
-
-SetHandleInformation = ctypes.windll.kernel32.SetHandleInformation
-SetHandleInformation.argtypes = [HANDLE, DWORD, DWORD]
-SetHandleInformation.restype = BOOL
-
-CloseHandle = ctypes.windll.kernel32.CloseHandle
-CloseHandle.argtypes = [HANDLE]
-CloseHandle.restype = BOOL
-
-CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW
-CommandLineToArgvW.argtypes = [LPCWSTR, POINTER(ctypes.c_int)]
-CommandLineToArgvW.restype = POINTER(LPCWSTR)
-
-LocalFree = ctypes.windll.kernel32.LocalFree
-LocalFree.argtypes = [HLOCAL]
-LocalFree.restype = HLOCAL
-
-class AvoidUNCPath(object):
- """A context manager to protect command execution from UNC paths.
-
- In the Win32 API, commands can't be invoked with the cwd being a UNC path.
- This context manager temporarily changes directory to the 'C:' drive on
- entering, and restores the original working directory on exit.
-
- The context manager returns the starting working directory *if* it made a
- change and None otherwise, so that users can apply the necessary adjustment
- to their system calls in the event of a change.
-
- Examples
- --------
- ::
- cmd = 'dir'
- with AvoidUNCPath() as path:
- if path is not None:
- cmd = '"pushd %s &&"%s' % (path, cmd)
- os.system(cmd)
- """
- def __enter__(self):
- self.path = py3compat.getcwd()
- self.is_unc_path = self.path.startswith(r"\\")
- if self.is_unc_path:
- # change to c drive (as cmd.exe cannot handle UNC addresses)
- os.chdir("C:")
- return self.path
- else:
- # We return None to signal that there was no change in the working
- # directory
- return None
-
- def __exit__(self, exc_type, exc_value, traceback):
- if self.is_unc_path:
- os.chdir(self.path)
-
-
-class Win32ShellCommandController(object):
- """Runs a shell command in a 'with' context.
-
- This implementation is Win32-specific.
-
- Example:
- # Runs the command interactively with default console stdin/stdout
- with ShellCommandController('python -i') as scc:
- scc.run()
-
- # Runs the command using the provided functions for stdin/stdout
- def my_stdout_func(s):
- # print or save the string 's'
- write_to_stdout(s)
- def my_stdin_func():
- # If input is available, return it as a string.
- if input_available():
- return get_input()
- # If no input available, return None after a short delay to
- # keep from blocking.
- else:
- time.sleep(0.01)
- return None
-
- with ShellCommandController('python -i') as scc:
- scc.run(my_stdout_func, my_stdin_func)
- """
-
- def __init__(self, cmd, mergeout = True):
- """Initializes the shell command controller.
-
- The cmd is the program to execute, and mergeout is
- whether to blend stdout and stderr into one output
- in stdout. Merging them together in this fashion more
- reliably keeps stdout and stderr in the correct order
- especially for interactive shell usage.
- """
- self.cmd = cmd
- self.mergeout = mergeout
-
- def __enter__(self):
- cmd = self.cmd
- mergeout = self.mergeout
-
- self.hstdout, self.hstdin, self.hstderr = None, None, None
- self.piProcInfo = None
- try:
- p_hstdout, c_hstdout, p_hstderr, \
- c_hstderr, p_hstdin, c_hstdin = [None]*6
-
- # SECURITY_ATTRIBUTES with inherit handle set to True
- saAttr = SECURITY_ATTRIBUTES()
- saAttr.nLength = ctypes.sizeof(saAttr)
- saAttr.bInheritHandle = True
- saAttr.lpSecurityDescriptor = None
-
- def create_pipe(uninherit):
- """Creates a Windows pipe, which consists of two handles.
-
- The 'uninherit' parameter controls which handle is not
- inherited by the child process.
- """
- handles = HANDLE(), HANDLE()
- if not CreatePipe(ctypes.byref(handles[0]),
- ctypes.byref(handles[1]), ctypes.byref(saAttr), 0):
- raise ctypes.WinError()
- if not SetHandleInformation(handles[uninherit],
- HANDLE_FLAG_INHERIT, 0):
- raise ctypes.WinError()
- return handles[0].value, handles[1].value
-
- p_hstdout, c_hstdout = create_pipe(uninherit=0)
- # 'mergeout' signals that stdout and stderr should be merged.
- # We do that by using one pipe for both of them.
- if mergeout:
- c_hstderr = HANDLE()
- if not DuplicateHandle(GetCurrentProcess(), c_hstdout,
- GetCurrentProcess(), ctypes.byref(c_hstderr),
- 0, True, DUPLICATE_SAME_ACCESS):
- raise ctypes.WinError()
- else:
- p_hstderr, c_hstderr = create_pipe(uninherit=0)
- c_hstdin, p_hstdin = create_pipe(uninherit=1)
-
- # Create the process object
- piProcInfo = PROCESS_INFORMATION()
- siStartInfo = STARTUPINFO()
- siStartInfo.cb = ctypes.sizeof(siStartInfo)
- siStartInfo.hStdInput = c_hstdin
- siStartInfo.hStdOutput = c_hstdout
- siStartInfo.hStdError = c_hstderr
- siStartInfo.dwFlags = STARTF_USESTDHANDLES
- dwCreationFlags = CREATE_SUSPENDED | CREATE_NO_WINDOW # | CREATE_NEW_CONSOLE
-
- if not CreateProcess(None,
- u"cmd.exe /c " + cmd,
- None, None, True, dwCreationFlags,
- None, None, ctypes.byref(siStartInfo),
- ctypes.byref(piProcInfo)):
- raise ctypes.WinError()
-
- # Close this process's versions of the child handles
- CloseHandle(c_hstdin)
- c_hstdin = None
- CloseHandle(c_hstdout)
- c_hstdout = None
- if c_hstderr is not None:
- CloseHandle(c_hstderr)
- c_hstderr = None
-
- # Transfer ownership of the parent handles to the object
- self.hstdin = p_hstdin
- p_hstdin = None
- self.hstdout = p_hstdout
- p_hstdout = None
- if not mergeout:
- self.hstderr = p_hstderr
- p_hstderr = None
- self.piProcInfo = piProcInfo
-
- finally:
- if p_hstdin:
- CloseHandle(p_hstdin)
- if c_hstdin:
- CloseHandle(c_hstdin)
- if p_hstdout:
- CloseHandle(p_hstdout)
- if c_hstdout:
- CloseHandle(c_hstdout)
- if p_hstderr:
- CloseHandle(p_hstderr)
- if c_hstderr:
- CloseHandle(c_hstderr)
-
- return self
-
- def _stdin_thread(self, handle, hprocess, func, stdout_func):
- exitCode = DWORD()
- bytesWritten = DWORD(0)
- while True:
- #print("stdin thread loop start")
- # Get the input string (may be bytes or unicode)
- data = func()
-
- # None signals to poll whether the process has exited
- if data is None:
- #print("checking for process completion")
- if not GetExitCodeProcess(hprocess, ctypes.byref(exitCode)):
- raise ctypes.WinError()
- if exitCode.value != STILL_ACTIVE:
- return
- # TESTING: Does zero-sized writefile help?
- if not WriteFile(handle, "", 0,
- ctypes.byref(bytesWritten), None):
- raise ctypes.WinError()
- continue
- #print("\nGot str %s\n" % repr(data), file=sys.stderr)
-
- # Encode the string to the console encoding
- if isinstance(data, unicode): #FIXME: Python3
- data = data.encode('utf_8')
-
- # What we have now must be a string of bytes
- if not isinstance(data, str): #FIXME: Python3
- raise RuntimeError("internal stdin function string error")
-
- # An empty string signals EOF
- if len(data) == 0:
- return
-
- # In a windows console, sometimes the input is echoed,
- # but sometimes not. How do we determine when to do this?
- stdout_func(data)
- # WriteFile may not accept all the data at once.
- # Loop until everything is processed
- while len(data) != 0:
- #print("Calling writefile")
- if not WriteFile(handle, data, len(data),
- ctypes.byref(bytesWritten), None):
- # This occurs at exit
- if GetLastError() == ERROR_NO_DATA:
- return
- raise ctypes.WinError()
- #print("Called writefile")
- data = data[bytesWritten.value:]
-
- def _stdout_thread(self, handle, func):
- # Allocate the output buffer
- data = ctypes.create_string_buffer(4096)
- while True:
- bytesRead = DWORD(0)
- if not ReadFile(handle, data, 4096,
- ctypes.byref(bytesRead), None):
- le = GetLastError()
- if le == ERROR_BROKEN_PIPE:
- return
- else:
- raise ctypes.WinError()
- # FIXME: Python3
- s = data.value[0:bytesRead.value]
- #print("\nv: %s" % repr(s), file=sys.stderr)
- func(s.decode('utf_8', 'replace'))
-
- def run(self, stdout_func = None, stdin_func = None, stderr_func = None):
- """Runs the process, using the provided functions for I/O.
-
- The function stdin_func should return strings whenever a
- character or characters become available.
- The functions stdout_func and stderr_func are called whenever
- something is printed to stdout or stderr, respectively.
- These functions are called from different threads (but not
- concurrently, because of the GIL).
- """
- if stdout_func is None and stdin_func is None and stderr_func is None:
- return self._run_stdio()
-
- if stderr_func is not None and self.mergeout:
- raise RuntimeError("Shell command was initiated with "
- "merged stdin/stdout, but a separate stderr_func "
- "was provided to the run() method")
-
- # Create a thread for each input/output handle
- stdin_thread = None
- threads = []
- if stdin_func:
- stdin_thread = threading.Thread(target=self._stdin_thread,
- args=(self.hstdin, self.piProcInfo.hProcess,
- stdin_func, stdout_func))
- threads.append(threading.Thread(target=self._stdout_thread,
- args=(self.hstdout, stdout_func)))
- if not self.mergeout:
- if stderr_func is None:
- stderr_func = stdout_func
- threads.append(threading.Thread(target=self._stdout_thread,
- args=(self.hstderr, stderr_func)))
- # Start the I/O threads and the process
- if ResumeThread(self.piProcInfo.hThread) == 0xFFFFFFFF:
- raise ctypes.WinError()
- if stdin_thread is not None:
- stdin_thread.start()
- for thread in threads:
- thread.start()
- # Wait for the process to complete
- if WaitForSingleObject(self.piProcInfo.hProcess, INFINITE) == \
- WAIT_FAILED:
- raise ctypes.WinError()
- # Wait for the I/O threads to complete
- for thread in threads:
- thread.join()
-
- # Wait for the stdin thread to complete
- if stdin_thread is not None:
- stdin_thread.join()
-
- def _stdin_raw_nonblock(self):
- """Use the raw Win32 handle of sys.stdin to do non-blocking reads"""
- # WARNING: This is experimental, and produces inconsistent results.
- # It's possible for the handle not to be appropriate for use
- # with WaitForSingleObject, among other things.
- handle = msvcrt.get_osfhandle(sys.stdin.fileno())
- result = WaitForSingleObject(handle, 100)
- if result == WAIT_FAILED:
- raise ctypes.WinError()
- elif result == WAIT_TIMEOUT:
- print(".", end='')
- return None
- else:
- data = ctypes.create_string_buffer(256)
- bytesRead = DWORD(0)
- print('?', end='')
-
- if not ReadFile(handle, data, 256,
- ctypes.byref(bytesRead), None):
- raise ctypes.WinError()
- # This ensures the non-blocking works with an actual console
- # Not checking the error, so the processing will still work with
- # other handle types
- FlushConsoleInputBuffer(handle)
-
- data = data.value
- data = data.replace('\r\n', '\n')
- data = data.replace('\r', '\n')
- print(repr(data) + " ", end='')
- return data
-
- def _stdin_raw_block(self):
- """Use a blocking stdin read"""
- # The big problem with the blocking read is that it doesn't
- # exit when it's supposed to in all contexts. An extra
- # key-press may be required to trigger the exit.
- try:
- data = sys.stdin.read(1)
- data = data.replace('\r', '\n')
- return data
- except WindowsError as we:
- if we.winerror == ERROR_NO_DATA:
- # This error occurs when the pipe is closed
- return None
- else:
- # Otherwise let the error propagate
- raise we
-
- def _stdout_raw(self, s):
- """Writes the string to stdout"""
- print(s, end='', file=sys.stdout)
- sys.stdout.flush()
-
- def _stderr_raw(self, s):
- """Writes the string to stdout"""
- print(s, end='', file=sys.stderr)
- sys.stderr.flush()
-
- def _run_stdio(self):
- """Runs the process using the system standard I/O.
-
- IMPORTANT: stdin needs to be asynchronous, so the Python
- sys.stdin object is not used. Instead,
- msvcrt.kbhit/getwch are used asynchronously.
- """
- # Disable Line and Echo mode
- #lpMode = DWORD()
- #handle = msvcrt.get_osfhandle(sys.stdin.fileno())
- #if GetConsoleMode(handle, ctypes.byref(lpMode)):
- # set_console_mode = True
- # if not SetConsoleMode(handle, lpMode.value &
- # ~(ENABLE_ECHO_INPUT | ENABLE_LINE_INPUT | ENABLE_PROCESSED_INPUT)):
- # raise ctypes.WinError()
-
- if self.mergeout:
- return self.run(stdout_func = self._stdout_raw,
- stdin_func = self._stdin_raw_block)
- else:
- return self.run(stdout_func = self._stdout_raw,
- stdin_func = self._stdin_raw_block,
- stderr_func = self._stderr_raw)
-
- # Restore the previous console mode
- #if set_console_mode:
- # if not SetConsoleMode(handle, lpMode.value):
- # raise ctypes.WinError()
-
- def __exit__(self, exc_type, exc_value, traceback):
- if self.hstdin:
- CloseHandle(self.hstdin)
- self.hstdin = None
- if self.hstdout:
- CloseHandle(self.hstdout)
- self.hstdout = None
- if self.hstderr:
- CloseHandle(self.hstderr)
- self.hstderr = None
- if self.piProcInfo is not None:
- CloseHandle(self.piProcInfo.hProcess)
- CloseHandle(self.piProcInfo.hThread)
- self.piProcInfo = None
-
-
-def system(cmd):
- """Win32 version of os.system() that works with network shares.
-
- Note that this implementation returns None, as meant for use in IPython.
-
- Parameters
- ----------
- cmd : str
- A command to be executed in the system shell.
-
- Returns
- -------
- None : we explicitly do NOT return the subprocess status code, as this
- utility is meant to be used extensively in IPython, where any return value
- would trigger :func:`sys.displayhook` calls.
- """
- with AvoidUNCPath() as path:
- if path is not None:
- cmd = '"pushd %s &&"%s' % (path, cmd)
- with Win32ShellCommandController(cmd) as scc:
- scc.run()
-
-
-if __name__ == "__main__":
- print("Test starting!")
- #system("cmd")
- system("python -i")
- print("Test finished!")
+"""Windows-specific implementation of process utilities with direct WinAPI.
+
+This file is meant to be used by process.py
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2010-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+from __future__ import print_function
+
+# stdlib
+import os, sys, threading
+import ctypes, msvcrt
+
+# local imports
+from . import py3compat
+
+# Win32 API types needed for the API calls
+from ctypes import POINTER
+from ctypes.wintypes import HANDLE, HLOCAL, LPVOID, WORD, DWORD, BOOL, \
+ ULONG, LPCWSTR
+LPDWORD = POINTER(DWORD)
+LPHANDLE = POINTER(HANDLE)
+ULONG_PTR = POINTER(ULONG)
+class SECURITY_ATTRIBUTES(ctypes.Structure):
+ _fields_ = [("nLength", DWORD),
+ ("lpSecurityDescriptor", LPVOID),
+ ("bInheritHandle", BOOL)]
+LPSECURITY_ATTRIBUTES = POINTER(SECURITY_ATTRIBUTES)
+class STARTUPINFO(ctypes.Structure):
+ _fields_ = [("cb", DWORD),
+ ("lpReserved", LPCWSTR),
+ ("lpDesktop", LPCWSTR),
+ ("lpTitle", LPCWSTR),
+ ("dwX", DWORD),
+ ("dwY", DWORD),
+ ("dwXSize", DWORD),
+ ("dwYSize", DWORD),
+ ("dwXCountChars", DWORD),
+ ("dwYCountChars", DWORD),
+ ("dwFillAttribute", DWORD),
+ ("dwFlags", DWORD),
+ ("wShowWindow", WORD),
+ ("cbReserved2", WORD),
+ ("lpReserved2", LPVOID),
+ ("hStdInput", HANDLE),
+ ("hStdOutput", HANDLE),
+ ("hStdError", HANDLE)]
+LPSTARTUPINFO = POINTER(STARTUPINFO)
+class PROCESS_INFORMATION(ctypes.Structure):
+ _fields_ = [("hProcess", HANDLE),
+ ("hThread", HANDLE),
+ ("dwProcessId", DWORD),
+ ("dwThreadId", DWORD)]
+LPPROCESS_INFORMATION = POINTER(PROCESS_INFORMATION)
+
+# Win32 API constants needed
+ERROR_HANDLE_EOF = 38
+ERROR_BROKEN_PIPE = 109
+ERROR_NO_DATA = 232
+HANDLE_FLAG_INHERIT = 0x0001
+STARTF_USESTDHANDLES = 0x0100
+CREATE_SUSPENDED = 0x0004
+CREATE_NEW_CONSOLE = 0x0010
+CREATE_NO_WINDOW = 0x08000000
+STILL_ACTIVE = 259
+WAIT_TIMEOUT = 0x0102
+WAIT_FAILED = 0xFFFFFFFF
+INFINITE = 0xFFFFFFFF
+DUPLICATE_SAME_ACCESS = 0x00000002
+ENABLE_ECHO_INPUT = 0x0004
+ENABLE_LINE_INPUT = 0x0002
+ENABLE_PROCESSED_INPUT = 0x0001
+
+# Win32 API functions needed
+GetLastError = ctypes.windll.kernel32.GetLastError
+GetLastError.argtypes = []
+GetLastError.restype = DWORD
+
+CreateFile = ctypes.windll.kernel32.CreateFileW
+CreateFile.argtypes = [LPCWSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE]
+CreateFile.restype = HANDLE
+
+CreatePipe = ctypes.windll.kernel32.CreatePipe
+CreatePipe.argtypes = [POINTER(HANDLE), POINTER(HANDLE),
+ LPSECURITY_ATTRIBUTES, DWORD]
+CreatePipe.restype = BOOL
+
+CreateProcess = ctypes.windll.kernel32.CreateProcessW
+CreateProcess.argtypes = [LPCWSTR, LPCWSTR, LPSECURITY_ATTRIBUTES,
+ LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPCWSTR, LPSTARTUPINFO,
+ LPPROCESS_INFORMATION]
+CreateProcess.restype = BOOL
+
+GetExitCodeProcess = ctypes.windll.kernel32.GetExitCodeProcess
+GetExitCodeProcess.argtypes = [HANDLE, LPDWORD]
+GetExitCodeProcess.restype = BOOL
+
+GetCurrentProcess = ctypes.windll.kernel32.GetCurrentProcess
+GetCurrentProcess.argtypes = []
+GetCurrentProcess.restype = HANDLE
+
+ResumeThread = ctypes.windll.kernel32.ResumeThread
+ResumeThread.argtypes = [HANDLE]
+ResumeThread.restype = DWORD
+
+ReadFile = ctypes.windll.kernel32.ReadFile
+ReadFile.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD, LPVOID]
+ReadFile.restype = BOOL
+
+WriteFile = ctypes.windll.kernel32.WriteFile
+WriteFile.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD, LPVOID]
+WriteFile.restype = BOOL
+
+GetConsoleMode = ctypes.windll.kernel32.GetConsoleMode
+GetConsoleMode.argtypes = [HANDLE, LPDWORD]
+GetConsoleMode.restype = BOOL
+
+SetConsoleMode = ctypes.windll.kernel32.SetConsoleMode
+SetConsoleMode.argtypes = [HANDLE, DWORD]
+SetConsoleMode.restype = BOOL
+
+FlushConsoleInputBuffer = ctypes.windll.kernel32.FlushConsoleInputBuffer
+FlushConsoleInputBuffer.argtypes = [HANDLE]
+FlushConsoleInputBuffer.restype = BOOL
+
+WaitForSingleObject = ctypes.windll.kernel32.WaitForSingleObject
+WaitForSingleObject.argtypes = [HANDLE, DWORD]
+WaitForSingleObject.restype = DWORD
+
+DuplicateHandle = ctypes.windll.kernel32.DuplicateHandle
+DuplicateHandle.argtypes = [HANDLE, HANDLE, HANDLE, LPHANDLE,
+ DWORD, BOOL, DWORD]
+DuplicateHandle.restype = BOOL
+
+SetHandleInformation = ctypes.windll.kernel32.SetHandleInformation
+SetHandleInformation.argtypes = [HANDLE, DWORD, DWORD]
+SetHandleInformation.restype = BOOL
+
+CloseHandle = ctypes.windll.kernel32.CloseHandle
+CloseHandle.argtypes = [HANDLE]
+CloseHandle.restype = BOOL
+
+CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW
+CommandLineToArgvW.argtypes = [LPCWSTR, POINTER(ctypes.c_int)]
+CommandLineToArgvW.restype = POINTER(LPCWSTR)
+
+LocalFree = ctypes.windll.kernel32.LocalFree
+LocalFree.argtypes = [HLOCAL]
+LocalFree.restype = HLOCAL
+
+class AvoidUNCPath(object):
+ """A context manager to protect command execution from UNC paths.
+
+ In the Win32 API, commands can't be invoked with the cwd being a UNC path.
+ This context manager temporarily changes directory to the 'C:' drive on
+ entering, and restores the original working directory on exit.
+
+ The context manager returns the starting working directory *if* it made a
+ change and None otherwise, so that users can apply the necessary adjustment
+ to their system calls in the event of a change.
+
+ Examples
+ --------
+ ::
+ cmd = 'dir'
+ with AvoidUNCPath() as path:
+ if path is not None:
+ cmd = '"pushd %s &&"%s' % (path, cmd)
+ os.system(cmd)
+ """
+ def __enter__(self):
+ self.path = py3compat.getcwd()
+ self.is_unc_path = self.path.startswith(r"\\")
+ if self.is_unc_path:
+ # change to c drive (as cmd.exe cannot handle UNC addresses)
+ os.chdir("C:")
+ return self.path
+ else:
+ # We return None to signal that there was no change in the working
+ # directory
+ return None
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ if self.is_unc_path:
+ os.chdir(self.path)
+
+
+class Win32ShellCommandController(object):
+ """Runs a shell command in a 'with' context.
+
+ This implementation is Win32-specific.
+
+ Example:
+ # Runs the command interactively with default console stdin/stdout
+ with ShellCommandController('python -i') as scc:
+ scc.run()
+
+ # Runs the command using the provided functions for stdin/stdout
+ def my_stdout_func(s):
+ # print or save the string 's'
+ write_to_stdout(s)
+ def my_stdin_func():
+ # If input is available, return it as a string.
+ if input_available():
+ return get_input()
+ # If no input available, return None after a short delay to
+ # keep from blocking.
+ else:
+ time.sleep(0.01)
+ return None
+
+ with ShellCommandController('python -i') as scc:
+ scc.run(my_stdout_func, my_stdin_func)
+ """
+
+ def __init__(self, cmd, mergeout = True):
+ """Initializes the shell command controller.
+
+ The cmd is the program to execute, and mergeout is
+ whether to blend stdout and stderr into one output
+ in stdout. Merging them together in this fashion more
+ reliably keeps stdout and stderr in the correct order
+ especially for interactive shell usage.
+ """
+ self.cmd = cmd
+ self.mergeout = mergeout
+
+ def __enter__(self):
+ cmd = self.cmd
+ mergeout = self.mergeout
+
+ self.hstdout, self.hstdin, self.hstderr = None, None, None
+ self.piProcInfo = None
+ try:
+ p_hstdout, c_hstdout, p_hstderr, \
+ c_hstderr, p_hstdin, c_hstdin = [None]*6
+
+ # SECURITY_ATTRIBUTES with inherit handle set to True
+ saAttr = SECURITY_ATTRIBUTES()
+ saAttr.nLength = ctypes.sizeof(saAttr)
+ saAttr.bInheritHandle = True
+ saAttr.lpSecurityDescriptor = None
+
+ def create_pipe(uninherit):
+ """Creates a Windows pipe, which consists of two handles.
+
+ The 'uninherit' parameter controls which handle is not
+ inherited by the child process.
+ """
+ handles = HANDLE(), HANDLE()
+ if not CreatePipe(ctypes.byref(handles[0]),
+ ctypes.byref(handles[1]), ctypes.byref(saAttr), 0):
+ raise ctypes.WinError()
+ if not SetHandleInformation(handles[uninherit],
+ HANDLE_FLAG_INHERIT, 0):
+ raise ctypes.WinError()
+ return handles[0].value, handles[1].value
+
+ p_hstdout, c_hstdout = create_pipe(uninherit=0)
+ # 'mergeout' signals that stdout and stderr should be merged.
+ # We do that by using one pipe for both of them.
+ if mergeout:
+ c_hstderr = HANDLE()
+ if not DuplicateHandle(GetCurrentProcess(), c_hstdout,
+ GetCurrentProcess(), ctypes.byref(c_hstderr),
+ 0, True, DUPLICATE_SAME_ACCESS):
+ raise ctypes.WinError()
+ else:
+ p_hstderr, c_hstderr = create_pipe(uninherit=0)
+ c_hstdin, p_hstdin = create_pipe(uninherit=1)
+
+ # Create the process object
+ piProcInfo = PROCESS_INFORMATION()
+ siStartInfo = STARTUPINFO()
+ siStartInfo.cb = ctypes.sizeof(siStartInfo)
+ siStartInfo.hStdInput = c_hstdin
+ siStartInfo.hStdOutput = c_hstdout
+ siStartInfo.hStdError = c_hstderr
+ siStartInfo.dwFlags = STARTF_USESTDHANDLES
+ dwCreationFlags = CREATE_SUSPENDED | CREATE_NO_WINDOW # | CREATE_NEW_CONSOLE
+
+ if not CreateProcess(None,
+ u"cmd.exe /c " + cmd,
+ None, None, True, dwCreationFlags,
+ None, None, ctypes.byref(siStartInfo),
+ ctypes.byref(piProcInfo)):
+ raise ctypes.WinError()
+
+ # Close this process's versions of the child handles
+ CloseHandle(c_hstdin)
+ c_hstdin = None
+ CloseHandle(c_hstdout)
+ c_hstdout = None
+ if c_hstderr is not None:
+ CloseHandle(c_hstderr)
+ c_hstderr = None
+
+ # Transfer ownership of the parent handles to the object
+ self.hstdin = p_hstdin
+ p_hstdin = None
+ self.hstdout = p_hstdout
+ p_hstdout = None
+ if not mergeout:
+ self.hstderr = p_hstderr
+ p_hstderr = None
+ self.piProcInfo = piProcInfo
+
+ finally:
+ if p_hstdin:
+ CloseHandle(p_hstdin)
+ if c_hstdin:
+ CloseHandle(c_hstdin)
+ if p_hstdout:
+ CloseHandle(p_hstdout)
+ if c_hstdout:
+ CloseHandle(c_hstdout)
+ if p_hstderr:
+ CloseHandle(p_hstderr)
+ if c_hstderr:
+ CloseHandle(c_hstderr)
+
+ return self
+
+ def _stdin_thread(self, handle, hprocess, func, stdout_func):
+ exitCode = DWORD()
+ bytesWritten = DWORD(0)
+ while True:
+ #print("stdin thread loop start")
+ # Get the input string (may be bytes or unicode)
+ data = func()
+
+ # None signals to poll whether the process has exited
+ if data is None:
+ #print("checking for process completion")
+ if not GetExitCodeProcess(hprocess, ctypes.byref(exitCode)):
+ raise ctypes.WinError()
+ if exitCode.value != STILL_ACTIVE:
+ return
+ # TESTING: Does zero-sized writefile help?
+ if not WriteFile(handle, "", 0,
+ ctypes.byref(bytesWritten), None):
+ raise ctypes.WinError()
+ continue
+ #print("\nGot str %s\n" % repr(data), file=sys.stderr)
+
+ # Encode the string to the console encoding
+ if isinstance(data, unicode): #FIXME: Python3
+ data = data.encode('utf_8')
+
+ # What we have now must be a string of bytes
+ if not isinstance(data, str): #FIXME: Python3
+ raise RuntimeError("internal stdin function string error")
+
+ # An empty string signals EOF
+ if len(data) == 0:
+ return
+
+ # In a windows console, sometimes the input is echoed,
+ # but sometimes not. How do we determine when to do this?
+ stdout_func(data)
+ # WriteFile may not accept all the data at once.
+ # Loop until everything is processed
+ while len(data) != 0:
+ #print("Calling writefile")
+ if not WriteFile(handle, data, len(data),
+ ctypes.byref(bytesWritten), None):
+ # This occurs at exit
+ if GetLastError() == ERROR_NO_DATA:
+ return
+ raise ctypes.WinError()
+ #print("Called writefile")
+ data = data[bytesWritten.value:]
+
+ def _stdout_thread(self, handle, func):
+ # Allocate the output buffer
+ data = ctypes.create_string_buffer(4096)
+ while True:
+ bytesRead = DWORD(0)
+ if not ReadFile(handle, data, 4096,
+ ctypes.byref(bytesRead), None):
+ le = GetLastError()
+ if le == ERROR_BROKEN_PIPE:
+ return
+ else:
+ raise ctypes.WinError()
+ # FIXME: Python3
+ s = data.value[0:bytesRead.value]
+ #print("\nv: %s" % repr(s), file=sys.stderr)
+ func(s.decode('utf_8', 'replace'))
+
+ def run(self, stdout_func = None, stdin_func = None, stderr_func = None):
+ """Runs the process, using the provided functions for I/O.
+
+ The function stdin_func should return strings whenever a
+ character or characters become available.
+ The functions stdout_func and stderr_func are called whenever
+ something is printed to stdout or stderr, respectively.
+ These functions are called from different threads (but not
+ concurrently, because of the GIL).
+ """
+ if stdout_func is None and stdin_func is None and stderr_func is None:
+ return self._run_stdio()
+
+ if stderr_func is not None and self.mergeout:
+ raise RuntimeError("Shell command was initiated with "
+ "merged stdin/stdout, but a separate stderr_func "
+ "was provided to the run() method")
+
+ # Create a thread for each input/output handle
+ stdin_thread = None
+ threads = []
+ if stdin_func:
+ stdin_thread = threading.Thread(target=self._stdin_thread,
+ args=(self.hstdin, self.piProcInfo.hProcess,
+ stdin_func, stdout_func))
+ threads.append(threading.Thread(target=self._stdout_thread,
+ args=(self.hstdout, stdout_func)))
+ if not self.mergeout:
+ if stderr_func is None:
+ stderr_func = stdout_func
+ threads.append(threading.Thread(target=self._stdout_thread,
+ args=(self.hstderr, stderr_func)))
+ # Start the I/O threads and the process
+ if ResumeThread(self.piProcInfo.hThread) == 0xFFFFFFFF:
+ raise ctypes.WinError()
+ if stdin_thread is not None:
+ stdin_thread.start()
+ for thread in threads:
+ thread.start()
+ # Wait for the process to complete
+ if WaitForSingleObject(self.piProcInfo.hProcess, INFINITE) == \
+ WAIT_FAILED:
+ raise ctypes.WinError()
+ # Wait for the I/O threads to complete
+ for thread in threads:
+ thread.join()
+
+ # Wait for the stdin thread to complete
+ if stdin_thread is not None:
+ stdin_thread.join()
+
+ def _stdin_raw_nonblock(self):
+ """Use the raw Win32 handle of sys.stdin to do non-blocking reads"""
+ # WARNING: This is experimental, and produces inconsistent results.
+ # It's possible for the handle not to be appropriate for use
+ # with WaitForSingleObject, among other things.
+ handle = msvcrt.get_osfhandle(sys.stdin.fileno())
+ result = WaitForSingleObject(handle, 100)
+ if result == WAIT_FAILED:
+ raise ctypes.WinError()
+ elif result == WAIT_TIMEOUT:
+ print(".", end='')
+ return None
+ else:
+ data = ctypes.create_string_buffer(256)
+ bytesRead = DWORD(0)
+ print('?', end='')
+
+ if not ReadFile(handle, data, 256,
+ ctypes.byref(bytesRead), None):
+ raise ctypes.WinError()
+ # This ensures the non-blocking works with an actual console
+ # Not checking the error, so the processing will still work with
+ # other handle types
+ FlushConsoleInputBuffer(handle)
+
+ data = data.value
+ data = data.replace('\r\n', '\n')
+ data = data.replace('\r', '\n')
+ print(repr(data) + " ", end='')
+ return data
+
+ def _stdin_raw_block(self):
+ """Use a blocking stdin read"""
+ # The big problem with the blocking read is that it doesn't
+ # exit when it's supposed to in all contexts. An extra
+ # key-press may be required to trigger the exit.
+ try:
+ data = sys.stdin.read(1)
+ data = data.replace('\r', '\n')
+ return data
+ except WindowsError as we:
+ if we.winerror == ERROR_NO_DATA:
+ # This error occurs when the pipe is closed
+ return None
+ else:
+ # Otherwise let the error propagate
+ raise we
+
+ def _stdout_raw(self, s):
+ """Writes the string to stdout"""
+ print(s, end='', file=sys.stdout)
+ sys.stdout.flush()
+
+ def _stderr_raw(self, s):
+ """Writes the string to stdout"""
+ print(s, end='', file=sys.stderr)
+ sys.stderr.flush()
+
+ def _run_stdio(self):
+ """Runs the process using the system standard I/O.
+
+ IMPORTANT: stdin needs to be asynchronous, so the Python
+ sys.stdin object is not used. Instead,
+ msvcrt.kbhit/getwch are used asynchronously.
+ """
+ # Disable Line and Echo mode
+ #lpMode = DWORD()
+ #handle = msvcrt.get_osfhandle(sys.stdin.fileno())
+ #if GetConsoleMode(handle, ctypes.byref(lpMode)):
+ # set_console_mode = True
+ # if not SetConsoleMode(handle, lpMode.value &
+ # ~(ENABLE_ECHO_INPUT | ENABLE_LINE_INPUT | ENABLE_PROCESSED_INPUT)):
+ # raise ctypes.WinError()
+
+ if self.mergeout:
+ return self.run(stdout_func = self._stdout_raw,
+ stdin_func = self._stdin_raw_block)
+ else:
+ return self.run(stdout_func = self._stdout_raw,
+ stdin_func = self._stdin_raw_block,
+ stderr_func = self._stderr_raw)
+
+ # Restore the previous console mode
+ #if set_console_mode:
+ # if not SetConsoleMode(handle, lpMode.value):
+ # raise ctypes.WinError()
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ if self.hstdin:
+ CloseHandle(self.hstdin)
+ self.hstdin = None
+ if self.hstdout:
+ CloseHandle(self.hstdout)
+ self.hstdout = None
+ if self.hstderr:
+ CloseHandle(self.hstderr)
+ self.hstderr = None
+ if self.piProcInfo is not None:
+ CloseHandle(self.piProcInfo.hProcess)
+ CloseHandle(self.piProcInfo.hThread)
+ self.piProcInfo = None
+
+
+def system(cmd):
+ """Win32 version of os.system() that works with network shares.
+
+ Note that this implementation returns None, as meant for use in IPython.
+
+ Parameters
+ ----------
+ cmd : str
+ A command to be executed in the system shell.
+
+ Returns
+ -------
+ None : we explicitly do NOT return the subprocess status code, as this
+ utility is meant to be used extensively in IPython, where any return value
+ would trigger :func:`sys.displayhook` calls.
+ """
+ with AvoidUNCPath() as path:
+ if path is not None:
+ cmd = '"pushd %s &&"%s' % (path, cmd)
+ with Win32ShellCommandController(cmd) as scc:
+ scc.run()
+
+
+if __name__ == "__main__":
+ print("Test starting!")
+ #system("cmd")
+ system("python -i")
+ print("Test finished!")
diff --git a/contrib/python/ipython/py2/IPython/utils/_signatures.py b/contrib/python/ipython/py2/IPython/utils/_signatures.py
index 9f403618ce..20f52b98ed 100644
--- a/contrib/python/ipython/py2/IPython/utils/_signatures.py
+++ b/contrib/python/ipython/py2/IPython/utils/_signatures.py
@@ -1,818 +1,818 @@
-"""Function signature objects for callables.
-
-Back port of Python 3.3's function signature tools from the inspect module,
-modified to be compatible with Python 2.7 and 3.2+.
-"""
-
-#-----------------------------------------------------------------------------
-# Python 3.3 stdlib inspect.py is public domain
-#
-# Backports Copyright (C) 2013 Aaron Iles
-# Used under Apache License Version 2.0
+"""Function signature objects for callables.
+
+Back port of Python 3.3's function signature tools from the inspect module,
+modified to be compatible with Python 2.7 and 3.2+.
+"""
+
+#-----------------------------------------------------------------------------
+# Python 3.3 stdlib inspect.py is public domain
#
-# Further Changes are Copyright (C) 2013 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-from __future__ import absolute_import, division, print_function
-import itertools
-import functools
-import re
-import types
+# Backports Copyright (C) 2013 Aaron Iles
+# Used under Apache License Version 2.0
+#
+# Further Changes are Copyright (C) 2013 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+from __future__ import absolute_import, division, print_function
+import itertools
+import functools
+import re
+import types
import inspect
-
-
-# patch for single-file
-# we don't support 2.6, so we can just import OrderedDict
-from collections import OrderedDict
-
-__version__ = '0.3'
-# end patch
-
-__all__ = ['BoundArguments', 'Parameter', 'Signature', 'signature']
-
-
-_WrapperDescriptor = type(type.__call__)
-_MethodWrapper = type(all.__call__)
-
-_NonUserDefinedCallables = (_WrapperDescriptor,
- _MethodWrapper,
- types.BuiltinFunctionType)
-
-
-def formatannotation(annotation, base_module=None):
- if isinstance(annotation, type):
- if annotation.__module__ in ('builtins', '__builtin__', base_module):
- return annotation.__name__
- return annotation.__module__+'.'+annotation.__name__
- return repr(annotation)
-
-
-def _get_user_defined_method(cls, method_name, *nested):
- try:
- if cls is type:
- return
- meth = getattr(cls, method_name)
- for name in nested:
- meth = getattr(meth, name, meth)
- except AttributeError:
- return
- else:
- if not isinstance(meth, _NonUserDefinedCallables):
- # Once '__signature__' will be added to 'C'-level
- # callables, this check won't be necessary
- return meth
-
-
-def signature(obj):
- '''Get a signature object for the passed callable.'''
-
- if not callable(obj):
- raise TypeError('{0!r} is not a callable object'.format(obj))
-
+
+
+# patch for single-file
+# we don't support 2.6, so we can just import OrderedDict
+from collections import OrderedDict
+
+__version__ = '0.3'
+# end patch
+
+__all__ = ['BoundArguments', 'Parameter', 'Signature', 'signature']
+
+
+_WrapperDescriptor = type(type.__call__)
+_MethodWrapper = type(all.__call__)
+
+_NonUserDefinedCallables = (_WrapperDescriptor,
+ _MethodWrapper,
+ types.BuiltinFunctionType)
+
+
+def formatannotation(annotation, base_module=None):
+ if isinstance(annotation, type):
+ if annotation.__module__ in ('builtins', '__builtin__', base_module):
+ return annotation.__name__
+ return annotation.__module__+'.'+annotation.__name__
+ return repr(annotation)
+
+
+def _get_user_defined_method(cls, method_name, *nested):
+ try:
+ if cls is type:
+ return
+ meth = getattr(cls, method_name)
+ for name in nested:
+ meth = getattr(meth, name, meth)
+ except AttributeError:
+ return
+ else:
+ if not isinstance(meth, _NonUserDefinedCallables):
+ # Once '__signature__' will be added to 'C'-level
+ # callables, this check won't be necessary
+ return meth
+
+
+def signature(obj):
+ '''Get a signature object for the passed callable.'''
+
+ if not callable(obj):
+ raise TypeError('{0!r} is not a callable object'.format(obj))
+
if inspect.ismethod(obj):
- if obj.__self__ is None:
- # Unbound method - treat it as a function (no distinction in Py 3)
- obj = obj.__func__
- else:
- # Bound method: trim off the first parameter (typically self or cls)
- sig = signature(obj.__func__)
- return sig.replace(parameters=tuple(sig.parameters.values())[1:])
-
- try:
- sig = obj.__signature__
- except AttributeError:
- pass
- else:
- if sig is not None:
- return sig
-
- try:
- # Was this function wrapped by a decorator?
- wrapped = obj.__wrapped__
- except AttributeError:
- pass
- else:
- return signature(wrapped)
-
+ if obj.__self__ is None:
+ # Unbound method - treat it as a function (no distinction in Py 3)
+ obj = obj.__func__
+ else:
+ # Bound method: trim off the first parameter (typically self or cls)
+ sig = signature(obj.__func__)
+ return sig.replace(parameters=tuple(sig.parameters.values())[1:])
+
+ try:
+ sig = obj.__signature__
+ except AttributeError:
+ pass
+ else:
+ if sig is not None:
+ return sig
+
+ try:
+ # Was this function wrapped by a decorator?
+ wrapped = obj.__wrapped__
+ except AttributeError:
+ pass
+ else:
+ return signature(wrapped)
+
if inspect.isfunction(obj):
- return Signature.from_function(obj)
-
- if isinstance(obj, functools.partial):
- sig = signature(obj.func)
-
- new_params = OrderedDict(sig.parameters.items())
-
- partial_args = obj.args or ()
- partial_keywords = obj.keywords or {}
- try:
- ba = sig.bind_partial(*partial_args, **partial_keywords)
- except TypeError as ex:
- msg = 'partial object {0!r} has incorrect arguments'.format(obj)
- raise ValueError(msg)
-
- for arg_name, arg_value in ba.arguments.items():
- param = new_params[arg_name]
- if arg_name in partial_keywords:
- # We set a new default value, because the following code
- # is correct:
- #
- # >>> def foo(a): print(a)
- # >>> print(partial(partial(foo, a=10), a=20)())
- # 20
- # >>> print(partial(partial(foo, a=10), a=20)(a=30))
- # 30
- #
- # So, with 'partial' objects, passing a keyword argument is
- # like setting a new default value for the corresponding
- # parameter
- #
- # We also mark this parameter with '_partial_kwarg'
- # flag. Later, in '_bind', the 'default' value of this
- # parameter will be added to 'kwargs', to simulate
- # the 'functools.partial' real call.
- new_params[arg_name] = param.replace(default=arg_value,
- _partial_kwarg=True)
-
- elif (param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL) and
- not param._partial_kwarg):
- new_params.pop(arg_name)
-
- return sig.replace(parameters=new_params.values())
-
- sig = None
- if isinstance(obj, type):
- # obj is a class or a metaclass
-
- # First, let's see if it has an overloaded __call__ defined
- # in its metaclass
- call = _get_user_defined_method(type(obj), '__call__')
- if call is not None:
- sig = signature(call)
- else:
- # Now we check if the 'obj' class has a '__new__' method
- new = _get_user_defined_method(obj, '__new__')
- if new is not None:
- sig = signature(new)
- else:
- # Finally, we should have at least __init__ implemented
- init = _get_user_defined_method(obj, '__init__')
- if init is not None:
- sig = signature(init)
- elif not isinstance(obj, _NonUserDefinedCallables):
- # An object with __call__
- # We also check that the 'obj' is not an instance of
- # _WrapperDescriptor or _MethodWrapper to avoid
- # infinite recursion (and even potential segfault)
- call = _get_user_defined_method(type(obj), '__call__', 'im_func')
- if call is not None:
- sig = signature(call)
-
- if sig is not None:
- return sig
-
- if isinstance(obj, types.BuiltinFunctionType):
- # Raise a nicer error message for builtins
- msg = 'no signature found for builtin function {0!r}'.format(obj)
- raise ValueError(msg)
-
- raise ValueError('callable {0!r} is not supported by signature'.format(obj))
-
-
-class _void(object):
- '''A private marker - used in Parameter & Signature'''
-
-
-class _empty(object):
- pass
-
-
-class _ParameterKind(int):
- def __new__(self, *args, **kwargs):
- obj = int.__new__(self, *args)
- obj._name = kwargs['name']
- return obj
-
- def __str__(self):
- return self._name
-
- def __repr__(self):
- return '<_ParameterKind: {0!r}>'.format(self._name)
-
-
-_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY')
-_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD')
-_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL')
-_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY')
-_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD')
-
-
-class Parameter(object):
- '''Represents a parameter in a function signature.
-
- Has the following public attributes:
-
- * name : str
- The name of the parameter as a string.
- * default : object
- The default value for the parameter if specified. If the
- parameter has no default value, this attribute is not set.
- * annotation
- The annotation for the parameter if specified. If the
- parameter has no annotation, this attribute is not set.
- * kind : str
- Describes how argument values are bound to the parameter.
- Possible values: `Parameter.POSITIONAL_ONLY`,
- `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`,
- `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`.
- '''
-
- __slots__ = ('_name', '_kind', '_default', '_annotation', '_partial_kwarg')
-
- POSITIONAL_ONLY = _POSITIONAL_ONLY
- POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD
- VAR_POSITIONAL = _VAR_POSITIONAL
- KEYWORD_ONLY = _KEYWORD_ONLY
- VAR_KEYWORD = _VAR_KEYWORD
-
- empty = _empty
-
- def __init__(self, name, kind, default=_empty, annotation=_empty,
- _partial_kwarg=False):
-
- if kind not in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD,
- _VAR_POSITIONAL, _KEYWORD_ONLY, _VAR_KEYWORD):
- raise ValueError("invalid value for 'Parameter.kind' attribute")
- self._kind = kind
-
- if default is not _empty:
- if kind in (_VAR_POSITIONAL, _VAR_KEYWORD):
- msg = '{0} parameters cannot have default values'.format(kind)
- raise ValueError(msg)
- self._default = default
- self._annotation = annotation
-
- if name is None:
- if kind != _POSITIONAL_ONLY:
- raise ValueError("None is not a valid name for a "
- "non-positional-only parameter")
- self._name = name
- else:
- name = str(name)
- if kind != _POSITIONAL_ONLY and not re.match(r'[a-z_]\w*$', name, re.I):
- msg = '{0!r} is not a valid parameter name'.format(name)
- raise ValueError(msg)
- self._name = name
-
- self._partial_kwarg = _partial_kwarg
-
- @property
- def name(self):
- return self._name
-
- @property
- def default(self):
- return self._default
-
- @property
- def annotation(self):
- return self._annotation
-
- @property
- def kind(self):
- return self._kind
-
- def replace(self, name=_void, kind=_void, annotation=_void,
- default=_void, _partial_kwarg=_void):
- '''Creates a customized copy of the Parameter.'''
-
- if name is _void:
- name = self._name
-
- if kind is _void:
- kind = self._kind
-
- if annotation is _void:
- annotation = self._annotation
-
- if default is _void:
- default = self._default
-
- if _partial_kwarg is _void:
- _partial_kwarg = self._partial_kwarg
-
- return type(self)(name, kind, default=default, annotation=annotation,
- _partial_kwarg=_partial_kwarg)
-
- def __str__(self):
- kind = self.kind
-
- formatted = self._name
- if kind == _POSITIONAL_ONLY:
- if formatted is None:
- formatted = ''
- formatted = '<{0}>'.format(formatted)
-
- # Add annotation and default value
- if self._annotation is not _empty:
- formatted = '{0}:{1}'.format(formatted,
- formatannotation(self._annotation))
-
- if self._default is not _empty:
- formatted = '{0}={1}'.format(formatted, repr(self._default))
-
- if kind == _VAR_POSITIONAL:
- formatted = '*' + formatted
- elif kind == _VAR_KEYWORD:
- formatted = '**' + formatted
-
- return formatted
-
- def __repr__(self):
- return '<{0} at {1:#x} {2!r}>'.format(self.__class__.__name__,
- id(self), self.name)
-
- def __hash__(self):
- msg = "unhashable type: '{0}'".format(self.__class__.__name__)
- raise TypeError(msg)
-
- def __eq__(self, other):
- return (issubclass(other.__class__, Parameter) and
- self._name == other._name and
- self._kind == other._kind and
- self._default == other._default and
- self._annotation == other._annotation)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
-
-class BoundArguments(object):
- '''Result of :meth:`Signature.bind` call. Holds the mapping of arguments
- to the function's parameters.
-
- Has the following public attributes:
-
- arguments : :class:`collections.OrderedDict`
- An ordered mutable mapping of parameters' names to arguments' values.
- Does not contain arguments' default values.
- signature : :class:`Signature`
- The Signature object that created this instance.
- args : tuple
- Tuple of positional arguments values.
- kwargs : dict
- Dict of keyword arguments values.
- '''
-
- def __init__(self, signature, arguments):
- self.arguments = arguments
- self._signature = signature
-
- @property
- def signature(self):
- return self._signature
-
- @property
- def args(self):
- args = []
- for param_name, param in self._signature.parameters.items():
- if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
- param._partial_kwarg):
- # Keyword arguments mapped by 'functools.partial'
- # (Parameter._partial_kwarg is True) are mapped
- # in 'BoundArguments.kwargs', along with VAR_KEYWORD &
- # KEYWORD_ONLY
- break
-
- try:
- arg = self.arguments[param_name]
- except KeyError:
- # We're done here. Other arguments
- # will be mapped in 'BoundArguments.kwargs'
- break
- else:
- if param.kind == _VAR_POSITIONAL:
- # *args
- args.extend(arg)
- else:
- # plain argument
- args.append(arg)
-
- return tuple(args)
-
- @property
- def kwargs(self):
- kwargs = {}
- kwargs_started = False
- for param_name, param in self._signature.parameters.items():
- if not kwargs_started:
- if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
- param._partial_kwarg):
- kwargs_started = True
- else:
- if param_name not in self.arguments:
- kwargs_started = True
- continue
-
- if not kwargs_started:
- continue
-
- try:
- arg = self.arguments[param_name]
- except KeyError:
- pass
- else:
- if param.kind == _VAR_KEYWORD:
- # **kwargs
- kwargs.update(arg)
- else:
- # plain keyword argument
- kwargs[param_name] = arg
-
- return kwargs
-
- def __hash__(self):
- msg = "unhashable type: '{0}'".format(self.__class__.__name__)
- raise TypeError(msg)
-
- def __eq__(self, other):
- return (issubclass(other.__class__, BoundArguments) and
- self.signature == other.signature and
- self.arguments == other.arguments)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
-
-class Signature(object):
- '''A Signature object represents the overall signature of a function.
- It stores a Parameter object for each parameter accepted by the
- function, as well as information specific to the function itself.
-
- A Signature object has the following public attributes:
-
- parameters : :class:`collections.OrderedDict`
- An ordered mapping of parameters' names to the corresponding
- Parameter objects (keyword-only arguments are in the same order
- as listed in `code.co_varnames`).
- return_annotation
- The annotation for the return type of the function if specified.
- If the function has no annotation for its return type, this
- attribute is not set.
- '''
-
- __slots__ = ('_return_annotation', '_parameters')
-
- _parameter_cls = Parameter
- _bound_arguments_cls = BoundArguments
-
- empty = _empty
-
- def __init__(self, parameters=None, return_annotation=_empty,
- __validate_parameters__=True):
- '''Constructs Signature from the given list of Parameter
- objects and 'return_annotation'. All arguments are optional.
- '''
-
- if parameters is None:
- params = OrderedDict()
- else:
- if __validate_parameters__:
- params = OrderedDict()
- top_kind = _POSITIONAL_ONLY
-
- for idx, param in enumerate(parameters):
- kind = param.kind
- if kind < top_kind:
- msg = 'wrong parameter order: {0} before {1}'
- msg = msg.format(top_kind, param.kind)
- raise ValueError(msg)
- else:
- top_kind = kind
-
- name = param.name
- if name is None:
- name = str(idx)
- param = param.replace(name=name)
-
- if name in params:
- msg = 'duplicate parameter name: {0!r}'.format(name)
- raise ValueError(msg)
- params[name] = param
- else:
- params = OrderedDict(((param.name, param)
- for param in parameters))
-
- self._parameters = params
- self._return_annotation = return_annotation
-
- @classmethod
- def from_function(cls, func):
- '''Constructs Signature for the given python function'''
-
+ return Signature.from_function(obj)
+
+ if isinstance(obj, functools.partial):
+ sig = signature(obj.func)
+
+ new_params = OrderedDict(sig.parameters.items())
+
+ partial_args = obj.args or ()
+ partial_keywords = obj.keywords or {}
+ try:
+ ba = sig.bind_partial(*partial_args, **partial_keywords)
+ except TypeError as ex:
+ msg = 'partial object {0!r} has incorrect arguments'.format(obj)
+ raise ValueError(msg)
+
+ for arg_name, arg_value in ba.arguments.items():
+ param = new_params[arg_name]
+ if arg_name in partial_keywords:
+ # We set a new default value, because the following code
+ # is correct:
+ #
+ # >>> def foo(a): print(a)
+ # >>> print(partial(partial(foo, a=10), a=20)())
+ # 20
+ # >>> print(partial(partial(foo, a=10), a=20)(a=30))
+ # 30
+ #
+ # So, with 'partial' objects, passing a keyword argument is
+ # like setting a new default value for the corresponding
+ # parameter
+ #
+ # We also mark this parameter with '_partial_kwarg'
+ # flag. Later, in '_bind', the 'default' value of this
+ # parameter will be added to 'kwargs', to simulate
+ # the 'functools.partial' real call.
+ new_params[arg_name] = param.replace(default=arg_value,
+ _partial_kwarg=True)
+
+ elif (param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL) and
+ not param._partial_kwarg):
+ new_params.pop(arg_name)
+
+ return sig.replace(parameters=new_params.values())
+
+ sig = None
+ if isinstance(obj, type):
+ # obj is a class or a metaclass
+
+ # First, let's see if it has an overloaded __call__ defined
+ # in its metaclass
+ call = _get_user_defined_method(type(obj), '__call__')
+ if call is not None:
+ sig = signature(call)
+ else:
+ # Now we check if the 'obj' class has a '__new__' method
+ new = _get_user_defined_method(obj, '__new__')
+ if new is not None:
+ sig = signature(new)
+ else:
+ # Finally, we should have at least __init__ implemented
+ init = _get_user_defined_method(obj, '__init__')
+ if init is not None:
+ sig = signature(init)
+ elif not isinstance(obj, _NonUserDefinedCallables):
+ # An object with __call__
+ # We also check that the 'obj' is not an instance of
+ # _WrapperDescriptor or _MethodWrapper to avoid
+ # infinite recursion (and even potential segfault)
+ call = _get_user_defined_method(type(obj), '__call__', 'im_func')
+ if call is not None:
+ sig = signature(call)
+
+ if sig is not None:
+ return sig
+
+ if isinstance(obj, types.BuiltinFunctionType):
+ # Raise a nicer error message for builtins
+ msg = 'no signature found for builtin function {0!r}'.format(obj)
+ raise ValueError(msg)
+
+ raise ValueError('callable {0!r} is not supported by signature'.format(obj))
+
+
+class _void(object):
+ '''A private marker - used in Parameter & Signature'''
+
+
+class _empty(object):
+ pass
+
+
+class _ParameterKind(int):
+ def __new__(self, *args, **kwargs):
+ obj = int.__new__(self, *args)
+ obj._name = kwargs['name']
+ return obj
+
+ def __str__(self):
+ return self._name
+
+ def __repr__(self):
+ return '<_ParameterKind: {0!r}>'.format(self._name)
+
+
+_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY')
+_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD')
+_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL')
+_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY')
+_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD')
+
+
+class Parameter(object):
+ '''Represents a parameter in a function signature.
+
+ Has the following public attributes:
+
+ * name : str
+ The name of the parameter as a string.
+ * default : object
+ The default value for the parameter if specified. If the
+ parameter has no default value, this attribute is not set.
+ * annotation
+ The annotation for the parameter if specified. If the
+ parameter has no annotation, this attribute is not set.
+ * kind : str
+ Describes how argument values are bound to the parameter.
+ Possible values: `Parameter.POSITIONAL_ONLY`,
+ `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`,
+ `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`.
+ '''
+
+ __slots__ = ('_name', '_kind', '_default', '_annotation', '_partial_kwarg')
+
+ POSITIONAL_ONLY = _POSITIONAL_ONLY
+ POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD
+ VAR_POSITIONAL = _VAR_POSITIONAL
+ KEYWORD_ONLY = _KEYWORD_ONLY
+ VAR_KEYWORD = _VAR_KEYWORD
+
+ empty = _empty
+
+ def __init__(self, name, kind, default=_empty, annotation=_empty,
+ _partial_kwarg=False):
+
+ if kind not in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD,
+ _VAR_POSITIONAL, _KEYWORD_ONLY, _VAR_KEYWORD):
+ raise ValueError("invalid value for 'Parameter.kind' attribute")
+ self._kind = kind
+
+ if default is not _empty:
+ if kind in (_VAR_POSITIONAL, _VAR_KEYWORD):
+ msg = '{0} parameters cannot have default values'.format(kind)
+ raise ValueError(msg)
+ self._default = default
+ self._annotation = annotation
+
+ if name is None:
+ if kind != _POSITIONAL_ONLY:
+ raise ValueError("None is not a valid name for a "
+ "non-positional-only parameter")
+ self._name = name
+ else:
+ name = str(name)
+ if kind != _POSITIONAL_ONLY and not re.match(r'[a-z_]\w*$', name, re.I):
+ msg = '{0!r} is not a valid parameter name'.format(name)
+ raise ValueError(msg)
+ self._name = name
+
+ self._partial_kwarg = _partial_kwarg
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def default(self):
+ return self._default
+
+ @property
+ def annotation(self):
+ return self._annotation
+
+ @property
+ def kind(self):
+ return self._kind
+
+ def replace(self, name=_void, kind=_void, annotation=_void,
+ default=_void, _partial_kwarg=_void):
+ '''Creates a customized copy of the Parameter.'''
+
+ if name is _void:
+ name = self._name
+
+ if kind is _void:
+ kind = self._kind
+
+ if annotation is _void:
+ annotation = self._annotation
+
+ if default is _void:
+ default = self._default
+
+ if _partial_kwarg is _void:
+ _partial_kwarg = self._partial_kwarg
+
+ return type(self)(name, kind, default=default, annotation=annotation,
+ _partial_kwarg=_partial_kwarg)
+
+ def __str__(self):
+ kind = self.kind
+
+ formatted = self._name
+ if kind == _POSITIONAL_ONLY:
+ if formatted is None:
+ formatted = ''
+ formatted = '<{0}>'.format(formatted)
+
+ # Add annotation and default value
+ if self._annotation is not _empty:
+ formatted = '{0}:{1}'.format(formatted,
+ formatannotation(self._annotation))
+
+ if self._default is not _empty:
+ formatted = '{0}={1}'.format(formatted, repr(self._default))
+
+ if kind == _VAR_POSITIONAL:
+ formatted = '*' + formatted
+ elif kind == _VAR_KEYWORD:
+ formatted = '**' + formatted
+
+ return formatted
+
+ def __repr__(self):
+ return '<{0} at {1:#x} {2!r}>'.format(self.__class__.__name__,
+ id(self), self.name)
+
+ def __hash__(self):
+ msg = "unhashable type: '{0}'".format(self.__class__.__name__)
+ raise TypeError(msg)
+
+ def __eq__(self, other):
+ return (issubclass(other.__class__, Parameter) and
+ self._name == other._name and
+ self._kind == other._kind and
+ self._default == other._default and
+ self._annotation == other._annotation)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class BoundArguments(object):
+ '''Result of :meth:`Signature.bind` call. Holds the mapping of arguments
+ to the function's parameters.
+
+ Has the following public attributes:
+
+ arguments : :class:`collections.OrderedDict`
+ An ordered mutable mapping of parameters' names to arguments' values.
+ Does not contain arguments' default values.
+ signature : :class:`Signature`
+ The Signature object that created this instance.
+ args : tuple
+ Tuple of positional arguments values.
+ kwargs : dict
+ Dict of keyword arguments values.
+ '''
+
+ def __init__(self, signature, arguments):
+ self.arguments = arguments
+ self._signature = signature
+
+ @property
+ def signature(self):
+ return self._signature
+
+ @property
+ def args(self):
+ args = []
+ for param_name, param in self._signature.parameters.items():
+ if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
+ param._partial_kwarg):
+ # Keyword arguments mapped by 'functools.partial'
+ # (Parameter._partial_kwarg is True) are mapped
+ # in 'BoundArguments.kwargs', along with VAR_KEYWORD &
+ # KEYWORD_ONLY
+ break
+
+ try:
+ arg = self.arguments[param_name]
+ except KeyError:
+ # We're done here. Other arguments
+ # will be mapped in 'BoundArguments.kwargs'
+ break
+ else:
+ if param.kind == _VAR_POSITIONAL:
+ # *args
+ args.extend(arg)
+ else:
+ # plain argument
+ args.append(arg)
+
+ return tuple(args)
+
+ @property
+ def kwargs(self):
+ kwargs = {}
+ kwargs_started = False
+ for param_name, param in self._signature.parameters.items():
+ if not kwargs_started:
+ if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
+ param._partial_kwarg):
+ kwargs_started = True
+ else:
+ if param_name not in self.arguments:
+ kwargs_started = True
+ continue
+
+ if not kwargs_started:
+ continue
+
+ try:
+ arg = self.arguments[param_name]
+ except KeyError:
+ pass
+ else:
+ if param.kind == _VAR_KEYWORD:
+ # **kwargs
+ kwargs.update(arg)
+ else:
+ # plain keyword argument
+ kwargs[param_name] = arg
+
+ return kwargs
+
+ def __hash__(self):
+ msg = "unhashable type: '{0}'".format(self.__class__.__name__)
+ raise TypeError(msg)
+
+ def __eq__(self, other):
+ return (issubclass(other.__class__, BoundArguments) and
+ self.signature == other.signature and
+ self.arguments == other.arguments)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class Signature(object):
+ '''A Signature object represents the overall signature of a function.
+ It stores a Parameter object for each parameter accepted by the
+ function, as well as information specific to the function itself.
+
+ A Signature object has the following public attributes:
+
+ parameters : :class:`collections.OrderedDict`
+ An ordered mapping of parameters' names to the corresponding
+ Parameter objects (keyword-only arguments are in the same order
+ as listed in `code.co_varnames`).
+ return_annotation
+ The annotation for the return type of the function if specified.
+ If the function has no annotation for its return type, this
+ attribute is not set.
+ '''
+
+ __slots__ = ('_return_annotation', '_parameters')
+
+ _parameter_cls = Parameter
+ _bound_arguments_cls = BoundArguments
+
+ empty = _empty
+
+ def __init__(self, parameters=None, return_annotation=_empty,
+ __validate_parameters__=True):
+ '''Constructs Signature from the given list of Parameter
+ objects and 'return_annotation'. All arguments are optional.
+ '''
+
+ if parameters is None:
+ params = OrderedDict()
+ else:
+ if __validate_parameters__:
+ params = OrderedDict()
+ top_kind = _POSITIONAL_ONLY
+
+ for idx, param in enumerate(parameters):
+ kind = param.kind
+ if kind < top_kind:
+ msg = 'wrong parameter order: {0} before {1}'
+ msg = msg.format(top_kind, param.kind)
+ raise ValueError(msg)
+ else:
+ top_kind = kind
+
+ name = param.name
+ if name is None:
+ name = str(idx)
+ param = param.replace(name=name)
+
+ if name in params:
+ msg = 'duplicate parameter name: {0!r}'.format(name)
+ raise ValueError(msg)
+ params[name] = param
+ else:
+ params = OrderedDict(((param.name, param)
+ for param in parameters))
+
+ self._parameters = params
+ self._return_annotation = return_annotation
+
+ @classmethod
+ def from_function(cls, func):
+ '''Constructs Signature for the given python function'''
+
if not inspect.isfunction(func):
- raise TypeError('{0!r} is not a Python function'.format(func))
-
- Parameter = cls._parameter_cls
-
- # Parameter information.
- func_code = func.__code__
- pos_count = func_code.co_argcount
- arg_names = func_code.co_varnames
- positional = tuple(arg_names[:pos_count])
- keyword_only_count = getattr(func_code, 'co_kwonlyargcount', 0)
- keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)]
- annotations = getattr(func, '__annotations__', {})
- defaults = func.__defaults__
- kwdefaults = getattr(func, '__kwdefaults__', None)
-
- if defaults:
- pos_default_count = len(defaults)
- else:
- pos_default_count = 0
-
- parameters = []
-
- # Non-keyword-only parameters w/o defaults.
- non_default_count = pos_count - pos_default_count
- for name in positional[:non_default_count]:
- annotation = annotations.get(name, _empty)
- parameters.append(Parameter(name, annotation=annotation,
- kind=_POSITIONAL_OR_KEYWORD))
-
- # ... w/ defaults.
- for offset, name in enumerate(positional[non_default_count:]):
- annotation = annotations.get(name, _empty)
- parameters.append(Parameter(name, annotation=annotation,
- kind=_POSITIONAL_OR_KEYWORD,
- default=defaults[offset]))
-
- # *args
- if func_code.co_flags & 0x04:
- name = arg_names[pos_count + keyword_only_count]
- annotation = annotations.get(name, _empty)
- parameters.append(Parameter(name, annotation=annotation,
- kind=_VAR_POSITIONAL))
-
- # Keyword-only parameters.
- for name in keyword_only:
- default = _empty
- if kwdefaults is not None:
- default = kwdefaults.get(name, _empty)
-
- annotation = annotations.get(name, _empty)
- parameters.append(Parameter(name, annotation=annotation,
- kind=_KEYWORD_ONLY,
- default=default))
- # **kwargs
- if func_code.co_flags & 0x08:
- index = pos_count + keyword_only_count
- if func_code.co_flags & 0x04:
- index += 1
-
- name = arg_names[index]
- annotation = annotations.get(name, _empty)
- parameters.append(Parameter(name, annotation=annotation,
- kind=_VAR_KEYWORD))
-
- return cls(parameters,
- return_annotation=annotations.get('return', _empty),
- __validate_parameters__=False)
-
- @property
- def parameters(self):
- try:
- return types.MappingProxyType(self._parameters)
- except AttributeError:
- return OrderedDict(self._parameters.items())
-
- @property
- def return_annotation(self):
- return self._return_annotation
-
- def replace(self, parameters=_void, return_annotation=_void):
- '''Creates a customized copy of the Signature.
- Pass 'parameters' and/or 'return_annotation' arguments
- to override them in the new copy.
- '''
-
- if parameters is _void:
- parameters = self.parameters.values()
-
- if return_annotation is _void:
- return_annotation = self._return_annotation
-
- return type(self)(parameters,
- return_annotation=return_annotation)
-
- def __hash__(self):
- msg = "unhashable type: '{0}'".format(self.__class__.__name__)
- raise TypeError(msg)
-
- def __eq__(self, other):
- if (not issubclass(type(other), Signature) or
- self.return_annotation != other.return_annotation or
- len(self.parameters) != len(other.parameters)):
- return False
-
- other_positions = dict((param, idx)
- for idx, param in enumerate(other.parameters.keys()))
-
- for idx, (param_name, param) in enumerate(self.parameters.items()):
- if param.kind == _KEYWORD_ONLY:
- try:
- other_param = other.parameters[param_name]
- except KeyError:
- return False
- else:
- if param != other_param:
- return False
- else:
- try:
- other_idx = other_positions[param_name]
- except KeyError:
- return False
- else:
- if (idx != other_idx or
- param != other.parameters[param_name]):
- return False
-
- return True
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def _bind(self, args, kwargs, partial=False):
- '''Private method. Don't use directly.'''
-
- arguments = OrderedDict()
-
- parameters = iter(self.parameters.values())
- parameters_ex = ()
- arg_vals = iter(args)
-
- if partial:
- # Support for binding arguments to 'functools.partial' objects.
- # See 'functools.partial' case in 'signature()' implementation
- # for details.
- for param_name, param in self.parameters.items():
- if (param._partial_kwarg and param_name not in kwargs):
- # Simulating 'functools.partial' behavior
- kwargs[param_name] = param.default
-
- while True:
- # Let's iterate through the positional arguments and corresponding
- # parameters
- try:
- arg_val = next(arg_vals)
- except StopIteration:
- # No more positional arguments
- try:
- param = next(parameters)
- except StopIteration:
- # No more parameters. That's it. Just need to check that
- # we have no `kwargs` after this while loop
- break
- else:
- if param.kind == _VAR_POSITIONAL:
- # That's OK, just empty *args. Let's start parsing
- # kwargs
- break
- elif param.name in kwargs:
- if param.kind == _POSITIONAL_ONLY:
- msg = '{arg!r} parameter is positional only, ' \
- 'but was passed as a keyword'
- msg = msg.format(arg=param.name)
- raise TypeError(msg)
- parameters_ex = (param,)
- break
- elif (param.kind == _VAR_KEYWORD or
- param.default is not _empty):
- # That's fine too - we have a default value for this
- # parameter. So, lets start parsing `kwargs`, starting
- # with the current parameter
- parameters_ex = (param,)
- break
- else:
- if partial:
- parameters_ex = (param,)
- break
- else:
- msg = '{arg!r} parameter lacking default value'
- msg = msg.format(arg=param.name)
- raise TypeError(msg)
- else:
- # We have a positional argument to process
- try:
- param = next(parameters)
- except StopIteration:
- raise TypeError('too many positional arguments')
- else:
- if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
- # Looks like we have no parameter for this positional
- # argument
- raise TypeError('too many positional arguments')
-
- if param.kind == _VAR_POSITIONAL:
- # We have an '*args'-like argument, let's fill it with
- # all positional arguments we have left and move on to
- # the next phase
- values = [arg_val]
- values.extend(arg_vals)
- arguments[param.name] = tuple(values)
- break
-
- if param.name in kwargs:
- raise TypeError('multiple values for argument '
- '{arg!r}'.format(arg=param.name))
-
- arguments[param.name] = arg_val
-
- # Now, we iterate through the remaining parameters to process
- # keyword arguments
- kwargs_param = None
- for param in itertools.chain(parameters_ex, parameters):
- if param.kind == _POSITIONAL_ONLY:
- # This should never happen in case of a properly built
- # Signature object (but let's have this check here
- # to ensure correct behaviour just in case)
- raise TypeError('{arg!r} parameter is positional only, '
- 'but was passed as a keyword'. \
- format(arg=param.name))
-
- if param.kind == _VAR_KEYWORD:
- # Memorize that we have a '**kwargs'-like parameter
- kwargs_param = param
- continue
-
- param_name = param.name
- try:
- arg_val = kwargs.pop(param_name)
- except KeyError:
- # We have no value for this parameter. It's fine though,
- # if it has a default value, or it is an '*args'-like
- # parameter, left alone by the processing of positional
- # arguments.
- if (not partial and param.kind != _VAR_POSITIONAL and
- param.default is _empty):
- raise TypeError('{arg!r} parameter lacking default value'. \
- format(arg=param_name))
-
- else:
- arguments[param_name] = arg_val
-
- if kwargs:
- if kwargs_param is not None:
- # Process our '**kwargs'-like parameter
- arguments[kwargs_param.name] = kwargs
- else:
- raise TypeError('too many keyword arguments')
-
- return self._bound_arguments_cls(self, arguments)
-
- def bind(self, *args, **kwargs):
- '''Get a :class:`BoundArguments` object, that maps the passed `args`
- and `kwargs` to the function's signature. Raises :exc:`TypeError`
- if the passed arguments can not be bound.
- '''
- return self._bind(args, kwargs)
-
- def bind_partial(self, *args, **kwargs):
- '''Get a :class:`BoundArguments` object, that partially maps the
- passed `args` and `kwargs` to the function's signature.
- Raises :exc:`TypeError` if the passed arguments can not be bound.
- '''
- return self._bind(args, kwargs, partial=True)
-
- def __str__(self):
- result = []
- render_kw_only_separator = True
- for idx, param in enumerate(self.parameters.values()):
- formatted = str(param)
-
- kind = param.kind
- if kind == _VAR_POSITIONAL:
- # OK, we have an '*args'-like parameter, so we won't need
- # a '*' to separate keyword-only arguments
- render_kw_only_separator = False
- elif kind == _KEYWORD_ONLY and render_kw_only_separator:
- # We have a keyword-only parameter to render and we haven't
- # rendered an '*args'-like parameter before, so add a '*'
- # separator to the parameters list ("foo(arg1, *, arg2)" case)
- result.append('*')
- # This condition should be only triggered once, so
- # reset the flag
- render_kw_only_separator = False
-
- result.append(formatted)
-
- rendered = '({0})'.format(', '.join(result))
-
- if self.return_annotation is not _empty:
- anno = formatannotation(self.return_annotation)
- rendered += ' -> {0}'.format(anno)
-
- return rendered
-
+ raise TypeError('{0!r} is not a Python function'.format(func))
+
+ Parameter = cls._parameter_cls
+
+ # Parameter information.
+ func_code = func.__code__
+ pos_count = func_code.co_argcount
+ arg_names = func_code.co_varnames
+ positional = tuple(arg_names[:pos_count])
+ keyword_only_count = getattr(func_code, 'co_kwonlyargcount', 0)
+ keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)]
+ annotations = getattr(func, '__annotations__', {})
+ defaults = func.__defaults__
+ kwdefaults = getattr(func, '__kwdefaults__', None)
+
+ if defaults:
+ pos_default_count = len(defaults)
+ else:
+ pos_default_count = 0
+
+ parameters = []
+
+ # Non-keyword-only parameters w/o defaults.
+ non_default_count = pos_count - pos_default_count
+ for name in positional[:non_default_count]:
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_POSITIONAL_OR_KEYWORD))
+
+ # ... w/ defaults.
+ for offset, name in enumerate(positional[non_default_count:]):
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_POSITIONAL_OR_KEYWORD,
+ default=defaults[offset]))
+
+ # *args
+ if func_code.co_flags & 0x04:
+ name = arg_names[pos_count + keyword_only_count]
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_VAR_POSITIONAL))
+
+ # Keyword-only parameters.
+ for name in keyword_only:
+ default = _empty
+ if kwdefaults is not None:
+ default = kwdefaults.get(name, _empty)
+
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_KEYWORD_ONLY,
+ default=default))
+ # **kwargs
+ if func_code.co_flags & 0x08:
+ index = pos_count + keyword_only_count
+ if func_code.co_flags & 0x04:
+ index += 1
+
+ name = arg_names[index]
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_VAR_KEYWORD))
+
+ return cls(parameters,
+ return_annotation=annotations.get('return', _empty),
+ __validate_parameters__=False)
+
+ @property
+ def parameters(self):
+ try:
+ return types.MappingProxyType(self._parameters)
+ except AttributeError:
+ return OrderedDict(self._parameters.items())
+
+ @property
+ def return_annotation(self):
+ return self._return_annotation
+
+ def replace(self, parameters=_void, return_annotation=_void):
+ '''Creates a customized copy of the Signature.
+ Pass 'parameters' and/or 'return_annotation' arguments
+ to override them in the new copy.
+ '''
+
+ if parameters is _void:
+ parameters = self.parameters.values()
+
+ if return_annotation is _void:
+ return_annotation = self._return_annotation
+
+ return type(self)(parameters,
+ return_annotation=return_annotation)
+
+ def __hash__(self):
+ msg = "unhashable type: '{0}'".format(self.__class__.__name__)
+ raise TypeError(msg)
+
+ def __eq__(self, other):
+ if (not issubclass(type(other), Signature) or
+ self.return_annotation != other.return_annotation or
+ len(self.parameters) != len(other.parameters)):
+ return False
+
+ other_positions = dict((param, idx)
+ for idx, param in enumerate(other.parameters.keys()))
+
+ for idx, (param_name, param) in enumerate(self.parameters.items()):
+ if param.kind == _KEYWORD_ONLY:
+ try:
+ other_param = other.parameters[param_name]
+ except KeyError:
+ return False
+ else:
+ if param != other_param:
+ return False
+ else:
+ try:
+ other_idx = other_positions[param_name]
+ except KeyError:
+ return False
+ else:
+ if (idx != other_idx or
+ param != other.parameters[param_name]):
+ return False
+
+ return True
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def _bind(self, args, kwargs, partial=False):
+ '''Private method. Don't use directly.'''
+
+ arguments = OrderedDict()
+
+ parameters = iter(self.parameters.values())
+ parameters_ex = ()
+ arg_vals = iter(args)
+
+ if partial:
+ # Support for binding arguments to 'functools.partial' objects.
+ # See 'functools.partial' case in 'signature()' implementation
+ # for details.
+ for param_name, param in self.parameters.items():
+ if (param._partial_kwarg and param_name not in kwargs):
+ # Simulating 'functools.partial' behavior
+ kwargs[param_name] = param.default
+
+ while True:
+ # Let's iterate through the positional arguments and corresponding
+ # parameters
+ try:
+ arg_val = next(arg_vals)
+ except StopIteration:
+ # No more positional arguments
+ try:
+ param = next(parameters)
+ except StopIteration:
+ # No more parameters. That's it. Just need to check that
+ # we have no `kwargs` after this while loop
+ break
+ else:
+ if param.kind == _VAR_POSITIONAL:
+ # That's OK, just empty *args. Let's start parsing
+ # kwargs
+ break
+ elif param.name in kwargs:
+ if param.kind == _POSITIONAL_ONLY:
+ msg = '{arg!r} parameter is positional only, ' \
+ 'but was passed as a keyword'
+ msg = msg.format(arg=param.name)
+ raise TypeError(msg)
+ parameters_ex = (param,)
+ break
+ elif (param.kind == _VAR_KEYWORD or
+ param.default is not _empty):
+ # That's fine too - we have a default value for this
+ # parameter. So, lets start parsing `kwargs`, starting
+ # with the current parameter
+ parameters_ex = (param,)
+ break
+ else:
+ if partial:
+ parameters_ex = (param,)
+ break
+ else:
+ msg = '{arg!r} parameter lacking default value'
+ msg = msg.format(arg=param.name)
+ raise TypeError(msg)
+ else:
+ # We have a positional argument to process
+ try:
+ param = next(parameters)
+ except StopIteration:
+ raise TypeError('too many positional arguments')
+ else:
+ if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
+ # Looks like we have no parameter for this positional
+ # argument
+ raise TypeError('too many positional arguments')
+
+ if param.kind == _VAR_POSITIONAL:
+ # We have an '*args'-like argument, let's fill it with
+ # all positional arguments we have left and move on to
+ # the next phase
+ values = [arg_val]
+ values.extend(arg_vals)
+ arguments[param.name] = tuple(values)
+ break
+
+ if param.name in kwargs:
+ raise TypeError('multiple values for argument '
+ '{arg!r}'.format(arg=param.name))
+
+ arguments[param.name] = arg_val
+
+ # Now, we iterate through the remaining parameters to process
+ # keyword arguments
+ kwargs_param = None
+ for param in itertools.chain(parameters_ex, parameters):
+ if param.kind == _POSITIONAL_ONLY:
+ # This should never happen in case of a properly built
+ # Signature object (but let's have this check here
+ # to ensure correct behaviour just in case)
+ raise TypeError('{arg!r} parameter is positional only, '
+ 'but was passed as a keyword'. \
+ format(arg=param.name))
+
+ if param.kind == _VAR_KEYWORD:
+ # Memorize that we have a '**kwargs'-like parameter
+ kwargs_param = param
+ continue
+
+ param_name = param.name
+ try:
+ arg_val = kwargs.pop(param_name)
+ except KeyError:
+ # We have no value for this parameter. It's fine though,
+ # if it has a default value, or it is an '*args'-like
+ # parameter, left alone by the processing of positional
+ # arguments.
+ if (not partial and param.kind != _VAR_POSITIONAL and
+ param.default is _empty):
+ raise TypeError('{arg!r} parameter lacking default value'. \
+ format(arg=param_name))
+
+ else:
+ arguments[param_name] = arg_val
+
+ if kwargs:
+ if kwargs_param is not None:
+ # Process our '**kwargs'-like parameter
+ arguments[kwargs_param.name] = kwargs
+ else:
+ raise TypeError('too many keyword arguments')
+
+ return self._bound_arguments_cls(self, arguments)
+
+ def bind(self, *args, **kwargs):
+ '''Get a :class:`BoundArguments` object, that maps the passed `args`
+ and `kwargs` to the function's signature. Raises :exc:`TypeError`
+ if the passed arguments can not be bound.
+ '''
+ return self._bind(args, kwargs)
+
+ def bind_partial(self, *args, **kwargs):
+ '''Get a :class:`BoundArguments` object, that partially maps the
+ passed `args` and `kwargs` to the function's signature.
+ Raises :exc:`TypeError` if the passed arguments can not be bound.
+ '''
+ return self._bind(args, kwargs, partial=True)
+
+ def __str__(self):
+ result = []
+ render_kw_only_separator = True
+ for idx, param in enumerate(self.parameters.values()):
+ formatted = str(param)
+
+ kind = param.kind
+ if kind == _VAR_POSITIONAL:
+ # OK, we have an '*args'-like parameter, so we won't need
+ # a '*' to separate keyword-only arguments
+ render_kw_only_separator = False
+ elif kind == _KEYWORD_ONLY and render_kw_only_separator:
+ # We have a keyword-only parameter to render and we haven't
+ # rendered an '*args'-like parameter before, so add a '*'
+ # separator to the parameters list ("foo(arg1, *, arg2)" case)
+ result.append('*')
+ # This condition should be only triggered once, so
+ # reset the flag
+ render_kw_only_separator = False
+
+ result.append(formatted)
+
+ rendered = '({0})'.format(', '.join(result))
+
+ if self.return_annotation is not _empty:
+ anno = formatannotation(self.return_annotation)
+ rendered += ' -> {0}'.format(anno)
+
+ return rendered
+
diff --git a/contrib/python/ipython/py2/IPython/utils/_sysinfo.py b/contrib/python/ipython/py2/IPython/utils/_sysinfo.py
index f3422bbb3c..21dd2fcceb 100644
--- a/contrib/python/ipython/py2/IPython/utils/_sysinfo.py
+++ b/contrib/python/ipython/py2/IPython/utils/_sysinfo.py
@@ -1,2 +1,2 @@
-# GENERATED BY setup.py
+# GENERATED BY setup.py
commit = u"2348ebbe4"
diff --git a/contrib/python/ipython/py2/IPython/utils/_tokenize_py2.py b/contrib/python/ipython/py2/IPython/utils/_tokenize_py2.py
index ffd7cc5e71..195df96ee5 100644
--- a/contrib/python/ipython/py2/IPython/utils/_tokenize_py2.py
+++ b/contrib/python/ipython/py2/IPython/utils/_tokenize_py2.py
@@ -1,439 +1,439 @@
-"""Patched version of standard library tokenize, to deal with various bugs.
-
-Patches
-
-- Relevant parts of Gareth Rees' patch for Python issue #12691 (untokenizing),
- manually applied.
-- Newlines in comments and blank lines should be either NL or NEWLINE, depending
- on whether they are in a multi-line statement. Filed as Python issue #17061.
-
--------------------------------------------------------------------------------
-Tokenization help for Python programs.
-
-generate_tokens(readline) is a generator that breaks a stream of
-text into Python tokens. It accepts a readline-like method which is called
-repeatedly to get the next line of input (or "" for EOF). It generates
-5-tuples with these members:
-
- the token type (see token.py)
- the token (a string)
- the starting (row, column) indices of the token (a 2-tuple of ints)
- the ending (row, column) indices of the token (a 2-tuple of ints)
- the original line (string)
-
-It is designed to match the working of the Python tokenizer exactly, except
-that it produces COMMENT tokens for comments and gives type OP for all
-operators
-
-Older entry points
- tokenize_loop(readline, tokeneater)
- tokenize(readline, tokeneater=printtoken)
-are the same, except instead of generating tokens, tokeneater is a callback
-function to which the 5 fields described above are passed as 5 arguments,
-each time a new token is found."""
-from __future__ import print_function
-
-__author__ = 'Ka-Ping Yee <ping@lfw.org>'
-__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
- 'Skip Montanaro, Raymond Hettinger')
-
-import string, re
-from token import *
-
-import token
-__all__ = [x for x in dir(token) if not x.startswith("_")]
-__all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
-del x
-del token
-
-__all__ += ["TokenError"]
-
-COMMENT = N_TOKENS
-tok_name[COMMENT] = 'COMMENT'
-NL = N_TOKENS + 1
-tok_name[NL] = 'NL'
-N_TOKENS += 2
-
-def group(*choices): return '(' + '|'.join(choices) + ')'
-def any(*choices): return group(*choices) + '*'
-def maybe(*choices): return group(*choices) + '?'
-
-Whitespace = r'[ \f\t]*'
-Comment = r'#[^\r\n]*'
-Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
-Name = r'[a-zA-Z_]\w*'
-
-Hexnumber = r'0[xX][\da-fA-F]+[lL]?'
-Octnumber = r'(0[oO][0-7]+)|(0[0-7]*)[lL]?'
-Binnumber = r'0[bB][01]+[lL]?'
-Decnumber = r'[1-9]\d*[lL]?'
-Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
-Exponent = r'[eE][-+]?\d+'
-Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
-Expfloat = r'\d+' + Exponent
-Floatnumber = group(Pointfloat, Expfloat)
-Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
-Number = group(Imagnumber, Floatnumber, Intnumber)
-
-# Tail end of ' string.
-Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
-# Tail end of " string.
-Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
-# Tail end of ''' string.
-Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
-# Tail end of """ string.
-Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
-Triple = group("[uUbB]?[rR]?'''", '[uUbB]?[rR]?"""')
-# Single-line ' or " string.
-String = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
- r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
-
-# Because of leftmost-then-longest match semantics, be sure to put the
-# longest operators first (e.g., if = came before ==, == would get
-# recognized as two instances of =).
-Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
- r"//=?",
- r"[+\-*/%&|^=<>]=?",
- r"~")
-
-Bracket = '[][(){}]'
-Special = group(r'\r?\n', r'[:;.,`@]')
-Funny = group(Operator, Bracket, Special)
-
-PlainToken = group(Number, Funny, String, Name)
-Token = Ignore + PlainToken
-
-# First (or only) line of ' or " string.
-ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
- group("'", r'\\\r?\n'),
- r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
- group('"', r'\\\r?\n'))
-PseudoExtras = group(r'\\\r?\n', Comment, Triple)
-PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
-
-tokenprog, pseudoprog, single3prog, double3prog = map(
- re.compile, (Token, PseudoToken, Single3, Double3))
-endprogs = {"'": re.compile(Single), '"': re.compile(Double),
- "'''": single3prog, '"""': double3prog,
- "r'''": single3prog, 'r"""': double3prog,
- "u'''": single3prog, 'u"""': double3prog,
- "ur'''": single3prog, 'ur"""': double3prog,
- "R'''": single3prog, 'R"""': double3prog,
- "U'''": single3prog, 'U"""': double3prog,
- "uR'''": single3prog, 'uR"""': double3prog,
- "Ur'''": single3prog, 'Ur"""': double3prog,
- "UR'''": single3prog, 'UR"""': double3prog,
- "b'''": single3prog, 'b"""': double3prog,
- "br'''": single3prog, 'br"""': double3prog,
- "B'''": single3prog, 'B"""': double3prog,
- "bR'''": single3prog, 'bR"""': double3prog,
- "Br'''": single3prog, 'Br"""': double3prog,
- "BR'''": single3prog, 'BR"""': double3prog,
- 'r': None, 'R': None, 'u': None, 'U': None,
- 'b': None, 'B': None}
-
-triple_quoted = {}
-for t in ("'''", '"""',
- "r'''", 'r"""', "R'''", 'R"""',
- "u'''", 'u"""', "U'''", 'U"""',
- "ur'''", 'ur"""', "Ur'''", 'Ur"""',
- "uR'''", 'uR"""', "UR'''", 'UR"""',
- "b'''", 'b"""', "B'''", 'B"""',
- "br'''", 'br"""', "Br'''", 'Br"""',
- "bR'''", 'bR"""', "BR'''", 'BR"""'):
- triple_quoted[t] = t
-single_quoted = {}
-for t in ("'", '"',
- "r'", 'r"', "R'", 'R"',
- "u'", 'u"', "U'", 'U"',
- "ur'", 'ur"', "Ur'", 'Ur"',
- "uR'", 'uR"', "UR'", 'UR"',
- "b'", 'b"', "B'", 'B"',
- "br'", 'br"', "Br'", 'Br"',
- "bR'", 'bR"', "BR'", 'BR"' ):
- single_quoted[t] = t
-
-tabsize = 8
-
-class TokenError(Exception): pass
-
-class StopTokenizing(Exception): pass
-
-def printtoken(type, token, srow_scol, erow_ecol, line): # for testing
- srow, scol = srow_scol
- erow, ecol = erow_ecol
- print("%d,%d-%d,%d:\t%s\t%s" % \
- (srow, scol, erow, ecol, tok_name[type], repr(token)))
-
-def tokenize(readline, tokeneater=printtoken):
- """
- The tokenize() function accepts two parameters: one representing the
- input stream, and one providing an output mechanism for tokenize().
-
- The first parameter, readline, must be a callable object which provides
- the same interface as the readline() method of built-in file objects.
- Each call to the function should return one line of input as a string.
-
- The second parameter, tokeneater, must also be a callable object. It is
- called once for each token, with five arguments, corresponding to the
- tuples generated by generate_tokens().
- """
- try:
- tokenize_loop(readline, tokeneater)
- except StopTokenizing:
- pass
-
-# backwards compatible interface
-def tokenize_loop(readline, tokeneater):
- for token_info in generate_tokens(readline):
- tokeneater(*token_info)
-
-class Untokenizer:
-
- def __init__(self):
- self.tokens = []
- self.prev_row = 1
- self.prev_col = 0
-
- def add_whitespace(self, start):
- row, col = start
- assert row >= self.prev_row
- col_offset = col - self.prev_col
- if col_offset > 0:
- self.tokens.append(" " * col_offset)
- elif row > self.prev_row and tok_type not in (NEWLINE, NL, ENDMARKER):
- # Line was backslash-continued
- self.tokens.append(" ")
-
- def untokenize(self, tokens):
- iterable = iter(tokens)
- for t in iterable:
- if len(t) == 2:
- self.compat(t, iterable)
- break
- tok_type, token, start, end = t[:4]
- self.add_whitespace(start)
- self.tokens.append(token)
- self.prev_row, self.prev_col = end
- if tok_type in (NEWLINE, NL):
- self.prev_row += 1
- self.prev_col = 0
- return "".join(self.tokens)
-
- def compat(self, token, iterable):
- # This import is here to avoid problems when the itertools
- # module is not built yet and tokenize is imported.
- from itertools import chain
- startline = False
- prevstring = False
- indents = []
- toks_append = self.tokens.append
- for tok in chain([token], iterable):
- toknum, tokval = tok[:2]
-
- if toknum in (NAME, NUMBER):
- tokval += ' '
-
- # Insert a space between two consecutive strings
- if toknum == STRING:
- if prevstring:
- tokval = ' ' + tokval
- prevstring = True
- else:
- prevstring = False
-
- if toknum == INDENT:
- indents.append(tokval)
- continue
- elif toknum == DEDENT:
- indents.pop()
- continue
- elif toknum in (NEWLINE, NL):
- startline = True
- elif startline and indents:
- toks_append(indents[-1])
- startline = False
- toks_append(tokval)
-
-def untokenize(iterable):
- """Transform tokens back into Python source code.
-
- Each element returned by the iterable must be a token sequence
- with at least two elements, a token number and token value. If
- only two tokens are passed, the resulting output is poor.
-
- Round-trip invariant for full input:
- Untokenized source will match input source exactly
-
- Round-trip invariant for limited intput:
- # Output text will tokenize the back to the input
- t1 = [tok[:2] for tok in generate_tokens(f.readline)]
- newcode = untokenize(t1)
- readline = iter(newcode.splitlines(1)).next
- t2 = [tok[:2] for tok in generate_tokens(readline)]
- assert t1 == t2
- """
- ut = Untokenizer()
- return ut.untokenize(iterable)
-
-def generate_tokens(readline):
- """
- The generate_tokens() generator requires one argment, readline, which
- must be a callable object which provides the same interface as the
- readline() method of built-in file objects. Each call to the function
- should return one line of input as a string. Alternately, readline
- can be a callable function terminating with StopIteration:
- readline = open(myfile).next # Example of alternate readline
-
- The generator produces 5-tuples with these members: the token type; the
- token string; a 2-tuple (srow, scol) of ints specifying the row and
- column where the token begins in the source; a 2-tuple (erow, ecol) of
- ints specifying the row and column where the token ends in the source;
- and the line on which the token was found. The line passed is the
- logical line; continuation lines are included.
- """
- lnum = parenlev = continued = 0
- namechars, numchars = string.ascii_letters + '_', '0123456789'
- contstr, needcont = '', 0
- contline = None
- indents = [0]
-
- while 1: # loop over lines in stream
- try:
- line = readline()
- except StopIteration:
- line = ''
- lnum += 1
- pos, max = 0, len(line)
-
- if contstr: # continued string
- if not line:
- raise TokenError("EOF in multi-line string", strstart)
- endmatch = endprog.match(line)
- if endmatch:
- pos = end = endmatch.end(0)
- yield (STRING, contstr + line[:end],
- strstart, (lnum, end), contline + line)
- contstr, needcont = '', 0
- contline = None
- elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
- yield (ERRORTOKEN, contstr + line,
- strstart, (lnum, len(line)), contline)
- contstr = ''
- contline = None
- continue
- else:
- contstr = contstr + line
- contline = contline + line
- continue
-
- elif parenlev == 0 and not continued: # new statement
- if not line: break
- column = 0
- while pos < max: # measure leading whitespace
- if line[pos] == ' ':
- column += 1
- elif line[pos] == '\t':
- column = (column//tabsize + 1)*tabsize
- elif line[pos] == '\f':
- column = 0
- else:
- break
- pos += 1
- if pos == max:
- break
-
- if line[pos] in '#\r\n': # skip comments or blank lines
- if line[pos] == '#':
- comment_token = line[pos:].rstrip('\r\n')
- nl_pos = pos + len(comment_token)
- yield (COMMENT, comment_token,
- (lnum, pos), (lnum, pos + len(comment_token)), line)
- yield (NEWLINE, line[nl_pos:],
- (lnum, nl_pos), (lnum, len(line)), line)
- else:
- yield (NEWLINE, line[pos:],
- (lnum, pos), (lnum, len(line)), line)
- continue
-
- if column > indents[-1]: # count indents or dedents
- indents.append(column)
- yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
- while column < indents[-1]:
- if column not in indents:
- raise IndentationError(
- "unindent does not match any outer indentation level",
- ("<tokenize>", lnum, pos, line))
- indents = indents[:-1]
- yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
-
- else: # continued statement
- if not line:
- raise TokenError("EOF in multi-line statement", (lnum, 0))
- continued = 0
-
- while pos < max:
- pseudomatch = pseudoprog.match(line, pos)
- if pseudomatch: # scan for tokens
- start, end = pseudomatch.span(1)
- spos, epos, pos = (lnum, start), (lnum, end), end
- token, initial = line[start:end], line[start]
-
- if initial in numchars or \
- (initial == '.' and token != '.'): # ordinary number
- yield (NUMBER, token, spos, epos, line)
- elif initial in '\r\n':
- yield (NL if parenlev > 0 else NEWLINE,
- token, spos, epos, line)
- elif initial == '#':
- assert not token.endswith("\n")
- yield (COMMENT, token, spos, epos, line)
- elif token in triple_quoted:
- endprog = endprogs[token]
- endmatch = endprog.match(line, pos)
- if endmatch: # all on one line
- pos = endmatch.end(0)
- token = line[start:pos]
- yield (STRING, token, spos, (lnum, pos), line)
- else:
- strstart = (lnum, start) # multiple lines
- contstr = line[start:]
- contline = line
- break
- elif initial in single_quoted or \
- token[:2] in single_quoted or \
- token[:3] in single_quoted:
- if token[-1] == '\n': # continued string
- strstart = (lnum, start)
- endprog = (endprogs[initial] or endprogs[token[1]] or
- endprogs[token[2]])
- contstr, needcont = line[start:], 1
- contline = line
- break
- else: # ordinary string
- yield (STRING, token, spos, epos, line)
- elif initial in namechars: # ordinary name
- yield (NAME, token, spos, epos, line)
- elif initial == '\\': # continued stmt
- continued = 1
- else:
- if initial in '([{':
- parenlev += 1
- elif initial in ')]}':
- parenlev -= 1
- yield (OP, token, spos, epos, line)
- else:
- yield (ERRORTOKEN, line[pos],
- (lnum, pos), (lnum, pos+1), line)
- pos += 1
-
- for indent in indents[1:]: # pop remaining indent levels
- yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
- yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
-
-if __name__ == '__main__': # testing
- import sys
- if len(sys.argv) > 1:
- tokenize(open(sys.argv[1]).readline)
- else:
- tokenize(sys.stdin.readline)
+"""Patched version of standard library tokenize, to deal with various bugs.
+
+Patches
+
+- Relevant parts of Gareth Rees' patch for Python issue #12691 (untokenizing),
+ manually applied.
+- Newlines in comments and blank lines should be either NL or NEWLINE, depending
+ on whether they are in a multi-line statement. Filed as Python issue #17061.
+
+-------------------------------------------------------------------------------
+Tokenization help for Python programs.
+
+generate_tokens(readline) is a generator that breaks a stream of
+text into Python tokens. It accepts a readline-like method which is called
+repeatedly to get the next line of input (or "" for EOF). It generates
+5-tuples with these members:
+
+ the token type (see token.py)
+ the token (a string)
+ the starting (row, column) indices of the token (a 2-tuple of ints)
+ the ending (row, column) indices of the token (a 2-tuple of ints)
+ the original line (string)
+
+It is designed to match the working of the Python tokenizer exactly, except
+that it produces COMMENT tokens for comments and gives type OP for all
+operators
+
+Older entry points
+ tokenize_loop(readline, tokeneater)
+ tokenize(readline, tokeneater=printtoken)
+are the same, except instead of generating tokens, tokeneater is a callback
+function to which the 5 fields described above are passed as 5 arguments,
+each time a new token is found."""
+from __future__ import print_function
+
+__author__ = 'Ka-Ping Yee <ping@lfw.org>'
+__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
+ 'Skip Montanaro, Raymond Hettinger')
+
+import string, re
+from token import *
+
+import token
+__all__ = [x for x in dir(token) if not x.startswith("_")]
+__all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
+del x
+del token
+
+__all__ += ["TokenError"]
+
+COMMENT = N_TOKENS
+tok_name[COMMENT] = 'COMMENT'
+NL = N_TOKENS + 1
+tok_name[NL] = 'NL'
+N_TOKENS += 2
+
+def group(*choices): return '(' + '|'.join(choices) + ')'
+def any(*choices): return group(*choices) + '*'
+def maybe(*choices): return group(*choices) + '?'
+
+Whitespace = r'[ \f\t]*'
+Comment = r'#[^\r\n]*'
+Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
+Name = r'[a-zA-Z_]\w*'
+
+Hexnumber = r'0[xX][\da-fA-F]+[lL]?'
+Octnumber = r'(0[oO][0-7]+)|(0[0-7]*)[lL]?'
+Binnumber = r'0[bB][01]+[lL]?'
+Decnumber = r'[1-9]\d*[lL]?'
+Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
+Exponent = r'[eE][-+]?\d+'
+Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
+Expfloat = r'\d+' + Exponent
+Floatnumber = group(Pointfloat, Expfloat)
+Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
+Number = group(Imagnumber, Floatnumber, Intnumber)
+
+# Tail end of ' string.
+Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
+# Tail end of " string.
+Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
+# Tail end of ''' string.
+Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
+# Tail end of """ string.
+Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
+Triple = group("[uUbB]?[rR]?'''", '[uUbB]?[rR]?"""')
+# Single-line ' or " string.
+String = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
+ r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
+
+# Because of leftmost-then-longest match semantics, be sure to put the
+# longest operators first (e.g., if = came before ==, == would get
+# recognized as two instances of =).
+Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
+ r"//=?",
+ r"[+\-*/%&|^=<>]=?",
+ r"~")
+
+Bracket = '[][(){}]'
+Special = group(r'\r?\n', r'[:;.,`@]')
+Funny = group(Operator, Bracket, Special)
+
+PlainToken = group(Number, Funny, String, Name)
+Token = Ignore + PlainToken
+
+# First (or only) line of ' or " string.
+ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
+ group("'", r'\\\r?\n'),
+ r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
+ group('"', r'\\\r?\n'))
+PseudoExtras = group(r'\\\r?\n', Comment, Triple)
+PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
+
+tokenprog, pseudoprog, single3prog, double3prog = map(
+ re.compile, (Token, PseudoToken, Single3, Double3))
+endprogs = {"'": re.compile(Single), '"': re.compile(Double),
+ "'''": single3prog, '"""': double3prog,
+ "r'''": single3prog, 'r"""': double3prog,
+ "u'''": single3prog, 'u"""': double3prog,
+ "ur'''": single3prog, 'ur"""': double3prog,
+ "R'''": single3prog, 'R"""': double3prog,
+ "U'''": single3prog, 'U"""': double3prog,
+ "uR'''": single3prog, 'uR"""': double3prog,
+ "Ur'''": single3prog, 'Ur"""': double3prog,
+ "UR'''": single3prog, 'UR"""': double3prog,
+ "b'''": single3prog, 'b"""': double3prog,
+ "br'''": single3prog, 'br"""': double3prog,
+ "B'''": single3prog, 'B"""': double3prog,
+ "bR'''": single3prog, 'bR"""': double3prog,
+ "Br'''": single3prog, 'Br"""': double3prog,
+ "BR'''": single3prog, 'BR"""': double3prog,
+ 'r': None, 'R': None, 'u': None, 'U': None,
+ 'b': None, 'B': None}
+
+triple_quoted = {}
+for t in ("'''", '"""',
+ "r'''", 'r"""', "R'''", 'R"""',
+ "u'''", 'u"""', "U'''", 'U"""',
+ "ur'''", 'ur"""', "Ur'''", 'Ur"""',
+ "uR'''", 'uR"""', "UR'''", 'UR"""',
+ "b'''", 'b"""', "B'''", 'B"""',
+ "br'''", 'br"""', "Br'''", 'Br"""',
+ "bR'''", 'bR"""', "BR'''", 'BR"""'):
+ triple_quoted[t] = t
+single_quoted = {}
+for t in ("'", '"',
+ "r'", 'r"', "R'", 'R"',
+ "u'", 'u"', "U'", 'U"',
+ "ur'", 'ur"', "Ur'", 'Ur"',
+ "uR'", 'uR"', "UR'", 'UR"',
+ "b'", 'b"', "B'", 'B"',
+ "br'", 'br"', "Br'", 'Br"',
+ "bR'", 'bR"', "BR'", 'BR"' ):
+ single_quoted[t] = t
+
+tabsize = 8
+
+class TokenError(Exception): pass
+
+class StopTokenizing(Exception): pass
+
+def printtoken(type, token, srow_scol, erow_ecol, line): # for testing
+ srow, scol = srow_scol
+ erow, ecol = erow_ecol
+ print("%d,%d-%d,%d:\t%s\t%s" % \
+ (srow, scol, erow, ecol, tok_name[type], repr(token)))
+
+def tokenize(readline, tokeneater=printtoken):
+ """
+ The tokenize() function accepts two parameters: one representing the
+ input stream, and one providing an output mechanism for tokenize().
+
+ The first parameter, readline, must be a callable object which provides
+ the same interface as the readline() method of built-in file objects.
+ Each call to the function should return one line of input as a string.
+
+ The second parameter, tokeneater, must also be a callable object. It is
+ called once for each token, with five arguments, corresponding to the
+ tuples generated by generate_tokens().
+ """
+ try:
+ tokenize_loop(readline, tokeneater)
+ except StopTokenizing:
+ pass
+
+# backwards compatible interface
+def tokenize_loop(readline, tokeneater):
+ for token_info in generate_tokens(readline):
+ tokeneater(*token_info)
+
+class Untokenizer:
+
+ def __init__(self):
+ self.tokens = []
+ self.prev_row = 1
+ self.prev_col = 0
+
+ def add_whitespace(self, start):
+ row, col = start
+ assert row >= self.prev_row
+ col_offset = col - self.prev_col
+ if col_offset > 0:
+ self.tokens.append(" " * col_offset)
+ elif row > self.prev_row and tok_type not in (NEWLINE, NL, ENDMARKER):
+ # Line was backslash-continued
+ self.tokens.append(" ")
+
+ def untokenize(self, tokens):
+ iterable = iter(tokens)
+ for t in iterable:
+ if len(t) == 2:
+ self.compat(t, iterable)
+ break
+ tok_type, token, start, end = t[:4]
+ self.add_whitespace(start)
+ self.tokens.append(token)
+ self.prev_row, self.prev_col = end
+ if tok_type in (NEWLINE, NL):
+ self.prev_row += 1
+ self.prev_col = 0
+ return "".join(self.tokens)
+
+ def compat(self, token, iterable):
+ # This import is here to avoid problems when the itertools
+ # module is not built yet and tokenize is imported.
+ from itertools import chain
+ startline = False
+ prevstring = False
+ indents = []
+ toks_append = self.tokens.append
+ for tok in chain([token], iterable):
+ toknum, tokval = tok[:2]
+
+ if toknum in (NAME, NUMBER):
+ tokval += ' '
+
+ # Insert a space between two consecutive strings
+ if toknum == STRING:
+ if prevstring:
+ tokval = ' ' + tokval
+ prevstring = True
+ else:
+ prevstring = False
+
+ if toknum == INDENT:
+ indents.append(tokval)
+ continue
+ elif toknum == DEDENT:
+ indents.pop()
+ continue
+ elif toknum in (NEWLINE, NL):
+ startline = True
+ elif startline and indents:
+ toks_append(indents[-1])
+ startline = False
+ toks_append(tokval)
+
+def untokenize(iterable):
+ """Transform tokens back into Python source code.
+
+ Each element returned by the iterable must be a token sequence
+ with at least two elements, a token number and token value. If
+ only two tokens are passed, the resulting output is poor.
+
+ Round-trip invariant for full input:
+ Untokenized source will match input source exactly
+
+ Round-trip invariant for limited intput:
+ # Output text will tokenize the back to the input
+ t1 = [tok[:2] for tok in generate_tokens(f.readline)]
+ newcode = untokenize(t1)
+ readline = iter(newcode.splitlines(1)).next
+ t2 = [tok[:2] for tok in generate_tokens(readline)]
+ assert t1 == t2
+ """
+ ut = Untokenizer()
+ return ut.untokenize(iterable)
+
+def generate_tokens(readline):
+ """
+ The generate_tokens() generator requires one argment, readline, which
+ must be a callable object which provides the same interface as the
+ readline() method of built-in file objects. Each call to the function
+ should return one line of input as a string. Alternately, readline
+ can be a callable function terminating with StopIteration:
+ readline = open(myfile).next # Example of alternate readline
+
+ The generator produces 5-tuples with these members: the token type; the
+ token string; a 2-tuple (srow, scol) of ints specifying the row and
+ column where the token begins in the source; a 2-tuple (erow, ecol) of
+ ints specifying the row and column where the token ends in the source;
+ and the line on which the token was found. The line passed is the
+ logical line; continuation lines are included.
+ """
+ lnum = parenlev = continued = 0
+ namechars, numchars = string.ascii_letters + '_', '0123456789'
+ contstr, needcont = '', 0
+ contline = None
+ indents = [0]
+
+ while 1: # loop over lines in stream
+ try:
+ line = readline()
+ except StopIteration:
+ line = ''
+ lnum += 1
+ pos, max = 0, len(line)
+
+ if contstr: # continued string
+ if not line:
+ raise TokenError("EOF in multi-line string", strstart)
+ endmatch = endprog.match(line)
+ if endmatch:
+ pos = end = endmatch.end(0)
+ yield (STRING, contstr + line[:end],
+ strstart, (lnum, end), contline + line)
+ contstr, needcont = '', 0
+ contline = None
+ elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
+ yield (ERRORTOKEN, contstr + line,
+ strstart, (lnum, len(line)), contline)
+ contstr = ''
+ contline = None
+ continue
+ else:
+ contstr = contstr + line
+ contline = contline + line
+ continue
+
+ elif parenlev == 0 and not continued: # new statement
+ if not line: break
+ column = 0
+ while pos < max: # measure leading whitespace
+ if line[pos] == ' ':
+ column += 1
+ elif line[pos] == '\t':
+ column = (column//tabsize + 1)*tabsize
+ elif line[pos] == '\f':
+ column = 0
+ else:
+ break
+ pos += 1
+ if pos == max:
+ break
+
+ if line[pos] in '#\r\n': # skip comments or blank lines
+ if line[pos] == '#':
+ comment_token = line[pos:].rstrip('\r\n')
+ nl_pos = pos + len(comment_token)
+ yield (COMMENT, comment_token,
+ (lnum, pos), (lnum, pos + len(comment_token)), line)
+ yield (NEWLINE, line[nl_pos:],
+ (lnum, nl_pos), (lnum, len(line)), line)
+ else:
+ yield (NEWLINE, line[pos:],
+ (lnum, pos), (lnum, len(line)), line)
+ continue
+
+ if column > indents[-1]: # count indents or dedents
+ indents.append(column)
+ yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
+ while column < indents[-1]:
+ if column not in indents:
+ raise IndentationError(
+ "unindent does not match any outer indentation level",
+ ("<tokenize>", lnum, pos, line))
+ indents = indents[:-1]
+ yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
+
+ else: # continued statement
+ if not line:
+ raise TokenError("EOF in multi-line statement", (lnum, 0))
+ continued = 0
+
+ while pos < max:
+ pseudomatch = pseudoprog.match(line, pos)
+ if pseudomatch: # scan for tokens
+ start, end = pseudomatch.span(1)
+ spos, epos, pos = (lnum, start), (lnum, end), end
+ token, initial = line[start:end], line[start]
+
+ if initial in numchars or \
+ (initial == '.' and token != '.'): # ordinary number
+ yield (NUMBER, token, spos, epos, line)
+ elif initial in '\r\n':
+ yield (NL if parenlev > 0 else NEWLINE,
+ token, spos, epos, line)
+ elif initial == '#':
+ assert not token.endswith("\n")
+ yield (COMMENT, token, spos, epos, line)
+ elif token in triple_quoted:
+ endprog = endprogs[token]
+ endmatch = endprog.match(line, pos)
+ if endmatch: # all on one line
+ pos = endmatch.end(0)
+ token = line[start:pos]
+ yield (STRING, token, spos, (lnum, pos), line)
+ else:
+ strstart = (lnum, start) # multiple lines
+ contstr = line[start:]
+ contline = line
+ break
+ elif initial in single_quoted or \
+ token[:2] in single_quoted or \
+ token[:3] in single_quoted:
+ if token[-1] == '\n': # continued string
+ strstart = (lnum, start)
+ endprog = (endprogs[initial] or endprogs[token[1]] or
+ endprogs[token[2]])
+ contstr, needcont = line[start:], 1
+ contline = line
+ break
+ else: # ordinary string
+ yield (STRING, token, spos, epos, line)
+ elif initial in namechars: # ordinary name
+ yield (NAME, token, spos, epos, line)
+ elif initial == '\\': # continued stmt
+ continued = 1
+ else:
+ if initial in '([{':
+ parenlev += 1
+ elif initial in ')]}':
+ parenlev -= 1
+ yield (OP, token, spos, epos, line)
+ else:
+ yield (ERRORTOKEN, line[pos],
+ (lnum, pos), (lnum, pos+1), line)
+ pos += 1
+
+ for indent in indents[1:]: # pop remaining indent levels
+ yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
+ yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
+
+if __name__ == '__main__': # testing
+ import sys
+ if len(sys.argv) > 1:
+ tokenize(open(sys.argv[1]).readline)
+ else:
+ tokenize(sys.stdin.readline)
diff --git a/contrib/python/ipython/py2/IPython/utils/_tokenize_py3.py b/contrib/python/ipython/py2/IPython/utils/_tokenize_py3.py
index ca85023c32..ee1fd9e639 100644
--- a/contrib/python/ipython/py2/IPython/utils/_tokenize_py3.py
+++ b/contrib/python/ipython/py2/IPython/utils/_tokenize_py3.py
@@ -1,595 +1,595 @@
-"""Patched version of standard library tokenize, to deal with various bugs.
-
-Based on Python 3.2 code.
-
-Patches:
-
-- Gareth Rees' patch for Python issue #12691 (untokenizing)
- - Except we don't encode the output of untokenize
- - Python 2 compatible syntax, so that it can be byte-compiled at installation
-- Newlines in comments and blank lines should be either NL or NEWLINE, depending
- on whether they are in a multi-line statement. Filed as Python issue #17061.
-- Export generate_tokens & TokenError
-- u and rb literals are allowed under Python 3.3 and above.
-
-------------------------------------------------------------------------------
-Tokenization help for Python programs.
-
-tokenize(readline) is a generator that breaks a stream of bytes into
-Python tokens. It decodes the bytes according to PEP-0263 for
-determining source file encoding.
-
-It accepts a readline-like method which is called repeatedly to get the
-next line of input (or b"" for EOF). It generates 5-tuples with these
-members:
-
- the token type (see token.py)
- the token (a string)
- the starting (row, column) indices of the token (a 2-tuple of ints)
- the ending (row, column) indices of the token (a 2-tuple of ints)
- the original line (string)
-
-It is designed to match the working of the Python tokenizer exactly, except
-that it produces COMMENT tokens for comments and gives type OP for all
-operators. Additionally, all token lists start with an ENCODING token
-which tells you which encoding was used to decode the bytes stream.
-"""
-from __future__ import absolute_import
-
-__author__ = 'Ka-Ping Yee <ping@lfw.org>'
-__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
- 'Skip Montanaro, Raymond Hettinger, Trent Nelson, '
- 'Michael Foord')
-import builtins
-import re
-import sys
-from token import *
-from codecs import lookup, BOM_UTF8
-import collections
-from io import TextIOWrapper
-cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
-
-import token
-__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding",
- "NL", "untokenize", "ENCODING", "TokenInfo"]
-del token
-
-__all__ += ["generate_tokens", "TokenError"]
-
-COMMENT = N_TOKENS
-tok_name[COMMENT] = 'COMMENT'
-NL = N_TOKENS + 1
-tok_name[NL] = 'NL'
-ENCODING = N_TOKENS + 2
-tok_name[ENCODING] = 'ENCODING'
-N_TOKENS += 3
-
-class TokenInfo(collections.namedtuple('TokenInfo', 'type string start end line')):
- def __repr__(self):
- annotated_type = '%d (%s)' % (self.type, tok_name[self.type])
- return ('TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)' %
- self._replace(type=annotated_type))
-
-def group(*choices): return '(' + '|'.join(choices) + ')'
-def any(*choices): return group(*choices) + '*'
-def maybe(*choices): return group(*choices) + '?'
-
-# Note: we use unicode matching for names ("\w") but ascii matching for
-# number literals.
-Whitespace = r'[ \f\t]*'
-Comment = r'#[^\r\n]*'
-Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
-Name = r'\w+'
-
-Hexnumber = r'0[xX][0-9a-fA-F]+'
-Binnumber = r'0[bB][01]+'
-Octnumber = r'0[oO][0-7]+'
-Decnumber = r'(?:0+|[1-9][0-9]*)'
-Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
-Exponent = r'[eE][-+]?[0-9]+'
-Pointfloat = group(r'[0-9]+\.[0-9]*', r'\.[0-9]+') + maybe(Exponent)
-Expfloat = r'[0-9]+' + Exponent
-Floatnumber = group(Pointfloat, Expfloat)
-Imagnumber = group(r'[0-9]+[jJ]', Floatnumber + r'[jJ]')
-Number = group(Imagnumber, Floatnumber, Intnumber)
-
-if sys.version_info.minor >= 3:
- StringPrefix = r'(?:[bB][rR]?|[rR][bB]?|[uU])?'
-else:
- StringPrefix = r'(?:[bB]?[rR]?)?'
-
-# Tail end of ' string.
-Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
-# Tail end of " string.
-Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
-# Tail end of ''' string.
-Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
-# Tail end of """ string.
-Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
-Triple = group(StringPrefix + "'''", StringPrefix + '"""')
-# Single-line ' or " string.
-String = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
- StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
-
-# Because of leftmost-then-longest match semantics, be sure to put the
-# longest operators first (e.g., if = came before ==, == would get
-# recognized as two instances of =).
-Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=",
- r"//=?", r"->",
- r"[+\-*/%&|^=<>]=?",
- r"~")
-
-Bracket = '[][(){}]'
-Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]')
-Funny = group(Operator, Bracket, Special)
-
-PlainToken = group(Number, Funny, String, Name)
-Token = Ignore + PlainToken
-
-# First (or only) line of ' or " string.
-ContStr = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
- group("'", r'\\\r?\n'),
- StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
- group('"', r'\\\r?\n'))
-PseudoExtras = group(r'\\\r?\n', Comment, Triple)
-PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
-
-def _compile(expr):
- return re.compile(expr, re.UNICODE)
-
-tokenprog, pseudoprog, single3prog, double3prog = map(
- _compile, (Token, PseudoToken, Single3, Double3))
-endprogs = {"'": _compile(Single), '"': _compile(Double),
- "'''": single3prog, '"""': double3prog,
- "r'''": single3prog, 'r"""': double3prog,
- "b'''": single3prog, 'b"""': double3prog,
- "R'''": single3prog, 'R"""': double3prog,
- "B'''": single3prog, 'B"""': double3prog,
- "br'''": single3prog, 'br"""': double3prog,
- "bR'''": single3prog, 'bR"""': double3prog,
- "Br'''": single3prog, 'Br"""': double3prog,
- "BR'''": single3prog, 'BR"""': double3prog,
- 'r': None, 'R': None, 'b': None, 'B': None}
-
-triple_quoted = {}
-for t in ("'''", '"""',
- "r'''", 'r"""', "R'''", 'R"""',
- "b'''", 'b"""', "B'''", 'B"""',
- "br'''", 'br"""', "Br'''", 'Br"""',
- "bR'''", 'bR"""', "BR'''", 'BR"""'):
- triple_quoted[t] = t
-single_quoted = {}
-for t in ("'", '"',
- "r'", 'r"', "R'", 'R"',
- "b'", 'b"', "B'", 'B"',
- "br'", 'br"', "Br'", 'Br"',
- "bR'", 'bR"', "BR'", 'BR"' ):
- single_quoted[t] = t
-
-if sys.version_info.minor >= 3:
- # Python 3.3
- for _prefix in ['rb', 'rB', 'Rb', 'RB', 'u', 'U']:
- _t2 = _prefix+'"""'
- endprogs[_t2] = double3prog
- triple_quoted[_t2] = _t2
- _t1 = _prefix + "'''"
- endprogs[_t1] = single3prog
- triple_quoted[_t1] = _t1
- single_quoted[_prefix+'"'] = _prefix+'"'
- single_quoted[_prefix+"'"] = _prefix+"'"
- del _prefix, _t2, _t1
- endprogs['u'] = None
- endprogs['U'] = None
-
-del _compile
-
-tabsize = 8
-
-class TokenError(Exception): pass
-
-class StopTokenizing(Exception): pass
-
-
-class Untokenizer:
-
- def __init__(self):
- self.tokens = []
- self.prev_row = 1
- self.prev_col = 0
- self.encoding = 'utf-8'
-
- def add_whitespace(self, tok_type, start):
- row, col = start
- assert row >= self.prev_row
- col_offset = col - self.prev_col
- if col_offset > 0:
- self.tokens.append(" " * col_offset)
- elif row > self.prev_row and tok_type not in (NEWLINE, NL, ENDMARKER):
- # Line was backslash-continued.
- self.tokens.append(" ")
-
- def untokenize(self, tokens):
- iterable = iter(tokens)
- for t in iterable:
- if len(t) == 2:
- self.compat(t, iterable)
- break
- tok_type, token, start, end = t[:4]
- if tok_type == ENCODING:
- self.encoding = token
- continue
- self.add_whitespace(tok_type, start)
- self.tokens.append(token)
- self.prev_row, self.prev_col = end
- if tok_type in (NEWLINE, NL):
- self.prev_row += 1
- self.prev_col = 0
- return "".join(self.tokens)
-
- def compat(self, token, iterable):
- # This import is here to avoid problems when the itertools
- # module is not built yet and tokenize is imported.
- from itertools import chain
- startline = False
- prevstring = False
- indents = []
- toks_append = self.tokens.append
-
- for tok in chain([token], iterable):
- toknum, tokval = tok[:2]
- if toknum == ENCODING:
- self.encoding = tokval
- continue
-
- if toknum in (NAME, NUMBER):
- tokval += ' '
-
- # Insert a space between two consecutive strings
- if toknum == STRING:
- if prevstring:
- tokval = ' ' + tokval
- prevstring = True
- else:
- prevstring = False
-
- if toknum == INDENT:
- indents.append(tokval)
- continue
- elif toknum == DEDENT:
- indents.pop()
- continue
- elif toknum in (NEWLINE, NL):
- startline = True
- elif startline and indents:
- toks_append(indents[-1])
- startline = False
- toks_append(tokval)
-
-
-def untokenize(tokens):
- """
- Convert ``tokens`` (an iterable) back into Python source code. Return
- a bytes object, encoded using the encoding specified by the last
- ENCODING token in ``tokens``, or UTF-8 if no ENCODING token is found.
-
- The result is guaranteed to tokenize back to match the input so that
- the conversion is lossless and round-trips are assured. The
- guarantee applies only to the token type and token string as the
- spacing between tokens (column positions) may change.
-
- :func:`untokenize` has two modes. If the input tokens are sequences
- of length 2 (``type``, ``string``) then spaces are added as necessary to
- preserve the round-trip property.
-
- If the input tokens are sequences of length 4 or more (``type``,
- ``string``, ``start``, ``end``), as returned by :func:`tokenize`, then
- spaces are added so that each token appears in the result at the
- position indicated by ``start`` and ``end``, if possible.
- """
- return Untokenizer().untokenize(tokens)
-
-
-def _get_normal_name(orig_enc):
- """Imitates get_normal_name in tokenizer.c."""
- # Only care about the first 12 characters.
- enc = orig_enc[:12].lower().replace("_", "-")
- if enc == "utf-8" or enc.startswith("utf-8-"):
- return "utf-8"
- if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
- enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
- return "iso-8859-1"
- return orig_enc
-
-def detect_encoding(readline):
- """
- The detect_encoding() function is used to detect the encoding that should
- be used to decode a Python source file. It requires one argment, readline,
- in the same way as the tokenize() generator.
-
- It will call readline a maximum of twice, and return the encoding used
- (as a string) and a list of any lines (left as bytes) it has read in.
-
- It detects the encoding from the presence of a utf-8 bom or an encoding
- cookie as specified in pep-0263. If both a bom and a cookie are present,
- but disagree, a SyntaxError will be raised. If the encoding cookie is an
- invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
- 'utf-8-sig' is returned.
-
- If no encoding is specified, then the default of 'utf-8' will be returned.
- """
- bom_found = False
- encoding = None
- default = 'utf-8'
- def read_or_stop():
- try:
- return readline()
- except StopIteration:
- return b''
-
- def find_cookie(line):
- try:
- # Decode as UTF-8. Either the line is an encoding declaration,
- # in which case it should be pure ASCII, or it must be UTF-8
- # per default encoding.
- line_string = line.decode('utf-8')
- except UnicodeDecodeError:
- raise SyntaxError("invalid or missing encoding declaration")
-
- matches = cookie_re.findall(line_string)
- if not matches:
- return None
- encoding = _get_normal_name(matches[0])
- try:
- codec = lookup(encoding)
- except LookupError:
- # This behaviour mimics the Python interpreter
- raise SyntaxError("unknown encoding: " + encoding)
-
- if bom_found:
- if encoding != 'utf-8':
- # This behaviour mimics the Python interpreter
- raise SyntaxError('encoding problem: utf-8')
- encoding += '-sig'
- return encoding
-
- first = read_or_stop()
- if first.startswith(BOM_UTF8):
- bom_found = True
- first = first[3:]
- default = 'utf-8-sig'
- if not first:
- return default, []
-
- encoding = find_cookie(first)
- if encoding:
- return encoding, [first]
-
- second = read_or_stop()
- if not second:
- return default, [first]
-
- encoding = find_cookie(second)
- if encoding:
- return encoding, [first, second]
-
- return default, [first, second]
-
-
-def open(filename):
- """Open a file in read only mode using the encoding detected by
- detect_encoding().
- """
- buffer = builtins.open(filename, 'rb')
- encoding, lines = detect_encoding(buffer.readline)
- buffer.seek(0)
- text = TextIOWrapper(buffer, encoding, line_buffering=True)
- text.mode = 'r'
- return text
-
-
-def tokenize(readline):
- """
- The tokenize() generator requires one argment, readline, which
- must be a callable object which provides the same interface as the
- readline() method of built-in file objects. Each call to the function
- should return one line of input as bytes. Alternately, readline
- can be a callable function terminating with StopIteration:
- readline = open(myfile, 'rb').__next__ # Example of alternate readline
-
- The generator produces 5-tuples with these members: the token type; the
- token string; a 2-tuple (srow, scol) of ints specifying the row and
- column where the token begins in the source; a 2-tuple (erow, ecol) of
- ints specifying the row and column where the token ends in the source;
- and the line on which the token was found. The line passed is the
- logical line; continuation lines are included.
-
- The first token sequence will always be an ENCODING token
- which tells you which encoding was used to decode the bytes stream.
- """
- # This import is here to avoid problems when the itertools module is not
- # built yet and tokenize is imported.
- from itertools import chain, repeat
- encoding, consumed = detect_encoding(readline)
- rl_gen = iter(readline, b"")
- empty = repeat(b"")
- return _tokenize(chain(consumed, rl_gen, empty).__next__, encoding)
-
-
-def _tokenize(readline, encoding):
- lnum = parenlev = continued = 0
- numchars = '0123456789'
- contstr, needcont = '', 0
- contline = None
- indents = [0]
-
- if encoding is not None:
- if encoding == "utf-8-sig":
- # BOM will already have been stripped.
- encoding = "utf-8"
- yield TokenInfo(ENCODING, encoding, (0, 0), (0, 0), '')
- while True: # loop over lines in stream
- try:
- line = readline()
- except StopIteration:
- line = b''
-
- if encoding is not None:
- line = line.decode(encoding)
- lnum += 1
- pos, max = 0, len(line)
-
- if contstr: # continued string
- if not line:
- raise TokenError("EOF in multi-line string", strstart)
- endmatch = endprog.match(line)
- if endmatch:
- pos = end = endmatch.end(0)
- yield TokenInfo(STRING, contstr + line[:end],
- strstart, (lnum, end), contline + line)
- contstr, needcont = '', 0
- contline = None
- elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
- yield TokenInfo(ERRORTOKEN, contstr + line,
- strstart, (lnum, len(line)), contline)
- contstr = ''
- contline = None
- continue
- else:
- contstr = contstr + line
- contline = contline + line
- continue
-
- elif parenlev == 0 and not continued: # new statement
- if not line: break
- column = 0
- while pos < max: # measure leading whitespace
- if line[pos] == ' ':
- column += 1
- elif line[pos] == '\t':
- column = (column//tabsize + 1)*tabsize
- elif line[pos] == '\f':
- column = 0
- else:
- break
- pos += 1
- if pos == max:
- break
-
- if line[pos] in '#\r\n': # skip comments or blank lines
- if line[pos] == '#':
- comment_token = line[pos:].rstrip('\r\n')
- nl_pos = pos + len(comment_token)
- yield TokenInfo(COMMENT, comment_token,
- (lnum, pos), (lnum, pos + len(comment_token)), line)
- yield TokenInfo(NEWLINE, line[nl_pos:],
- (lnum, nl_pos), (lnum, len(line)), line)
- else:
- yield TokenInfo(NEWLINE, line[pos:],
- (lnum, pos), (lnum, len(line)), line)
- continue
-
- if column > indents[-1]: # count indents or dedents
- indents.append(column)
- yield TokenInfo(INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
- while column < indents[-1]:
- if column not in indents:
- raise IndentationError(
- "unindent does not match any outer indentation level",
- ("<tokenize>", lnum, pos, line))
- indents = indents[:-1]
- yield TokenInfo(DEDENT, '', (lnum, pos), (lnum, pos), line)
-
- else: # continued statement
- if not line:
- raise TokenError("EOF in multi-line statement", (lnum, 0))
- continued = 0
-
- while pos < max:
- pseudomatch = pseudoprog.match(line, pos)
- if pseudomatch: # scan for tokens
- start, end = pseudomatch.span(1)
- spos, epos, pos = (lnum, start), (lnum, end), end
- token, initial = line[start:end], line[start]
-
- if (initial in numchars or # ordinary number
- (initial == '.' and token != '.' and token != '...')):
- yield TokenInfo(NUMBER, token, spos, epos, line)
- elif initial in '\r\n':
- yield TokenInfo(NL if parenlev > 0 else NEWLINE,
- token, spos, epos, line)
- elif initial == '#':
- assert not token.endswith("\n")
- yield TokenInfo(COMMENT, token, spos, epos, line)
- elif token in triple_quoted:
- endprog = endprogs[token]
- endmatch = endprog.match(line, pos)
- if endmatch: # all on one line
- pos = endmatch.end(0)
- token = line[start:pos]
- yield TokenInfo(STRING, token, spos, (lnum, pos), line)
- else:
- strstart = (lnum, start) # multiple lines
- contstr = line[start:]
- contline = line
- break
- elif initial in single_quoted or \
- token[:2] in single_quoted or \
- token[:3] in single_quoted:
- if token[-1] == '\n': # continued string
- strstart = (lnum, start)
- endprog = (endprogs[initial] or endprogs[token[1]] or
- endprogs[token[2]])
- contstr, needcont = line[start:], 1
- contline = line
- break
- else: # ordinary string
- yield TokenInfo(STRING, token, spos, epos, line)
- elif initial.isidentifier(): # ordinary name
- yield TokenInfo(NAME, token, spos, epos, line)
- elif initial == '\\': # continued stmt
- continued = 1
- else:
- if initial in '([{':
- parenlev += 1
- elif initial in ')]}':
- parenlev -= 1
- yield TokenInfo(OP, token, spos, epos, line)
- else:
- yield TokenInfo(ERRORTOKEN, line[pos],
- (lnum, pos), (lnum, pos+1), line)
- pos += 1
-
- for indent in indents[1:]: # pop remaining indent levels
- yield TokenInfo(DEDENT, '', (lnum, 0), (lnum, 0), '')
- yield TokenInfo(ENDMARKER, '', (lnum, 0), (lnum, 0), '')
-
-
-# An undocumented, backwards compatible, API for all the places in the standard
-# library that expect to be able to use tokenize with strings
-def generate_tokens(readline):
- return _tokenize(readline, None)
-
-if __name__ == "__main__":
- # Quick sanity check
- s = b'''def parseline(self, line):
- """Parse the line into a command name and a string containing
- the arguments. Returns a tuple containing (command, args, line).
- 'command' and 'args' may be None if the line couldn't be parsed.
- """
- line = line.strip()
- if not line:
- return None, None, line
- elif line[0] == '?':
- line = 'help ' + line[1:]
- elif line[0] == '!':
- if hasattr(self, 'do_shell'):
- line = 'shell ' + line[1:]
- else:
- return None, None, line
- i, n = 0, len(line)
- while i < n and line[i] in self.identchars: i = i+1
- cmd, arg = line[:i], line[i:].strip()
- return cmd, arg, line
- '''
- for tok in tokenize(iter(s.splitlines()).__next__):
- print(tok)
+"""Patched version of standard library tokenize, to deal with various bugs.
+
+Based on Python 3.2 code.
+
+Patches:
+
+- Gareth Rees' patch for Python issue #12691 (untokenizing)
+ - Except we don't encode the output of untokenize
+ - Python 2 compatible syntax, so that it can be byte-compiled at installation
+- Newlines in comments and blank lines should be either NL or NEWLINE, depending
+ on whether they are in a multi-line statement. Filed as Python issue #17061.
+- Export generate_tokens & TokenError
+- u and rb literals are allowed under Python 3.3 and above.
+
+------------------------------------------------------------------------------
+Tokenization help for Python programs.
+
+tokenize(readline) is a generator that breaks a stream of bytes into
+Python tokens. It decodes the bytes according to PEP-0263 for
+determining source file encoding.
+
+It accepts a readline-like method which is called repeatedly to get the
+next line of input (or b"" for EOF). It generates 5-tuples with these
+members:
+
+ the token type (see token.py)
+ the token (a string)
+ the starting (row, column) indices of the token (a 2-tuple of ints)
+ the ending (row, column) indices of the token (a 2-tuple of ints)
+ the original line (string)
+
+It is designed to match the working of the Python tokenizer exactly, except
+that it produces COMMENT tokens for comments and gives type OP for all
+operators. Additionally, all token lists start with an ENCODING token
+which tells you which encoding was used to decode the bytes stream.
+"""
+from __future__ import absolute_import
+
+__author__ = 'Ka-Ping Yee <ping@lfw.org>'
+__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
+ 'Skip Montanaro, Raymond Hettinger, Trent Nelson, '
+ 'Michael Foord')
+import builtins
+import re
+import sys
+from token import *
+from codecs import lookup, BOM_UTF8
+import collections
+from io import TextIOWrapper
+cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
+
+import token
+__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding",
+ "NL", "untokenize", "ENCODING", "TokenInfo"]
+del token
+
+__all__ += ["generate_tokens", "TokenError"]
+
+COMMENT = N_TOKENS
+tok_name[COMMENT] = 'COMMENT'
+NL = N_TOKENS + 1
+tok_name[NL] = 'NL'
+ENCODING = N_TOKENS + 2
+tok_name[ENCODING] = 'ENCODING'
+N_TOKENS += 3
+
+class TokenInfo(collections.namedtuple('TokenInfo', 'type string start end line')):
+ def __repr__(self):
+ annotated_type = '%d (%s)' % (self.type, tok_name[self.type])
+ return ('TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)' %
+ self._replace(type=annotated_type))
+
+def group(*choices): return '(' + '|'.join(choices) + ')'
+def any(*choices): return group(*choices) + '*'
+def maybe(*choices): return group(*choices) + '?'
+
+# Note: we use unicode matching for names ("\w") but ascii matching for
+# number literals.
+Whitespace = r'[ \f\t]*'
+Comment = r'#[^\r\n]*'
+Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
+Name = r'\w+'
+
+Hexnumber = r'0[xX][0-9a-fA-F]+'
+Binnumber = r'0[bB][01]+'
+Octnumber = r'0[oO][0-7]+'
+Decnumber = r'(?:0+|[1-9][0-9]*)'
+Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
+Exponent = r'[eE][-+]?[0-9]+'
+Pointfloat = group(r'[0-9]+\.[0-9]*', r'\.[0-9]+') + maybe(Exponent)
+Expfloat = r'[0-9]+' + Exponent
+Floatnumber = group(Pointfloat, Expfloat)
+Imagnumber = group(r'[0-9]+[jJ]', Floatnumber + r'[jJ]')
+Number = group(Imagnumber, Floatnumber, Intnumber)
+
+if sys.version_info.minor >= 3:
+ StringPrefix = r'(?:[bB][rR]?|[rR][bB]?|[uU])?'
+else:
+ StringPrefix = r'(?:[bB]?[rR]?)?'
+
+# Tail end of ' string.
+Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
+# Tail end of " string.
+Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
+# Tail end of ''' string.
+Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
+# Tail end of """ string.
+Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
+Triple = group(StringPrefix + "'''", StringPrefix + '"""')
+# Single-line ' or " string.
+String = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
+ StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
+
+# Because of leftmost-then-longest match semantics, be sure to put the
+# longest operators first (e.g., if = came before ==, == would get
+# recognized as two instances of =).
+Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=",
+ r"//=?", r"->",
+ r"[+\-*/%&|^=<>]=?",
+ r"~")
+
+Bracket = '[][(){}]'
+Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]')
+Funny = group(Operator, Bracket, Special)
+
+PlainToken = group(Number, Funny, String, Name)
+Token = Ignore + PlainToken
+
+# First (or only) line of ' or " string.
+ContStr = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
+ group("'", r'\\\r?\n'),
+ StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
+ group('"', r'\\\r?\n'))
+PseudoExtras = group(r'\\\r?\n', Comment, Triple)
+PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
+
+def _compile(expr):
+ return re.compile(expr, re.UNICODE)
+
+tokenprog, pseudoprog, single3prog, double3prog = map(
+ _compile, (Token, PseudoToken, Single3, Double3))
+endprogs = {"'": _compile(Single), '"': _compile(Double),
+ "'''": single3prog, '"""': double3prog,
+ "r'''": single3prog, 'r"""': double3prog,
+ "b'''": single3prog, 'b"""': double3prog,
+ "R'''": single3prog, 'R"""': double3prog,
+ "B'''": single3prog, 'B"""': double3prog,
+ "br'''": single3prog, 'br"""': double3prog,
+ "bR'''": single3prog, 'bR"""': double3prog,
+ "Br'''": single3prog, 'Br"""': double3prog,
+ "BR'''": single3prog, 'BR"""': double3prog,
+ 'r': None, 'R': None, 'b': None, 'B': None}
+
+triple_quoted = {}
+for t in ("'''", '"""',
+ "r'''", 'r"""', "R'''", 'R"""',
+ "b'''", 'b"""', "B'''", 'B"""',
+ "br'''", 'br"""', "Br'''", 'Br"""',
+ "bR'''", 'bR"""', "BR'''", 'BR"""'):
+ triple_quoted[t] = t
+single_quoted = {}
+for t in ("'", '"',
+ "r'", 'r"', "R'", 'R"',
+ "b'", 'b"', "B'", 'B"',
+ "br'", 'br"', "Br'", 'Br"',
+ "bR'", 'bR"', "BR'", 'BR"' ):
+ single_quoted[t] = t
+
+if sys.version_info.minor >= 3:
+ # Python 3.3
+ for _prefix in ['rb', 'rB', 'Rb', 'RB', 'u', 'U']:
+ _t2 = _prefix+'"""'
+ endprogs[_t2] = double3prog
+ triple_quoted[_t2] = _t2
+ _t1 = _prefix + "'''"
+ endprogs[_t1] = single3prog
+ triple_quoted[_t1] = _t1
+ single_quoted[_prefix+'"'] = _prefix+'"'
+ single_quoted[_prefix+"'"] = _prefix+"'"
+ del _prefix, _t2, _t1
+ endprogs['u'] = None
+ endprogs['U'] = None
+
+del _compile
+
+tabsize = 8
+
+class TokenError(Exception): pass
+
+class StopTokenizing(Exception): pass
+
+
+class Untokenizer:
+
+ def __init__(self):
+ self.tokens = []
+ self.prev_row = 1
+ self.prev_col = 0
+ self.encoding = 'utf-8'
+
+ def add_whitespace(self, tok_type, start):
+ row, col = start
+ assert row >= self.prev_row
+ col_offset = col - self.prev_col
+ if col_offset > 0:
+ self.tokens.append(" " * col_offset)
+ elif row > self.prev_row and tok_type not in (NEWLINE, NL, ENDMARKER):
+ # Line was backslash-continued.
+ self.tokens.append(" ")
+
+ def untokenize(self, tokens):
+ iterable = iter(tokens)
+ for t in iterable:
+ if len(t) == 2:
+ self.compat(t, iterable)
+ break
+ tok_type, token, start, end = t[:4]
+ if tok_type == ENCODING:
+ self.encoding = token
+ continue
+ self.add_whitespace(tok_type, start)
+ self.tokens.append(token)
+ self.prev_row, self.prev_col = end
+ if tok_type in (NEWLINE, NL):
+ self.prev_row += 1
+ self.prev_col = 0
+ return "".join(self.tokens)
+
+ def compat(self, token, iterable):
+ # This import is here to avoid problems when the itertools
+ # module is not built yet and tokenize is imported.
+ from itertools import chain
+ startline = False
+ prevstring = False
+ indents = []
+ toks_append = self.tokens.append
+
+ for tok in chain([token], iterable):
+ toknum, tokval = tok[:2]
+ if toknum == ENCODING:
+ self.encoding = tokval
+ continue
+
+ if toknum in (NAME, NUMBER):
+ tokval += ' '
+
+ # Insert a space between two consecutive strings
+ if toknum == STRING:
+ if prevstring:
+ tokval = ' ' + tokval
+ prevstring = True
+ else:
+ prevstring = False
+
+ if toknum == INDENT:
+ indents.append(tokval)
+ continue
+ elif toknum == DEDENT:
+ indents.pop()
+ continue
+ elif toknum in (NEWLINE, NL):
+ startline = True
+ elif startline and indents:
+ toks_append(indents[-1])
+ startline = False
+ toks_append(tokval)
+
+
+def untokenize(tokens):
+ """
+ Convert ``tokens`` (an iterable) back into Python source code. Return
+ a bytes object, encoded using the encoding specified by the last
+ ENCODING token in ``tokens``, or UTF-8 if no ENCODING token is found.
+
+ The result is guaranteed to tokenize back to match the input so that
+ the conversion is lossless and round-trips are assured. The
+ guarantee applies only to the token type and token string as the
+ spacing between tokens (column positions) may change.
+
+ :func:`untokenize` has two modes. If the input tokens are sequences
+ of length 2 (``type``, ``string``) then spaces are added as necessary to
+ preserve the round-trip property.
+
+ If the input tokens are sequences of length 4 or more (``type``,
+ ``string``, ``start``, ``end``), as returned by :func:`tokenize`, then
+ spaces are added so that each token appears in the result at the
+ position indicated by ``start`` and ``end``, if possible.
+ """
+ return Untokenizer().untokenize(tokens)
+
+
+def _get_normal_name(orig_enc):
+ """Imitates get_normal_name in tokenizer.c."""
+ # Only care about the first 12 characters.
+ enc = orig_enc[:12].lower().replace("_", "-")
+ if enc == "utf-8" or enc.startswith("utf-8-"):
+ return "utf-8"
+ if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
+ enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
+ return "iso-8859-1"
+ return orig_enc
+
+def detect_encoding(readline):
+ """
+ The detect_encoding() function is used to detect the encoding that should
+ be used to decode a Python source file. It requires one argment, readline,
+ in the same way as the tokenize() generator.
+
+ It will call readline a maximum of twice, and return the encoding used
+ (as a string) and a list of any lines (left as bytes) it has read in.
+
+ It detects the encoding from the presence of a utf-8 bom or an encoding
+ cookie as specified in pep-0263. If both a bom and a cookie are present,
+ but disagree, a SyntaxError will be raised. If the encoding cookie is an
+ invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
+ 'utf-8-sig' is returned.
+
+ If no encoding is specified, then the default of 'utf-8' will be returned.
+ """
+ bom_found = False
+ encoding = None
+ default = 'utf-8'
+ def read_or_stop():
+ try:
+ return readline()
+ except StopIteration:
+ return b''
+
+ def find_cookie(line):
+ try:
+ # Decode as UTF-8. Either the line is an encoding declaration,
+ # in which case it should be pure ASCII, or it must be UTF-8
+ # per default encoding.
+ line_string = line.decode('utf-8')
+ except UnicodeDecodeError:
+ raise SyntaxError("invalid or missing encoding declaration")
+
+ matches = cookie_re.findall(line_string)
+ if not matches:
+ return None
+ encoding = _get_normal_name(matches[0])
+ try:
+ codec = lookup(encoding)
+ except LookupError:
+ # This behaviour mimics the Python interpreter
+ raise SyntaxError("unknown encoding: " + encoding)
+
+ if bom_found:
+ if encoding != 'utf-8':
+ # This behaviour mimics the Python interpreter
+ raise SyntaxError('encoding problem: utf-8')
+ encoding += '-sig'
+ return encoding
+
+ first = read_or_stop()
+ if first.startswith(BOM_UTF8):
+ bom_found = True
+ first = first[3:]
+ default = 'utf-8-sig'
+ if not first:
+ return default, []
+
+ encoding = find_cookie(first)
+ if encoding:
+ return encoding, [first]
+
+ second = read_or_stop()
+ if not second:
+ return default, [first]
+
+ encoding = find_cookie(second)
+ if encoding:
+ return encoding, [first, second]
+
+ return default, [first, second]
+
+
+def open(filename):
+ """Open a file in read only mode using the encoding detected by
+ detect_encoding().
+ """
+ buffer = builtins.open(filename, 'rb')
+ encoding, lines = detect_encoding(buffer.readline)
+ buffer.seek(0)
+ text = TextIOWrapper(buffer, encoding, line_buffering=True)
+ text.mode = 'r'
+ return text
+
+
+def tokenize(readline):
+ """
+ The tokenize() generator requires one argment, readline, which
+ must be a callable object which provides the same interface as the
+ readline() method of built-in file objects. Each call to the function
+ should return one line of input as bytes. Alternately, readline
+ can be a callable function terminating with StopIteration:
+ readline = open(myfile, 'rb').__next__ # Example of alternate readline
+
+ The generator produces 5-tuples with these members: the token type; the
+ token string; a 2-tuple (srow, scol) of ints specifying the row and
+ column where the token begins in the source; a 2-tuple (erow, ecol) of
+ ints specifying the row and column where the token ends in the source;
+ and the line on which the token was found. The line passed is the
+ logical line; continuation lines are included.
+
+ The first token sequence will always be an ENCODING token
+ which tells you which encoding was used to decode the bytes stream.
+ """
+ # This import is here to avoid problems when the itertools module is not
+ # built yet and tokenize is imported.
+ from itertools import chain, repeat
+ encoding, consumed = detect_encoding(readline)
+ rl_gen = iter(readline, b"")
+ empty = repeat(b"")
+ return _tokenize(chain(consumed, rl_gen, empty).__next__, encoding)
+
+
+def _tokenize(readline, encoding):
+ lnum = parenlev = continued = 0
+ numchars = '0123456789'
+ contstr, needcont = '', 0
+ contline = None
+ indents = [0]
+
+ if encoding is not None:
+ if encoding == "utf-8-sig":
+ # BOM will already have been stripped.
+ encoding = "utf-8"
+ yield TokenInfo(ENCODING, encoding, (0, 0), (0, 0), '')
+ while True: # loop over lines in stream
+ try:
+ line = readline()
+ except StopIteration:
+ line = b''
+
+ if encoding is not None:
+ line = line.decode(encoding)
+ lnum += 1
+ pos, max = 0, len(line)
+
+ if contstr: # continued string
+ if not line:
+ raise TokenError("EOF in multi-line string", strstart)
+ endmatch = endprog.match(line)
+ if endmatch:
+ pos = end = endmatch.end(0)
+ yield TokenInfo(STRING, contstr + line[:end],
+ strstart, (lnum, end), contline + line)
+ contstr, needcont = '', 0
+ contline = None
+ elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
+ yield TokenInfo(ERRORTOKEN, contstr + line,
+ strstart, (lnum, len(line)), contline)
+ contstr = ''
+ contline = None
+ continue
+ else:
+ contstr = contstr + line
+ contline = contline + line
+ continue
+
+ elif parenlev == 0 and not continued: # new statement
+ if not line: break
+ column = 0
+ while pos < max: # measure leading whitespace
+ if line[pos] == ' ':
+ column += 1
+ elif line[pos] == '\t':
+ column = (column//tabsize + 1)*tabsize
+ elif line[pos] == '\f':
+ column = 0
+ else:
+ break
+ pos += 1
+ if pos == max:
+ break
+
+ if line[pos] in '#\r\n': # skip comments or blank lines
+ if line[pos] == '#':
+ comment_token = line[pos:].rstrip('\r\n')
+ nl_pos = pos + len(comment_token)
+ yield TokenInfo(COMMENT, comment_token,
+ (lnum, pos), (lnum, pos + len(comment_token)), line)
+ yield TokenInfo(NEWLINE, line[nl_pos:],
+ (lnum, nl_pos), (lnum, len(line)), line)
+ else:
+ yield TokenInfo(NEWLINE, line[pos:],
+ (lnum, pos), (lnum, len(line)), line)
+ continue
+
+ if column > indents[-1]: # count indents or dedents
+ indents.append(column)
+ yield TokenInfo(INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
+ while column < indents[-1]:
+ if column not in indents:
+ raise IndentationError(
+ "unindent does not match any outer indentation level",
+ ("<tokenize>", lnum, pos, line))
+ indents = indents[:-1]
+ yield TokenInfo(DEDENT, '', (lnum, pos), (lnum, pos), line)
+
+ else: # continued statement
+ if not line:
+ raise TokenError("EOF in multi-line statement", (lnum, 0))
+ continued = 0
+
+ while pos < max:
+ pseudomatch = pseudoprog.match(line, pos)
+ if pseudomatch: # scan for tokens
+ start, end = pseudomatch.span(1)
+ spos, epos, pos = (lnum, start), (lnum, end), end
+ token, initial = line[start:end], line[start]
+
+ if (initial in numchars or # ordinary number
+ (initial == '.' and token != '.' and token != '...')):
+ yield TokenInfo(NUMBER, token, spos, epos, line)
+ elif initial in '\r\n':
+ yield TokenInfo(NL if parenlev > 0 else NEWLINE,
+ token, spos, epos, line)
+ elif initial == '#':
+ assert not token.endswith("\n")
+ yield TokenInfo(COMMENT, token, spos, epos, line)
+ elif token in triple_quoted:
+ endprog = endprogs[token]
+ endmatch = endprog.match(line, pos)
+ if endmatch: # all on one line
+ pos = endmatch.end(0)
+ token = line[start:pos]
+ yield TokenInfo(STRING, token, spos, (lnum, pos), line)
+ else:
+ strstart = (lnum, start) # multiple lines
+ contstr = line[start:]
+ contline = line
+ break
+ elif initial in single_quoted or \
+ token[:2] in single_quoted or \
+ token[:3] in single_quoted:
+ if token[-1] == '\n': # continued string
+ strstart = (lnum, start)
+ endprog = (endprogs[initial] or endprogs[token[1]] or
+ endprogs[token[2]])
+ contstr, needcont = line[start:], 1
+ contline = line
+ break
+ else: # ordinary string
+ yield TokenInfo(STRING, token, spos, epos, line)
+ elif initial.isidentifier(): # ordinary name
+ yield TokenInfo(NAME, token, spos, epos, line)
+ elif initial == '\\': # continued stmt
+ continued = 1
+ else:
+ if initial in '([{':
+ parenlev += 1
+ elif initial in ')]}':
+ parenlev -= 1
+ yield TokenInfo(OP, token, spos, epos, line)
+ else:
+ yield TokenInfo(ERRORTOKEN, line[pos],
+ (lnum, pos), (lnum, pos+1), line)
+ pos += 1
+
+ for indent in indents[1:]: # pop remaining indent levels
+ yield TokenInfo(DEDENT, '', (lnum, 0), (lnum, 0), '')
+ yield TokenInfo(ENDMARKER, '', (lnum, 0), (lnum, 0), '')
+
+
+# An undocumented, backwards compatible, API for all the places in the standard
+# library that expect to be able to use tokenize with strings
+def generate_tokens(readline):
+ return _tokenize(readline, None)
+
+if __name__ == "__main__":
+ # Quick sanity check
+ s = b'''def parseline(self, line):
+ """Parse the line into a command name and a string containing
+ the arguments. Returns a tuple containing (command, args, line).
+ 'command' and 'args' may be None if the line couldn't be parsed.
+ """
+ line = line.strip()
+ if not line:
+ return None, None, line
+ elif line[0] == '?':
+ line = 'help ' + line[1:]
+ elif line[0] == '!':
+ if hasattr(self, 'do_shell'):
+ line = 'shell ' + line[1:]
+ else:
+ return None, None, line
+ i, n = 0, len(line)
+ while i < n and line[i] in self.identchars: i = i+1
+ cmd, arg = line[:i], line[i:].strip()
+ return cmd, arg, line
+ '''
+ for tok in tokenize(iter(s.splitlines()).__next__):
+ print(tok)
diff --git a/contrib/python/ipython/py2/IPython/utils/capture.py b/contrib/python/ipython/py2/IPython/utils/capture.py
index bb241b0fad..d8f919568c 100644
--- a/contrib/python/ipython/py2/IPython/utils/capture.py
+++ b/contrib/python/ipython/py2/IPython/utils/capture.py
@@ -1,176 +1,176 @@
-# encoding: utf-8
-"""IO capturing utilities."""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import print_function, absolute_import
-
-import sys
-
-from IPython.utils.py3compat import PY3
-
-if PY3:
- from io import StringIO
-else:
- from StringIO import StringIO
-
-#-----------------------------------------------------------------------------
-# Classes and functions
-#-----------------------------------------------------------------------------
-
-
-class RichOutput(object):
+# encoding: utf-8
+"""IO capturing utilities."""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import print_function, absolute_import
+
+import sys
+
+from IPython.utils.py3compat import PY3
+
+if PY3:
+ from io import StringIO
+else:
+ from StringIO import StringIO
+
+#-----------------------------------------------------------------------------
+# Classes and functions
+#-----------------------------------------------------------------------------
+
+
+class RichOutput(object):
def __init__(self, data=None, metadata=None, transient=None, update=False):
- self.data = data or {}
- self.metadata = metadata or {}
+ self.data = data or {}
+ self.metadata = metadata or {}
self.transient = transient or {}
self.update = update
- def display(self):
- from IPython.display import publish_display_data
+ def display(self):
+ from IPython.display import publish_display_data
publish_display_data(data=self.data, metadata=self.metadata,
transient=self.transient, update=self.update)
- def _repr_mime_(self, mime):
- if mime not in self.data:
- return
- data = self.data[mime]
- if mime in self.metadata:
- return data, self.metadata[mime]
- else:
- return data
-
- def _repr_html_(self):
- return self._repr_mime_("text/html")
-
- def _repr_latex_(self):
- return self._repr_mime_("text/latex")
-
- def _repr_json_(self):
- return self._repr_mime_("application/json")
-
- def _repr_javascript_(self):
- return self._repr_mime_("application/javascript")
-
- def _repr_png_(self):
- return self._repr_mime_("image/png")
-
- def _repr_jpeg_(self):
- return self._repr_mime_("image/jpeg")
-
- def _repr_svg_(self):
- return self._repr_mime_("image/svg+xml")
-
-
-class CapturedIO(object):
- """Simple object for containing captured stdout/err and rich display StringIO objects
-
- Each instance `c` has three attributes:
-
- - ``c.stdout`` : standard output as a string
- - ``c.stderr`` : standard error as a string
- - ``c.outputs``: a list of rich display outputs
-
- Additionally, there's a ``c.show()`` method which will print all of the
- above in the same order, and can be invoked simply via ``c()``.
- """
-
- def __init__(self, stdout, stderr, outputs=None):
- self._stdout = stdout
- self._stderr = stderr
- if outputs is None:
- outputs = []
- self._outputs = outputs
-
- def __str__(self):
- return self.stdout
-
- @property
- def stdout(self):
- "Captured standard output"
- if not self._stdout:
- return ''
- return self._stdout.getvalue()
-
- @property
- def stderr(self):
- "Captured standard error"
- if not self._stderr:
- return ''
- return self._stderr.getvalue()
-
- @property
- def outputs(self):
- """A list of the captured rich display outputs, if any.
-
- If you have a CapturedIO object ``c``, these can be displayed in IPython
- using::
-
- from IPython.display import display
- for o in c.outputs:
- display(o)
- """
+ def _repr_mime_(self, mime):
+ if mime not in self.data:
+ return
+ data = self.data[mime]
+ if mime in self.metadata:
+ return data, self.metadata[mime]
+ else:
+ return data
+
+ def _repr_html_(self):
+ return self._repr_mime_("text/html")
+
+ def _repr_latex_(self):
+ return self._repr_mime_("text/latex")
+
+ def _repr_json_(self):
+ return self._repr_mime_("application/json")
+
+ def _repr_javascript_(self):
+ return self._repr_mime_("application/javascript")
+
+ def _repr_png_(self):
+ return self._repr_mime_("image/png")
+
+ def _repr_jpeg_(self):
+ return self._repr_mime_("image/jpeg")
+
+ def _repr_svg_(self):
+ return self._repr_mime_("image/svg+xml")
+
+
+class CapturedIO(object):
+ """Simple object for containing captured stdout/err and rich display StringIO objects
+
+ Each instance `c` has three attributes:
+
+ - ``c.stdout`` : standard output as a string
+ - ``c.stderr`` : standard error as a string
+ - ``c.outputs``: a list of rich display outputs
+
+ Additionally, there's a ``c.show()`` method which will print all of the
+ above in the same order, and can be invoked simply via ``c()``.
+ """
+
+ def __init__(self, stdout, stderr, outputs=None):
+ self._stdout = stdout
+ self._stderr = stderr
+ if outputs is None:
+ outputs = []
+ self._outputs = outputs
+
+ def __str__(self):
+ return self.stdout
+
+ @property
+ def stdout(self):
+ "Captured standard output"
+ if not self._stdout:
+ return ''
+ return self._stdout.getvalue()
+
+ @property
+ def stderr(self):
+ "Captured standard error"
+ if not self._stderr:
+ return ''
+ return self._stderr.getvalue()
+
+ @property
+ def outputs(self):
+ """A list of the captured rich display outputs, if any.
+
+ If you have a CapturedIO object ``c``, these can be displayed in IPython
+ using::
+
+ from IPython.display import display
+ for o in c.outputs:
+ display(o)
+ """
return [ RichOutput(**kargs) for kargs in self._outputs ]
- def show(self):
- """write my output to sys.stdout/err as appropriate"""
- sys.stdout.write(self.stdout)
- sys.stderr.write(self.stderr)
- sys.stdout.flush()
- sys.stderr.flush()
+ def show(self):
+ """write my output to sys.stdout/err as appropriate"""
+ sys.stdout.write(self.stdout)
+ sys.stderr.write(self.stderr)
+ sys.stdout.flush()
+ sys.stderr.flush()
for kargs in self._outputs:
RichOutput(**kargs).display()
- __call__ = show
-
-
-class capture_output(object):
- """context manager for capturing stdout/err"""
- stdout = True
- stderr = True
- display = True
-
- def __init__(self, stdout=True, stderr=True, display=True):
- self.stdout = stdout
- self.stderr = stderr
- self.display = display
- self.shell = None
-
- def __enter__(self):
- from IPython.core.getipython import get_ipython
- from IPython.core.displaypub import CapturingDisplayPublisher
+ __call__ = show
+
+
+class capture_output(object):
+ """context manager for capturing stdout/err"""
+ stdout = True
+ stderr = True
+ display = True
+
+ def __init__(self, stdout=True, stderr=True, display=True):
+ self.stdout = stdout
+ self.stderr = stderr
+ self.display = display
+ self.shell = None
+
+ def __enter__(self):
+ from IPython.core.getipython import get_ipython
+ from IPython.core.displaypub import CapturingDisplayPublisher
from IPython.core.displayhook import CapturingDisplayHook
- self.sys_stdout = sys.stdout
- self.sys_stderr = sys.stderr
-
- if self.display:
- self.shell = get_ipython()
- if self.shell is None:
- self.save_display_pub = None
- self.display = False
-
- stdout = stderr = outputs = None
- if self.stdout:
- stdout = sys.stdout = StringIO()
- if self.stderr:
- stderr = sys.stderr = StringIO()
- if self.display:
- self.save_display_pub = self.shell.display_pub
- self.shell.display_pub = CapturingDisplayPublisher()
- outputs = self.shell.display_pub.outputs
+ self.sys_stdout = sys.stdout
+ self.sys_stderr = sys.stderr
+
+ if self.display:
+ self.shell = get_ipython()
+ if self.shell is None:
+ self.save_display_pub = None
+ self.display = False
+
+ stdout = stderr = outputs = None
+ if self.stdout:
+ stdout = sys.stdout = StringIO()
+ if self.stderr:
+ stderr = sys.stderr = StringIO()
+ if self.display:
+ self.save_display_pub = self.shell.display_pub
+ self.shell.display_pub = CapturingDisplayPublisher()
+ outputs = self.shell.display_pub.outputs
self.save_display_hook = sys.displayhook
sys.displayhook = CapturingDisplayHook(shell=self.shell,
outputs=outputs)
- return CapturedIO(stdout, stderr, outputs)
+ return CapturedIO(stdout, stderr, outputs)
- def __exit__(self, exc_type, exc_value, traceback):
- sys.stdout = self.sys_stdout
- sys.stderr = self.sys_stderr
- if self.display and self.shell:
- self.shell.display_pub = self.save_display_pub
+ def __exit__(self, exc_type, exc_value, traceback):
+ sys.stdout = self.sys_stdout
+ sys.stderr = self.sys_stderr
+ if self.display and self.shell:
+ self.shell.display_pub = self.save_display_pub
sys.displayhook = self.save_display_hook
-
-
+
+
diff --git a/contrib/python/ipython/py2/IPython/utils/coloransi.py b/contrib/python/ipython/py2/IPython/utils/coloransi.py
index 597c69fe11..bc8e8377f7 100644
--- a/contrib/python/ipython/py2/IPython/utils/coloransi.py
+++ b/contrib/python/ipython/py2/IPython/utils/coloransi.py
@@ -1,187 +1,187 @@
-# -*- coding: utf-8 -*-
-"""Tools for coloring text in ANSI terminals.
-"""
-
-#*****************************************************************************
-# Copyright (C) 2002-2006 Fernando Perez. <fperez@colorado.edu>
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#*****************************************************************************
-
-__all__ = ['TermColors','InputTermColors','ColorScheme','ColorSchemeTable']
-
-import os
-
-from IPython.utils.ipstruct import Struct
-
-color_templates = (
- # Dark colors
- ("Black" , "0;30"),
- ("Red" , "0;31"),
- ("Green" , "0;32"),
- ("Brown" , "0;33"),
- ("Blue" , "0;34"),
- ("Purple" , "0;35"),
- ("Cyan" , "0;36"),
- ("LightGray" , "0;37"),
- # Light colors
- ("DarkGray" , "1;30"),
- ("LightRed" , "1;31"),
- ("LightGreen" , "1;32"),
- ("Yellow" , "1;33"),
- ("LightBlue" , "1;34"),
- ("LightPurple" , "1;35"),
- ("LightCyan" , "1;36"),
- ("White" , "1;37"),
- # Blinking colors. Probably should not be used in anything serious.
- ("BlinkBlack" , "5;30"),
- ("BlinkRed" , "5;31"),
- ("BlinkGreen" , "5;32"),
- ("BlinkYellow" , "5;33"),
- ("BlinkBlue" , "5;34"),
- ("BlinkPurple" , "5;35"),
- ("BlinkCyan" , "5;36"),
- ("BlinkLightGray", "5;37"),
- )
-
-def make_color_table(in_class):
- """Build a set of color attributes in a class.
-
- Helper function for building the :class:`TermColors` and
- :class`InputTermColors`.
- """
- for name,value in color_templates:
- setattr(in_class,name,in_class._base % value)
-
-class TermColors:
- """Color escape sequences.
-
- This class defines the escape sequences for all the standard (ANSI?)
- colors in terminals. Also defines a NoColor escape which is just the null
- string, suitable for defining 'dummy' color schemes in terminals which get
- confused by color escapes.
-
- This class should be used as a mixin for building color schemes."""
-
- NoColor = '' # for color schemes in color-less terminals.
- Normal = '\033[0m' # Reset normal coloring
- _base = '\033[%sm' # Template for all other colors
-
-# Build the actual color table as a set of class attributes:
-make_color_table(TermColors)
-
-class InputTermColors:
- """Color escape sequences for input prompts.
-
- This class is similar to TermColors, but the escapes are wrapped in \001
- and \002 so that readline can properly know the length of each line and
- can wrap lines accordingly. Use this class for any colored text which
- needs to be used in input prompts, such as in calls to raw_input().
-
- This class defines the escape sequences for all the standard (ANSI?)
- colors in terminals. Also defines a NoColor escape which is just the null
- string, suitable for defining 'dummy' color schemes in terminals which get
- confused by color escapes.
-
- This class should be used as a mixin for building color schemes."""
-
- NoColor = '' # for color schemes in color-less terminals.
-
- if os.name == 'nt' and os.environ.get('TERM','dumb') == 'emacs':
- # (X)emacs on W32 gets confused with \001 and \002 so we remove them
- Normal = '\033[0m' # Reset normal coloring
- _base = '\033[%sm' # Template for all other colors
- else:
- Normal = '\001\033[0m\002' # Reset normal coloring
- _base = '\001\033[%sm\002' # Template for all other colors
-
-# Build the actual color table as a set of class attributes:
-make_color_table(InputTermColors)
-
-class NoColors:
- """This defines all the same names as the colour classes, but maps them to
- empty strings, so it can easily be substituted to turn off colours."""
- NoColor = ''
- Normal = ''
-
-for name, value in color_templates:
- setattr(NoColors, name, '')
-
-class ColorScheme:
- """Generic color scheme class. Just a name and a Struct."""
- def __init__(self,__scheme_name_,colordict=None,**colormap):
- self.name = __scheme_name_
- if colordict is None:
- self.colors = Struct(**colormap)
- else:
- self.colors = Struct(colordict)
-
- def copy(self,name=None):
- """Return a full copy of the object, optionally renaming it."""
- if name is None:
- name = self.name
- return ColorScheme(name, self.colors.dict())
-
-class ColorSchemeTable(dict):
- """General class to handle tables of color schemes.
-
- It's basically a dict of color schemes with a couple of shorthand
- attributes and some convenient methods.
-
- active_scheme_name -> obvious
- active_colors -> actual color table of the active scheme"""
-
- def __init__(self, scheme_list=None, default_scheme=''):
- """Create a table of color schemes.
-
- The table can be created empty and manually filled or it can be
- created with a list of valid color schemes AND the specification for
- the default active scheme.
- """
-
- # create object attributes to be set later
- self.active_scheme_name = ''
- self.active_colors = None
-
- if scheme_list:
- if default_scheme == '':
- raise ValueError('you must specify the default color scheme')
- for scheme in scheme_list:
- self.add_scheme(scheme)
- self.set_active_scheme(default_scheme)
-
- def copy(self):
- """Return full copy of object"""
- return ColorSchemeTable(self.values(),self.active_scheme_name)
-
- def add_scheme(self,new_scheme):
- """Add a new color scheme to the table."""
- if not isinstance(new_scheme,ColorScheme):
- raise ValueError('ColorSchemeTable only accepts ColorScheme instances')
- self[new_scheme.name] = new_scheme
-
- def set_active_scheme(self,scheme,case_sensitive=0):
- """Set the currently active scheme.
-
- Names are by default compared in a case-insensitive way, but this can
- be changed by setting the parameter case_sensitive to true."""
-
- scheme_names = list(self.keys())
- if case_sensitive:
- valid_schemes = scheme_names
- scheme_test = scheme
- else:
- valid_schemes = [s.lower() for s in scheme_names]
- scheme_test = scheme.lower()
- try:
- scheme_idx = valid_schemes.index(scheme_test)
- except ValueError:
- raise ValueError('Unrecognized color scheme: ' + scheme + \
- '\nValid schemes: '+str(scheme_names).replace("'', ",''))
- else:
- active = scheme_names[scheme_idx]
- self.active_scheme_name = active
- self.active_colors = self[active].colors
- # Now allow using '' as an index for the current active scheme
- self[''] = self[active]
+# -*- coding: utf-8 -*-
+"""Tools for coloring text in ANSI terminals.
+"""
+
+#*****************************************************************************
+# Copyright (C) 2002-2006 Fernando Perez. <fperez@colorado.edu>
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#*****************************************************************************
+
+__all__ = ['TermColors','InputTermColors','ColorScheme','ColorSchemeTable']
+
+import os
+
+from IPython.utils.ipstruct import Struct
+
+color_templates = (
+ # Dark colors
+ ("Black" , "0;30"),
+ ("Red" , "0;31"),
+ ("Green" , "0;32"),
+ ("Brown" , "0;33"),
+ ("Blue" , "0;34"),
+ ("Purple" , "0;35"),
+ ("Cyan" , "0;36"),
+ ("LightGray" , "0;37"),
+ # Light colors
+ ("DarkGray" , "1;30"),
+ ("LightRed" , "1;31"),
+ ("LightGreen" , "1;32"),
+ ("Yellow" , "1;33"),
+ ("LightBlue" , "1;34"),
+ ("LightPurple" , "1;35"),
+ ("LightCyan" , "1;36"),
+ ("White" , "1;37"),
+ # Blinking colors. Probably should not be used in anything serious.
+ ("BlinkBlack" , "5;30"),
+ ("BlinkRed" , "5;31"),
+ ("BlinkGreen" , "5;32"),
+ ("BlinkYellow" , "5;33"),
+ ("BlinkBlue" , "5;34"),
+ ("BlinkPurple" , "5;35"),
+ ("BlinkCyan" , "5;36"),
+ ("BlinkLightGray", "5;37"),
+ )
+
+def make_color_table(in_class):
+ """Build a set of color attributes in a class.
+
+ Helper function for building the :class:`TermColors` and
+ :class`InputTermColors`.
+ """
+ for name,value in color_templates:
+ setattr(in_class,name,in_class._base % value)
+
+class TermColors:
+ """Color escape sequences.
+
+ This class defines the escape sequences for all the standard (ANSI?)
+ colors in terminals. Also defines a NoColor escape which is just the null
+ string, suitable for defining 'dummy' color schemes in terminals which get
+ confused by color escapes.
+
+ This class should be used as a mixin for building color schemes."""
+
+ NoColor = '' # for color schemes in color-less terminals.
+ Normal = '\033[0m' # Reset normal coloring
+ _base = '\033[%sm' # Template for all other colors
+
+# Build the actual color table as a set of class attributes:
+make_color_table(TermColors)
+
+class InputTermColors:
+ """Color escape sequences for input prompts.
+
+ This class is similar to TermColors, but the escapes are wrapped in \001
+ and \002 so that readline can properly know the length of each line and
+ can wrap lines accordingly. Use this class for any colored text which
+ needs to be used in input prompts, such as in calls to raw_input().
+
+ This class defines the escape sequences for all the standard (ANSI?)
+ colors in terminals. Also defines a NoColor escape which is just the null
+ string, suitable for defining 'dummy' color schemes in terminals which get
+ confused by color escapes.
+
+ This class should be used as a mixin for building color schemes."""
+
+ NoColor = '' # for color schemes in color-less terminals.
+
+ if os.name == 'nt' and os.environ.get('TERM','dumb') == 'emacs':
+ # (X)emacs on W32 gets confused with \001 and \002 so we remove them
+ Normal = '\033[0m' # Reset normal coloring
+ _base = '\033[%sm' # Template for all other colors
+ else:
+ Normal = '\001\033[0m\002' # Reset normal coloring
+ _base = '\001\033[%sm\002' # Template for all other colors
+
+# Build the actual color table as a set of class attributes:
+make_color_table(InputTermColors)
+
+class NoColors:
+ """This defines all the same names as the colour classes, but maps them to
+ empty strings, so it can easily be substituted to turn off colours."""
+ NoColor = ''
+ Normal = ''
+
+for name, value in color_templates:
+ setattr(NoColors, name, '')
+
+class ColorScheme:
+ """Generic color scheme class. Just a name and a Struct."""
+ def __init__(self,__scheme_name_,colordict=None,**colormap):
+ self.name = __scheme_name_
+ if colordict is None:
+ self.colors = Struct(**colormap)
+ else:
+ self.colors = Struct(colordict)
+
+ def copy(self,name=None):
+ """Return a full copy of the object, optionally renaming it."""
+ if name is None:
+ name = self.name
+ return ColorScheme(name, self.colors.dict())
+
+class ColorSchemeTable(dict):
+ """General class to handle tables of color schemes.
+
+ It's basically a dict of color schemes with a couple of shorthand
+ attributes and some convenient methods.
+
+ active_scheme_name -> obvious
+ active_colors -> actual color table of the active scheme"""
+
+ def __init__(self, scheme_list=None, default_scheme=''):
+ """Create a table of color schemes.
+
+ The table can be created empty and manually filled or it can be
+ created with a list of valid color schemes AND the specification for
+ the default active scheme.
+ """
+
+ # create object attributes to be set later
+ self.active_scheme_name = ''
+ self.active_colors = None
+
+ if scheme_list:
+ if default_scheme == '':
+ raise ValueError('you must specify the default color scheme')
+ for scheme in scheme_list:
+ self.add_scheme(scheme)
+ self.set_active_scheme(default_scheme)
+
+ def copy(self):
+ """Return full copy of object"""
+ return ColorSchemeTable(self.values(),self.active_scheme_name)
+
+ def add_scheme(self,new_scheme):
+ """Add a new color scheme to the table."""
+ if not isinstance(new_scheme,ColorScheme):
+ raise ValueError('ColorSchemeTable only accepts ColorScheme instances')
+ self[new_scheme.name] = new_scheme
+
+ def set_active_scheme(self,scheme,case_sensitive=0):
+ """Set the currently active scheme.
+
+ Names are by default compared in a case-insensitive way, but this can
+ be changed by setting the parameter case_sensitive to true."""
+
+ scheme_names = list(self.keys())
+ if case_sensitive:
+ valid_schemes = scheme_names
+ scheme_test = scheme
+ else:
+ valid_schemes = [s.lower() for s in scheme_names]
+ scheme_test = scheme.lower()
+ try:
+ scheme_idx = valid_schemes.index(scheme_test)
+ except ValueError:
+ raise ValueError('Unrecognized color scheme: ' + scheme + \
+ '\nValid schemes: '+str(scheme_names).replace("'', ",''))
+ else:
+ active = scheme_names[scheme_idx]
+ self.active_scheme_name = active
+ self.active_colors = self[active].colors
+ # Now allow using '' as an index for the current active scheme
+ self[''] = self[active]
diff --git a/contrib/python/ipython/py2/IPython/utils/contexts.py b/contrib/python/ipython/py2/IPython/utils/contexts.py
index 358dfe8b29..4d379b0eda 100644
--- a/contrib/python/ipython/py2/IPython/utils/contexts.py
+++ b/contrib/python/ipython/py2/IPython/utils/contexts.py
@@ -1,66 +1,66 @@
-# encoding: utf-8
-"""Miscellaneous context managers.
-"""
-
+# encoding: utf-8
+"""Miscellaneous context managers.
+"""
+
import warnings
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-class preserve_keys(object):
- """Preserve a set of keys in a dictionary.
-
- Upon entering the context manager the current values of the keys
- will be saved. Upon exiting, the dictionary will be updated to
- restore the original value of the preserved keys. Preserved keys
- which did not exist when entering the context manager will be
- deleted.
-
- Examples
- --------
-
- >>> d = {'a': 1, 'b': 2, 'c': 3}
- >>> with preserve_keys(d, 'b', 'c', 'd'):
- ... del d['a']
- ... del d['b'] # will be reset to 2
- ... d['c'] = None # will be reset to 3
- ... d['d'] = 4 # will be deleted
- ... d['e'] = 5
- ... print(sorted(d.items()))
- ...
- [('c', None), ('d', 4), ('e', 5)]
- >>> print(sorted(d.items()))
- [('b', 2), ('c', 3), ('e', 5)]
- """
-
- def __init__(self, dictionary, *keys):
- self.dictionary = dictionary
- self.keys = keys
-
- def __enter__(self):
- # Actions to perform upon exiting.
- to_delete = []
- to_update = {}
-
- d = self.dictionary
- for k in self.keys:
- if k in d:
- to_update[k] = d[k]
- else:
- to_delete.append(k)
-
- self.to_delete = to_delete
- self.to_update = to_update
-
- def __exit__(self, *exc_info):
- d = self.dictionary
-
- for k in self.to_delete:
- d.pop(k, None)
- d.update(self.to_update)
-
-
-class NoOpContext(object):
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+class preserve_keys(object):
+ """Preserve a set of keys in a dictionary.
+
+ Upon entering the context manager the current values of the keys
+ will be saved. Upon exiting, the dictionary will be updated to
+ restore the original value of the preserved keys. Preserved keys
+ which did not exist when entering the context manager will be
+ deleted.
+
+ Examples
+ --------
+
+ >>> d = {'a': 1, 'b': 2, 'c': 3}
+ >>> with preserve_keys(d, 'b', 'c', 'd'):
+ ... del d['a']
+ ... del d['b'] # will be reset to 2
+ ... d['c'] = None # will be reset to 3
+ ... d['d'] = 4 # will be deleted
+ ... d['e'] = 5
+ ... print(sorted(d.items()))
+ ...
+ [('c', None), ('d', 4), ('e', 5)]
+ >>> print(sorted(d.items()))
+ [('b', 2), ('c', 3), ('e', 5)]
+ """
+
+ def __init__(self, dictionary, *keys):
+ self.dictionary = dictionary
+ self.keys = keys
+
+ def __enter__(self):
+ # Actions to perform upon exiting.
+ to_delete = []
+ to_update = {}
+
+ d = self.dictionary
+ for k in self.keys:
+ if k in d:
+ to_update[k] = d[k]
+ else:
+ to_delete.append(k)
+
+ self.to_delete = to_delete
+ self.to_update = to_update
+
+ def __exit__(self, *exc_info):
+ d = self.dictionary
+
+ for k in self.to_delete:
+ d.pop(k, None)
+ d.update(self.to_update)
+
+
+class NoOpContext(object):
"""
Deprecated
@@ -70,5 +70,5 @@ class NoOpContext(object):
warnings.warn("""NoOpContext is deprecated since IPython 5.0 """,
DeprecationWarning, stacklevel=2)
- def __enter__(self): pass
- def __exit__(self, type, value, traceback): pass
+ def __enter__(self): pass
+ def __exit__(self, type, value, traceback): pass
diff --git a/contrib/python/ipython/py2/IPython/utils/daemonize.py b/contrib/python/ipython/py2/IPython/utils/daemonize.py
index f093cf67cb..a1bfaa193b 100644
--- a/contrib/python/ipython/py2/IPython/utils/daemonize.py
+++ b/contrib/python/ipython/py2/IPython/utils/daemonize.py
@@ -1,4 +1,4 @@
-from warnings import warn
-
-warn("IPython.utils.daemonize has moved to ipyparallel.apps.daemonize")
-from ipyparallel.apps.daemonize import daemonize
+from warnings import warn
+
+warn("IPython.utils.daemonize has moved to ipyparallel.apps.daemonize")
+from ipyparallel.apps.daemonize import daemonize
diff --git a/contrib/python/ipython/py2/IPython/utils/data.py b/contrib/python/ipython/py2/IPython/utils/data.py
index 36a8aabd95..308a692559 100644
--- a/contrib/python/ipython/py2/IPython/utils/data.py
+++ b/contrib/python/ipython/py2/IPython/utils/data.py
@@ -1,37 +1,37 @@
-# encoding: utf-8
-"""Utilities for working with data structures like lists, dicts and tuples.
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-from .py3compat import xrange
-
-def uniq_stable(elems):
- """uniq_stable(elems) -> list
-
- Return from an iterable, a list of all the unique elements in the input,
- but maintaining the order in which they first appear.
-
- Note: All elements in the input must be hashable for this routine
- to work, as it internally uses a set for efficiency reasons.
- """
- seen = set()
- return [x for x in elems if x not in seen and not seen.add(x)]
-
-
-def flatten(seq):
- """Flatten a list of lists (NOT recursive, only works for 2d lists)."""
-
- return [x for subseq in seq for x in subseq]
-
-
-def chop(seq, size):
- """Chop a sequence into chunks of the given size."""
- return [seq[i:i+size] for i in xrange(0,len(seq),size)]
-
-
+# encoding: utf-8
+"""Utilities for working with data structures like lists, dicts and tuples.
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+from .py3compat import xrange
+
+def uniq_stable(elems):
+ """uniq_stable(elems) -> list
+
+ Return from an iterable, a list of all the unique elements in the input,
+ but maintaining the order in which they first appear.
+
+ Note: All elements in the input must be hashable for this routine
+ to work, as it internally uses a set for efficiency reasons.
+ """
+ seen = set()
+ return [x for x in elems if x not in seen and not seen.add(x)]
+
+
+def flatten(seq):
+ """Flatten a list of lists (NOT recursive, only works for 2d lists)."""
+
+ return [x for subseq in seq for x in subseq]
+
+
+def chop(seq, size):
+ """Chop a sequence into chunks of the given size."""
+ return [seq[i:i+size] for i in xrange(0,len(seq),size)]
+
+
diff --git a/contrib/python/ipython/py2/IPython/utils/decorators.py b/contrib/python/ipython/py2/IPython/utils/decorators.py
index 79be8ca1e6..c26485553c 100644
--- a/contrib/python/ipython/py2/IPython/utils/decorators.py
+++ b/contrib/python/ipython/py2/IPython/utils/decorators.py
@@ -1,58 +1,58 @@
-# encoding: utf-8
-"""Decorators that don't go anywhere else.
-
-This module contains misc. decorators that don't really go with another module
-in :mod:`IPython.utils`. Beore putting something here please see if it should
-go into another topical module in :mod:`IPython.utils`.
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-def flag_calls(func):
- """Wrap a function to detect and flag when it gets called.
-
- This is a decorator which takes a function and wraps it in a function with
- a 'called' attribute. wrapper.called is initialized to False.
-
- The wrapper.called attribute is set to False right before each call to the
- wrapped function, so if the call fails it remains False. After the call
- completes, wrapper.called is set to True and the output is returned.
-
- Testing for truth in wrapper.called allows you to determine if a call to
- func() was attempted and succeeded."""
-
- # don't wrap twice
- if hasattr(func, 'called'):
- return func
-
- def wrapper(*args,**kw):
- wrapper.called = False
- out = func(*args,**kw)
- wrapper.called = True
- return out
-
- wrapper.called = False
- wrapper.__doc__ = func.__doc__
- return wrapper
-
-def undoc(func):
- """Mark a function or class as undocumented.
-
- This is found by inspecting the AST, so for now it must be used directly
- as @undoc, not as e.g. @decorators.undoc
- """
- return func
-
+# encoding: utf-8
+"""Decorators that don't go anywhere else.
+
+This module contains misc. decorators that don't really go with another module
+in :mod:`IPython.utils`. Beore putting something here please see if it should
+go into another topical module in :mod:`IPython.utils`.
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+def flag_calls(func):
+ """Wrap a function to detect and flag when it gets called.
+
+ This is a decorator which takes a function and wraps it in a function with
+ a 'called' attribute. wrapper.called is initialized to False.
+
+ The wrapper.called attribute is set to False right before each call to the
+ wrapped function, so if the call fails it remains False. After the call
+ completes, wrapper.called is set to True and the output is returned.
+
+ Testing for truth in wrapper.called allows you to determine if a call to
+ func() was attempted and succeeded."""
+
+ # don't wrap twice
+ if hasattr(func, 'called'):
+ return func
+
+ def wrapper(*args,**kw):
+ wrapper.called = False
+ out = func(*args,**kw)
+ wrapper.called = True
+ return out
+
+ wrapper.called = False
+ wrapper.__doc__ = func.__doc__
+ return wrapper
+
+def undoc(func):
+ """Mark a function or class as undocumented.
+
+ This is found by inspecting the AST, so for now it must be used directly
+ as @undoc, not as e.g. @decorators.undoc
+ """
+ return func
+
diff --git a/contrib/python/ipython/py2/IPython/utils/dir2.py b/contrib/python/ipython/py2/IPython/utils/dir2.py
index fb0cd719ef..f6f164f9b1 100644
--- a/contrib/python/ipython/py2/IPython/utils/dir2.py
+++ b/contrib/python/ipython/py2/IPython/utils/dir2.py
@@ -1,51 +1,51 @@
-# encoding: utf-8
-"""A fancy version of Python's builtin :func:`dir` function.
-"""
-
+# encoding: utf-8
+"""A fancy version of Python's builtin :func:`dir` function.
+"""
+
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
-
+
import inspect
-from .py3compat import string_types
-
-
-def safe_hasattr(obj, attr):
- """In recent versions of Python, hasattr() only catches AttributeError.
- This catches all errors.
- """
- try:
- getattr(obj, attr)
- return True
- except:
- return False
-
-
-def dir2(obj):
- """dir2(obj) -> list of strings
-
- Extended version of the Python builtin dir(), which does a few extra
- checks.
-
- This version is guaranteed to return only a list of true strings, whereas
- dir() returns anything that objects inject into themselves, even if they
- are later not really valid for attribute access (many extension libraries
- have such bugs).
- """
-
- # Start building the attribute list via dir(), and then complete it
- # with a few extra special-purpose calls.
-
- try:
- words = set(dir(obj))
- except Exception:
- # TypeError: dir(obj) does not return a list
- words = set()
-
- # filter out non-string attributes which may be stuffed by dir() calls
- # and poor coding in third-party modules
-
- words = [w for w in words if isinstance(w, string_types)]
- return sorted(words)
+from .py3compat import string_types
+
+
+def safe_hasattr(obj, attr):
+ """In recent versions of Python, hasattr() only catches AttributeError.
+ This catches all errors.
+ """
+ try:
+ getattr(obj, attr)
+ return True
+ except:
+ return False
+
+
+def dir2(obj):
+ """dir2(obj) -> list of strings
+
+ Extended version of the Python builtin dir(), which does a few extra
+ checks.
+
+ This version is guaranteed to return only a list of true strings, whereas
+ dir() returns anything that objects inject into themselves, even if they
+ are later not really valid for attribute access (many extension libraries
+ have such bugs).
+ """
+
+ # Start building the attribute list via dir(), and then complete it
+ # with a few extra special-purpose calls.
+
+ try:
+ words = set(dir(obj))
+ except Exception:
+ # TypeError: dir(obj) does not return a list
+ words = set()
+
+ # filter out non-string attributes which may be stuffed by dir() calls
+ # and poor coding in third-party modules
+
+ words = [w for w in words if isinstance(w, string_types)]
+ return sorted(words)
def get_real_method(obj, name):
diff --git a/contrib/python/ipython/py2/IPython/utils/encoding.py b/contrib/python/ipython/py2/IPython/utils/encoding.py
index ba8ca09534..387a24700c 100644
--- a/contrib/python/ipython/py2/IPython/utils/encoding.py
+++ b/contrib/python/ipython/py2/IPython/utils/encoding.py
@@ -1,71 +1,71 @@
-# coding: utf-8
-"""
-Utilities for dealing with text encodings
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2012 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-import sys
-import locale
-import warnings
-
-# to deal with the possibility of sys.std* not being a stream at all
-def get_stream_enc(stream, default=None):
- """Return the given stream's encoding or a default.
-
- There are cases where ``sys.std*`` might not actually be a stream, so
- check for the encoding attribute prior to returning it, and return
- a default if it doesn't exist or evaluates as False. ``default``
- is None if not provided.
- """
- if not hasattr(stream, 'encoding') or not stream.encoding:
- return default
- else:
- return stream.encoding
-
-# Less conservative replacement for sys.getdefaultencoding, that will try
-# to match the environment.
-# Defined here as central function, so if we find better choices, we
-# won't need to make changes all over IPython.
-def getdefaultencoding(prefer_stream=True):
- """Return IPython's guess for the default encoding for bytes as text.
-
- If prefer_stream is True (default), asks for stdin.encoding first,
- to match the calling Terminal, but that is often None for subprocesses.
-
- Then fall back on locale.getpreferredencoding(),
- which should be a sensible platform default (that respects LANG environment),
- and finally to sys.getdefaultencoding() which is the most conservative option,
- and usually ASCII on Python 2 or UTF8 on Python 3.
- """
- enc = None
- if prefer_stream:
- enc = get_stream_enc(sys.stdin)
- if not enc or enc=='ascii':
- try:
- # There are reports of getpreferredencoding raising errors
- # in some cases, which may well be fixed, but let's be conservative here.
- enc = locale.getpreferredencoding()
- except Exception:
- pass
- enc = enc or sys.getdefaultencoding()
- # On windows `cp0` can be returned to indicate that there is no code page.
- # Since cp0 is an invalid encoding return instead cp1252 which is the
- # Western European default.
- if enc == 'cp0':
- warnings.warn(
- "Invalid code page cp0 detected - using cp1252 instead."
- "If cp1252 is incorrect please ensure a valid code page "
- "is defined for the process.", RuntimeWarning)
- return 'cp1252'
- return enc
-
-DEFAULT_ENCODING = getdefaultencoding()
+# coding: utf-8
+"""
+Utilities for dealing with text encodings
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2012 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+import sys
+import locale
+import warnings
+
+# to deal with the possibility of sys.std* not being a stream at all
+def get_stream_enc(stream, default=None):
+ """Return the given stream's encoding or a default.
+
+ There are cases where ``sys.std*`` might not actually be a stream, so
+ check for the encoding attribute prior to returning it, and return
+ a default if it doesn't exist or evaluates as False. ``default``
+ is None if not provided.
+ """
+ if not hasattr(stream, 'encoding') or not stream.encoding:
+ return default
+ else:
+ return stream.encoding
+
+# Less conservative replacement for sys.getdefaultencoding, that will try
+# to match the environment.
+# Defined here as central function, so if we find better choices, we
+# won't need to make changes all over IPython.
+def getdefaultencoding(prefer_stream=True):
+ """Return IPython's guess for the default encoding for bytes as text.
+
+ If prefer_stream is True (default), asks for stdin.encoding first,
+ to match the calling Terminal, but that is often None for subprocesses.
+
+ Then fall back on locale.getpreferredencoding(),
+ which should be a sensible platform default (that respects LANG environment),
+ and finally to sys.getdefaultencoding() which is the most conservative option,
+ and usually ASCII on Python 2 or UTF8 on Python 3.
+ """
+ enc = None
+ if prefer_stream:
+ enc = get_stream_enc(sys.stdin)
+ if not enc or enc=='ascii':
+ try:
+ # There are reports of getpreferredencoding raising errors
+ # in some cases, which may well be fixed, but let's be conservative here.
+ enc = locale.getpreferredencoding()
+ except Exception:
+ pass
+ enc = enc or sys.getdefaultencoding()
+ # On windows `cp0` can be returned to indicate that there is no code page.
+ # Since cp0 is an invalid encoding return instead cp1252 which is the
+ # Western European default.
+ if enc == 'cp0':
+ warnings.warn(
+ "Invalid code page cp0 detected - using cp1252 instead."
+ "If cp1252 is incorrect please ensure a valid code page "
+ "is defined for the process.", RuntimeWarning)
+ return 'cp1252'
+ return enc
+
+DEFAULT_ENCODING = getdefaultencoding()
diff --git a/contrib/python/ipython/py2/IPython/utils/eventful.py b/contrib/python/ipython/py2/IPython/utils/eventful.py
index e954a45e0a..fc0f7aee4f 100644
--- a/contrib/python/ipython/py2/IPython/utils/eventful.py
+++ b/contrib/python/ipython/py2/IPython/utils/eventful.py
@@ -1,7 +1,7 @@
-from __future__ import absolute_import
-
-from warnings import warn
-
-warn("IPython.utils.eventful has moved to traitlets.eventful")
-
-from traitlets.eventful import *
+from __future__ import absolute_import
+
+from warnings import warn
+
+warn("IPython.utils.eventful has moved to traitlets.eventful")
+
+from traitlets.eventful import *
diff --git a/contrib/python/ipython/py2/IPython/utils/frame.py b/contrib/python/ipython/py2/IPython/utils/frame.py
index ebf9e47bf9..76ccc71c44 100644
--- a/contrib/python/ipython/py2/IPython/utils/frame.py
+++ b/contrib/python/ipython/py2/IPython/utils/frame.py
@@ -1,98 +1,98 @@
-# encoding: utf-8
-"""
-Utilities for working with stack frames.
-"""
-from __future__ import print_function
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import sys
-from IPython.utils import py3compat
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-@py3compat.doctest_refactor_print
-def extract_vars(*names,**kw):
- """Extract a set of variables by name from another frame.
-
- Parameters
- ----------
- *names : str
- One or more variable names which will be extracted from the caller's
- frame.
-
- depth : integer, optional
- How many frames in the stack to walk when looking for your variables.
- The default is 0, which will use the frame where the call was made.
-
-
- Examples
- --------
- ::
-
- In [2]: def func(x):
- ...: y = 1
- ...: print(sorted(extract_vars('x','y').items()))
- ...:
-
- In [3]: func('hello')
- [('x', 'hello'), ('y', 1)]
- """
-
- depth = kw.get('depth',0)
-
- callerNS = sys._getframe(depth+1).f_locals
- return dict((k,callerNS[k]) for k in names)
-
-
-def extract_vars_above(*names):
- """Extract a set of variables by name from another frame.
-
- Similar to extractVars(), but with a specified depth of 1, so that names
- are exctracted exactly from above the caller.
-
- This is simply a convenience function so that the very common case (for us)
- of skipping exactly 1 frame doesn't have to construct a special dict for
- keyword passing."""
-
- callerNS = sys._getframe(2).f_locals
- return dict((k,callerNS[k]) for k in names)
-
-
-def debugx(expr,pre_msg=''):
- """Print the value of an expression from the caller's frame.
-
- Takes an expression, evaluates it in the caller's frame and prints both
- the given expression and the resulting value (as well as a debug mark
- indicating the name of the calling function. The input must be of a form
- suitable for eval().
-
- An optional message can be passed, which will be prepended to the printed
- expr->value pair."""
-
- cf = sys._getframe(1)
- print('[DBG:%s] %s%s -> %r' % (cf.f_code.co_name,pre_msg,expr,
- eval(expr,cf.f_globals,cf.f_locals)))
-
-
-# deactivate it by uncommenting the following line, which makes it a no-op
-#def debugx(expr,pre_msg=''): pass
-
-def extract_module_locals(depth=0):
- """Returns (module, locals) of the function `depth` frames away from the caller"""
- f = sys._getframe(depth + 1)
- global_ns = f.f_globals
- module = sys.modules[global_ns['__name__']]
- return (module, f.f_locals)
-
+# encoding: utf-8
+"""
+Utilities for working with stack frames.
+"""
+from __future__ import print_function
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import sys
+from IPython.utils import py3compat
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+@py3compat.doctest_refactor_print
+def extract_vars(*names,**kw):
+ """Extract a set of variables by name from another frame.
+
+ Parameters
+ ----------
+ *names : str
+ One or more variable names which will be extracted from the caller's
+ frame.
+
+ depth : integer, optional
+ How many frames in the stack to walk when looking for your variables.
+ The default is 0, which will use the frame where the call was made.
+
+
+ Examples
+ --------
+ ::
+
+ In [2]: def func(x):
+ ...: y = 1
+ ...: print(sorted(extract_vars('x','y').items()))
+ ...:
+
+ In [3]: func('hello')
+ [('x', 'hello'), ('y', 1)]
+ """
+
+ depth = kw.get('depth',0)
+
+ callerNS = sys._getframe(depth+1).f_locals
+ return dict((k,callerNS[k]) for k in names)
+
+
+def extract_vars_above(*names):
+ """Extract a set of variables by name from another frame.
+
+ Similar to extractVars(), but with a specified depth of 1, so that names
+ are exctracted exactly from above the caller.
+
+ This is simply a convenience function so that the very common case (for us)
+ of skipping exactly 1 frame doesn't have to construct a special dict for
+ keyword passing."""
+
+ callerNS = sys._getframe(2).f_locals
+ return dict((k,callerNS[k]) for k in names)
+
+
+def debugx(expr,pre_msg=''):
+ """Print the value of an expression from the caller's frame.
+
+ Takes an expression, evaluates it in the caller's frame and prints both
+ the given expression and the resulting value (as well as a debug mark
+ indicating the name of the calling function. The input must be of a form
+ suitable for eval().
+
+ An optional message can be passed, which will be prepended to the printed
+ expr->value pair."""
+
+ cf = sys._getframe(1)
+ print('[DBG:%s] %s%s -> %r' % (cf.f_code.co_name,pre_msg,expr,
+ eval(expr,cf.f_globals,cf.f_locals)))
+
+
+# deactivate it by uncommenting the following line, which makes it a no-op
+#def debugx(expr,pre_msg=''): pass
+
+def extract_module_locals(depth=0):
+ """Returns (module, locals) of the function `depth` frames away from the caller"""
+ f = sys._getframe(depth + 1)
+ global_ns = f.f_globals
+ module = sys.modules[global_ns['__name__']]
+ return (module, f.f_locals)
+
diff --git a/contrib/python/ipython/py2/IPython/utils/generics.py b/contrib/python/ipython/py2/IPython/utils/generics.py
index ff856a7e55..5ffdc86ebd 100644
--- a/contrib/python/ipython/py2/IPython/utils/generics.py
+++ b/contrib/python/ipython/py2/IPython/utils/generics.py
@@ -1,34 +1,34 @@
-# encoding: utf-8
-"""Generic functions for extending IPython.
-
-See http://pypi.python.org/pypi/simplegeneric.
-"""
-
-from IPython.core.error import TryNext
-from simplegeneric import generic
-
-
-@generic
-def inspect_object(obj):
- """Called when you do obj?"""
- raise TryNext
-
-
-@generic
-def complete_object(obj, prev_completions):
- """Custom completer dispatching for python objects.
-
- Parameters
- ----------
- obj : object
- The object to complete.
- prev_completions : list
- List of attributes discovered so far.
-
- This should return the list of attributes in obj. If you only wish to
- add to the attributes already discovered normally, return
- own_attrs + prev_completions.
- """
- raise TryNext
-
-
+# encoding: utf-8
+"""Generic functions for extending IPython.
+
+See http://pypi.python.org/pypi/simplegeneric.
+"""
+
+from IPython.core.error import TryNext
+from simplegeneric import generic
+
+
+@generic
+def inspect_object(obj):
+ """Called when you do obj?"""
+ raise TryNext
+
+
+@generic
+def complete_object(obj, prev_completions):
+ """Custom completer dispatching for python objects.
+
+ Parameters
+ ----------
+ obj : object
+ The object to complete.
+ prev_completions : list
+ List of attributes discovered so far.
+
+ This should return the list of attributes in obj. If you only wish to
+ add to the attributes already discovered normally, return
+ own_attrs + prev_completions.
+ """
+ raise TryNext
+
+
diff --git a/contrib/python/ipython/py2/IPython/utils/importstring.py b/contrib/python/ipython/py2/IPython/utils/importstring.py
index 2c7a2a167e..c8e1840eb3 100644
--- a/contrib/python/ipython/py2/IPython/utils/importstring.py
+++ b/contrib/python/ipython/py2/IPython/utils/importstring.py
@@ -1,39 +1,39 @@
-# encoding: utf-8
-"""
-A simple utility to import something by its string name.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-
-def import_item(name):
- """Import and return ``bar`` given the string ``foo.bar``.
-
- Calling ``bar = import_item("foo.bar")`` is the functional equivalent of
- executing the code ``from foo import bar``.
-
- Parameters
- ----------
- name : string
- The fully qualified name of the module/package being imported.
-
- Returns
- -------
- mod : module object
- The module that was imported.
- """
-
- parts = name.rsplit('.', 1)
- if len(parts) == 2:
- # called with 'foo.bar....'
- package, obj = parts
- module = __import__(package, fromlist=[obj])
- try:
- pak = getattr(module, obj)
- except AttributeError:
- raise ImportError('No module named %s' % obj)
- return pak
- else:
- # called with un-dotted string
- return __import__(parts[0])
+# encoding: utf-8
+"""
+A simple utility to import something by its string name.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+
+def import_item(name):
+ """Import and return ``bar`` given the string ``foo.bar``.
+
+ Calling ``bar = import_item("foo.bar")`` is the functional equivalent of
+ executing the code ``from foo import bar``.
+
+ Parameters
+ ----------
+ name : string
+ The fully qualified name of the module/package being imported.
+
+ Returns
+ -------
+ mod : module object
+ The module that was imported.
+ """
+
+ parts = name.rsplit('.', 1)
+ if len(parts) == 2:
+ # called with 'foo.bar....'
+ package, obj = parts
+ module = __import__(package, fromlist=[obj])
+ try:
+ pak = getattr(module, obj)
+ except AttributeError:
+ raise ImportError('No module named %s' % obj)
+ return pak
+ else:
+ # called with un-dotted string
+ return __import__(parts[0])
diff --git a/contrib/python/ipython/py2/IPython/utils/io.py b/contrib/python/ipython/py2/IPython/utils/io.py
index c316c73bcd..036d6e3926 100644
--- a/contrib/python/ipython/py2/IPython/utils/io.py
+++ b/contrib/python/ipython/py2/IPython/utils/io.py
@@ -1,95 +1,95 @@
-# encoding: utf-8
-"""
-IO related utilities.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import print_function
-from __future__ import absolute_import
-
-
-import atexit
-import os
-import sys
-import tempfile
+# encoding: utf-8
+"""
+IO related utilities.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import print_function
+from __future__ import absolute_import
+
+
+import atexit
+import os
+import sys
+import tempfile
import warnings
-from warnings import warn
+from warnings import warn
from IPython.utils.decorators import undoc
-from .capture import CapturedIO, capture_output
-from .py3compat import string_types, input, PY3
-
+from .capture import CapturedIO, capture_output
+from .py3compat import string_types, input, PY3
+
@undoc
-class IOStream:
-
+class IOStream:
+
def __init__(self, stream, fallback=None):
warn('IOStream is deprecated since IPython 5.0, use sys.{stdin,stdout,stderr} instead',
DeprecationWarning, stacklevel=2)
- if not hasattr(stream,'write') or not hasattr(stream,'flush'):
- if fallback is not None:
- stream = fallback
- else:
- raise ValueError("fallback required, but not specified")
- self.stream = stream
- self._swrite = stream.write
-
- # clone all methods not overridden:
- def clone(meth):
- return not hasattr(self, meth) and not meth.startswith('_')
- for meth in filter(clone, dir(stream)):
+ if not hasattr(stream,'write') or not hasattr(stream,'flush'):
+ if fallback is not None:
+ stream = fallback
+ else:
+ raise ValueError("fallback required, but not specified")
+ self.stream = stream
+ self._swrite = stream.write
+
+ # clone all methods not overridden:
+ def clone(meth):
+ return not hasattr(self, meth) and not meth.startswith('_')
+ for meth in filter(clone, dir(stream)):
try:
val = getattr(stream, meth)
except AttributeError:
pass
else:
setattr(self, meth, val)
-
- def __repr__(self):
- cls = self.__class__
- tpl = '{mod}.{cls}({args})'
- return tpl.format(mod=cls.__module__, cls=cls.__name__, args=self.stream)
-
- def write(self,data):
+
+ def __repr__(self):
+ cls = self.__class__
+ tpl = '{mod}.{cls}({args})'
+ return tpl.format(mod=cls.__module__, cls=cls.__name__, args=self.stream)
+
+ def write(self,data):
warn('IOStream is deprecated since IPython 5.0, use sys.{stdin,stdout,stderr} instead',
DeprecationWarning, stacklevel=2)
- try:
- self._swrite(data)
- except:
- try:
- # print handles some unicode issues which may trip a plain
- # write() call. Emulate write() by using an empty end
- # argument.
- print(data, end='', file=self.stream)
- except:
- # if we get here, something is seriously broken.
- print('ERROR - failed to write data to stream:', self.stream,
- file=sys.stderr)
-
- def writelines(self, lines):
+ try:
+ self._swrite(data)
+ except:
+ try:
+ # print handles some unicode issues which may trip a plain
+ # write() call. Emulate write() by using an empty end
+ # argument.
+ print(data, end='', file=self.stream)
+ except:
+ # if we get here, something is seriously broken.
+ print('ERROR - failed to write data to stream:', self.stream,
+ file=sys.stderr)
+
+ def writelines(self, lines):
warn('IOStream is deprecated since IPython 5.0, use sys.{stdin,stdout,stderr} instead',
DeprecationWarning, stacklevel=2)
- if isinstance(lines, string_types):
- lines = [lines]
- for line in lines:
- self.write(line)
-
- # This class used to have a writeln method, but regular files and streams
- # in Python don't have this method. We need to keep this completely
- # compatible so we removed it.
-
- @property
- def closed(self):
- return self.stream.closed
-
- def close(self):
- pass
-
-# setup stdin/stdout/stderr to sys.stdin/sys.stdout/sys.stderr
+ if isinstance(lines, string_types):
+ lines = [lines]
+ for line in lines:
+ self.write(line)
+
+ # This class used to have a writeln method, but regular files and streams
+ # in Python don't have this method. We need to keep this completely
+ # compatible so we removed it.
+
+ @property
+ def closed(self):
+ return self.stream.closed
+
+ def close(self):
+ pass
+
+# setup stdin/stdout/stderr to sys.stdin/sys.stdout/sys.stderr
devnull = open(os.devnull, 'w')
-atexit.register(devnull.close)
-
+atexit.register(devnull.close)
+
# io.std* are deprecated, but don't show our own deprecation warnings
# during initialization of the deprecated API.
with warnings.catch_warnings():
@@ -98,149 +98,149 @@ with warnings.catch_warnings():
stdout = IOStream(sys.stdout, fallback=devnull)
stderr = IOStream(sys.stderr, fallback=devnull)
-class Tee(object):
- """A class to duplicate an output stream to stdout/err.
-
- This works in a manner very similar to the Unix 'tee' command.
-
- When the object is closed or deleted, it closes the original file given to
- it for duplication.
- """
- # Inspired by:
- # http://mail.python.org/pipermail/python-list/2007-May/442737.html
-
- def __init__(self, file_or_name, mode="w", channel='stdout'):
- """Construct a new Tee object.
-
- Parameters
- ----------
- file_or_name : filename or open filehandle (writable)
- File that will be duplicated
-
- mode : optional, valid mode for open().
- If a filename was give, open with this mode.
-
- channel : str, one of ['stdout', 'stderr']
- """
- if channel not in ['stdout', 'stderr']:
- raise ValueError('Invalid channel spec %s' % channel)
-
- if hasattr(file_or_name, 'write') and hasattr(file_or_name, 'seek'):
- self.file = file_or_name
- else:
- self.file = open(file_or_name, mode)
- self.channel = channel
- self.ostream = getattr(sys, channel)
- setattr(sys, channel, self)
- self._closed = False
-
- def close(self):
- """Close the file and restore the channel."""
- self.flush()
- setattr(sys, self.channel, self.ostream)
- self.file.close()
- self._closed = True
-
- def write(self, data):
- """Write data to both channels."""
- self.file.write(data)
- self.ostream.write(data)
- self.ostream.flush()
-
- def flush(self):
- """Flush both channels."""
- self.file.flush()
- self.ostream.flush()
-
- def __del__(self):
- if not self._closed:
- self.close()
-
-
-def ask_yes_no(prompt, default=None, interrupt=None):
- """Asks a question and returns a boolean (y/n) answer.
-
- If default is given (one of 'y','n'), it is used if the user input is
- empty. If interrupt is given (one of 'y','n'), it is used if the user
- presses Ctrl-C. Otherwise the question is repeated until an answer is
- given.
-
- An EOF is treated as the default answer. If there is no default, an
- exception is raised to prevent infinite loops.
-
- Valid answers are: y/yes/n/no (match is not case sensitive)."""
-
- answers = {'y':True,'n':False,'yes':True,'no':False}
- ans = None
- while ans not in answers.keys():
- try:
- ans = input(prompt+' ').lower()
- if not ans: # response was an empty string
- ans = default
- except KeyboardInterrupt:
- if interrupt:
- ans = interrupt
- except EOFError:
- if default in answers.keys():
- ans = default
- print()
- else:
- raise
-
- return answers[ans]
-
-
-def temp_pyfile(src, ext='.py'):
- """Make a temporary python file, return filename and filehandle.
-
- Parameters
- ----------
- src : string or list of strings (no need for ending newlines if list)
- Source code to be written to the file.
-
- ext : optional, string
- Extension for the generated file.
-
- Returns
- -------
- (filename, open filehandle)
- It is the caller's responsibility to close the open file and unlink it.
- """
- fname = tempfile.mkstemp(ext)[1]
- f = open(fname,'w')
- f.write(src)
- f.flush()
- return fname, f
-
-def atomic_writing(*args, **kwargs):
- """DEPRECATED: moved to notebook.services.contents.fileio"""
- warn("IPython.utils.io.atomic_writing has moved to notebook.services.contents.fileio")
- from notebook.services.contents.fileio import atomic_writing
- return atomic_writing(*args, **kwargs)
-
-def raw_print(*args, **kw):
- """Raw print to sys.__stdout__, otherwise identical interface to print()."""
-
- print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
- file=sys.__stdout__)
- sys.__stdout__.flush()
-
-
-def raw_print_err(*args, **kw):
- """Raw print to sys.__stderr__, otherwise identical interface to print()."""
-
- print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
- file=sys.__stderr__)
- sys.__stderr__.flush()
-
-
-# Short aliases for quick debugging, do NOT use these in production code.
-rprint = raw_print
-rprinte = raw_print_err
-
-
-def unicode_std_stream(stream='stdout'):
- """DEPRECATED, moved to nbconvert.utils.io"""
- warn("IPython.utils.io.unicode_std_stream has moved to nbconvert.utils.io")
- from nbconvert.utils.io import unicode_std_stream
- return unicode_std_stream(stream)
+class Tee(object):
+ """A class to duplicate an output stream to stdout/err.
+
+ This works in a manner very similar to the Unix 'tee' command.
+
+ When the object is closed or deleted, it closes the original file given to
+ it for duplication.
+ """
+ # Inspired by:
+ # http://mail.python.org/pipermail/python-list/2007-May/442737.html
+
+ def __init__(self, file_or_name, mode="w", channel='stdout'):
+ """Construct a new Tee object.
+
+ Parameters
+ ----------
+ file_or_name : filename or open filehandle (writable)
+ File that will be duplicated
+
+ mode : optional, valid mode for open().
+ If a filename was give, open with this mode.
+
+ channel : str, one of ['stdout', 'stderr']
+ """
+ if channel not in ['stdout', 'stderr']:
+ raise ValueError('Invalid channel spec %s' % channel)
+
+ if hasattr(file_or_name, 'write') and hasattr(file_or_name, 'seek'):
+ self.file = file_or_name
+ else:
+ self.file = open(file_or_name, mode)
+ self.channel = channel
+ self.ostream = getattr(sys, channel)
+ setattr(sys, channel, self)
+ self._closed = False
+
+ def close(self):
+ """Close the file and restore the channel."""
+ self.flush()
+ setattr(sys, self.channel, self.ostream)
+ self.file.close()
+ self._closed = True
+
+ def write(self, data):
+ """Write data to both channels."""
+ self.file.write(data)
+ self.ostream.write(data)
+ self.ostream.flush()
+
+ def flush(self):
+ """Flush both channels."""
+ self.file.flush()
+ self.ostream.flush()
+
+ def __del__(self):
+ if not self._closed:
+ self.close()
+
+
+def ask_yes_no(prompt, default=None, interrupt=None):
+ """Asks a question and returns a boolean (y/n) answer.
+
+ If default is given (one of 'y','n'), it is used if the user input is
+ empty. If interrupt is given (one of 'y','n'), it is used if the user
+ presses Ctrl-C. Otherwise the question is repeated until an answer is
+ given.
+
+ An EOF is treated as the default answer. If there is no default, an
+ exception is raised to prevent infinite loops.
+
+ Valid answers are: y/yes/n/no (match is not case sensitive)."""
+
+ answers = {'y':True,'n':False,'yes':True,'no':False}
+ ans = None
+ while ans not in answers.keys():
+ try:
+ ans = input(prompt+' ').lower()
+ if not ans: # response was an empty string
+ ans = default
+ except KeyboardInterrupt:
+ if interrupt:
+ ans = interrupt
+ except EOFError:
+ if default in answers.keys():
+ ans = default
+ print()
+ else:
+ raise
+
+ return answers[ans]
+
+
+def temp_pyfile(src, ext='.py'):
+ """Make a temporary python file, return filename and filehandle.
+
+ Parameters
+ ----------
+ src : string or list of strings (no need for ending newlines if list)
+ Source code to be written to the file.
+
+ ext : optional, string
+ Extension for the generated file.
+
+ Returns
+ -------
+ (filename, open filehandle)
+ It is the caller's responsibility to close the open file and unlink it.
+ """
+ fname = tempfile.mkstemp(ext)[1]
+ f = open(fname,'w')
+ f.write(src)
+ f.flush()
+ return fname, f
+
+def atomic_writing(*args, **kwargs):
+ """DEPRECATED: moved to notebook.services.contents.fileio"""
+ warn("IPython.utils.io.atomic_writing has moved to notebook.services.contents.fileio")
+ from notebook.services.contents.fileio import atomic_writing
+ return atomic_writing(*args, **kwargs)
+
+def raw_print(*args, **kw):
+ """Raw print to sys.__stdout__, otherwise identical interface to print()."""
+
+ print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
+ file=sys.__stdout__)
+ sys.__stdout__.flush()
+
+
+def raw_print_err(*args, **kw):
+ """Raw print to sys.__stderr__, otherwise identical interface to print()."""
+
+ print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
+ file=sys.__stderr__)
+ sys.__stderr__.flush()
+
+
+# Short aliases for quick debugging, do NOT use these in production code.
+rprint = raw_print
+rprinte = raw_print_err
+
+
+def unicode_std_stream(stream='stdout'):
+ """DEPRECATED, moved to nbconvert.utils.io"""
+ warn("IPython.utils.io.unicode_std_stream has moved to nbconvert.utils.io")
+ from nbconvert.utils.io import unicode_std_stream
+ return unicode_std_stream(stream)
diff --git a/contrib/python/ipython/py2/IPython/utils/ipstruct.py b/contrib/python/ipython/py2/IPython/utils/ipstruct.py
index e17760b4f9..e2b3e8fa4c 100644
--- a/contrib/python/ipython/py2/IPython/utils/ipstruct.py
+++ b/contrib/python/ipython/py2/IPython/utils/ipstruct.py
@@ -1,391 +1,391 @@
-# encoding: utf-8
-"""A dict subclass that supports attribute style access.
-
-Authors:
-
-* Fernando Perez (original)
-* Brian Granger (refactoring to a dict subclass)
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-__all__ = ['Struct']
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-
-class Struct(dict):
- """A dict subclass with attribute style access.
-
- This dict subclass has a a few extra features:
-
- * Attribute style access.
- * Protection of class members (like keys, items) when using attribute
- style access.
- * The ability to restrict assignment to only existing keys.
- * Intelligent merging.
- * Overloaded operators.
- """
- _allownew = True
- def __init__(self, *args, **kw):
- """Initialize with a dictionary, another Struct, or data.
-
- Parameters
- ----------
- args : dict, Struct
- Initialize with one dict or Struct
- kw : dict
- Initialize with key, value pairs.
-
- Examples
- --------
-
- >>> s = Struct(a=10,b=30)
- >>> s.a
- 10
- >>> s.b
- 30
- >>> s2 = Struct(s,c=30)
- >>> sorted(s2.keys())
- ['a', 'b', 'c']
- """
- object.__setattr__(self, '_allownew', True)
- dict.__init__(self, *args, **kw)
-
- def __setitem__(self, key, value):
- """Set an item with check for allownew.
-
- Examples
- --------
-
- >>> s = Struct()
- >>> s['a'] = 10
- >>> s.allow_new_attr(False)
- >>> s['a'] = 10
- >>> s['a']
- 10
- >>> try:
- ... s['b'] = 20
- ... except KeyError:
- ... print('this is not allowed')
- ...
- this is not allowed
- """
- if not self._allownew and key not in self:
- raise KeyError(
- "can't create new attribute %s when allow_new_attr(False)" % key)
- dict.__setitem__(self, key, value)
-
- def __setattr__(self, key, value):
- """Set an attr with protection of class members.
-
- This calls :meth:`self.__setitem__` but convert :exc:`KeyError` to
- :exc:`AttributeError`.
-
- Examples
- --------
-
- >>> s = Struct()
- >>> s.a = 10
- >>> s.a
- 10
- >>> try:
- ... s.get = 10
- ... except AttributeError:
- ... print("you can't set a class member")
- ...
- you can't set a class member
- """
- # If key is an str it might be a class member or instance var
- if isinstance(key, str):
- # I can't simply call hasattr here because it calls getattr, which
- # calls self.__getattr__, which returns True for keys in
- # self._data. But I only want keys in the class and in
- # self.__dict__
- if key in self.__dict__ or hasattr(Struct, key):
- raise AttributeError(
- 'attr %s is a protected member of class Struct.' % key
- )
- try:
- self.__setitem__(key, value)
- except KeyError as e:
- raise AttributeError(e)
-
- def __getattr__(self, key):
- """Get an attr by calling :meth:`dict.__getitem__`.
-
- Like :meth:`__setattr__`, this method converts :exc:`KeyError` to
- :exc:`AttributeError`.
-
- Examples
- --------
-
- >>> s = Struct(a=10)
- >>> s.a
- 10
- >>> type(s.get)
- <... 'builtin_function_or_method'>
- >>> try:
- ... s.b
- ... except AttributeError:
- ... print("I don't have that key")
- ...
- I don't have that key
- """
- try:
- result = self[key]
- except KeyError:
- raise AttributeError(key)
- else:
- return result
-
- def __iadd__(self, other):
- """s += s2 is a shorthand for s.merge(s2).
-
- Examples
- --------
-
- >>> s = Struct(a=10,b=30)
- >>> s2 = Struct(a=20,c=40)
- >>> s += s2
- >>> sorted(s.keys())
- ['a', 'b', 'c']
- """
- self.merge(other)
- return self
-
- def __add__(self,other):
- """s + s2 -> New Struct made from s.merge(s2).
-
- Examples
- --------
-
- >>> s1 = Struct(a=10,b=30)
- >>> s2 = Struct(a=20,c=40)
- >>> s = s1 + s2
- >>> sorted(s.keys())
- ['a', 'b', 'c']
- """
- sout = self.copy()
- sout.merge(other)
- return sout
-
- def __sub__(self,other):
- """s1 - s2 -> remove keys in s2 from s1.
-
- Examples
- --------
-
- >>> s1 = Struct(a=10,b=30)
- >>> s2 = Struct(a=40)
- >>> s = s1 - s2
- >>> s
- {'b': 30}
- """
- sout = self.copy()
- sout -= other
- return sout
-
- def __isub__(self,other):
- """Inplace remove keys from self that are in other.
-
- Examples
- --------
-
- >>> s1 = Struct(a=10,b=30)
- >>> s2 = Struct(a=40)
- >>> s1 -= s2
- >>> s1
- {'b': 30}
- """
- for k in other.keys():
- if k in self:
- del self[k]
- return self
-
- def __dict_invert(self, data):
- """Helper function for merge.
-
- Takes a dictionary whose values are lists and returns a dict with
- the elements of each list as keys and the original keys as values.
- """
- outdict = {}
- for k,lst in data.items():
- if isinstance(lst, str):
- lst = lst.split()
- for entry in lst:
- outdict[entry] = k
- return outdict
-
- def dict(self):
- return self
-
- def copy(self):
- """Return a copy as a Struct.
-
- Examples
- --------
-
- >>> s = Struct(a=10,b=30)
- >>> s2 = s.copy()
- >>> type(s2) is Struct
- True
- """
- return Struct(dict.copy(self))
-
- def hasattr(self, key):
- """hasattr function available as a method.
-
- Implemented like has_key.
-
- Examples
- --------
-
- >>> s = Struct(a=10)
- >>> s.hasattr('a')
- True
- >>> s.hasattr('b')
- False
- >>> s.hasattr('get')
- False
- """
- return key in self
-
- def allow_new_attr(self, allow = True):
- """Set whether new attributes can be created in this Struct.
-
- This can be used to catch typos by verifying that the attribute user
- tries to change already exists in this Struct.
- """
- object.__setattr__(self, '_allownew', allow)
-
- def merge(self, __loc_data__=None, __conflict_solve=None, **kw):
- """Merge two Structs with customizable conflict resolution.
-
- This is similar to :meth:`update`, but much more flexible. First, a
- dict is made from data+key=value pairs. When merging this dict with
- the Struct S, the optional dictionary 'conflict' is used to decide
- what to do.
-
- If conflict is not given, the default behavior is to preserve any keys
- with their current value (the opposite of the :meth:`update` method's
- behavior).
-
- Parameters
- ----------
- __loc_data : dict, Struct
- The data to merge into self
- __conflict_solve : dict
- The conflict policy dict. The keys are binary functions used to
- resolve the conflict and the values are lists of strings naming
- the keys the conflict resolution function applies to. Instead of
- a list of strings a space separated string can be used, like
- 'a b c'.
- kw : dict
- Additional key, value pairs to merge in
-
- Notes
- -----
-
- The `__conflict_solve` dict is a dictionary of binary functions which will be used to
- solve key conflicts. Here is an example::
-
- __conflict_solve = dict(
- func1=['a','b','c'],
- func2=['d','e']
- )
-
- In this case, the function :func:`func1` will be used to resolve
- keys 'a', 'b' and 'c' and the function :func:`func2` will be used for
- keys 'd' and 'e'. This could also be written as::
-
- __conflict_solve = dict(func1='a b c',func2='d e')
-
- These functions will be called for each key they apply to with the
- form::
-
- func1(self['a'], other['a'])
-
- The return value is used as the final merged value.
-
- As a convenience, merge() provides five (the most commonly needed)
- pre-defined policies: preserve, update, add, add_flip and add_s. The
- easiest explanation is their implementation::
-
- preserve = lambda old,new: old
- update = lambda old,new: new
- add = lambda old,new: old + new
- add_flip = lambda old,new: new + old # note change of order!
- add_s = lambda old,new: old + ' ' + new # only for str!
-
- You can use those four words (as strings) as keys instead
- of defining them as functions, and the merge method will substitute
- the appropriate functions for you.
-
- For more complicated conflict resolution policies, you still need to
- construct your own functions.
-
- Examples
- --------
-
- This show the default policy:
-
- >>> s = Struct(a=10,b=30)
- >>> s2 = Struct(a=20,c=40)
- >>> s.merge(s2)
- >>> sorted(s.items())
- [('a', 10), ('b', 30), ('c', 40)]
-
- Now, show how to specify a conflict dict:
-
- >>> s = Struct(a=10,b=30)
- >>> s2 = Struct(a=20,b=40)
- >>> conflict = {'update':'a','add':'b'}
- >>> s.merge(s2,conflict)
- >>> sorted(s.items())
- [('a', 20), ('b', 70)]
- """
-
- data_dict = dict(__loc_data__,**kw)
-
- # policies for conflict resolution: two argument functions which return
- # the value that will go in the new struct
- preserve = lambda old,new: old
- update = lambda old,new: new
- add = lambda old,new: old + new
- add_flip = lambda old,new: new + old # note change of order!
- add_s = lambda old,new: old + ' ' + new
-
- # default policy is to keep current keys when there's a conflict
- conflict_solve = dict.fromkeys(self, preserve)
-
- # the conflict_solve dictionary is given by the user 'inverted': we
- # need a name-function mapping, it comes as a function -> names
- # dict. Make a local copy (b/c we'll make changes), replace user
- # strings for the three builtin policies and invert it.
- if __conflict_solve:
- inv_conflict_solve_user = __conflict_solve.copy()
- for name, func in [('preserve',preserve), ('update',update),
- ('add',add), ('add_flip',add_flip),
- ('add_s',add_s)]:
- if name in inv_conflict_solve_user.keys():
- inv_conflict_solve_user[func] = inv_conflict_solve_user[name]
- del inv_conflict_solve_user[name]
- conflict_solve.update(self.__dict_invert(inv_conflict_solve_user))
- for key in data_dict:
- if key not in self:
- self[key] = data_dict[key]
- else:
- self[key] = conflict_solve[key](self[key],data_dict[key])
-
+# encoding: utf-8
+"""A dict subclass that supports attribute style access.
+
+Authors:
+
+* Fernando Perez (original)
+* Brian Granger (refactoring to a dict subclass)
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+__all__ = ['Struct']
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+
+class Struct(dict):
+ """A dict subclass with attribute style access.
+
+ This dict subclass has a a few extra features:
+
+ * Attribute style access.
+ * Protection of class members (like keys, items) when using attribute
+ style access.
+ * The ability to restrict assignment to only existing keys.
+ * Intelligent merging.
+ * Overloaded operators.
+ """
+ _allownew = True
+ def __init__(self, *args, **kw):
+ """Initialize with a dictionary, another Struct, or data.
+
+ Parameters
+ ----------
+ args : dict, Struct
+ Initialize with one dict or Struct
+ kw : dict
+ Initialize with key, value pairs.
+
+ Examples
+ --------
+
+ >>> s = Struct(a=10,b=30)
+ >>> s.a
+ 10
+ >>> s.b
+ 30
+ >>> s2 = Struct(s,c=30)
+ >>> sorted(s2.keys())
+ ['a', 'b', 'c']
+ """
+ object.__setattr__(self, '_allownew', True)
+ dict.__init__(self, *args, **kw)
+
+ def __setitem__(self, key, value):
+ """Set an item with check for allownew.
+
+ Examples
+ --------
+
+ >>> s = Struct()
+ >>> s['a'] = 10
+ >>> s.allow_new_attr(False)
+ >>> s['a'] = 10
+ >>> s['a']
+ 10
+ >>> try:
+ ... s['b'] = 20
+ ... except KeyError:
+ ... print('this is not allowed')
+ ...
+ this is not allowed
+ """
+ if not self._allownew and key not in self:
+ raise KeyError(
+ "can't create new attribute %s when allow_new_attr(False)" % key)
+ dict.__setitem__(self, key, value)
+
+ def __setattr__(self, key, value):
+ """Set an attr with protection of class members.
+
+ This calls :meth:`self.__setitem__` but convert :exc:`KeyError` to
+ :exc:`AttributeError`.
+
+ Examples
+ --------
+
+ >>> s = Struct()
+ >>> s.a = 10
+ >>> s.a
+ 10
+ >>> try:
+ ... s.get = 10
+ ... except AttributeError:
+ ... print("you can't set a class member")
+ ...
+ you can't set a class member
+ """
+ # If key is an str it might be a class member or instance var
+ if isinstance(key, str):
+ # I can't simply call hasattr here because it calls getattr, which
+ # calls self.__getattr__, which returns True for keys in
+ # self._data. But I only want keys in the class and in
+ # self.__dict__
+ if key in self.__dict__ or hasattr(Struct, key):
+ raise AttributeError(
+ 'attr %s is a protected member of class Struct.' % key
+ )
+ try:
+ self.__setitem__(key, value)
+ except KeyError as e:
+ raise AttributeError(e)
+
+ def __getattr__(self, key):
+ """Get an attr by calling :meth:`dict.__getitem__`.
+
+ Like :meth:`__setattr__`, this method converts :exc:`KeyError` to
+ :exc:`AttributeError`.
+
+ Examples
+ --------
+
+ >>> s = Struct(a=10)
+ >>> s.a
+ 10
+ >>> type(s.get)
+ <... 'builtin_function_or_method'>
+ >>> try:
+ ... s.b
+ ... except AttributeError:
+ ... print("I don't have that key")
+ ...
+ I don't have that key
+ """
+ try:
+ result = self[key]
+ except KeyError:
+ raise AttributeError(key)
+ else:
+ return result
+
+ def __iadd__(self, other):
+ """s += s2 is a shorthand for s.merge(s2).
+
+ Examples
+ --------
+
+ >>> s = Struct(a=10,b=30)
+ >>> s2 = Struct(a=20,c=40)
+ >>> s += s2
+ >>> sorted(s.keys())
+ ['a', 'b', 'c']
+ """
+ self.merge(other)
+ return self
+
+ def __add__(self,other):
+ """s + s2 -> New Struct made from s.merge(s2).
+
+ Examples
+ --------
+
+ >>> s1 = Struct(a=10,b=30)
+ >>> s2 = Struct(a=20,c=40)
+ >>> s = s1 + s2
+ >>> sorted(s.keys())
+ ['a', 'b', 'c']
+ """
+ sout = self.copy()
+ sout.merge(other)
+ return sout
+
+ def __sub__(self,other):
+ """s1 - s2 -> remove keys in s2 from s1.
+
+ Examples
+ --------
+
+ >>> s1 = Struct(a=10,b=30)
+ >>> s2 = Struct(a=40)
+ >>> s = s1 - s2
+ >>> s
+ {'b': 30}
+ """
+ sout = self.copy()
+ sout -= other
+ return sout
+
+ def __isub__(self,other):
+ """Inplace remove keys from self that are in other.
+
+ Examples
+ --------
+
+ >>> s1 = Struct(a=10,b=30)
+ >>> s2 = Struct(a=40)
+ >>> s1 -= s2
+ >>> s1
+ {'b': 30}
+ """
+ for k in other.keys():
+ if k in self:
+ del self[k]
+ return self
+
+ def __dict_invert(self, data):
+ """Helper function for merge.
+
+ Takes a dictionary whose values are lists and returns a dict with
+ the elements of each list as keys and the original keys as values.
+ """
+ outdict = {}
+ for k,lst in data.items():
+ if isinstance(lst, str):
+ lst = lst.split()
+ for entry in lst:
+ outdict[entry] = k
+ return outdict
+
+ def dict(self):
+ return self
+
+ def copy(self):
+ """Return a copy as a Struct.
+
+ Examples
+ --------
+
+ >>> s = Struct(a=10,b=30)
+ >>> s2 = s.copy()
+ >>> type(s2) is Struct
+ True
+ """
+ return Struct(dict.copy(self))
+
+ def hasattr(self, key):
+ """hasattr function available as a method.
+
+ Implemented like has_key.
+
+ Examples
+ --------
+
+ >>> s = Struct(a=10)
+ >>> s.hasattr('a')
+ True
+ >>> s.hasattr('b')
+ False
+ >>> s.hasattr('get')
+ False
+ """
+ return key in self
+
+ def allow_new_attr(self, allow = True):
+ """Set whether new attributes can be created in this Struct.
+
+ This can be used to catch typos by verifying that the attribute user
+ tries to change already exists in this Struct.
+ """
+ object.__setattr__(self, '_allownew', allow)
+
+ def merge(self, __loc_data__=None, __conflict_solve=None, **kw):
+ """Merge two Structs with customizable conflict resolution.
+
+ This is similar to :meth:`update`, but much more flexible. First, a
+ dict is made from data+key=value pairs. When merging this dict with
+ the Struct S, the optional dictionary 'conflict' is used to decide
+ what to do.
+
+ If conflict is not given, the default behavior is to preserve any keys
+ with their current value (the opposite of the :meth:`update` method's
+ behavior).
+
+ Parameters
+ ----------
+ __loc_data : dict, Struct
+ The data to merge into self
+ __conflict_solve : dict
+ The conflict policy dict. The keys are binary functions used to
+ resolve the conflict and the values are lists of strings naming
+ the keys the conflict resolution function applies to. Instead of
+ a list of strings a space separated string can be used, like
+ 'a b c'.
+ kw : dict
+ Additional key, value pairs to merge in
+
+ Notes
+ -----
+
+ The `__conflict_solve` dict is a dictionary of binary functions which will be used to
+ solve key conflicts. Here is an example::
+
+ __conflict_solve = dict(
+ func1=['a','b','c'],
+ func2=['d','e']
+ )
+
+ In this case, the function :func:`func1` will be used to resolve
+ keys 'a', 'b' and 'c' and the function :func:`func2` will be used for
+ keys 'd' and 'e'. This could also be written as::
+
+ __conflict_solve = dict(func1='a b c',func2='d e')
+
+ These functions will be called for each key they apply to with the
+ form::
+
+ func1(self['a'], other['a'])
+
+ The return value is used as the final merged value.
+
+ As a convenience, merge() provides five (the most commonly needed)
+ pre-defined policies: preserve, update, add, add_flip and add_s. The
+ easiest explanation is their implementation::
+
+ preserve = lambda old,new: old
+ update = lambda old,new: new
+ add = lambda old,new: old + new
+ add_flip = lambda old,new: new + old # note change of order!
+ add_s = lambda old,new: old + ' ' + new # only for str!
+
+ You can use those four words (as strings) as keys instead
+ of defining them as functions, and the merge method will substitute
+ the appropriate functions for you.
+
+ For more complicated conflict resolution policies, you still need to
+ construct your own functions.
+
+ Examples
+ --------
+
+ This show the default policy:
+
+ >>> s = Struct(a=10,b=30)
+ >>> s2 = Struct(a=20,c=40)
+ >>> s.merge(s2)
+ >>> sorted(s.items())
+ [('a', 10), ('b', 30), ('c', 40)]
+
+ Now, show how to specify a conflict dict:
+
+ >>> s = Struct(a=10,b=30)
+ >>> s2 = Struct(a=20,b=40)
+ >>> conflict = {'update':'a','add':'b'}
+ >>> s.merge(s2,conflict)
+ >>> sorted(s.items())
+ [('a', 20), ('b', 70)]
+ """
+
+ data_dict = dict(__loc_data__,**kw)
+
+ # policies for conflict resolution: two argument functions which return
+ # the value that will go in the new struct
+ preserve = lambda old,new: old
+ update = lambda old,new: new
+ add = lambda old,new: old + new
+ add_flip = lambda old,new: new + old # note change of order!
+ add_s = lambda old,new: old + ' ' + new
+
+ # default policy is to keep current keys when there's a conflict
+ conflict_solve = dict.fromkeys(self, preserve)
+
+ # the conflict_solve dictionary is given by the user 'inverted': we
+ # need a name-function mapping, it comes as a function -> names
+ # dict. Make a local copy (b/c we'll make changes), replace user
+ # strings for the three builtin policies and invert it.
+ if __conflict_solve:
+ inv_conflict_solve_user = __conflict_solve.copy()
+ for name, func in [('preserve',preserve), ('update',update),
+ ('add',add), ('add_flip',add_flip),
+ ('add_s',add_s)]:
+ if name in inv_conflict_solve_user.keys():
+ inv_conflict_solve_user[func] = inv_conflict_solve_user[name]
+ del inv_conflict_solve_user[name]
+ conflict_solve.update(self.__dict_invert(inv_conflict_solve_user))
+ for key in data_dict:
+ if key not in self:
+ self[key] = data_dict[key]
+ else:
+ self[key] = conflict_solve[key](self[key],data_dict[key])
+
diff --git a/contrib/python/ipython/py2/IPython/utils/jsonutil.py b/contrib/python/ipython/py2/IPython/utils/jsonutil.py
index 4bb400ca1e..c3ee93859e 100644
--- a/contrib/python/ipython/py2/IPython/utils/jsonutil.py
+++ b/contrib/python/ipython/py2/IPython/utils/jsonutil.py
@@ -1,5 +1,5 @@
-from warnings import warn
-
-warn("IPython.utils.jsonutil has moved to jupyter_client.jsonutil")
-
-from jupyter_client.jsonutil import *
+from warnings import warn
+
+warn("IPython.utils.jsonutil has moved to jupyter_client.jsonutil")
+
+from jupyter_client.jsonutil import *
diff --git a/contrib/python/ipython/py2/IPython/utils/localinterfaces.py b/contrib/python/ipython/py2/IPython/utils/localinterfaces.py
index f90564def5..89b8fdeb54 100644
--- a/contrib/python/ipython/py2/IPython/utils/localinterfaces.py
+++ b/contrib/python/ipython/py2/IPython/utils/localinterfaces.py
@@ -1,5 +1,5 @@
-from warnings import warn
-
-warn("IPython.utils.localinterfaces has moved to jupyter_client.localinterfaces")
-
-from jupyter_client.localinterfaces import *
+from warnings import warn
+
+warn("IPython.utils.localinterfaces has moved to jupyter_client.localinterfaces")
+
+from jupyter_client.localinterfaces import *
diff --git a/contrib/python/ipython/py2/IPython/utils/log.py b/contrib/python/ipython/py2/IPython/utils/log.py
index 422bb9b343..3eb9bdadd8 100644
--- a/contrib/python/ipython/py2/IPython/utils/log.py
+++ b/contrib/python/ipython/py2/IPython/utils/log.py
@@ -1,7 +1,7 @@
-from __future__ import absolute_import
-
-from warnings import warn
-
-warn("IPython.utils.log has moved to traitlets.log")
-
-from traitlets.log import *
+from __future__ import absolute_import
+
+from warnings import warn
+
+warn("IPython.utils.log has moved to traitlets.log")
+
+from traitlets.log import *
diff --git a/contrib/python/ipython/py2/IPython/utils/module_paths.py b/contrib/python/ipython/py2/IPython/utils/module_paths.py
index fc2c7f07c0..45a711c0b4 100644
--- a/contrib/python/ipython/py2/IPython/utils/module_paths.py
+++ b/contrib/python/ipython/py2/IPython/utils/module_paths.py
@@ -1,125 +1,125 @@
-"""Utility functions for finding modules
-
-Utility functions for finding modules on sys.path.
-
-`find_mod` finds named module on sys.path.
-
-`get_init` helper function that finds __init__ file in a directory.
-
-`find_module` variant of imp.find_module in std_lib that only returns
-path to module and not an open file object as well.
-
-
-
-"""
-#-----------------------------------------------------------------------------
-# Copyright (c) 2011, the IPython Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-from __future__ import print_function
-
-# Stdlib imports
-import imp
-import os
-
-# Third-party imports
-
-# Our own imports
-
-
-#-----------------------------------------------------------------------------
-# Globals and constants
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Local utilities
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Classes and functions
-#-----------------------------------------------------------------------------
-def find_module(name, path=None):
- """imp.find_module variant that only return path of module.
-
- The `imp.find_module` returns a filehandle that we are not interested in.
- Also we ignore any bytecode files that `imp.find_module` finds.
-
- Parameters
- ----------
- name : str
- name of module to locate
- path : list of str
- list of paths to search for `name`. If path=None then search sys.path
-
- Returns
- -------
- filename : str
- Return full path of module or None if module is missing or does not have
- .py or .pyw extension
- """
- if name is None:
- return None
- try:
- file, filename, _ = imp.find_module(name, path)
- except ImportError:
- return None
- if file is None:
- return filename
- else:
- file.close()
- if os.path.splitext(filename)[1] in [".py", ".pyc"]:
- return filename
- else:
- return None
-
-def get_init(dirname):
- """Get __init__ file path for module directory
-
- Parameters
- ----------
- dirname : str
- Find the __init__ file in directory `dirname`
-
- Returns
- -------
- init_path : str
- Path to __init__ file
- """
- fbase = os.path.join(dirname, "__init__")
- for ext in [".py", ".pyw"]:
- fname = fbase + ext
- if os.path.isfile(fname):
- return fname
-
-
-def find_mod(module_name):
- """Find module `module_name` on sys.path
-
- Return the path to module `module_name`. If `module_name` refers to
- a module directory then return path to __init__ file. Return full
- path of module or None if module is missing or does not have .py or .pyw
- extension. We are not interested in running bytecode.
-
- Parameters
- ----------
- module_name : str
-
- Returns
- -------
- modulepath : str
- Path to module `module_name`.
- """
- parts = module_name.split(".")
- basepath = find_module(parts[0])
- for submodname in parts[1:]:
- basepath = find_module(submodname, [basepath])
- if basepath and os.path.isdir(basepath):
- basepath = get_init(basepath)
- return basepath
+"""Utility functions for finding modules
+
+Utility functions for finding modules on sys.path.
+
+`find_mod` finds named module on sys.path.
+
+`get_init` helper function that finds __init__ file in a directory.
+
+`find_module` variant of imp.find_module in std_lib that only returns
+path to module and not an open file object as well.
+
+
+
+"""
+#-----------------------------------------------------------------------------
+# Copyright (c) 2011, the IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+from __future__ import print_function
+
+# Stdlib imports
+import imp
+import os
+
+# Third-party imports
+
+# Our own imports
+
+
+#-----------------------------------------------------------------------------
+# Globals and constants
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Local utilities
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Classes and functions
+#-----------------------------------------------------------------------------
+def find_module(name, path=None):
+ """imp.find_module variant that only return path of module.
+
+ The `imp.find_module` returns a filehandle that we are not interested in.
+ Also we ignore any bytecode files that `imp.find_module` finds.
+
+ Parameters
+ ----------
+ name : str
+ name of module to locate
+ path : list of str
+ list of paths to search for `name`. If path=None then search sys.path
+
+ Returns
+ -------
+ filename : str
+ Return full path of module or None if module is missing or does not have
+ .py or .pyw extension
+ """
+ if name is None:
+ return None
+ try:
+ file, filename, _ = imp.find_module(name, path)
+ except ImportError:
+ return None
+ if file is None:
+ return filename
+ else:
+ file.close()
+ if os.path.splitext(filename)[1] in [".py", ".pyc"]:
+ return filename
+ else:
+ return None
+
+def get_init(dirname):
+ """Get __init__ file path for module directory
+
+ Parameters
+ ----------
+ dirname : str
+ Find the __init__ file in directory `dirname`
+
+ Returns
+ -------
+ init_path : str
+ Path to __init__ file
+ """
+ fbase = os.path.join(dirname, "__init__")
+ for ext in [".py", ".pyw"]:
+ fname = fbase + ext
+ if os.path.isfile(fname):
+ return fname
+
+
+def find_mod(module_name):
+ """Find module `module_name` on sys.path
+
+ Return the path to module `module_name`. If `module_name` refers to
+ a module directory then return path to __init__ file. Return full
+ path of module or None if module is missing or does not have .py or .pyw
+ extension. We are not interested in running bytecode.
+
+ Parameters
+ ----------
+ module_name : str
+
+ Returns
+ -------
+ modulepath : str
+ Path to module `module_name`.
+ """
+ parts = module_name.split(".")
+ basepath = find_module(parts[0])
+ for submodname in parts[1:]:
+ basepath = find_module(submodname, [basepath])
+ if basepath and os.path.isdir(basepath):
+ basepath = get_init(basepath)
+ return basepath
diff --git a/contrib/python/ipython/py2/IPython/utils/openpy.py b/contrib/python/ipython/py2/IPython/utils/openpy.py
index f55f254bc1..0a7cc0f00e 100644
--- a/contrib/python/ipython/py2/IPython/utils/openpy.py
+++ b/contrib/python/ipython/py2/IPython/utils/openpy.py
@@ -1,249 +1,249 @@
-"""
-Tools to open .py files as Unicode, using the encoding specified within the file,
-as per PEP 263.
-
-Much of the code is taken from the tokenize module in Python 3.2.
-"""
-from __future__ import absolute_import
-
-import io
-from io import TextIOWrapper, BytesIO
-import os.path
-import re
-
-from .py3compat import unicode_type
-
-cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)", re.UNICODE)
-cookie_comment_re = re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)", re.UNICODE)
-
-try:
- # Available in Python 3
- from tokenize import detect_encoding
-except ImportError:
- from codecs import lookup, BOM_UTF8
-
- # Copied from Python 3.2 tokenize
- def _get_normal_name(orig_enc):
- """Imitates get_normal_name in tokenizer.c."""
- # Only care about the first 12 characters.
- enc = orig_enc[:12].lower().replace("_", "-")
- if enc == "utf-8" or enc.startswith("utf-8-"):
- return "utf-8"
- if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
- enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
- return "iso-8859-1"
- return orig_enc
-
- # Copied from Python 3.2 tokenize
- def detect_encoding(readline):
- """
- The detect_encoding() function is used to detect the encoding that should
- be used to decode a Python source file. It requires one argment, readline,
- in the same way as the tokenize() generator.
-
- It will call readline a maximum of twice, and return the encoding used
- (as a string) and a list of any lines (left as bytes) it has read in.
-
- It detects the encoding from the presence of a utf-8 bom or an encoding
- cookie as specified in pep-0263. If both a bom and a cookie are present,
- but disagree, a SyntaxError will be raised. If the encoding cookie is an
- invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
- 'utf-8-sig' is returned.
-
- If no encoding is specified, then the default of 'utf-8' will be returned.
- """
- bom_found = False
- encoding = None
- default = 'utf-8'
- def read_or_stop():
- try:
- return readline()
- except StopIteration:
- return b''
-
- def find_cookie(line):
- try:
- line_string = line.decode('ascii')
- except UnicodeDecodeError:
- return None
-
- matches = cookie_re.findall(line_string)
- if not matches:
- return None
- encoding = _get_normal_name(matches[0])
- try:
- codec = lookup(encoding)
- except LookupError:
- # This behaviour mimics the Python interpreter
- raise SyntaxError("unknown encoding: " + encoding)
-
- if bom_found:
- if codec.name != 'utf-8':
- # This behaviour mimics the Python interpreter
- raise SyntaxError('encoding problem: utf-8')
- encoding += '-sig'
- return encoding
-
- first = read_or_stop()
- if first.startswith(BOM_UTF8):
- bom_found = True
- first = first[3:]
- default = 'utf-8-sig'
- if not first:
- return default, []
-
- encoding = find_cookie(first)
- if encoding:
- return encoding, [first]
-
- second = read_or_stop()
- if not second:
- return default, [first]
-
- encoding = find_cookie(second)
- if encoding:
- return encoding, [first, second]
-
- return default, [first, second]
-
-try:
- # Available in Python 3.2 and above.
- from tokenize import open
-except ImportError:
- # Copied from Python 3.2 tokenize
- def open(filename):
- """Open a file in read only mode using the encoding detected by
- detect_encoding().
- """
- buffer = io.open(filename, 'rb') # Tweaked to use io.open for Python 2
- encoding, lines = detect_encoding(buffer.readline)
- buffer.seek(0)
- text = TextIOWrapper(buffer, encoding, line_buffering=True)
- text.mode = 'r'
- return text
-
-def source_to_unicode(txt, errors='replace', skip_encoding_cookie=True):
- """Converts a bytes string with python source code to unicode.
-
- Unicode strings are passed through unchanged. Byte strings are checked
- for the python source file encoding cookie to determine encoding.
- txt can be either a bytes buffer or a string containing the source
- code.
- """
- if isinstance(txt, unicode_type):
- return txt
- if isinstance(txt, bytes):
- buffer = BytesIO(txt)
- else:
- buffer = txt
+"""
+Tools to open .py files as Unicode, using the encoding specified within the file,
+as per PEP 263.
+
+Much of the code is taken from the tokenize module in Python 3.2.
+"""
+from __future__ import absolute_import
+
+import io
+from io import TextIOWrapper, BytesIO
+import os.path
+import re
+
+from .py3compat import unicode_type
+
+cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)", re.UNICODE)
+cookie_comment_re = re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)", re.UNICODE)
+
+try:
+ # Available in Python 3
+ from tokenize import detect_encoding
+except ImportError:
+ from codecs import lookup, BOM_UTF8
+
+ # Copied from Python 3.2 tokenize
+ def _get_normal_name(orig_enc):
+ """Imitates get_normal_name in tokenizer.c."""
+ # Only care about the first 12 characters.
+ enc = orig_enc[:12].lower().replace("_", "-")
+ if enc == "utf-8" or enc.startswith("utf-8-"):
+ return "utf-8"
+ if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
+ enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
+ return "iso-8859-1"
+ return orig_enc
+
+ # Copied from Python 3.2 tokenize
+ def detect_encoding(readline):
+ """
+ The detect_encoding() function is used to detect the encoding that should
+ be used to decode a Python source file. It requires one argment, readline,
+ in the same way as the tokenize() generator.
+
+ It will call readline a maximum of twice, and return the encoding used
+ (as a string) and a list of any lines (left as bytes) it has read in.
+
+ It detects the encoding from the presence of a utf-8 bom or an encoding
+ cookie as specified in pep-0263. If both a bom and a cookie are present,
+ but disagree, a SyntaxError will be raised. If the encoding cookie is an
+ invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
+ 'utf-8-sig' is returned.
+
+ If no encoding is specified, then the default of 'utf-8' will be returned.
+ """
+ bom_found = False
+ encoding = None
+ default = 'utf-8'
+ def read_or_stop():
+ try:
+ return readline()
+ except StopIteration:
+ return b''
+
+ def find_cookie(line):
+ try:
+ line_string = line.decode('ascii')
+ except UnicodeDecodeError:
+ return None
+
+ matches = cookie_re.findall(line_string)
+ if not matches:
+ return None
+ encoding = _get_normal_name(matches[0])
+ try:
+ codec = lookup(encoding)
+ except LookupError:
+ # This behaviour mimics the Python interpreter
+ raise SyntaxError("unknown encoding: " + encoding)
+
+ if bom_found:
+ if codec.name != 'utf-8':
+ # This behaviour mimics the Python interpreter
+ raise SyntaxError('encoding problem: utf-8')
+ encoding += '-sig'
+ return encoding
+
+ first = read_or_stop()
+ if first.startswith(BOM_UTF8):
+ bom_found = True
+ first = first[3:]
+ default = 'utf-8-sig'
+ if not first:
+ return default, []
+
+ encoding = find_cookie(first)
+ if encoding:
+ return encoding, [first]
+
+ second = read_or_stop()
+ if not second:
+ return default, [first]
+
+ encoding = find_cookie(second)
+ if encoding:
+ return encoding, [first, second]
+
+ return default, [first, second]
+
+try:
+ # Available in Python 3.2 and above.
+ from tokenize import open
+except ImportError:
+ # Copied from Python 3.2 tokenize
+ def open(filename):
+ """Open a file in read only mode using the encoding detected by
+ detect_encoding().
+ """
+ buffer = io.open(filename, 'rb') # Tweaked to use io.open for Python 2
+ encoding, lines = detect_encoding(buffer.readline)
+ buffer.seek(0)
+ text = TextIOWrapper(buffer, encoding, line_buffering=True)
+ text.mode = 'r'
+ return text
+
+def source_to_unicode(txt, errors='replace', skip_encoding_cookie=True):
+ """Converts a bytes string with python source code to unicode.
+
+ Unicode strings are passed through unchanged. Byte strings are checked
+ for the python source file encoding cookie to determine encoding.
+ txt can be either a bytes buffer or a string containing the source
+ code.
+ """
+ if isinstance(txt, unicode_type):
+ return txt
+ if isinstance(txt, bytes):
+ buffer = BytesIO(txt)
+ else:
+ buffer = txt
+ try:
+ encoding, _ = detect_encoding(buffer.readline)
+ except SyntaxError:
+ encoding = "ascii"
+ buffer.seek(0)
+ text = TextIOWrapper(buffer, encoding, errors=errors, line_buffering=True)
+ text.mode = 'r'
+ if skip_encoding_cookie:
+ return u"".join(strip_encoding_cookie(text))
+ else:
+ return text.read()
+
+def strip_encoding_cookie(filelike):
+ """Generator to pull lines from a text-mode file, skipping the encoding
+ cookie if it is found in the first two lines.
+ """
+ it = iter(filelike)
+ try:
+ first = next(it)
+ if not cookie_comment_re.match(first):
+ yield first
+ second = next(it)
+ if not cookie_comment_re.match(second):
+ yield second
+ except StopIteration:
+ return
+
+ for line in it:
+ yield line
+
+def read_py_file(filename, skip_encoding_cookie=True):
+ """Read a Python file, using the encoding declared inside the file.
+
+ Parameters
+ ----------
+ filename : str
+ The path to the file to read.
+ skip_encoding_cookie : bool
+ If True (the default), and the encoding declaration is found in the first
+ two lines, that line will be excluded from the output - compiling a
+ unicode string with an encoding declaration is a SyntaxError in Python 2.
+
+ Returns
+ -------
+ A unicode string containing the contents of the file.
+ """
+ with open(filename) as f: # the open function defined in this module.
+ if skip_encoding_cookie:
+ return "".join(strip_encoding_cookie(f))
+ else:
+ return f.read()
+
+def read_py_url(url, errors='replace', skip_encoding_cookie=True):
+ """Read a Python file from a URL, using the encoding declared inside the file.
+
+ Parameters
+ ----------
+ url : str
+ The URL from which to fetch the file.
+ errors : str
+ How to handle decoding errors in the file. Options are the same as for
+ bytes.decode(), but here 'replace' is the default.
+ skip_encoding_cookie : bool
+ If True (the default), and the encoding declaration is found in the first
+ two lines, that line will be excluded from the output - compiling a
+ unicode string with an encoding declaration is a SyntaxError in Python 2.
+
+ Returns
+ -------
+ A unicode string containing the contents of the file.
+ """
+ # Deferred import for faster start
+ try:
+ from urllib.request import urlopen # Py 3
+ except ImportError:
+ from urllib import urlopen
+ response = urlopen(url)
+ buffer = io.BytesIO(response.read())
+ return source_to_unicode(buffer, errors, skip_encoding_cookie)
+
+def _list_readline(x):
+ """Given a list, returns a readline() function that returns the next element
+ with each call.
+ """
+ x = iter(x)
+ def readline():
+ return next(x)
+ return readline
+
+# Code for going between .py files and cached .pyc files ----------------------
+
+try: # Python 3.2, see PEP 3147
try:
- encoding, _ = detect_encoding(buffer.readline)
- except SyntaxError:
- encoding = "ascii"
- buffer.seek(0)
- text = TextIOWrapper(buffer, encoding, errors=errors, line_buffering=True)
- text.mode = 'r'
- if skip_encoding_cookie:
- return u"".join(strip_encoding_cookie(text))
- else:
- return text.read()
-
-def strip_encoding_cookie(filelike):
- """Generator to pull lines from a text-mode file, skipping the encoding
- cookie if it is found in the first two lines.
- """
- it = iter(filelike)
- try:
- first = next(it)
- if not cookie_comment_re.match(first):
- yield first
- second = next(it)
- if not cookie_comment_re.match(second):
- yield second
- except StopIteration:
- return
-
- for line in it:
- yield line
-
-def read_py_file(filename, skip_encoding_cookie=True):
- """Read a Python file, using the encoding declared inside the file.
-
- Parameters
- ----------
- filename : str
- The path to the file to read.
- skip_encoding_cookie : bool
- If True (the default), and the encoding declaration is found in the first
- two lines, that line will be excluded from the output - compiling a
- unicode string with an encoding declaration is a SyntaxError in Python 2.
-
- Returns
- -------
- A unicode string containing the contents of the file.
- """
- with open(filename) as f: # the open function defined in this module.
- if skip_encoding_cookie:
- return "".join(strip_encoding_cookie(f))
- else:
- return f.read()
-
-def read_py_url(url, errors='replace', skip_encoding_cookie=True):
- """Read a Python file from a URL, using the encoding declared inside the file.
-
- Parameters
- ----------
- url : str
- The URL from which to fetch the file.
- errors : str
- How to handle decoding errors in the file. Options are the same as for
- bytes.decode(), but here 'replace' is the default.
- skip_encoding_cookie : bool
- If True (the default), and the encoding declaration is found in the first
- two lines, that line will be excluded from the output - compiling a
- unicode string with an encoding declaration is a SyntaxError in Python 2.
-
- Returns
- -------
- A unicode string containing the contents of the file.
- """
- # Deferred import for faster start
- try:
- from urllib.request import urlopen # Py 3
- except ImportError:
- from urllib import urlopen
- response = urlopen(url)
- buffer = io.BytesIO(response.read())
- return source_to_unicode(buffer, errors, skip_encoding_cookie)
-
-def _list_readline(x):
- """Given a list, returns a readline() function that returns the next element
- with each call.
- """
- x = iter(x)
- def readline():
- return next(x)
- return readline
-
-# Code for going between .py files and cached .pyc files ----------------------
-
-try: # Python 3.2, see PEP 3147
- try:
- from importlib.util import source_from_cache, cache_from_source
- except ImportError :
- ## deprecated since 3.4
- from imp import source_from_cache, cache_from_source
-except ImportError:
- # Python <= 3.1: .pyc files go next to .py
- def source_from_cache(path):
- basename, ext = os.path.splitext(path)
- if ext not in ('.pyc', '.pyo'):
- raise ValueError('Not a cached Python file extension', ext)
- # Should we look for .pyw files?
- return basename + '.py'
-
- def cache_from_source(path, debug_override=None):
- if debug_override is None:
- debug_override = __debug__
- basename, ext = os.path.splitext(path)
- return basename + '.pyc' if debug_override else '.pyo'
+ from importlib.util import source_from_cache, cache_from_source
+ except ImportError :
+ ## deprecated since 3.4
+ from imp import source_from_cache, cache_from_source
+except ImportError:
+ # Python <= 3.1: .pyc files go next to .py
+ def source_from_cache(path):
+ basename, ext = os.path.splitext(path)
+ if ext not in ('.pyc', '.pyo'):
+ raise ValueError('Not a cached Python file extension', ext)
+ # Should we look for .pyw files?
+ return basename + '.py'
+
+ def cache_from_source(path, debug_override=None):
+ if debug_override is None:
+ debug_override = __debug__
+ basename, ext = os.path.splitext(path)
+ return basename + '.pyc' if debug_override else '.pyo'
diff --git a/contrib/python/ipython/py2/IPython/utils/path.py b/contrib/python/ipython/py2/IPython/utils/path.py
index 800e0e13ee..fa850812c7 100644
--- a/contrib/python/ipython/py2/IPython/utils/path.py
+++ b/contrib/python/ipython/py2/IPython/utils/path.py
@@ -1,447 +1,447 @@
-# encoding: utf-8
-"""
-Utilities for path handling.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import os
-import sys
-import errno
-import shutil
-import random
-import glob
-from warnings import warn
-from hashlib import md5
-
-from IPython.utils.process import system
-from IPython.utils import py3compat
-from IPython.utils.decorators import undoc
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-fs_encoding = sys.getfilesystemencoding()
-
-def _writable_dir(path):
- """Whether `path` is a directory, to which the user has write access."""
- return os.path.isdir(path) and os.access(path, os.W_OK)
-
-if sys.platform == 'win32':
- def _get_long_path_name(path):
- """Get a long path name (expand ~) on Windows using ctypes.
-
- Examples
- --------
-
- >>> get_long_path_name('c:\\docume~1')
- u'c:\\\\Documents and Settings'
-
- """
- try:
- import ctypes
- except ImportError:
- raise ImportError('you need to have ctypes installed for this to work')
- _GetLongPathName = ctypes.windll.kernel32.GetLongPathNameW
- _GetLongPathName.argtypes = [ctypes.c_wchar_p, ctypes.c_wchar_p,
- ctypes.c_uint ]
-
- buf = ctypes.create_unicode_buffer(260)
- rv = _GetLongPathName(path, buf, 260)
- if rv == 0 or rv > 260:
- return path
- else:
- return buf.value
-else:
- def _get_long_path_name(path):
- """Dummy no-op."""
- return path
-
-
-
-def get_long_path_name(path):
- """Expand a path into its long form.
-
- On Windows this expands any ~ in the paths. On other platforms, it is
- a null operation.
- """
- return _get_long_path_name(path)
-
-
-def unquote_filename(name, win32=(sys.platform=='win32')):
- """ On Windows, remove leading and trailing quotes from filenames.
+# encoding: utf-8
+"""
+Utilities for path handling.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import os
+import sys
+import errno
+import shutil
+import random
+import glob
+from warnings import warn
+from hashlib import md5
+
+from IPython.utils.process import system
+from IPython.utils import py3compat
+from IPython.utils.decorators import undoc
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+fs_encoding = sys.getfilesystemencoding()
+
+def _writable_dir(path):
+ """Whether `path` is a directory, to which the user has write access."""
+ return os.path.isdir(path) and os.access(path, os.W_OK)
+
+if sys.platform == 'win32':
+ def _get_long_path_name(path):
+ """Get a long path name (expand ~) on Windows using ctypes.
+
+ Examples
+ --------
+
+ >>> get_long_path_name('c:\\docume~1')
+ u'c:\\\\Documents and Settings'
+
+ """
+ try:
+ import ctypes
+ except ImportError:
+ raise ImportError('you need to have ctypes installed for this to work')
+ _GetLongPathName = ctypes.windll.kernel32.GetLongPathNameW
+ _GetLongPathName.argtypes = [ctypes.c_wchar_p, ctypes.c_wchar_p,
+ ctypes.c_uint ]
+
+ buf = ctypes.create_unicode_buffer(260)
+ rv = _GetLongPathName(path, buf, 260)
+ if rv == 0 or rv > 260:
+ return path
+ else:
+ return buf.value
+else:
+ def _get_long_path_name(path):
+ """Dummy no-op."""
+ return path
+
+
+
+def get_long_path_name(path):
+ """Expand a path into its long form.
+
+ On Windows this expands any ~ in the paths. On other platforms, it is
+ a null operation.
+ """
+ return _get_long_path_name(path)
+
+
+def unquote_filename(name, win32=(sys.platform=='win32')):
+ """ On Windows, remove leading and trailing quotes from filenames.
This function has been deprecated and should not be used any more:
unquoting is now taken care of by :func:`IPython.utils.process.arg_split`.
- """
+ """
warn("'unquote_filename' is deprecated since IPython 5.0 and should not "
"be used anymore", DeprecationWarning, stacklevel=2)
- if win32:
- if name.startswith(("'", '"')) and name.endswith(("'", '"')):
- name = name[1:-1]
- return name
-
-
-def compress_user(path):
- """Reverse of :func:`os.path.expanduser`
+ if win32:
+ if name.startswith(("'", '"')) and name.endswith(("'", '"')):
+ name = name[1:-1]
+ return name
+
+
+def compress_user(path):
+ """Reverse of :func:`os.path.expanduser`
"""
- path = py3compat.unicode_to_str(path, sys.getfilesystemencoding())
- home = os.path.expanduser('~')
- if path.startswith(home):
- path = "~" + path[len(home):]
- return path
-
-def get_py_filename(name, force_win32=None):
- """Return a valid python filename in the current directory.
-
- If the given name is not a file, it adds '.py' and searches again.
- Raises IOError with an informative message if the file isn't found.
- """
-
- name = os.path.expanduser(name)
+ path = py3compat.unicode_to_str(path, sys.getfilesystemencoding())
+ home = os.path.expanduser('~')
+ if path.startswith(home):
+ path = "~" + path[len(home):]
+ return path
+
+def get_py_filename(name, force_win32=None):
+ """Return a valid python filename in the current directory.
+
+ If the given name is not a file, it adds '.py' and searches again.
+ Raises IOError with an informative message if the file isn't found.
+ """
+
+ name = os.path.expanduser(name)
if force_win32 is not None:
warn("The 'force_win32' argument to 'get_py_filename' is deprecated "
"since IPython 5.0 and should not be used anymore",
DeprecationWarning, stacklevel=2)
- if not os.path.isfile(name) and not name.endswith('.py'):
- name += '.py'
- if os.path.isfile(name):
- return name
- else:
- raise IOError('File `%r` not found.' % name)
-
-
-def filefind(filename, path_dirs=None):
- """Find a file by looking through a sequence of paths.
-
- This iterates through a sequence of paths looking for a file and returns
- the full, absolute path of the first occurence of the file. If no set of
- path dirs is given, the filename is tested as is, after running through
- :func:`expandvars` and :func:`expanduser`. Thus a simple call::
-
- filefind('myfile.txt')
-
- will find the file in the current working dir, but::
-
- filefind('~/myfile.txt')
-
- Will find the file in the users home directory. This function does not
- automatically try any paths, such as the cwd or the user's home directory.
-
- Parameters
- ----------
- filename : str
- The filename to look for.
- path_dirs : str, None or sequence of str
- The sequence of paths to look for the file in. If None, the filename
- need to be absolute or be in the cwd. If a string, the string is
- put into a sequence and the searched. If a sequence, walk through
- each element and join with ``filename``, calling :func:`expandvars`
- and :func:`expanduser` before testing for existence.
-
- Returns
- -------
- Raises :exc:`IOError` or returns absolute path to file.
- """
-
- # If paths are quoted, abspath gets confused, strip them...
- filename = filename.strip('"').strip("'")
- # If the input is an absolute path, just check it exists
- if os.path.isabs(filename) and os.path.isfile(filename):
- return filename
-
- if path_dirs is None:
- path_dirs = ("",)
- elif isinstance(path_dirs, py3compat.string_types):
- path_dirs = (path_dirs,)
-
- for path in path_dirs:
- if path == '.': path = py3compat.getcwd()
- testname = expand_path(os.path.join(path, filename))
- if os.path.isfile(testname):
- return os.path.abspath(testname)
-
- raise IOError("File %r does not exist in any of the search paths: %r" %
- (filename, path_dirs) )
-
-
-class HomeDirError(Exception):
- pass
-
-
-def get_home_dir(require_writable=False):
- """Return the 'home' directory, as a unicode string.
-
- Uses os.path.expanduser('~'), and checks for writability.
-
- See stdlib docs for how this is determined.
- $HOME is first priority on *ALL* platforms.
-
- Parameters
- ----------
-
- require_writable : bool [default: False]
- if True:
- guarantees the return value is a writable directory, otherwise
- raises HomeDirError
- if False:
- The path is resolved, but it is not guaranteed to exist or be writable.
- """
-
- homedir = os.path.expanduser('~')
- # Next line will make things work even when /home/ is a symlink to
- # /usr/home as it is on FreeBSD, for example
- homedir = os.path.realpath(homedir)
-
- if not _writable_dir(homedir) and os.name == 'nt':
- # expanduser failed, use the registry to get the 'My Documents' folder.
- try:
- try:
- import winreg as wreg # Py 3
- except ImportError:
- import _winreg as wreg # Py 2
- key = wreg.OpenKey(
- wreg.HKEY_CURRENT_USER,
- "Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
- )
- homedir = wreg.QueryValueEx(key,'Personal')[0]
- key.Close()
- except:
- pass
-
- if (not require_writable) or _writable_dir(homedir):
- return py3compat.cast_unicode(homedir, fs_encoding)
- else:
- raise HomeDirError('%s is not a writable dir, '
- 'set $HOME environment variable to override' % homedir)
-
-def get_xdg_dir():
- """Return the XDG_CONFIG_HOME, if it is defined and exists, else None.
-
- This is only for non-OS X posix (Linux,Unix,etc.) systems.
- """
-
- env = os.environ
-
- if os.name == 'posix' and sys.platform != 'darwin':
- # Linux, Unix, AIX, etc.
- # use ~/.config if empty OR not set
- xdg = env.get("XDG_CONFIG_HOME", None) or os.path.join(get_home_dir(), '.config')
- if xdg and _writable_dir(xdg):
- return py3compat.cast_unicode(xdg, fs_encoding)
-
- return None
-
-
-def get_xdg_cache_dir():
- """Return the XDG_CACHE_HOME, if it is defined and exists, else None.
-
- This is only for non-OS X posix (Linux,Unix,etc.) systems.
- """
-
- env = os.environ
-
- if os.name == 'posix' and sys.platform != 'darwin':
- # Linux, Unix, AIX, etc.
- # use ~/.cache if empty OR not set
- xdg = env.get("XDG_CACHE_HOME", None) or os.path.join(get_home_dir(), '.cache')
- if xdg and _writable_dir(xdg):
- return py3compat.cast_unicode(xdg, fs_encoding)
-
- return None
-
-
-@undoc
-def get_ipython_dir():
+ if not os.path.isfile(name) and not name.endswith('.py'):
+ name += '.py'
+ if os.path.isfile(name):
+ return name
+ else:
+ raise IOError('File `%r` not found.' % name)
+
+
+def filefind(filename, path_dirs=None):
+ """Find a file by looking through a sequence of paths.
+
+ This iterates through a sequence of paths looking for a file and returns
+ the full, absolute path of the first occurence of the file. If no set of
+ path dirs is given, the filename is tested as is, after running through
+ :func:`expandvars` and :func:`expanduser`. Thus a simple call::
+
+ filefind('myfile.txt')
+
+ will find the file in the current working dir, but::
+
+ filefind('~/myfile.txt')
+
+ Will find the file in the users home directory. This function does not
+ automatically try any paths, such as the cwd or the user's home directory.
+
+ Parameters
+ ----------
+ filename : str
+ The filename to look for.
+ path_dirs : str, None or sequence of str
+ The sequence of paths to look for the file in. If None, the filename
+ need to be absolute or be in the cwd. If a string, the string is
+ put into a sequence and the searched. If a sequence, walk through
+ each element and join with ``filename``, calling :func:`expandvars`
+ and :func:`expanduser` before testing for existence.
+
+ Returns
+ -------
+ Raises :exc:`IOError` or returns absolute path to file.
+ """
+
+ # If paths are quoted, abspath gets confused, strip them...
+ filename = filename.strip('"').strip("'")
+ # If the input is an absolute path, just check it exists
+ if os.path.isabs(filename) and os.path.isfile(filename):
+ return filename
+
+ if path_dirs is None:
+ path_dirs = ("",)
+ elif isinstance(path_dirs, py3compat.string_types):
+ path_dirs = (path_dirs,)
+
+ for path in path_dirs:
+ if path == '.': path = py3compat.getcwd()
+ testname = expand_path(os.path.join(path, filename))
+ if os.path.isfile(testname):
+ return os.path.abspath(testname)
+
+ raise IOError("File %r does not exist in any of the search paths: %r" %
+ (filename, path_dirs) )
+
+
+class HomeDirError(Exception):
+ pass
+
+
+def get_home_dir(require_writable=False):
+ """Return the 'home' directory, as a unicode string.
+
+ Uses os.path.expanduser('~'), and checks for writability.
+
+ See stdlib docs for how this is determined.
+ $HOME is first priority on *ALL* platforms.
+
+ Parameters
+ ----------
+
+ require_writable : bool [default: False]
+ if True:
+ guarantees the return value is a writable directory, otherwise
+ raises HomeDirError
+ if False:
+ The path is resolved, but it is not guaranteed to exist or be writable.
+ """
+
+ homedir = os.path.expanduser('~')
+ # Next line will make things work even when /home/ is a symlink to
+ # /usr/home as it is on FreeBSD, for example
+ homedir = os.path.realpath(homedir)
+
+ if not _writable_dir(homedir) and os.name == 'nt':
+ # expanduser failed, use the registry to get the 'My Documents' folder.
+ try:
+ try:
+ import winreg as wreg # Py 3
+ except ImportError:
+ import _winreg as wreg # Py 2
+ key = wreg.OpenKey(
+ wreg.HKEY_CURRENT_USER,
+ "Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
+ )
+ homedir = wreg.QueryValueEx(key,'Personal')[0]
+ key.Close()
+ except:
+ pass
+
+ if (not require_writable) or _writable_dir(homedir):
+ return py3compat.cast_unicode(homedir, fs_encoding)
+ else:
+ raise HomeDirError('%s is not a writable dir, '
+ 'set $HOME environment variable to override' % homedir)
+
+def get_xdg_dir():
+ """Return the XDG_CONFIG_HOME, if it is defined and exists, else None.
+
+ This is only for non-OS X posix (Linux,Unix,etc.) systems.
+ """
+
+ env = os.environ
+
+ if os.name == 'posix' and sys.platform != 'darwin':
+ # Linux, Unix, AIX, etc.
+ # use ~/.config if empty OR not set
+ xdg = env.get("XDG_CONFIG_HOME", None) or os.path.join(get_home_dir(), '.config')
+ if xdg and _writable_dir(xdg):
+ return py3compat.cast_unicode(xdg, fs_encoding)
+
+ return None
+
+
+def get_xdg_cache_dir():
+ """Return the XDG_CACHE_HOME, if it is defined and exists, else None.
+
+ This is only for non-OS X posix (Linux,Unix,etc.) systems.
+ """
+
+ env = os.environ
+
+ if os.name == 'posix' and sys.platform != 'darwin':
+ # Linux, Unix, AIX, etc.
+ # use ~/.cache if empty OR not set
+ xdg = env.get("XDG_CACHE_HOME", None) or os.path.join(get_home_dir(), '.cache')
+ if xdg and _writable_dir(xdg):
+ return py3compat.cast_unicode(xdg, fs_encoding)
+
+ return None
+
+
+@undoc
+def get_ipython_dir():
warn("get_ipython_dir has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
- from IPython.paths import get_ipython_dir
- return get_ipython_dir()
-
-@undoc
-def get_ipython_cache_dir():
+ from IPython.paths import get_ipython_dir
+ return get_ipython_dir()
+
+@undoc
+def get_ipython_cache_dir():
warn("get_ipython_cache_dir has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
- from IPython.paths import get_ipython_cache_dir
- return get_ipython_cache_dir()
-
-@undoc
-def get_ipython_package_dir():
+ from IPython.paths import get_ipython_cache_dir
+ return get_ipython_cache_dir()
+
+@undoc
+def get_ipython_package_dir():
warn("get_ipython_package_dir has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
- from IPython.paths import get_ipython_package_dir
- return get_ipython_package_dir()
-
-@undoc
-def get_ipython_module_path(module_str):
+ from IPython.paths import get_ipython_package_dir
+ return get_ipython_package_dir()
+
+@undoc
+def get_ipython_module_path(module_str):
warn("get_ipython_module_path has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
- from IPython.paths import get_ipython_module_path
- return get_ipython_module_path(module_str)
-
-@undoc
-def locate_profile(profile='default'):
+ from IPython.paths import get_ipython_module_path
+ return get_ipython_module_path(module_str)
+
+@undoc
+def locate_profile(profile='default'):
warn("locate_profile has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
- from IPython.paths import locate_profile
- return locate_profile(profile=profile)
-
-def expand_path(s):
- """Expand $VARS and ~names in a string, like a shell
-
- :Examples:
-
- In [2]: os.environ['FOO']='test'
-
- In [3]: expand_path('variable FOO is $FOO')
- Out[3]: 'variable FOO is test'
- """
- # This is a pretty subtle hack. When expand user is given a UNC path
- # on Windows (\\server\share$\%username%), os.path.expandvars, removes
- # the $ to get (\\server\share\%username%). I think it considered $
- # alone an empty var. But, we need the $ to remains there (it indicates
- # a hidden share).
- if os.name=='nt':
- s = s.replace('$\\', 'IPYTHON_TEMP')
- s = os.path.expandvars(os.path.expanduser(s))
- if os.name=='nt':
- s = s.replace('IPYTHON_TEMP', '$\\')
- return s
-
-
-def unescape_glob(string):
- """Unescape glob pattern in `string`."""
- def unescape(s):
- for pattern in '*[]!?':
- s = s.replace(r'\{0}'.format(pattern), pattern)
- return s
- return '\\'.join(map(unescape, string.split('\\\\')))
-
-
-def shellglob(args):
- """
- Do glob expansion for each element in `args` and return a flattened list.
-
- Unmatched glob pattern will remain as-is in the returned list.
-
- """
- expanded = []
- # Do not unescape backslash in Windows as it is interpreted as
- # path separator:
- unescape = unescape_glob if sys.platform != 'win32' else lambda x: x
- for a in args:
- expanded.extend(glob.glob(a) or [unescape(a)])
- return expanded
-
-
-def target_outdated(target,deps):
- """Determine whether a target is out of date.
-
- target_outdated(target,deps) -> 1/0
-
- deps: list of filenames which MUST exist.
- target: single filename which may or may not exist.
-
- If target doesn't exist or is older than any file listed in deps, return
- true, otherwise return false.
- """
- try:
- target_time = os.path.getmtime(target)
- except os.error:
- return 1
- for dep in deps:
- dep_time = os.path.getmtime(dep)
- if dep_time > target_time:
- #print "For target",target,"Dep failed:",dep # dbg
- #print "times (dep,tar):",dep_time,target_time # dbg
- return 1
- return 0
-
-
-def target_update(target,deps,cmd):
- """Update a target with a given command given a list of dependencies.
-
- target_update(target,deps,cmd) -> runs cmd if target is outdated.
-
- This is just a wrapper around target_outdated() which calls the given
- command if target is outdated."""
-
- if target_outdated(target,deps):
- system(cmd)
-
-@undoc
-def filehash(path):
- """Make an MD5 hash of a file, ignoring any differences in line
- ending characters."""
+ from IPython.paths import locate_profile
+ return locate_profile(profile=profile)
+
+def expand_path(s):
+ """Expand $VARS and ~names in a string, like a shell
+
+ :Examples:
+
+ In [2]: os.environ['FOO']='test'
+
+ In [3]: expand_path('variable FOO is $FOO')
+ Out[3]: 'variable FOO is test'
+ """
+ # This is a pretty subtle hack. When expand user is given a UNC path
+ # on Windows (\\server\share$\%username%), os.path.expandvars, removes
+ # the $ to get (\\server\share\%username%). I think it considered $
+ # alone an empty var. But, we need the $ to remains there (it indicates
+ # a hidden share).
+ if os.name=='nt':
+ s = s.replace('$\\', 'IPYTHON_TEMP')
+ s = os.path.expandvars(os.path.expanduser(s))
+ if os.name=='nt':
+ s = s.replace('IPYTHON_TEMP', '$\\')
+ return s
+
+
+def unescape_glob(string):
+ """Unescape glob pattern in `string`."""
+ def unescape(s):
+ for pattern in '*[]!?':
+ s = s.replace(r'\{0}'.format(pattern), pattern)
+ return s
+ return '\\'.join(map(unescape, string.split('\\\\')))
+
+
+def shellglob(args):
+ """
+ Do glob expansion for each element in `args` and return a flattened list.
+
+ Unmatched glob pattern will remain as-is in the returned list.
+
+ """
+ expanded = []
+ # Do not unescape backslash in Windows as it is interpreted as
+ # path separator:
+ unescape = unescape_glob if sys.platform != 'win32' else lambda x: x
+ for a in args:
+ expanded.extend(glob.glob(a) or [unescape(a)])
+ return expanded
+
+
+def target_outdated(target,deps):
+ """Determine whether a target is out of date.
+
+ target_outdated(target,deps) -> 1/0
+
+ deps: list of filenames which MUST exist.
+ target: single filename which may or may not exist.
+
+ If target doesn't exist or is older than any file listed in deps, return
+ true, otherwise return false.
+ """
+ try:
+ target_time = os.path.getmtime(target)
+ except os.error:
+ return 1
+ for dep in deps:
+ dep_time = os.path.getmtime(dep)
+ if dep_time > target_time:
+ #print "For target",target,"Dep failed:",dep # dbg
+ #print "times (dep,tar):",dep_time,target_time # dbg
+ return 1
+ return 0
+
+
+def target_update(target,deps,cmd):
+ """Update a target with a given command given a list of dependencies.
+
+ target_update(target,deps,cmd) -> runs cmd if target is outdated.
+
+ This is just a wrapper around target_outdated() which calls the given
+ command if target is outdated."""
+
+ if target_outdated(target,deps):
+ system(cmd)
+
+@undoc
+def filehash(path):
+ """Make an MD5 hash of a file, ignoring any differences in line
+ ending characters."""
warn("filehash() is deprecated since IPython 4.0", DeprecationWarning, stacklevel=2)
- with open(path, "rU") as f:
- return md5(py3compat.str_to_bytes(f.read())).hexdigest()
-
-ENOLINK = 1998
-
-def link(src, dst):
- """Hard links ``src`` to ``dst``, returning 0 or errno.
-
- Note that the special errno ``ENOLINK`` will be returned if ``os.link`` isn't
- supported by the operating system.
- """
-
- if not hasattr(os, "link"):
- return ENOLINK
- link_errno = 0
- try:
- os.link(src, dst)
- except OSError as e:
- link_errno = e.errno
- return link_errno
-
-
-def link_or_copy(src, dst):
- """Attempts to hardlink ``src`` to ``dst``, copying if the link fails.
-
- Attempts to maintain the semantics of ``shutil.copy``.
-
- Because ``os.link`` does not overwrite files, a unique temporary file
- will be used if the target already exists, then that file will be moved
- into place.
- """
-
- if os.path.isdir(dst):
- dst = os.path.join(dst, os.path.basename(src))
-
- link_errno = link(src, dst)
- if link_errno == errno.EEXIST:
- if os.stat(src).st_ino == os.stat(dst).st_ino:
- # dst is already a hard link to the correct file, so we don't need
- # to do anything else. If we try to link and rename the file
- # anyway, we get duplicate files - see http://bugs.python.org/issue21876
- return
-
- new_dst = dst + "-temp-%04X" %(random.randint(1, 16**4), )
- try:
- link_or_copy(src, new_dst)
- except:
- try:
- os.remove(new_dst)
- except OSError:
- pass
- raise
- os.rename(new_dst, dst)
- elif link_errno != 0:
- # Either link isn't supported, or the filesystem doesn't support
- # linking, or 'src' and 'dst' are on different filesystems.
- shutil.copy(src, dst)
-
-def ensure_dir_exists(path, mode=0o755):
- """ensure that a directory exists
-
- If it doesn't exist, try to create it and protect against a race condition
- if another process is doing the same.
-
- The default permissions are 755, which differ from os.makedirs default of 777.
- """
- if not os.path.exists(path):
- try:
- os.makedirs(path, mode=mode)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
- elif not os.path.isdir(path):
- raise IOError("%r exists but is not a directory" % path)
+ with open(path, "rU") as f:
+ return md5(py3compat.str_to_bytes(f.read())).hexdigest()
+
+ENOLINK = 1998
+
+def link(src, dst):
+ """Hard links ``src`` to ``dst``, returning 0 or errno.
+
+ Note that the special errno ``ENOLINK`` will be returned if ``os.link`` isn't
+ supported by the operating system.
+ """
+
+ if not hasattr(os, "link"):
+ return ENOLINK
+ link_errno = 0
+ try:
+ os.link(src, dst)
+ except OSError as e:
+ link_errno = e.errno
+ return link_errno
+
+
+def link_or_copy(src, dst):
+ """Attempts to hardlink ``src`` to ``dst``, copying if the link fails.
+
+ Attempts to maintain the semantics of ``shutil.copy``.
+
+ Because ``os.link`` does not overwrite files, a unique temporary file
+ will be used if the target already exists, then that file will be moved
+ into place.
+ """
+
+ if os.path.isdir(dst):
+ dst = os.path.join(dst, os.path.basename(src))
+
+ link_errno = link(src, dst)
+ if link_errno == errno.EEXIST:
+ if os.stat(src).st_ino == os.stat(dst).st_ino:
+ # dst is already a hard link to the correct file, so we don't need
+ # to do anything else. If we try to link and rename the file
+ # anyway, we get duplicate files - see http://bugs.python.org/issue21876
+ return
+
+ new_dst = dst + "-temp-%04X" %(random.randint(1, 16**4), )
+ try:
+ link_or_copy(src, new_dst)
+ except:
+ try:
+ os.remove(new_dst)
+ except OSError:
+ pass
+ raise
+ os.rename(new_dst, dst)
+ elif link_errno != 0:
+ # Either link isn't supported, or the filesystem doesn't support
+ # linking, or 'src' and 'dst' are on different filesystems.
+ shutil.copy(src, dst)
+
+def ensure_dir_exists(path, mode=0o755):
+ """ensure that a directory exists
+
+ If it doesn't exist, try to create it and protect against a race condition
+ if another process is doing the same.
+
+ The default permissions are 755, which differ from os.makedirs default of 777.
+ """
+ if not os.path.exists(path):
+ try:
+ os.makedirs(path, mode=mode)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ elif not os.path.isdir(path):
+ raise IOError("%r exists but is not a directory" % path)
diff --git a/contrib/python/ipython/py2/IPython/utils/pickleutil.py b/contrib/python/ipython/py2/IPython/utils/pickleutil.py
index 9111ad73c0..665ff09f2d 100644
--- a/contrib/python/ipython/py2/IPython/utils/pickleutil.py
+++ b/contrib/python/ipython/py2/IPython/utils/pickleutil.py
@@ -1,5 +1,5 @@
-from warnings import warn
-
-warn("IPython.utils.pickleutil has moved to ipykernel.pickleutil")
-
-from ipykernel.pickleutil import *
+from warnings import warn
+
+warn("IPython.utils.pickleutil has moved to ipykernel.pickleutil")
+
+from ipykernel.pickleutil import *
diff --git a/contrib/python/ipython/py2/IPython/utils/process.py b/contrib/python/ipython/py2/IPython/utils/process.py
index ca85a03b5c..a274f43f3a 100644
--- a/contrib/python/ipython/py2/IPython/utils/process.py
+++ b/contrib/python/ipython/py2/IPython/utils/process.py
@@ -1,106 +1,106 @@
-# encoding: utf-8
-"""
-Utilities for working with external processes.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import print_function
-
-import os
-import sys
-
-if sys.platform == 'win32':
- from ._process_win32 import system, getoutput, arg_split, check_pid
-elif sys.platform == 'cli':
- from ._process_cli import system, getoutput, arg_split, check_pid
-else:
- from ._process_posix import system, getoutput, arg_split, check_pid
-
-from ._process_common import getoutputerror, get_output_error_code, process_handler
-from . import py3compat
-
-
-class FindCmdError(Exception):
- pass
-
-
-def find_cmd(cmd):
- """Find absolute path to executable cmd in a cross platform manner.
-
- This function tries to determine the full path to a command line program
- using `which` on Unix/Linux/OS X and `win32api` on Windows. Most of the
- time it will use the version that is first on the users `PATH`.
-
- Warning, don't use this to find IPython command line programs as there
- is a risk you will find the wrong one. Instead find those using the
- following code and looking for the application itself::
-
- from IPython.utils.path import get_ipython_module_path
- from IPython.utils.process import pycmd2argv
- argv = pycmd2argv(get_ipython_module_path('IPython.terminal.ipapp'))
-
- Parameters
- ----------
- cmd : str
- The command line program to look for.
- """
- path = py3compat.which(cmd)
- if path is None:
- raise FindCmdError('command could not be found: %s' % cmd)
- return path
-
-
-def is_cmd_found(cmd):
- """Check whether executable `cmd` exists or not and return a bool."""
- try:
- find_cmd(cmd)
- return True
- except FindCmdError:
- return False
-
-
-def pycmd2argv(cmd):
- r"""Take the path of a python command and return a list (argv-style).
-
- This only works on Python based command line programs and will find the
- location of the ``python`` executable using ``sys.executable`` to make
- sure the right version is used.
-
- For a given path ``cmd``, this returns [cmd] if cmd's extension is .exe,
- .com or .bat, and [, cmd] otherwise.
-
- Parameters
- ----------
- cmd : string
- The path of the command.
-
- Returns
- -------
- argv-style list.
- """
- ext = os.path.splitext(cmd)[1]
- if ext in ['.exe', '.com', '.bat']:
- return [cmd]
- else:
- return [sys.executable, cmd]
-
-
-def abbrev_cwd():
- """ Return abbreviated version of cwd, e.g. d:mydir """
- cwd = py3compat.getcwd().replace('\\','/')
- drivepart = ''
- tail = cwd
- if sys.platform == 'win32':
- if len(cwd) < 4:
- return cwd
- drivepart,tail = os.path.splitdrive(cwd)
-
-
- parts = tail.split('/')
- if len(parts) > 2:
- tail = '/'.join(parts[-2:])
-
- return (drivepart + (
- cwd == '/' and '/' or tail))
+# encoding: utf-8
+"""
+Utilities for working with external processes.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import print_function
+
+import os
+import sys
+
+if sys.platform == 'win32':
+ from ._process_win32 import system, getoutput, arg_split, check_pid
+elif sys.platform == 'cli':
+ from ._process_cli import system, getoutput, arg_split, check_pid
+else:
+ from ._process_posix import system, getoutput, arg_split, check_pid
+
+from ._process_common import getoutputerror, get_output_error_code, process_handler
+from . import py3compat
+
+
+class FindCmdError(Exception):
+ pass
+
+
+def find_cmd(cmd):
+ """Find absolute path to executable cmd in a cross platform manner.
+
+ This function tries to determine the full path to a command line program
+ using `which` on Unix/Linux/OS X and `win32api` on Windows. Most of the
+ time it will use the version that is first on the users `PATH`.
+
+ Warning, don't use this to find IPython command line programs as there
+ is a risk you will find the wrong one. Instead find those using the
+ following code and looking for the application itself::
+
+ from IPython.utils.path import get_ipython_module_path
+ from IPython.utils.process import pycmd2argv
+ argv = pycmd2argv(get_ipython_module_path('IPython.terminal.ipapp'))
+
+ Parameters
+ ----------
+ cmd : str
+ The command line program to look for.
+ """
+ path = py3compat.which(cmd)
+ if path is None:
+ raise FindCmdError('command could not be found: %s' % cmd)
+ return path
+
+
+def is_cmd_found(cmd):
+ """Check whether executable `cmd` exists or not and return a bool."""
+ try:
+ find_cmd(cmd)
+ return True
+ except FindCmdError:
+ return False
+
+
+def pycmd2argv(cmd):
+ r"""Take the path of a python command and return a list (argv-style).
+
+ This only works on Python based command line programs and will find the
+ location of the ``python`` executable using ``sys.executable`` to make
+ sure the right version is used.
+
+ For a given path ``cmd``, this returns [cmd] if cmd's extension is .exe,
+ .com or .bat, and [, cmd] otherwise.
+
+ Parameters
+ ----------
+ cmd : string
+ The path of the command.
+
+ Returns
+ -------
+ argv-style list.
+ """
+ ext = os.path.splitext(cmd)[1]
+ if ext in ['.exe', '.com', '.bat']:
+ return [cmd]
+ else:
+ return [sys.executable, cmd]
+
+
+def abbrev_cwd():
+ """ Return abbreviated version of cwd, e.g. d:mydir """
+ cwd = py3compat.getcwd().replace('\\','/')
+ drivepart = ''
+ tail = cwd
+ if sys.platform == 'win32':
+ if len(cwd) < 4:
+ return cwd
+ drivepart,tail = os.path.splitdrive(cwd)
+
+
+ parts = tail.split('/')
+ if len(parts) > 2:
+ tail = '/'.join(parts[-2:])
+
+ return (drivepart + (
+ cwd == '/' and '/' or tail))
diff --git a/contrib/python/ipython/py2/IPython/utils/py3compat.py b/contrib/python/ipython/py2/IPython/utils/py3compat.py
index adaac362a7..88602e5342 100644
--- a/contrib/python/ipython/py2/IPython/utils/py3compat.py
+++ b/contrib/python/ipython/py2/IPython/utils/py3compat.py
@@ -1,336 +1,336 @@
-# coding: utf-8
-"""Compatibility tricks for Python 3. Mainly to do with unicode."""
-import functools
-import os
-import sys
-import re
-import shutil
-import types
+# coding: utf-8
+"""Compatibility tricks for Python 3. Mainly to do with unicode."""
+import functools
+import os
+import sys
+import re
+import shutil
+import types
import platform
-
-from .encoding import DEFAULT_ENCODING
-
-def no_code(x, encoding=None):
- return x
-
-def decode(s, encoding=None):
- encoding = encoding or DEFAULT_ENCODING
- return s.decode(encoding, "replace")
-
-def encode(u, encoding=None):
- encoding = encoding or DEFAULT_ENCODING
- return u.encode(encoding, "replace")
-
-
-def cast_unicode(s, encoding=None):
- if isinstance(s, bytes):
- return decode(s, encoding)
- return s
-
-def cast_bytes(s, encoding=None):
- if not isinstance(s, bytes):
- return encode(s, encoding)
- return s
-
-def buffer_to_bytes(buf):
- """Cast a buffer object to bytes"""
- if not isinstance(buf, bytes):
- buf = bytes(buf)
- return buf
-
-def _modify_str_or_docstring(str_change_func):
- @functools.wraps(str_change_func)
- def wrapper(func_or_str):
- if isinstance(func_or_str, string_types):
- func = None
- doc = func_or_str
- else:
- func = func_or_str
- doc = func.__doc__
-
- # PYTHONOPTIMIZE=2 strips docstrings, so they can disappear unexpectedly
- if doc is not None:
- doc = str_change_func(doc)
-
- if func:
- func.__doc__ = doc
- return func
- return doc
- return wrapper
-
-def safe_unicode(e):
- """unicode(e) with various fallbacks. Used for exceptions, which may not be
- safe to call unicode() on.
- """
- try:
- return unicode_type(e)
- except UnicodeError:
- pass
-
- try:
- return str_to_unicode(str(e))
- except UnicodeError:
- pass
-
- try:
- return str_to_unicode(repr(e))
- except UnicodeError:
- pass
-
- return u'Unrecoverably corrupt evalue'
-
-# shutil.which from Python 3.4
-def _shutil_which(cmd, mode=os.F_OK | os.X_OK, path=None):
- """Given a command, mode, and a PATH string, return the path which
- conforms to the given mode on the PATH, or None if there is no such
- file.
-
- `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
- of os.environ.get("PATH"), or can be overridden with a custom search
- path.
-
- This is a backport of shutil.which from Python 3.4
- """
- # Check that a given file can be accessed with the correct mode.
- # Additionally check that `file` is not a directory, as on Windows
- # directories pass the os.access check.
- def _access_check(fn, mode):
- return (os.path.exists(fn) and os.access(fn, mode)
- and not os.path.isdir(fn))
-
- # If we're given a path with a directory part, look it up directly rather
- # than referring to PATH directories. This includes checking relative to the
- # current directory, e.g. ./script
- if os.path.dirname(cmd):
- if _access_check(cmd, mode):
- return cmd
- return None
-
- if path is None:
- path = os.environ.get("PATH", os.defpath)
- if not path:
- return None
- path = path.split(os.pathsep)
-
- if sys.platform == "win32":
- # The current directory takes precedence on Windows.
- if not os.curdir in path:
- path.insert(0, os.curdir)
-
- # PATHEXT is necessary to check on Windows.
- pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
- # See if the given file matches any of the expected path extensions.
- # This will allow us to short circuit when given "python.exe".
- # If it does match, only test that one, otherwise we have to try
- # others.
- if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
- files = [cmd]
- else:
- files = [cmd + ext for ext in pathext]
- else:
- # On other platforms you don't have things like PATHEXT to tell you
- # what file suffixes are executable, so just pass on cmd as-is.
- files = [cmd]
-
- seen = set()
- for dir in path:
- normdir = os.path.normcase(dir)
- if not normdir in seen:
- seen.add(normdir)
- for thefile in files:
- name = os.path.join(dir, thefile)
- if _access_check(name, mode):
- return name
- return None
-
-if sys.version_info[0] >= 3:
- PY3 = True
-
- # keep reference to builtin_mod because the kernel overrides that value
- # to forward requests to a frontend.
- def input(prompt=''):
- return builtin_mod.input(prompt)
-
- builtin_mod_name = "builtins"
- import builtins as builtin_mod
-
- str_to_unicode = no_code
- unicode_to_str = no_code
- str_to_bytes = encode
- bytes_to_str = decode
- cast_bytes_py2 = no_code
- cast_unicode_py2 = no_code
- buffer_to_bytes_py2 = no_code
-
- string_types = (str,)
- unicode_type = str
-
- which = shutil.which
-
- def isidentifier(s, dotted=False):
- if dotted:
- return all(isidentifier(a) for a in s.split("."))
- return s.isidentifier()
-
- xrange = range
- def iteritems(d): return iter(d.items())
- def itervalues(d): return iter(d.values())
- getcwd = os.getcwd
-
- MethodType = types.MethodType
-
- def execfile(fname, glob, loc=None, compiler=None):
- loc = loc if (loc is not None) else glob
- with open(fname, 'rb') as f:
- compiler = compiler or compile
- exec(compiler(f.read(), fname, 'exec'), glob, loc)
-
- # Refactor print statements in doctests.
- _print_statement_re = re.compile(r"\bprint (?P<expr>.*)$", re.MULTILINE)
- def _print_statement_sub(match):
- expr = match.groups('expr')
- return "print(%s)" % expr
-
- @_modify_str_or_docstring
- def doctest_refactor_print(doc):
- """Refactor 'print x' statements in a doctest to print(x) style. 2to3
- unfortunately doesn't pick up on our doctests.
-
- Can accept a string or a function, so it can be used as a decorator."""
- return _print_statement_re.sub(_print_statement_sub, doc)
-
- # Abstract u'abc' syntax:
- @_modify_str_or_docstring
- def u_format(s):
- """"{u}'abc'" --> "'abc'" (Python 3)
-
- Accepts a string or a function, so it can be used as a decorator."""
- return s.format(u='')
-
- def get_closure(f):
- """Get a function's closure attribute"""
- return f.__closure__
-
-else:
- PY3 = False
-
- # keep reference to builtin_mod because the kernel overrides that value
- # to forward requests to a frontend.
- def input(prompt=''):
- return builtin_mod.raw_input(prompt)
-
- builtin_mod_name = "__builtin__"
- import __builtin__ as builtin_mod
-
- str_to_unicode = decode
- unicode_to_str = encode
- str_to_bytes = no_code
- bytes_to_str = no_code
- cast_bytes_py2 = cast_bytes
- cast_unicode_py2 = cast_unicode
- buffer_to_bytes_py2 = buffer_to_bytes
-
- string_types = (str, unicode)
- unicode_type = unicode
-
- import re
- _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$")
- def isidentifier(s, dotted=False):
- if dotted:
- return all(isidentifier(a) for a in s.split("."))
- return bool(_name_re.match(s))
-
- xrange = xrange
- def iteritems(d): return d.iteritems()
- def itervalues(d): return d.itervalues()
- getcwd = os.getcwdu
-
- def MethodType(func, instance):
- return types.MethodType(func, instance, type(instance))
-
- def doctest_refactor_print(func_or_str):
- return func_or_str
-
- def get_closure(f):
- """Get a function's closure attribute"""
- return f.func_closure
-
- which = _shutil_which
-
- # Abstract u'abc' syntax:
- @_modify_str_or_docstring
- def u_format(s):
- """"{u}'abc'" --> "u'abc'" (Python 2)
-
- Accepts a string or a function, so it can be used as a decorator."""
- return s.format(u='u')
-
- if sys.platform == 'win32':
- def execfile(fname, glob=None, loc=None, compiler=None):
- loc = loc if (loc is not None) else glob
- scripttext = builtin_mod.open(fname).read()+ '\n'
- # compile converts unicode filename to str assuming
- # ascii. Let's do the conversion before calling compile
- if isinstance(fname, unicode):
- filename = unicode_to_str(fname)
- else:
- filename = fname
- compiler = compiler or compile
- exec(compiler(scripttext, filename, 'exec'), glob, loc)
-
- else:
- def execfile(fname, glob=None, loc=None, compiler=None):
- if isinstance(fname, unicode):
- filename = fname.encode(sys.getfilesystemencoding())
- else:
- filename = fname
- where = [ns for ns in [glob, loc] if ns is not None]
- if compiler is None:
- builtin_mod.execfile(filename, *where)
- else:
- scripttext = builtin_mod.open(fname).read().rstrip() + '\n'
- exec(compiler(scripttext, filename, 'exec'), glob, loc)
-
-
-PY2 = not PY3
+
+from .encoding import DEFAULT_ENCODING
+
+def no_code(x, encoding=None):
+ return x
+
+def decode(s, encoding=None):
+ encoding = encoding or DEFAULT_ENCODING
+ return s.decode(encoding, "replace")
+
+def encode(u, encoding=None):
+ encoding = encoding or DEFAULT_ENCODING
+ return u.encode(encoding, "replace")
+
+
+def cast_unicode(s, encoding=None):
+ if isinstance(s, bytes):
+ return decode(s, encoding)
+ return s
+
+def cast_bytes(s, encoding=None):
+ if not isinstance(s, bytes):
+ return encode(s, encoding)
+ return s
+
+def buffer_to_bytes(buf):
+ """Cast a buffer object to bytes"""
+ if not isinstance(buf, bytes):
+ buf = bytes(buf)
+ return buf
+
+def _modify_str_or_docstring(str_change_func):
+ @functools.wraps(str_change_func)
+ def wrapper(func_or_str):
+ if isinstance(func_or_str, string_types):
+ func = None
+ doc = func_or_str
+ else:
+ func = func_or_str
+ doc = func.__doc__
+
+ # PYTHONOPTIMIZE=2 strips docstrings, so they can disappear unexpectedly
+ if doc is not None:
+ doc = str_change_func(doc)
+
+ if func:
+ func.__doc__ = doc
+ return func
+ return doc
+ return wrapper
+
+def safe_unicode(e):
+ """unicode(e) with various fallbacks. Used for exceptions, which may not be
+ safe to call unicode() on.
+ """
+ try:
+ return unicode_type(e)
+ except UnicodeError:
+ pass
+
+ try:
+ return str_to_unicode(str(e))
+ except UnicodeError:
+ pass
+
+ try:
+ return str_to_unicode(repr(e))
+ except UnicodeError:
+ pass
+
+ return u'Unrecoverably corrupt evalue'
+
+# shutil.which from Python 3.4
+def _shutil_which(cmd, mode=os.F_OK | os.X_OK, path=None):
+ """Given a command, mode, and a PATH string, return the path which
+ conforms to the given mode on the PATH, or None if there is no such
+ file.
+
+ `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
+ of os.environ.get("PATH"), or can be overridden with a custom search
+ path.
+
+ This is a backport of shutil.which from Python 3.4
+ """
+ # Check that a given file can be accessed with the correct mode.
+ # Additionally check that `file` is not a directory, as on Windows
+ # directories pass the os.access check.
+ def _access_check(fn, mode):
+ return (os.path.exists(fn) and os.access(fn, mode)
+ and not os.path.isdir(fn))
+
+ # If we're given a path with a directory part, look it up directly rather
+ # than referring to PATH directories. This includes checking relative to the
+ # current directory, e.g. ./script
+ if os.path.dirname(cmd):
+ if _access_check(cmd, mode):
+ return cmd
+ return None
+
+ if path is None:
+ path = os.environ.get("PATH", os.defpath)
+ if not path:
+ return None
+ path = path.split(os.pathsep)
+
+ if sys.platform == "win32":
+ # The current directory takes precedence on Windows.
+ if not os.curdir in path:
+ path.insert(0, os.curdir)
+
+ # PATHEXT is necessary to check on Windows.
+ pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
+ # See if the given file matches any of the expected path extensions.
+ # This will allow us to short circuit when given "python.exe".
+ # If it does match, only test that one, otherwise we have to try
+ # others.
+ if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
+ files = [cmd]
+ else:
+ files = [cmd + ext for ext in pathext]
+ else:
+ # On other platforms you don't have things like PATHEXT to tell you
+ # what file suffixes are executable, so just pass on cmd as-is.
+ files = [cmd]
+
+ seen = set()
+ for dir in path:
+ normdir = os.path.normcase(dir)
+ if not normdir in seen:
+ seen.add(normdir)
+ for thefile in files:
+ name = os.path.join(dir, thefile)
+ if _access_check(name, mode):
+ return name
+ return None
+
+if sys.version_info[0] >= 3:
+ PY3 = True
+
+ # keep reference to builtin_mod because the kernel overrides that value
+ # to forward requests to a frontend.
+ def input(prompt=''):
+ return builtin_mod.input(prompt)
+
+ builtin_mod_name = "builtins"
+ import builtins as builtin_mod
+
+ str_to_unicode = no_code
+ unicode_to_str = no_code
+ str_to_bytes = encode
+ bytes_to_str = decode
+ cast_bytes_py2 = no_code
+ cast_unicode_py2 = no_code
+ buffer_to_bytes_py2 = no_code
+
+ string_types = (str,)
+ unicode_type = str
+
+ which = shutil.which
+
+ def isidentifier(s, dotted=False):
+ if dotted:
+ return all(isidentifier(a) for a in s.split("."))
+ return s.isidentifier()
+
+ xrange = range
+ def iteritems(d): return iter(d.items())
+ def itervalues(d): return iter(d.values())
+ getcwd = os.getcwd
+
+ MethodType = types.MethodType
+
+ def execfile(fname, glob, loc=None, compiler=None):
+ loc = loc if (loc is not None) else glob
+ with open(fname, 'rb') as f:
+ compiler = compiler or compile
+ exec(compiler(f.read(), fname, 'exec'), glob, loc)
+
+ # Refactor print statements in doctests.
+ _print_statement_re = re.compile(r"\bprint (?P<expr>.*)$", re.MULTILINE)
+ def _print_statement_sub(match):
+ expr = match.groups('expr')
+ return "print(%s)" % expr
+
+ @_modify_str_or_docstring
+ def doctest_refactor_print(doc):
+ """Refactor 'print x' statements in a doctest to print(x) style. 2to3
+ unfortunately doesn't pick up on our doctests.
+
+ Can accept a string or a function, so it can be used as a decorator."""
+ return _print_statement_re.sub(_print_statement_sub, doc)
+
+ # Abstract u'abc' syntax:
+ @_modify_str_or_docstring
+ def u_format(s):
+ """"{u}'abc'" --> "'abc'" (Python 3)
+
+ Accepts a string or a function, so it can be used as a decorator."""
+ return s.format(u='')
+
+ def get_closure(f):
+ """Get a function's closure attribute"""
+ return f.__closure__
+
+else:
+ PY3 = False
+
+ # keep reference to builtin_mod because the kernel overrides that value
+ # to forward requests to a frontend.
+ def input(prompt=''):
+ return builtin_mod.raw_input(prompt)
+
+ builtin_mod_name = "__builtin__"
+ import __builtin__ as builtin_mod
+
+ str_to_unicode = decode
+ unicode_to_str = encode
+ str_to_bytes = no_code
+ bytes_to_str = no_code
+ cast_bytes_py2 = cast_bytes
+ cast_unicode_py2 = cast_unicode
+ buffer_to_bytes_py2 = buffer_to_bytes
+
+ string_types = (str, unicode)
+ unicode_type = unicode
+
+ import re
+ _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$")
+ def isidentifier(s, dotted=False):
+ if dotted:
+ return all(isidentifier(a) for a in s.split("."))
+ return bool(_name_re.match(s))
+
+ xrange = xrange
+ def iteritems(d): return d.iteritems()
+ def itervalues(d): return d.itervalues()
+ getcwd = os.getcwdu
+
+ def MethodType(func, instance):
+ return types.MethodType(func, instance, type(instance))
+
+ def doctest_refactor_print(func_or_str):
+ return func_or_str
+
+ def get_closure(f):
+ """Get a function's closure attribute"""
+ return f.func_closure
+
+ which = _shutil_which
+
+ # Abstract u'abc' syntax:
+ @_modify_str_or_docstring
+ def u_format(s):
+ """"{u}'abc'" --> "u'abc'" (Python 2)
+
+ Accepts a string or a function, so it can be used as a decorator."""
+ return s.format(u='u')
+
+ if sys.platform == 'win32':
+ def execfile(fname, glob=None, loc=None, compiler=None):
+ loc = loc if (loc is not None) else glob
+ scripttext = builtin_mod.open(fname).read()+ '\n'
+ # compile converts unicode filename to str assuming
+ # ascii. Let's do the conversion before calling compile
+ if isinstance(fname, unicode):
+ filename = unicode_to_str(fname)
+ else:
+ filename = fname
+ compiler = compiler or compile
+ exec(compiler(scripttext, filename, 'exec'), glob, loc)
+
+ else:
+ def execfile(fname, glob=None, loc=None, compiler=None):
+ if isinstance(fname, unicode):
+ filename = fname.encode(sys.getfilesystemencoding())
+ else:
+ filename = fname
+ where = [ns for ns in [glob, loc] if ns is not None]
+ if compiler is None:
+ builtin_mod.execfile(filename, *where)
+ else:
+ scripttext = builtin_mod.open(fname).read().rstrip() + '\n'
+ exec(compiler(scripttext, filename, 'exec'), glob, loc)
+
+
+PY2 = not PY3
PYPY = platform.python_implementation() == "PyPy"
-
-
-def annotate(**kwargs):
- """Python 3 compatible function annotation for Python 2."""
- if not kwargs:
- raise ValueError('annotations must be provided as keyword arguments')
- def dec(f):
- if hasattr(f, '__annotations__'):
- for k, v in kwargs.items():
- f.__annotations__[k] = v
- else:
- f.__annotations__ = kwargs
- return f
- return dec
-
-
-# Parts below taken from six:
-# Copyright (c) 2010-2013 Benjamin Peterson
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-def with_metaclass(meta, *bases):
- """Create a base class with a metaclass."""
- return meta("_NewBase", bases, {})
+
+
+def annotate(**kwargs):
+ """Python 3 compatible function annotation for Python 2."""
+ if not kwargs:
+ raise ValueError('annotations must be provided as keyword arguments')
+ def dec(f):
+ if hasattr(f, '__annotations__'):
+ for k, v in kwargs.items():
+ f.__annotations__[k] = v
+ else:
+ f.__annotations__ = kwargs
+ return f
+ return dec
+
+
+# Parts below taken from six:
+# Copyright (c) 2010-2013 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ return meta("_NewBase", bases, {})
diff --git a/contrib/python/ipython/py2/IPython/utils/rlineimpl.py b/contrib/python/ipython/py2/IPython/utils/rlineimpl.py
index f8c4e84334..e1cf03942c 100644
--- a/contrib/python/ipython/py2/IPython/utils/rlineimpl.py
+++ b/contrib/python/ipython/py2/IPython/utils/rlineimpl.py
@@ -1,74 +1,74 @@
-# -*- coding: utf-8 -*-
-""" Imports and provides the 'correct' version of readline for the platform.
-
-Readline is used throughout IPython as::
-
- import IPython.utils.rlineimpl as readline
-
-In addition to normal readline stuff, this module provides have_readline
-boolean and _outputfile variable used in IPython.utils.
-"""
-
-import sys
-import warnings
-
-_rlmod_names = ['gnureadline', 'readline']
-
-have_readline = False
-for _rlmod_name in _rlmod_names:
- try:
- # import readline as _rl
- _rl = __import__(_rlmod_name)
- # from readline import *
- globals().update({k:v for k,v in _rl.__dict__.items() if not k.startswith('_')})
- except ImportError:
- pass
- else:
- have_readline = True
- break
-
-if have_readline and (sys.platform == 'win32' or sys.platform == 'cli'):
- try:
- _outputfile=_rl.GetOutputFile()
- except AttributeError:
- warnings.warn("Failed GetOutputFile")
- have_readline = False
-
-# Test to see if libedit is being used instead of GNU readline.
-# Thanks to Boyd Waters for the original patch.
-uses_libedit = False
-
-if have_readline:
- # Official Python docs state that 'libedit' is in the docstring for libedit readline:
- uses_libedit = _rl.__doc__ and 'libedit' in _rl.__doc__
- # Note that many non-System Pythons also do not use proper readline,
- # but do not report libedit at all, nor are they linked dynamically against libedit.
- # known culprits of this include: EPD, Fink
- # There is not much we can do to detect this, until we find a specific failure
- # case, rather than relying on the readline module to self-identify as broken.
-
-if uses_libedit and sys.platform == 'darwin':
- _rl.parse_and_bind("bind ^I rl_complete")
- warnings.warn('\n'.join(['', "*"*78,
- "libedit detected - readline will not be well behaved, including but not limited to:",
- " * crashes on tab completion",
- " * incorrect history navigation",
- " * corrupting long-lines",
- " * failure to wrap or indent lines properly",
- "It is highly recommended that you install gnureadline, which is installable with:",
- " pip install gnureadline",
- "*"*78]),
- RuntimeWarning)
-
-# the clear_history() function was only introduced in Python 2.4 and is
-# actually optional in the readline API, so we must explicitly check for its
-# existence. Some known platforms actually don't have it. This thread:
-# http://mail.python.org/pipermail/python-dev/2003-August/037845.html
-# has the original discussion.
-
-if have_readline:
- try:
- _rl.clear_history
- except AttributeError:
- def clear_history(): pass
- _rl.clear_history = clear_history
+# -*- coding: utf-8 -*-
+""" Imports and provides the 'correct' version of readline for the platform.
+
+Readline is used throughout IPython as::
+
+ import IPython.utils.rlineimpl as readline
+
+In addition to normal readline stuff, this module provides have_readline
+boolean and _outputfile variable used in IPython.utils.
+"""
+
+import sys
+import warnings
+
+_rlmod_names = ['gnureadline', 'readline']
+
+have_readline = False
+for _rlmod_name in _rlmod_names:
+ try:
+ # import readline as _rl
+ _rl = __import__(_rlmod_name)
+ # from readline import *
+ globals().update({k:v for k,v in _rl.__dict__.items() if not k.startswith('_')})
+ except ImportError:
+ pass
+ else:
+ have_readline = True
+ break
+
+if have_readline and (sys.platform == 'win32' or sys.platform == 'cli'):
+ try:
+ _outputfile=_rl.GetOutputFile()
+ except AttributeError:
+ warnings.warn("Failed GetOutputFile")
+ have_readline = False
+
+# Test to see if libedit is being used instead of GNU readline.
+# Thanks to Boyd Waters for the original patch.
+uses_libedit = False
+
+if have_readline:
+ # Official Python docs state that 'libedit' is in the docstring for libedit readline:
+ uses_libedit = _rl.__doc__ and 'libedit' in _rl.__doc__
+ # Note that many non-System Pythons also do not use proper readline,
+ # but do not report libedit at all, nor are they linked dynamically against libedit.
+ # known culprits of this include: EPD, Fink
+ # There is not much we can do to detect this, until we find a specific failure
+ # case, rather than relying on the readline module to self-identify as broken.
+
+if uses_libedit and sys.platform == 'darwin':
+ _rl.parse_and_bind("bind ^I rl_complete")
+ warnings.warn('\n'.join(['', "*"*78,
+ "libedit detected - readline will not be well behaved, including but not limited to:",
+ " * crashes on tab completion",
+ " * incorrect history navigation",
+ " * corrupting long-lines",
+ " * failure to wrap or indent lines properly",
+ "It is highly recommended that you install gnureadline, which is installable with:",
+ " pip install gnureadline",
+ "*"*78]),
+ RuntimeWarning)
+
+# the clear_history() function was only introduced in Python 2.4 and is
+# actually optional in the readline API, so we must explicitly check for its
+# existence. Some known platforms actually don't have it. This thread:
+# http://mail.python.org/pipermail/python-dev/2003-August/037845.html
+# has the original discussion.
+
+if have_readline:
+ try:
+ _rl.clear_history
+ except AttributeError:
+ def clear_history(): pass
+ _rl.clear_history = clear_history
diff --git a/contrib/python/ipython/py2/IPython/utils/sentinel.py b/contrib/python/ipython/py2/IPython/utils/sentinel.py
index 7af2558c1a..dc57a2591c 100644
--- a/contrib/python/ipython/py2/IPython/utils/sentinel.py
+++ b/contrib/python/ipython/py2/IPython/utils/sentinel.py
@@ -1,17 +1,17 @@
-"""Sentinel class for constants with useful reprs"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-class Sentinel(object):
-
- def __init__(self, name, module, docstring=None):
- self.name = name
- self.module = module
- if docstring:
- self.__doc__ = docstring
-
-
- def __repr__(self):
- return str(self.module)+'.'+self.name
-
+"""Sentinel class for constants with useful reprs"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+class Sentinel(object):
+
+ def __init__(self, name, module, docstring=None):
+ self.name = name
+ self.module = module
+ if docstring:
+ self.__doc__ = docstring
+
+
+ def __repr__(self):
+ return str(self.module)+'.'+self.name
+
diff --git a/contrib/python/ipython/py2/IPython/utils/shimmodule.py b/contrib/python/ipython/py2/IPython/utils/shimmodule.py
index c2cf6c6de7..8b74f5011a 100644
--- a/contrib/python/ipython/py2/IPython/utils/shimmodule.py
+++ b/contrib/python/ipython/py2/IPython/utils/shimmodule.py
@@ -1,92 +1,92 @@
-"""A shim module for deprecated imports
-"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-import sys
-import types
-
-from .importstring import import_item
-
-class ShimWarning(Warning):
- """A warning to show when a module has moved, and a shim is in its place."""
-
-class ShimImporter(object):
- """Import hook for a shim.
-
- This ensures that submodule imports return the real target module,
- not a clone that will confuse `is` and `isinstance` checks.
- """
- def __init__(self, src, mirror):
- self.src = src
- self.mirror = mirror
-
- def _mirror_name(self, fullname):
- """get the name of the mirrored module"""
-
- return self.mirror + fullname[len(self.src):]
-
- def find_module(self, fullname, path=None):
- """Return self if we should be used to import the module."""
- if fullname.startswith(self.src + '.'):
- mirror_name = self._mirror_name(fullname)
- try:
- mod = import_item(mirror_name)
- except ImportError:
- return
- else:
- if not isinstance(mod, types.ModuleType):
- # not a module
- return None
- return self
-
- def load_module(self, fullname):
- """Import the mirrored module, and insert it into sys.modules"""
- mirror_name = self._mirror_name(fullname)
- mod = import_item(mirror_name)
- sys.modules[fullname] = mod
- return mod
-
-
-class ShimModule(types.ModuleType):
-
- def __init__(self, *args, **kwargs):
- self._mirror = kwargs.pop("mirror")
- src = kwargs.pop("src", None)
- if src:
- kwargs['name'] = src.rsplit('.', 1)[-1]
- super(ShimModule, self).__init__(*args, **kwargs)
- # add import hook for descendent modules
- if src:
- sys.meta_path.append(
- ShimImporter(src=src, mirror=self._mirror)
- )
-
- @property
- def __path__(self):
- return []
-
- @property
- def __spec__(self):
- """Don't produce __spec__ until requested"""
- return __import__(self._mirror).__spec__
-
- def __dir__(self):
- return dir(__import__(self._mirror))
-
- @property
- def __all__(self):
- """Ensure __all__ is always defined"""
- mod = __import__(self._mirror)
- try:
- return mod.__all__
- except AttributeError:
- return [name for name in dir(mod) if not name.startswith('_')]
-
- def __getattr__(self, key):
- # Use the equivalent of import_item(name), see below
- name = "%s.%s" % (self._mirror, key)
- try:
- return import_item(name)
- except ImportError:
- raise AttributeError(key)
+"""A shim module for deprecated imports
+"""
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import sys
+import types
+
+from .importstring import import_item
+
+class ShimWarning(Warning):
+ """A warning to show when a module has moved, and a shim is in its place."""
+
+class ShimImporter(object):
+ """Import hook for a shim.
+
+ This ensures that submodule imports return the real target module,
+ not a clone that will confuse `is` and `isinstance` checks.
+ """
+ def __init__(self, src, mirror):
+ self.src = src
+ self.mirror = mirror
+
+ def _mirror_name(self, fullname):
+ """get the name of the mirrored module"""
+
+ return self.mirror + fullname[len(self.src):]
+
+ def find_module(self, fullname, path=None):
+ """Return self if we should be used to import the module."""
+ if fullname.startswith(self.src + '.'):
+ mirror_name = self._mirror_name(fullname)
+ try:
+ mod = import_item(mirror_name)
+ except ImportError:
+ return
+ else:
+ if not isinstance(mod, types.ModuleType):
+ # not a module
+ return None
+ return self
+
+ def load_module(self, fullname):
+ """Import the mirrored module, and insert it into sys.modules"""
+ mirror_name = self._mirror_name(fullname)
+ mod = import_item(mirror_name)
+ sys.modules[fullname] = mod
+ return mod
+
+
+class ShimModule(types.ModuleType):
+
+ def __init__(self, *args, **kwargs):
+ self._mirror = kwargs.pop("mirror")
+ src = kwargs.pop("src", None)
+ if src:
+ kwargs['name'] = src.rsplit('.', 1)[-1]
+ super(ShimModule, self).__init__(*args, **kwargs)
+ # add import hook for descendent modules
+ if src:
+ sys.meta_path.append(
+ ShimImporter(src=src, mirror=self._mirror)
+ )
+
+ @property
+ def __path__(self):
+ return []
+
+ @property
+ def __spec__(self):
+ """Don't produce __spec__ until requested"""
+ return __import__(self._mirror).__spec__
+
+ def __dir__(self):
+ return dir(__import__(self._mirror))
+
+ @property
+ def __all__(self):
+ """Ensure __all__ is always defined"""
+ mod = __import__(self._mirror)
+ try:
+ return mod.__all__
+ except AttributeError:
+ return [name for name in dir(mod) if not name.startswith('_')]
+
+ def __getattr__(self, key):
+ # Use the equivalent of import_item(name), see below
+ name = "%s.%s" % (self._mirror, key)
+ try:
+ return import_item(name)
+ except ImportError:
+ raise AttributeError(key)
diff --git a/contrib/python/ipython/py2/IPython/utils/signatures.py b/contrib/python/ipython/py2/IPython/utils/signatures.py
index 4d0eb74a7e..dedc51cfda 100644
--- a/contrib/python/ipython/py2/IPython/utils/signatures.py
+++ b/contrib/python/ipython/py2/IPython/utils/signatures.py
@@ -1,11 +1,11 @@
-"""Function signature objects for callables.
-
-Use the standard library version if available, as it is more up to date.
-Fallback on backport otherwise.
-"""
-
-
-try:
- from inspect import BoundArguments, Parameter, Signature, signature
-except ImportError:
- from ._signatures import BoundArguments, Parameter, Signature, signature
+"""Function signature objects for callables.
+
+Use the standard library version if available, as it is more up to date.
+Fallback on backport otherwise.
+"""
+
+
+try:
+ from inspect import BoundArguments, Parameter, Signature, signature
+except ImportError:
+ from ._signatures import BoundArguments, Parameter, Signature, signature
diff --git a/contrib/python/ipython/py2/IPython/utils/strdispatch.py b/contrib/python/ipython/py2/IPython/utils/strdispatch.py
index a6183404e7..d6bf510535 100644
--- a/contrib/python/ipython/py2/IPython/utils/strdispatch.py
+++ b/contrib/python/ipython/py2/IPython/utils/strdispatch.py
@@ -1,68 +1,68 @@
-"""String dispatch class to match regexps and dispatch commands.
-"""
-
-# Stdlib imports
-import re
-
-# Our own modules
-from IPython.core.hooks import CommandChainDispatcher
-
-# Code begins
-class StrDispatch(object):
- """Dispatch (lookup) a set of strings / regexps for match.
-
- Example:
-
- >>> dis = StrDispatch()
- >>> dis.add_s('hei',34, priority = 4)
- >>> dis.add_s('hei',123, priority = 2)
- >>> dis.add_re('h.i', 686)
- >>> print(list(dis.flat_matches('hei')))
- [123, 34, 686]
- """
-
- def __init__(self):
- self.strs = {}
- self.regexs = {}
-
- def add_s(self, s, obj, priority= 0 ):
- """ Adds a target 'string' for dispatching """
-
- chain = self.strs.get(s, CommandChainDispatcher())
- chain.add(obj,priority)
- self.strs[s] = chain
-
- def add_re(self, regex, obj, priority= 0 ):
- """ Adds a target regexp for dispatching """
-
- chain = self.regexs.get(regex, CommandChainDispatcher())
- chain.add(obj,priority)
- self.regexs[regex] = chain
-
- def dispatch(self, key):
- """ Get a seq of Commandchain objects that match key """
- if key in self.strs:
- yield self.strs[key]
-
- for r, obj in self.regexs.items():
- if re.match(r, key):
- yield obj
- else:
- #print "nomatch",key # dbg
- pass
-
- def __repr__(self):
- return "<Strdispatch %s, %s>" % (self.strs, self.regexs)
-
- def s_matches(self, key):
- if key not in self.strs:
- return
- for el in self.strs[key]:
- yield el[1]
-
- def flat_matches(self, key):
- """ Yield all 'value' targets, without priority """
- for val in self.dispatch(key):
- for el in val:
- yield el[1] # only value, no priority
- return
+"""String dispatch class to match regexps and dispatch commands.
+"""
+
+# Stdlib imports
+import re
+
+# Our own modules
+from IPython.core.hooks import CommandChainDispatcher
+
+# Code begins
+class StrDispatch(object):
+ """Dispatch (lookup) a set of strings / regexps for match.
+
+ Example:
+
+ >>> dis = StrDispatch()
+ >>> dis.add_s('hei',34, priority = 4)
+ >>> dis.add_s('hei',123, priority = 2)
+ >>> dis.add_re('h.i', 686)
+ >>> print(list(dis.flat_matches('hei')))
+ [123, 34, 686]
+ """
+
+ def __init__(self):
+ self.strs = {}
+ self.regexs = {}
+
+ def add_s(self, s, obj, priority= 0 ):
+ """ Adds a target 'string' for dispatching """
+
+ chain = self.strs.get(s, CommandChainDispatcher())
+ chain.add(obj,priority)
+ self.strs[s] = chain
+
+ def add_re(self, regex, obj, priority= 0 ):
+ """ Adds a target regexp for dispatching """
+
+ chain = self.regexs.get(regex, CommandChainDispatcher())
+ chain.add(obj,priority)
+ self.regexs[regex] = chain
+
+ def dispatch(self, key):
+ """ Get a seq of Commandchain objects that match key """
+ if key in self.strs:
+ yield self.strs[key]
+
+ for r, obj in self.regexs.items():
+ if re.match(r, key):
+ yield obj
+ else:
+ #print "nomatch",key # dbg
+ pass
+
+ def __repr__(self):
+ return "<Strdispatch %s, %s>" % (self.strs, self.regexs)
+
+ def s_matches(self, key):
+ if key not in self.strs:
+ return
+ for el in self.strs[key]:
+ yield el[1]
+
+ def flat_matches(self, key):
+ """ Yield all 'value' targets, without priority """
+ for val in self.dispatch(key):
+ for el in val:
+ yield el[1] # only value, no priority
+ return
diff --git a/contrib/python/ipython/py2/IPython/utils/sysinfo.py b/contrib/python/ipython/py2/IPython/utils/sysinfo.py
index 51ca68d9cf..db7f2914d4 100644
--- a/contrib/python/ipython/py2/IPython/utils/sysinfo.py
+++ b/contrib/python/ipython/py2/IPython/utils/sysinfo.py
@@ -1,167 +1,167 @@
-# encoding: utf-8
-"""
-Utilities for getting information about IPython and the system it's running in.
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import os
-import platform
-import pprint
-import sys
-import subprocess
-
-from IPython.core import release
-from IPython.utils import py3compat, _sysinfo, encoding
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-def pkg_commit_hash(pkg_path):
- """Get short form of commit hash given directory `pkg_path`
-
- We get the commit hash from (in order of preference):
-
- * IPython.utils._sysinfo.commit
- * git output, if we are in a git repository
-
- If these fail, we return a not-found placeholder tuple
-
- Parameters
- ----------
- pkg_path : str
- directory containing package
- only used for getting commit from active repo
-
- Returns
- -------
- hash_from : str
- Where we got the hash from - description
- hash_str : str
- short form of hash
- """
- # Try and get commit from written commit text file
- if _sysinfo.commit:
- return "installation", _sysinfo.commit
-
- # maybe we are in a repository
- proc = subprocess.Popen('git rev-parse --short HEAD',
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- cwd=pkg_path, shell=True)
- repo_commit, _ = proc.communicate()
- if repo_commit:
- return 'repository', repo_commit.strip().decode('ascii')
- return '(none found)', u'<not found>'
-
-
-def pkg_info(pkg_path):
- """Return dict describing the context of this package
-
- Parameters
- ----------
- pkg_path : str
- path containing __init__.py for package
-
- Returns
- -------
- context : dict
- with named parameters of interest
- """
- src, hsh = pkg_commit_hash(pkg_path)
- return dict(
- ipython_version=release.version,
- ipython_path=pkg_path,
- commit_source=src,
- commit_hash=hsh,
- sys_version=sys.version,
- sys_executable=sys.executable,
- sys_platform=sys.platform,
- platform=platform.platform(),
- os_name=os.name,
- default_encoding=encoding.DEFAULT_ENCODING,
- )
-
-def get_sys_info():
- """Return useful information about IPython and the system, as a dict."""
- p = os.path
- path = p.realpath(p.dirname(p.abspath(p.join(__file__, '..'))))
- return pkg_info(path)
-
-@py3compat.doctest_refactor_print
-def sys_info():
- """Return useful information about IPython and the system, as a string.
-
- Examples
- --------
- ::
-
- In [2]: print sys_info()
- {'commit_hash': '144fdae', # random
- 'commit_source': 'repository',
- 'ipython_path': '/home/fperez/usr/lib/python2.6/site-packages/IPython',
- 'ipython_version': '0.11.dev',
- 'os_name': 'posix',
- 'platform': 'Linux-2.6.35-22-generic-i686-with-Ubuntu-10.10-maverick',
- 'sys_executable': '/usr/bin/python',
- 'sys_platform': 'linux2',
- 'sys_version': '2.6.6 (r266:84292, Sep 15 2010, 15:52:39) \\n[GCC 4.4.5]'}
- """
- return pprint.pformat(get_sys_info())
-
-def _num_cpus_unix():
- """Return the number of active CPUs on a Unix system."""
- return os.sysconf("SC_NPROCESSORS_ONLN")
-
-
-def _num_cpus_darwin():
- """Return the number of active CPUs on a Darwin system."""
- p = subprocess.Popen(['sysctl','-n','hw.ncpu'],stdout=subprocess.PIPE)
- return p.stdout.read()
-
-
-def _num_cpus_windows():
- """Return the number of active CPUs on a Windows system."""
- return os.environ.get("NUMBER_OF_PROCESSORS")
-
-
-def num_cpus():
- """Return the effective number of CPUs in the system as an integer.
-
- This cross-platform function makes an attempt at finding the total number of
- available CPUs in the system, as returned by various underlying system and
- python calls.
-
- If it can't find a sensible answer, it returns 1 (though an error *may* make
- it return a large positive number that's actually incorrect).
- """
-
- # Many thanks to the Parallel Python project (http://www.parallelpython.com)
- # for the names of the keys we needed to look up for this function. This
- # code was inspired by their equivalent function.
-
- ncpufuncs = {'Linux':_num_cpus_unix,
- 'Darwin':_num_cpus_darwin,
- 'Windows':_num_cpus_windows
- }
-
- ncpufunc = ncpufuncs.get(platform.system(),
- # default to unix version (Solaris, AIX, etc)
- _num_cpus_unix)
-
- try:
- ncpus = max(1,int(ncpufunc()))
- except:
- ncpus = 1
- return ncpus
-
+# encoding: utf-8
+"""
+Utilities for getting information about IPython and the system it's running in.
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import os
+import platform
+import pprint
+import sys
+import subprocess
+
+from IPython.core import release
+from IPython.utils import py3compat, _sysinfo, encoding
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+def pkg_commit_hash(pkg_path):
+ """Get short form of commit hash given directory `pkg_path`
+
+ We get the commit hash from (in order of preference):
+
+ * IPython.utils._sysinfo.commit
+ * git output, if we are in a git repository
+
+ If these fail, we return a not-found placeholder tuple
+
+ Parameters
+ ----------
+ pkg_path : str
+ directory containing package
+ only used for getting commit from active repo
+
+ Returns
+ -------
+ hash_from : str
+ Where we got the hash from - description
+ hash_str : str
+ short form of hash
+ """
+ # Try and get commit from written commit text file
+ if _sysinfo.commit:
+ return "installation", _sysinfo.commit
+
+ # maybe we are in a repository
+ proc = subprocess.Popen('git rev-parse --short HEAD',
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=pkg_path, shell=True)
+ repo_commit, _ = proc.communicate()
+ if repo_commit:
+ return 'repository', repo_commit.strip().decode('ascii')
+ return '(none found)', u'<not found>'
+
+
+def pkg_info(pkg_path):
+ """Return dict describing the context of this package
+
+ Parameters
+ ----------
+ pkg_path : str
+ path containing __init__.py for package
+
+ Returns
+ -------
+ context : dict
+ with named parameters of interest
+ """
+ src, hsh = pkg_commit_hash(pkg_path)
+ return dict(
+ ipython_version=release.version,
+ ipython_path=pkg_path,
+ commit_source=src,
+ commit_hash=hsh,
+ sys_version=sys.version,
+ sys_executable=sys.executable,
+ sys_platform=sys.platform,
+ platform=platform.platform(),
+ os_name=os.name,
+ default_encoding=encoding.DEFAULT_ENCODING,
+ )
+
+def get_sys_info():
+ """Return useful information about IPython and the system, as a dict."""
+ p = os.path
+ path = p.realpath(p.dirname(p.abspath(p.join(__file__, '..'))))
+ return pkg_info(path)
+
+@py3compat.doctest_refactor_print
+def sys_info():
+ """Return useful information about IPython and the system, as a string.
+
+ Examples
+ --------
+ ::
+
+ In [2]: print sys_info()
+ {'commit_hash': '144fdae', # random
+ 'commit_source': 'repository',
+ 'ipython_path': '/home/fperez/usr/lib/python2.6/site-packages/IPython',
+ 'ipython_version': '0.11.dev',
+ 'os_name': 'posix',
+ 'platform': 'Linux-2.6.35-22-generic-i686-with-Ubuntu-10.10-maverick',
+ 'sys_executable': '/usr/bin/python',
+ 'sys_platform': 'linux2',
+ 'sys_version': '2.6.6 (r266:84292, Sep 15 2010, 15:52:39) \\n[GCC 4.4.5]'}
+ """
+ return pprint.pformat(get_sys_info())
+
+def _num_cpus_unix():
+ """Return the number of active CPUs on a Unix system."""
+ return os.sysconf("SC_NPROCESSORS_ONLN")
+
+
+def _num_cpus_darwin():
+ """Return the number of active CPUs on a Darwin system."""
+ p = subprocess.Popen(['sysctl','-n','hw.ncpu'],stdout=subprocess.PIPE)
+ return p.stdout.read()
+
+
+def _num_cpus_windows():
+ """Return the number of active CPUs on a Windows system."""
+ return os.environ.get("NUMBER_OF_PROCESSORS")
+
+
+def num_cpus():
+ """Return the effective number of CPUs in the system as an integer.
+
+ This cross-platform function makes an attempt at finding the total number of
+ available CPUs in the system, as returned by various underlying system and
+ python calls.
+
+ If it can't find a sensible answer, it returns 1 (though an error *may* make
+ it return a large positive number that's actually incorrect).
+ """
+
+ # Many thanks to the Parallel Python project (http://www.parallelpython.com)
+ # for the names of the keys we needed to look up for this function. This
+ # code was inspired by their equivalent function.
+
+ ncpufuncs = {'Linux':_num_cpus_unix,
+ 'Darwin':_num_cpus_darwin,
+ 'Windows':_num_cpus_windows
+ }
+
+ ncpufunc = ncpufuncs.get(platform.system(),
+ # default to unix version (Solaris, AIX, etc)
+ _num_cpus_unix)
+
+ try:
+ ncpus = max(1,int(ncpufunc()))
+ except:
+ ncpus = 1
+ return ncpus
+
diff --git a/contrib/python/ipython/py2/IPython/utils/syspathcontext.py b/contrib/python/ipython/py2/IPython/utils/syspathcontext.py
index fdcfbbee35..89612038ff 100644
--- a/contrib/python/ipython/py2/IPython/utils/syspathcontext.py
+++ b/contrib/python/ipython/py2/IPython/utils/syspathcontext.py
@@ -1,71 +1,71 @@
-# encoding: utf-8
-"""
-Context managers for adding things to sys.path temporarily.
-
-Authors:
-
-* Brian Granger
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import sys
-
-from IPython.utils.py3compat import cast_bytes_py2
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-class appended_to_syspath(object):
- """A context for appending a directory to sys.path for a second."""
-
- def __init__(self, dir):
- self.dir = cast_bytes_py2(dir, sys.getdefaultencoding())
-
- def __enter__(self):
- if self.dir not in sys.path:
- sys.path.append(self.dir)
- self.added = True
- else:
- self.added = False
-
- def __exit__(self, type, value, traceback):
- if self.added:
- try:
- sys.path.remove(self.dir)
- except ValueError:
- pass
- # Returning False causes any exceptions to be re-raised.
- return False
-
-class prepended_to_syspath(object):
- """A context for prepending a directory to sys.path for a second."""
-
- def __init__(self, dir):
- self.dir = cast_bytes_py2(dir, sys.getdefaultencoding())
-
- def __enter__(self):
- if self.dir not in sys.path:
- sys.path.insert(0,self.dir)
- self.added = True
- else:
- self.added = False
-
- def __exit__(self, type, value, traceback):
- if self.added:
- try:
- sys.path.remove(self.dir)
- except ValueError:
- pass
- # Returning False causes any exceptions to be re-raised.
- return False
+# encoding: utf-8
+"""
+Context managers for adding things to sys.path temporarily.
+
+Authors:
+
+* Brian Granger
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import sys
+
+from IPython.utils.py3compat import cast_bytes_py2
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+class appended_to_syspath(object):
+ """A context for appending a directory to sys.path for a second."""
+
+ def __init__(self, dir):
+ self.dir = cast_bytes_py2(dir, sys.getdefaultencoding())
+
+ def __enter__(self):
+ if self.dir not in sys.path:
+ sys.path.append(self.dir)
+ self.added = True
+ else:
+ self.added = False
+
+ def __exit__(self, type, value, traceback):
+ if self.added:
+ try:
+ sys.path.remove(self.dir)
+ except ValueError:
+ pass
+ # Returning False causes any exceptions to be re-raised.
+ return False
+
+class prepended_to_syspath(object):
+ """A context for prepending a directory to sys.path for a second."""
+
+ def __init__(self, dir):
+ self.dir = cast_bytes_py2(dir, sys.getdefaultencoding())
+
+ def __enter__(self):
+ if self.dir not in sys.path:
+ sys.path.insert(0,self.dir)
+ self.added = True
+ else:
+ self.added = False
+
+ def __exit__(self, type, value, traceback):
+ if self.added:
+ try:
+ sys.path.remove(self.dir)
+ except ValueError:
+ pass
+ # Returning False causes any exceptions to be re-raised.
+ return False
diff --git a/contrib/python/ipython/py2/IPython/utils/tempdir.py b/contrib/python/ipython/py2/IPython/utils/tempdir.py
index 909205e192..951abd65c9 100644
--- a/contrib/python/ipython/py2/IPython/utils/tempdir.py
+++ b/contrib/python/ipython/py2/IPython/utils/tempdir.py
@@ -1,145 +1,145 @@
-"""TemporaryDirectory class, copied from Python 3.2.
-
-This is copied from the stdlib and will be standard in Python 3.2 and onwards.
-"""
-from __future__ import print_function
-
-import os as _os
-import warnings as _warnings
-import sys as _sys
-
-# This code should only be used in Python versions < 3.2, since after that we
-# can rely on the stdlib itself.
-try:
- from tempfile import TemporaryDirectory
-
-except ImportError:
- from tempfile import mkdtemp, template
-
- class TemporaryDirectory(object):
- """Create and return a temporary directory. This has the same
- behavior as mkdtemp but can be used as a context manager. For
- example:
-
- with TemporaryDirectory() as tmpdir:
- ...
-
- Upon exiting the context, the directory and everthing contained
- in it are removed.
- """
-
- def __init__(self, suffix="", prefix=template, dir=None):
- self.name = mkdtemp(suffix, prefix, dir)
- self._closed = False
-
- def __enter__(self):
- return self.name
-
- def cleanup(self, _warn=False):
- if self.name and not self._closed:
- try:
- self._rmtree(self.name)
- except (TypeError, AttributeError) as ex:
- # Issue #10188: Emit a warning on stderr
- # if the directory could not be cleaned
- # up due to missing globals
- if "None" not in str(ex):
- raise
- print("ERROR: {!r} while cleaning up {!r}".format(ex, self,),
- file=_sys.stderr)
- return
- self._closed = True
- if _warn:
- self._warn("Implicitly cleaning up {!r}".format(self),
- Warning)
-
- def __exit__(self, exc, value, tb):
- self.cleanup()
-
- def __del__(self):
- # Issue a ResourceWarning if implicit cleanup needed
- self.cleanup(_warn=True)
-
-
- # XXX (ncoghlan): The following code attempts to make
- # this class tolerant of the module nulling out process
- # that happens during CPython interpreter shutdown
- # Alas, it doesn't actually manage it. See issue #10188
- _listdir = staticmethod(_os.listdir)
- _path_join = staticmethod(_os.path.join)
- _isdir = staticmethod(_os.path.isdir)
- _remove = staticmethod(_os.remove)
- _rmdir = staticmethod(_os.rmdir)
- _os_error = _os.error
- _warn = _warnings.warn
-
- def _rmtree(self, path):
- # Essentially a stripped down version of shutil.rmtree. We can't
- # use globals because they may be None'ed out at shutdown.
- for name in self._listdir(path):
- fullname = self._path_join(path, name)
- try:
- isdir = self._isdir(fullname)
- except self._os_error:
- isdir = False
- if isdir:
- self._rmtree(fullname)
- else:
- try:
- self._remove(fullname)
- except self._os_error:
- pass
- try:
- self._rmdir(path)
- except self._os_error:
- pass
-
-
-class NamedFileInTemporaryDirectory(object):
-
- def __init__(self, filename, mode='w+b', bufsize=-1, **kwds):
- """
- Open a file named `filename` in a temporary directory.
-
- This context manager is preferred over `NamedTemporaryFile` in
- stdlib `tempfile` when one needs to reopen the file.
-
- Arguments `mode` and `bufsize` are passed to `open`.
- Rest of the arguments are passed to `TemporaryDirectory`.
-
- """
- self._tmpdir = TemporaryDirectory(**kwds)
- path = _os.path.join(self._tmpdir.name, filename)
- self.file = open(path, mode, bufsize)
-
- def cleanup(self):
- self.file.close()
- self._tmpdir.cleanup()
-
- __del__ = cleanup
-
- def __enter__(self):
- return self.file
-
- def __exit__(self, type, value, traceback):
- self.cleanup()
-
-
-class TemporaryWorkingDirectory(TemporaryDirectory):
- """
- Creates a temporary directory and sets the cwd to that directory.
- Automatically reverts to previous cwd upon cleanup.
- Usage example:
-
- with TemporaryWorkingDirectory() as tmpdir:
- ...
- """
- def __enter__(self):
- self.old_wd = _os.getcwd()
- _os.chdir(self.name)
- return super(TemporaryWorkingDirectory, self).__enter__()
-
- def __exit__(self, exc, value, tb):
- _os.chdir(self.old_wd)
- return super(TemporaryWorkingDirectory, self).__exit__(exc, value, tb)
-
+"""TemporaryDirectory class, copied from Python 3.2.
+
+This is copied from the stdlib and will be standard in Python 3.2 and onwards.
+"""
+from __future__ import print_function
+
+import os as _os
+import warnings as _warnings
+import sys as _sys
+
+# This code should only be used in Python versions < 3.2, since after that we
+# can rely on the stdlib itself.
+try:
+ from tempfile import TemporaryDirectory
+
+except ImportError:
+ from tempfile import mkdtemp, template
+
+ class TemporaryDirectory(object):
+ """Create and return a temporary directory. This has the same
+ behavior as mkdtemp but can be used as a context manager. For
+ example:
+
+ with TemporaryDirectory() as tmpdir:
+ ...
+
+ Upon exiting the context, the directory and everthing contained
+ in it are removed.
+ """
+
+ def __init__(self, suffix="", prefix=template, dir=None):
+ self.name = mkdtemp(suffix, prefix, dir)
+ self._closed = False
+
+ def __enter__(self):
+ return self.name
+
+ def cleanup(self, _warn=False):
+ if self.name and not self._closed:
+ try:
+ self._rmtree(self.name)
+ except (TypeError, AttributeError) as ex:
+ # Issue #10188: Emit a warning on stderr
+ # if the directory could not be cleaned
+ # up due to missing globals
+ if "None" not in str(ex):
+ raise
+ print("ERROR: {!r} while cleaning up {!r}".format(ex, self,),
+ file=_sys.stderr)
+ return
+ self._closed = True
+ if _warn:
+ self._warn("Implicitly cleaning up {!r}".format(self),
+ Warning)
+
+ def __exit__(self, exc, value, tb):
+ self.cleanup()
+
+ def __del__(self):
+ # Issue a ResourceWarning if implicit cleanup needed
+ self.cleanup(_warn=True)
+
+
+ # XXX (ncoghlan): The following code attempts to make
+ # this class tolerant of the module nulling out process
+ # that happens during CPython interpreter shutdown
+ # Alas, it doesn't actually manage it. See issue #10188
+ _listdir = staticmethod(_os.listdir)
+ _path_join = staticmethod(_os.path.join)
+ _isdir = staticmethod(_os.path.isdir)
+ _remove = staticmethod(_os.remove)
+ _rmdir = staticmethod(_os.rmdir)
+ _os_error = _os.error
+ _warn = _warnings.warn
+
+ def _rmtree(self, path):
+ # Essentially a stripped down version of shutil.rmtree. We can't
+ # use globals because they may be None'ed out at shutdown.
+ for name in self._listdir(path):
+ fullname = self._path_join(path, name)
+ try:
+ isdir = self._isdir(fullname)
+ except self._os_error:
+ isdir = False
+ if isdir:
+ self._rmtree(fullname)
+ else:
+ try:
+ self._remove(fullname)
+ except self._os_error:
+ pass
+ try:
+ self._rmdir(path)
+ except self._os_error:
+ pass
+
+
+class NamedFileInTemporaryDirectory(object):
+
+ def __init__(self, filename, mode='w+b', bufsize=-1, **kwds):
+ """
+ Open a file named `filename` in a temporary directory.
+
+ This context manager is preferred over `NamedTemporaryFile` in
+ stdlib `tempfile` when one needs to reopen the file.
+
+ Arguments `mode` and `bufsize` are passed to `open`.
+ Rest of the arguments are passed to `TemporaryDirectory`.
+
+ """
+ self._tmpdir = TemporaryDirectory(**kwds)
+ path = _os.path.join(self._tmpdir.name, filename)
+ self.file = open(path, mode, bufsize)
+
+ def cleanup(self):
+ self.file.close()
+ self._tmpdir.cleanup()
+
+ __del__ = cleanup
+
+ def __enter__(self):
+ return self.file
+
+ def __exit__(self, type, value, traceback):
+ self.cleanup()
+
+
+class TemporaryWorkingDirectory(TemporaryDirectory):
+ """
+ Creates a temporary directory and sets the cwd to that directory.
+ Automatically reverts to previous cwd upon cleanup.
+ Usage example:
+
+ with TemporaryWorkingDirectory() as tmpdir:
+ ...
+ """
+ def __enter__(self):
+ self.old_wd = _os.getcwd()
+ _os.chdir(self.name)
+ return super(TemporaryWorkingDirectory, self).__enter__()
+
+ def __exit__(self, exc, value, tb):
+ _os.chdir(self.old_wd)
+ return super(TemporaryWorkingDirectory, self).__exit__(exc, value, tb)
+
diff --git a/contrib/python/ipython/py2/IPython/utils/terminal.py b/contrib/python/ipython/py2/IPython/utils/terminal.py
index 833debce41..e92c410c79 100644
--- a/contrib/python/ipython/py2/IPython/utils/terminal.py
+++ b/contrib/python/ipython/py2/IPython/utils/terminal.py
@@ -1,22 +1,22 @@
-# encoding: utf-8
-"""
-Utilities for working with terminals.
-
-Authors:
-
-* Brian E. Granger
-* Fernando Perez
-* Alexander Belchenko (e-mail: bialix AT ukr.net)
-"""
-
+# encoding: utf-8
+"""
+Utilities for working with terminals.
+
+Authors:
+
+* Brian E. Granger
+* Fernando Perez
+* Alexander Belchenko (e-mail: bialix AT ukr.net)
+"""
+
from __future__ import absolute_import
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
-
-import os
-import sys
-import warnings
+
+import os
+import sys
+import warnings
try:
from shutil import get_terminal_size as _get_terminal_size
except ImportError:
@@ -25,101 +25,101 @@ except ImportError:
from backports.shutil_get_terminal_size import get_terminal_size as _get_terminal_size
except ImportError:
from ._get_terminal_size import get_terminal_size as _get_terminal_size
-
-from . import py3compat
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-# This variable is part of the expected API of the module:
-ignore_termtitle = True
-
-
-
-if os.name == 'posix':
- def _term_clear():
- os.system('clear')
-elif sys.platform == 'win32':
- def _term_clear():
- os.system('cls')
-else:
- def _term_clear():
- pass
-
-
-
-def toggle_set_term_title(val):
- """Control whether set_term_title is active or not.
-
- set_term_title() allows writing to the console titlebar. In embedded
- widgets this can cause problems, so this call can be used to toggle it on
- or off as needed.
-
- The default state of the module is for the function to be disabled.
-
- Parameters
- ----------
- val : bool
- If True, set_term_title() actually writes to the terminal (using the
- appropriate platform-specific module). If False, it is a no-op.
- """
- global ignore_termtitle
- ignore_termtitle = not(val)
-
-
-def _set_term_title(*args,**kw):
- """Dummy no-op."""
- pass
-
-
-def _set_term_title_xterm(title):
- """ Change virtual terminal title in xterm-workalikes """
- sys.stdout.write('\033]0;%s\007' % title)
-
-if os.name == 'posix':
- TERM = os.environ.get('TERM','')
- if TERM.startswith('xterm'):
- _set_term_title = _set_term_title_xterm
-elif sys.platform == 'win32':
- try:
- import ctypes
-
- SetConsoleTitleW = ctypes.windll.kernel32.SetConsoleTitleW
- SetConsoleTitleW.argtypes = [ctypes.c_wchar_p]
-
- def _set_term_title(title):
- """Set terminal title using ctypes to access the Win32 APIs."""
- SetConsoleTitleW(title)
- except ImportError:
- def _set_term_title(title):
- """Set terminal title using the 'title' command."""
- global ignore_termtitle
-
- try:
- # Cannot be on network share when issuing system commands
- curr = py3compat.getcwd()
- os.chdir("C:")
- ret = os.system("title " + title)
- finally:
- os.chdir(curr)
- if ret:
- # non-zero return code signals error, don't try again
- ignore_termtitle = True
-
-
-def set_term_title(title):
- """Set terminal title using the necessary platform-dependent calls."""
- if ignore_termtitle:
- return
- _set_term_title(title)
-
-
-def freeze_term_title():
- warnings.warn("This function is deprecated, use toggle_set_term_title()")
- global ignore_termtitle
- ignore_termtitle = True
-
-
+
+from . import py3compat
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+# This variable is part of the expected API of the module:
+ignore_termtitle = True
+
+
+
+if os.name == 'posix':
+ def _term_clear():
+ os.system('clear')
+elif sys.platform == 'win32':
+ def _term_clear():
+ os.system('cls')
+else:
+ def _term_clear():
+ pass
+
+
+
+def toggle_set_term_title(val):
+ """Control whether set_term_title is active or not.
+
+ set_term_title() allows writing to the console titlebar. In embedded
+ widgets this can cause problems, so this call can be used to toggle it on
+ or off as needed.
+
+ The default state of the module is for the function to be disabled.
+
+ Parameters
+ ----------
+ val : bool
+ If True, set_term_title() actually writes to the terminal (using the
+ appropriate platform-specific module). If False, it is a no-op.
+ """
+ global ignore_termtitle
+ ignore_termtitle = not(val)
+
+
+def _set_term_title(*args,**kw):
+ """Dummy no-op."""
+ pass
+
+
+def _set_term_title_xterm(title):
+ """ Change virtual terminal title in xterm-workalikes """
+ sys.stdout.write('\033]0;%s\007' % title)
+
+if os.name == 'posix':
+ TERM = os.environ.get('TERM','')
+ if TERM.startswith('xterm'):
+ _set_term_title = _set_term_title_xterm
+elif sys.platform == 'win32':
+ try:
+ import ctypes
+
+ SetConsoleTitleW = ctypes.windll.kernel32.SetConsoleTitleW
+ SetConsoleTitleW.argtypes = [ctypes.c_wchar_p]
+
+ def _set_term_title(title):
+ """Set terminal title using ctypes to access the Win32 APIs."""
+ SetConsoleTitleW(title)
+ except ImportError:
+ def _set_term_title(title):
+ """Set terminal title using the 'title' command."""
+ global ignore_termtitle
+
+ try:
+ # Cannot be on network share when issuing system commands
+ curr = py3compat.getcwd()
+ os.chdir("C:")
+ ret = os.system("title " + title)
+ finally:
+ os.chdir(curr)
+ if ret:
+ # non-zero return code signals error, don't try again
+ ignore_termtitle = True
+
+
+def set_term_title(title):
+ """Set terminal title using the necessary platform-dependent calls."""
+ if ignore_termtitle:
+ return
+ _set_term_title(title)
+
+
+def freeze_term_title():
+ warnings.warn("This function is deprecated, use toggle_set_term_title()")
+ global ignore_termtitle
+ ignore_termtitle = True
+
+
def get_terminal_size(defaultx=80, defaulty=25):
return _get_terminal_size((defaultx, defaulty))
diff --git a/contrib/python/ipython/py2/IPython/utils/text.py b/contrib/python/ipython/py2/IPython/utils/text.py
index 50ff04e1fc..5ed1a845e3 100644
--- a/contrib/python/ipython/py2/IPython/utils/text.py
+++ b/contrib/python/ipython/py2/IPython/utils/text.py
@@ -1,783 +1,783 @@
-# encoding: utf-8
-"""
-Utilities for working with strings and text.
-
-Inheritance diagram:
-
-.. inheritance-diagram:: IPython.utils.text
- :parts: 3
-"""
-from __future__ import absolute_import
-
-import os
-import re
-import sys
-import textwrap
-from string import Formatter
+# encoding: utf-8
+"""
+Utilities for working with strings and text.
+
+Inheritance diagram:
+
+.. inheritance-diagram:: IPython.utils.text
+ :parts: 3
+"""
+from __future__ import absolute_import
+
+import os
+import re
+import sys
+import textwrap
+from string import Formatter
try:
from pathlib import Path
except ImportError:
# Python 2 backport
from pathlib2 import Path
-
-from IPython.testing.skipdoctest import skip_doctest_py3, skip_doctest
-from IPython.utils import py3compat
-
-# datetime.strftime date format for ipython
-if sys.platform == 'win32':
- date_format = "%B %d, %Y"
-else:
- date_format = "%B %-d, %Y"
-
-class LSString(str):
- """String derivative with a special access attributes.
-
- These are normal strings, but with the special attributes:
-
- .l (or .list) : value as list (split on newlines).
- .n (or .nlstr): original value (the string itself).
- .s (or .spstr): value as whitespace-separated string.
- .p (or .paths): list of path objects (requires path.py package)
-
- Any values which require transformations are computed only once and
- cached.
-
- Such strings are very useful to efficiently interact with the shell, which
- typically only understands whitespace-separated options for commands."""
-
- def get_list(self):
- try:
- return self.__list
- except AttributeError:
- self.__list = self.split('\n')
- return self.__list
-
- l = list = property(get_list)
-
- def get_spstr(self):
- try:
- return self.__spstr
- except AttributeError:
- self.__spstr = self.replace('\n',' ')
- return self.__spstr
-
- s = spstr = property(get_spstr)
-
- def get_nlstr(self):
- return self
-
- n = nlstr = property(get_nlstr)
-
- def get_paths(self):
- try:
- return self.__paths
- except AttributeError:
+
+from IPython.testing.skipdoctest import skip_doctest_py3, skip_doctest
+from IPython.utils import py3compat
+
+# datetime.strftime date format for ipython
+if sys.platform == 'win32':
+ date_format = "%B %d, %Y"
+else:
+ date_format = "%B %-d, %Y"
+
+class LSString(str):
+ """String derivative with a special access attributes.
+
+ These are normal strings, but with the special attributes:
+
+ .l (or .list) : value as list (split on newlines).
+ .n (or .nlstr): original value (the string itself).
+ .s (or .spstr): value as whitespace-separated string.
+ .p (or .paths): list of path objects (requires path.py package)
+
+ Any values which require transformations are computed only once and
+ cached.
+
+ Such strings are very useful to efficiently interact with the shell, which
+ typically only understands whitespace-separated options for commands."""
+
+ def get_list(self):
+ try:
+ return self.__list
+ except AttributeError:
+ self.__list = self.split('\n')
+ return self.__list
+
+ l = list = property(get_list)
+
+ def get_spstr(self):
+ try:
+ return self.__spstr
+ except AttributeError:
+ self.__spstr = self.replace('\n',' ')
+ return self.__spstr
+
+ s = spstr = property(get_spstr)
+
+ def get_nlstr(self):
+ return self
+
+ n = nlstr = property(get_nlstr)
+
+ def get_paths(self):
+ try:
+ return self.__paths
+ except AttributeError:
self.__paths = [Path(p) for p in self.split('\n') if os.path.exists(p)]
- return self.__paths
-
- p = paths = property(get_paths)
-
-# FIXME: We need to reimplement type specific displayhook and then add this
-# back as a custom printer. This should also be moved outside utils into the
-# core.
-
-# def print_lsstring(arg):
-# """ Prettier (non-repr-like) and more informative printer for LSString """
-# print "LSString (.p, .n, .l, .s available). Value:"
-# print arg
-#
-#
-# print_lsstring = result_display.when_type(LSString)(print_lsstring)
-
-
-class SList(list):
- """List derivative with a special access attributes.
-
- These are normal lists, but with the special attributes:
-
- * .l (or .list) : value as list (the list itself).
- * .n (or .nlstr): value as a string, joined on newlines.
- * .s (or .spstr): value as a string, joined on spaces.
- * .p (or .paths): list of path objects (requires path.py package)
-
- Any values which require transformations are computed only once and
- cached."""
-
- def get_list(self):
- return self
-
- l = list = property(get_list)
-
- def get_spstr(self):
- try:
- return self.__spstr
- except AttributeError:
- self.__spstr = ' '.join(self)
- return self.__spstr
-
- s = spstr = property(get_spstr)
-
- def get_nlstr(self):
- try:
- return self.__nlstr
- except AttributeError:
- self.__nlstr = '\n'.join(self)
- return self.__nlstr
-
- n = nlstr = property(get_nlstr)
-
- def get_paths(self):
- try:
- return self.__paths
- except AttributeError:
+ return self.__paths
+
+ p = paths = property(get_paths)
+
+# FIXME: We need to reimplement type specific displayhook and then add this
+# back as a custom printer. This should also be moved outside utils into the
+# core.
+
+# def print_lsstring(arg):
+# """ Prettier (non-repr-like) and more informative printer for LSString """
+# print "LSString (.p, .n, .l, .s available). Value:"
+# print arg
+#
+#
+# print_lsstring = result_display.when_type(LSString)(print_lsstring)
+
+
+class SList(list):
+ """List derivative with a special access attributes.
+
+ These are normal lists, but with the special attributes:
+
+ * .l (or .list) : value as list (the list itself).
+ * .n (or .nlstr): value as a string, joined on newlines.
+ * .s (or .spstr): value as a string, joined on spaces.
+ * .p (or .paths): list of path objects (requires path.py package)
+
+ Any values which require transformations are computed only once and
+ cached."""
+
+ def get_list(self):
+ return self
+
+ l = list = property(get_list)
+
+ def get_spstr(self):
+ try:
+ return self.__spstr
+ except AttributeError:
+ self.__spstr = ' '.join(self)
+ return self.__spstr
+
+ s = spstr = property(get_spstr)
+
+ def get_nlstr(self):
+ try:
+ return self.__nlstr
+ except AttributeError:
+ self.__nlstr = '\n'.join(self)
+ return self.__nlstr
+
+ n = nlstr = property(get_nlstr)
+
+ def get_paths(self):
+ try:
+ return self.__paths
+ except AttributeError:
self.__paths = [Path(p) for p in self if os.path.exists(p)]
- return self.__paths
-
- p = paths = property(get_paths)
-
- def grep(self, pattern, prune = False, field = None):
- """ Return all strings matching 'pattern' (a regex or callable)
-
- This is case-insensitive. If prune is true, return all items
- NOT matching the pattern.
-
- If field is specified, the match must occur in the specified
- whitespace-separated field.
-
- Examples::
-
- a.grep( lambda x: x.startswith('C') )
- a.grep('Cha.*log', prune=1)
- a.grep('chm', field=-1)
- """
-
- def match_target(s):
- if field is None:
- return s
- parts = s.split()
- try:
- tgt = parts[field]
- return tgt
- except IndexError:
- return ""
-
- if isinstance(pattern, py3compat.string_types):
- pred = lambda x : re.search(pattern, x, re.IGNORECASE)
- else:
- pred = pattern
- if not prune:
- return SList([el for el in self if pred(match_target(el))])
- else:
- return SList([el for el in self if not pred(match_target(el))])
-
- def fields(self, *fields):
- """ Collect whitespace-separated fields from string list
-
- Allows quick awk-like usage of string lists.
-
- Example data (in var a, created by 'a = !ls -l')::
-
- -rwxrwxrwx 1 ville None 18 Dec 14 2006 ChangeLog
- drwxrwxrwx+ 6 ville None 0 Oct 24 18:05 IPython
-
- * ``a.fields(0)`` is ``['-rwxrwxrwx', 'drwxrwxrwx+']``
- * ``a.fields(1,0)`` is ``['1 -rwxrwxrwx', '6 drwxrwxrwx+']``
- (note the joining by space).
- * ``a.fields(-1)`` is ``['ChangeLog', 'IPython']``
-
- IndexErrors are ignored.
-
- Without args, fields() just split()'s the strings.
- """
- if len(fields) == 0:
- return [el.split() for el in self]
-
- res = SList()
- for el in [f.split() for f in self]:
- lineparts = []
-
- for fd in fields:
- try:
- lineparts.append(el[fd])
- except IndexError:
- pass
- if lineparts:
- res.append(" ".join(lineparts))
-
- return res
-
- def sort(self,field= None, nums = False):
- """ sort by specified fields (see fields())
-
- Example::
-
- a.sort(1, nums = True)
-
- Sorts a by second field, in numerical order (so that 21 > 3)
-
- """
-
- #decorate, sort, undecorate
- if field is not None:
- dsu = [[SList([line]).fields(field), line] for line in self]
- else:
- dsu = [[line, line] for line in self]
- if nums:
- for i in range(len(dsu)):
- numstr = "".join([ch for ch in dsu[i][0] if ch.isdigit()])
- try:
- n = int(numstr)
- except ValueError:
- n = 0
- dsu[i][0] = n
-
-
- dsu.sort()
- return SList([t[1] for t in dsu])
-
-
-# FIXME: We need to reimplement type specific displayhook and then add this
-# back as a custom printer. This should also be moved outside utils into the
-# core.
-
-# def print_slist(arg):
-# """ Prettier (non-repr-like) and more informative printer for SList """
-# print "SList (.p, .n, .l, .s, .grep(), .fields(), sort() available):"
-# if hasattr(arg, 'hideonce') and arg.hideonce:
-# arg.hideonce = False
-# return
-#
-# nlprint(arg) # This was a nested list printer, now removed.
-#
-# print_slist = result_display.when_type(SList)(print_slist)
-
-
-def indent(instr,nspaces=4, ntabs=0, flatten=False):
- """Indent a string a given number of spaces or tabstops.
-
- indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces.
-
- Parameters
- ----------
-
- instr : basestring
- The string to be indented.
- nspaces : int (default: 4)
- The number of spaces to be indented.
- ntabs : int (default: 0)
- The number of tabs to be indented.
- flatten : bool (default: False)
- Whether to scrub existing indentation. If True, all lines will be
- aligned to the same indentation. If False, existing indentation will
- be strictly increased.
-
- Returns
- -------
-
- str|unicode : string indented by ntabs and nspaces.
-
- """
- if instr is None:
- return
- ind = '\t'*ntabs+' '*nspaces
- if flatten:
- pat = re.compile(r'^\s*', re.MULTILINE)
- else:
- pat = re.compile(r'^', re.MULTILINE)
- outstr = re.sub(pat, ind, instr)
- if outstr.endswith(os.linesep+ind):
- return outstr[:-len(ind)]
- else:
- return outstr
-
-
-def list_strings(arg):
- """Always return a list of strings, given a string or list of strings
- as input.
-
- Examples
- --------
- ::
-
- In [7]: list_strings('A single string')
- Out[7]: ['A single string']
-
- In [8]: list_strings(['A single string in a list'])
- Out[8]: ['A single string in a list']
-
- In [9]: list_strings(['A','list','of','strings'])
- Out[9]: ['A', 'list', 'of', 'strings']
- """
-
- if isinstance(arg, py3compat.string_types): return [arg]
- else: return arg
-
-
-def marquee(txt='',width=78,mark='*'):
- """Return the input string centered in a 'marquee'.
-
- Examples
- --------
- ::
-
- In [16]: marquee('A test',40)
- Out[16]: '**************** A test ****************'
-
- In [17]: marquee('A test',40,'-')
- Out[17]: '---------------- A test ----------------'
-
- In [18]: marquee('A test',40,' ')
- Out[18]: ' A test '
-
- """
- if not txt:
- return (mark*width)[:width]
- nmark = (width-len(txt)-2)//len(mark)//2
- if nmark < 0: nmark =0
- marks = mark*nmark
- return '%s %s %s' % (marks,txt,marks)
-
-
-ini_spaces_re = re.compile(r'^(\s+)')
-
-def num_ini_spaces(strng):
- """Return the number of initial spaces in a string"""
-
- ini_spaces = ini_spaces_re.match(strng)
- if ini_spaces:
- return ini_spaces.end()
- else:
- return 0
-
-
-def format_screen(strng):
- """Format a string for screen printing.
-
- This removes some latex-type format codes."""
- # Paragraph continue
- par_re = re.compile(r'\\$',re.MULTILINE)
- strng = par_re.sub('',strng)
- return strng
-
-
-def dedent(text):
- """Equivalent of textwrap.dedent that ignores unindented first line.
-
- This means it will still dedent strings like:
- '''foo
- is a bar
- '''
-
- For use in wrap_paragraphs.
- """
-
- if text.startswith('\n'):
- # text starts with blank line, don't ignore the first line
- return textwrap.dedent(text)
-
- # split first line
- splits = text.split('\n',1)
- if len(splits) == 1:
- # only one line
- return textwrap.dedent(text)
-
- first, rest = splits
- # dedent everything but the first line
- rest = textwrap.dedent(rest)
- return '\n'.join([first, rest])
-
-
-def wrap_paragraphs(text, ncols=80):
- """Wrap multiple paragraphs to fit a specified width.
-
- This is equivalent to textwrap.wrap, but with support for multiple
- paragraphs, as separated by empty lines.
-
- Returns
- -------
-
- list of complete paragraphs, wrapped to fill `ncols` columns.
- """
- paragraph_re = re.compile(r'\n(\s*\n)+', re.MULTILINE)
- text = dedent(text).strip()
- paragraphs = paragraph_re.split(text)[::2] # every other entry is space
- out_ps = []
- indent_re = re.compile(r'\n\s+', re.MULTILINE)
- for p in paragraphs:
- # presume indentation that survives dedent is meaningful formatting,
- # so don't fill unless text is flush.
- if indent_re.search(p) is None:
- # wrap paragraph
- p = textwrap.fill(p, ncols)
- out_ps.append(p)
- return out_ps
-
-
-def long_substr(data):
- """Return the longest common substring in a list of strings.
-
- Credit: http://stackoverflow.com/questions/2892931/longest-common-substring-from-more-than-two-strings-python
- """
- substr = ''
- if len(data) > 1 and len(data[0]) > 0:
- for i in range(len(data[0])):
- for j in range(len(data[0])-i+1):
- if j > len(substr) and all(data[0][i:i+j] in x for x in data):
- substr = data[0][i:i+j]
- elif len(data) == 1:
- substr = data[0]
- return substr
-
-
-def strip_email_quotes(text):
- """Strip leading email quotation characters ('>').
-
- Removes any combination of leading '>' interspersed with whitespace that
- appears *identically* in all lines of the input text.
-
- Parameters
- ----------
- text : str
-
- Examples
- --------
-
- Simple uses::
-
- In [2]: strip_email_quotes('> > text')
- Out[2]: 'text'
-
- In [3]: strip_email_quotes('> > text\\n> > more')
- Out[3]: 'text\\nmore'
-
- Note how only the common prefix that appears in all lines is stripped::
-
- In [4]: strip_email_quotes('> > text\\n> > more\\n> more...')
- Out[4]: '> text\\n> more\\nmore...'
-
- So if any line has no quote marks ('>') , then none are stripped from any
- of them ::
-
- In [5]: strip_email_quotes('> > text\\n> > more\\nlast different')
- Out[5]: '> > text\\n> > more\\nlast different'
- """
- lines = text.splitlines()
- matches = set()
- for line in lines:
- prefix = re.match(r'^(\s*>[ >]*)', line)
- if prefix:
- matches.add(prefix.group(1))
- else:
- break
- else:
- prefix = long_substr(list(matches))
- if prefix:
- strip = len(prefix)
- text = '\n'.join([ ln[strip:] for ln in lines])
- return text
-
-def strip_ansi(source):
- """
- Remove ansi escape codes from text.
-
- Parameters
- ----------
- source : str
- Source to remove the ansi from
- """
- return re.sub(r'\033\[(\d|;)+?m', '', source)
-
-
-class EvalFormatter(Formatter):
- """A String Formatter that allows evaluation of simple expressions.
-
- Note that this version interprets a : as specifying a format string (as per
- standard string formatting), so if slicing is required, you must explicitly
- create a slice.
-
- This is to be used in templating cases, such as the parallel batch
- script templates, where simple arithmetic on arguments is useful.
-
- Examples
- --------
- ::
-
- In [1]: f = EvalFormatter()
- In [2]: f.format('{n//4}', n=8)
- Out[2]: '2'
-
- In [3]: f.format("{greeting[slice(2,4)]}", greeting="Hello")
- Out[3]: 'll'
- """
- def get_field(self, name, args, kwargs):
- v = eval(name, kwargs)
- return v, name
-
-#XXX: As of Python 3.4, the format string parsing no longer splits on a colon
-# inside [], so EvalFormatter can handle slicing. Once we only support 3.4 and
-# above, it should be possible to remove FullEvalFormatter.
-
-@skip_doctest_py3
-class FullEvalFormatter(Formatter):
- """A String Formatter that allows evaluation of simple expressions.
-
- Any time a format key is not found in the kwargs,
- it will be tried as an expression in the kwargs namespace.
-
- Note that this version allows slicing using [1:2], so you cannot specify
- a format string. Use :class:`EvalFormatter` to permit format strings.
-
- Examples
- --------
- ::
-
- In [1]: f = FullEvalFormatter()
- In [2]: f.format('{n//4}', n=8)
- Out[2]: u'2'
-
- In [3]: f.format('{list(range(5))[2:4]}')
- Out[3]: u'[2, 3]'
-
- In [4]: f.format('{3*2}')
- Out[4]: u'6'
- """
- # copied from Formatter._vformat with minor changes to allow eval
- # and replace the format_spec code with slicing
- def vformat(self, format_string, args, kwargs):
- result = []
- for literal_text, field_name, format_spec, conversion in \
- self.parse(format_string):
-
- # output the literal text
- if literal_text:
- result.append(literal_text)
-
- # if there's a field, output it
- if field_name is not None:
- # this is some markup, find the object and do
- # the formatting
-
- if format_spec:
- # override format spec, to allow slicing:
- field_name = ':'.join([field_name, format_spec])
-
- # eval the contents of the field for the object
- # to be formatted
- obj = eval(field_name, kwargs)
-
- # do any conversion on the resulting object
- obj = self.convert_field(obj, conversion)
-
- # format the object and append to the result
- result.append(self.format_field(obj, ''))
-
- return u''.join(py3compat.cast_unicode(s) for s in result)
-
-
-@skip_doctest_py3
-class DollarFormatter(FullEvalFormatter):
- """Formatter allowing Itpl style $foo replacement, for names and attribute
- access only. Standard {foo} replacement also works, and allows full
- evaluation of its arguments.
-
- Examples
- --------
- ::
-
- In [1]: f = DollarFormatter()
- In [2]: f.format('{n//4}', n=8)
- Out[2]: u'2'
-
- In [3]: f.format('23 * 76 is $result', result=23*76)
- Out[3]: u'23 * 76 is 1748'
-
- In [4]: f.format('$a or {b}', a=1, b=2)
- Out[4]: u'1 or 2'
- """
- _dollar_pattern = re.compile("(.*?)\$(\$?[\w\.]+)")
- def parse(self, fmt_string):
- for literal_txt, field_name, format_spec, conversion \
- in Formatter.parse(self, fmt_string):
-
- # Find $foo patterns in the literal text.
- continue_from = 0
- txt = ""
- for m in self._dollar_pattern.finditer(literal_txt):
- new_txt, new_field = m.group(1,2)
- # $$foo --> $foo
- if new_field.startswith("$"):
- txt += new_txt + new_field
- else:
- yield (txt + new_txt, new_field, "", None)
- txt = ""
- continue_from = m.end()
-
- # Re-yield the {foo} style pattern
- yield (txt + literal_txt[continue_from:], field_name, format_spec, conversion)
-
-#-----------------------------------------------------------------------------
-# Utils to columnize a list of string
-#-----------------------------------------------------------------------------
-
-def _col_chunks(l, max_rows, row_first=False):
- """Yield successive max_rows-sized column chunks from l."""
- if row_first:
- ncols = (len(l) // max_rows) + (len(l) % max_rows > 0)
- for i in py3compat.xrange(ncols):
- yield [l[j] for j in py3compat.xrange(i, len(l), ncols)]
- else:
- for i in py3compat.xrange(0, len(l), max_rows):
- yield l[i:(i + max_rows)]
-
-
-def _find_optimal(rlist, row_first=False, separator_size=2, displaywidth=80):
- """Calculate optimal info to columnize a list of string"""
- for max_rows in range(1, len(rlist) + 1):
- col_widths = list(map(max, _col_chunks(rlist, max_rows, row_first)))
- sumlength = sum(col_widths)
- ncols = len(col_widths)
- if sumlength + separator_size * (ncols - 1) <= displaywidth:
- break
- return {'num_columns': ncols,
- 'optimal_separator_width': (displaywidth - sumlength) / (ncols - 1) if (ncols - 1) else 0,
- 'max_rows': max_rows,
- 'column_widths': col_widths
- }
-
-
-def _get_or_default(mylist, i, default=None):
- """return list item number, or default if don't exist"""
- if i >= len(mylist):
- return default
- else :
- return mylist[i]
-
-
-def compute_item_matrix(items, row_first=False, empty=None, *args, **kwargs) :
- """Returns a nested list, and info to columnize items
-
- Parameters
- ----------
-
- items
- list of strings to columize
- row_first : (default False)
- Whether to compute columns for a row-first matrix instead of
- column-first (default).
- empty : (default None)
- default value to fill list if needed
- separator_size : int (default=2)
- How much caracters will be used as a separation between each columns.
- displaywidth : int (default=80)
- The width of the area onto wich the columns should enter
-
- Returns
- -------
-
- strings_matrix
-
- nested list of string, the outer most list contains as many list as
- rows, the innermost lists have each as many element as colums. If the
- total number of elements in `items` does not equal the product of
- rows*columns, the last element of some lists are filled with `None`.
-
- dict_info
- some info to make columnize easier:
-
- num_columns
- number of columns
- max_rows
- maximum number of rows (final number may be less)
- column_widths
- list of with of each columns
- optimal_separator_width
- best separator width between columns
-
- Examples
- --------
- ::
-
- In [1]: l = ['aaa','b','cc','d','eeeee','f','g','h','i','j','k','l']
- ...: compute_item_matrix(l, displaywidth=12)
- Out[1]:
- ([['aaa', 'f', 'k'],
- ['b', 'g', 'l'],
- ['cc', 'h', None],
- ['d', 'i', None],
- ['eeeee', 'j', None]],
- {'num_columns': 3,
- 'column_widths': [5, 1, 1],
- 'optimal_separator_width': 2,
- 'max_rows': 5})
- """
- info = _find_optimal(list(map(len, items)), row_first, *args, **kwargs)
- nrow, ncol = info['max_rows'], info['num_columns']
- if row_first:
- return ([[_get_or_default(items, r * ncol + c, default=empty) for c in range(ncol)] for r in range(nrow)], info)
- else:
- return ([[_get_or_default(items, c * nrow + r, default=empty) for c in range(ncol)] for r in range(nrow)], info)
-
-
-def columnize(items, row_first=False, separator=' ', displaywidth=80, spread=False):
- """ Transform a list of strings into a single string with columns.
-
- Parameters
- ----------
- items : sequence of strings
- The strings to process.
-
- row_first : (default False)
- Whether to compute columns for a row-first matrix instead of
- column-first (default).
-
- separator : str, optional [default is two spaces]
- The string that separates columns.
-
- displaywidth : int, optional [default is 80]
- Width of the display in number of characters.
-
- Returns
- -------
- The formatted string.
- """
- if not items:
- return '\n'
- matrix, info = compute_item_matrix(items, row_first=row_first, separator_size=len(separator), displaywidth=displaywidth)
- if spread:
- separator = separator.ljust(int(info['optimal_separator_width']))
- fmatrix = [filter(None, x) for x in matrix]
- sjoin = lambda x : separator.join([ y.ljust(w, ' ') for y, w in zip(x, info['column_widths'])])
- return '\n'.join(map(sjoin, fmatrix))+'\n'
-
-
-def get_text_list(list_, last_sep=' and ', sep=", ", wrap_item_with=""):
- """
- Return a string with a natural enumeration of items
-
- >>> get_text_list(['a', 'b', 'c', 'd'])
- 'a, b, c and d'
- >>> get_text_list(['a', 'b', 'c'], ' or ')
- 'a, b or c'
- >>> get_text_list(['a', 'b', 'c'], ', ')
- 'a, b, c'
- >>> get_text_list(['a', 'b'], ' or ')
- 'a or b'
- >>> get_text_list(['a'])
- 'a'
- >>> get_text_list([])
- ''
- >>> get_text_list(['a', 'b'], wrap_item_with="`")
- '`a` and `b`'
- >>> get_text_list(['a', 'b', 'c', 'd'], " = ", sep=" + ")
- 'a + b + c = d'
- """
- if len(list_) == 0:
- return ''
- if wrap_item_with:
- list_ = ['%s%s%s' % (wrap_item_with, item, wrap_item_with) for
- item in list_]
- if len(list_) == 1:
- return list_[0]
- return '%s%s%s' % (
- sep.join(i for i in list_[:-1]),
- last_sep, list_[-1])
+ return self.__paths
+
+ p = paths = property(get_paths)
+
+ def grep(self, pattern, prune = False, field = None):
+ """ Return all strings matching 'pattern' (a regex or callable)
+
+ This is case-insensitive. If prune is true, return all items
+ NOT matching the pattern.
+
+ If field is specified, the match must occur in the specified
+ whitespace-separated field.
+
+ Examples::
+
+ a.grep( lambda x: x.startswith('C') )
+ a.grep('Cha.*log', prune=1)
+ a.grep('chm', field=-1)
+ """
+
+ def match_target(s):
+ if field is None:
+ return s
+ parts = s.split()
+ try:
+ tgt = parts[field]
+ return tgt
+ except IndexError:
+ return ""
+
+ if isinstance(pattern, py3compat.string_types):
+ pred = lambda x : re.search(pattern, x, re.IGNORECASE)
+ else:
+ pred = pattern
+ if not prune:
+ return SList([el for el in self if pred(match_target(el))])
+ else:
+ return SList([el for el in self if not pred(match_target(el))])
+
+ def fields(self, *fields):
+ """ Collect whitespace-separated fields from string list
+
+ Allows quick awk-like usage of string lists.
+
+ Example data (in var a, created by 'a = !ls -l')::
+
+ -rwxrwxrwx 1 ville None 18 Dec 14 2006 ChangeLog
+ drwxrwxrwx+ 6 ville None 0 Oct 24 18:05 IPython
+
+ * ``a.fields(0)`` is ``['-rwxrwxrwx', 'drwxrwxrwx+']``
+ * ``a.fields(1,0)`` is ``['1 -rwxrwxrwx', '6 drwxrwxrwx+']``
+ (note the joining by space).
+ * ``a.fields(-1)`` is ``['ChangeLog', 'IPython']``
+
+ IndexErrors are ignored.
+
+ Without args, fields() just split()'s the strings.
+ """
+ if len(fields) == 0:
+ return [el.split() for el in self]
+
+ res = SList()
+ for el in [f.split() for f in self]:
+ lineparts = []
+
+ for fd in fields:
+ try:
+ lineparts.append(el[fd])
+ except IndexError:
+ pass
+ if lineparts:
+ res.append(" ".join(lineparts))
+
+ return res
+
+ def sort(self,field= None, nums = False):
+ """ sort by specified fields (see fields())
+
+ Example::
+
+ a.sort(1, nums = True)
+
+ Sorts a by second field, in numerical order (so that 21 > 3)
+
+ """
+
+ #decorate, sort, undecorate
+ if field is not None:
+ dsu = [[SList([line]).fields(field), line] for line in self]
+ else:
+ dsu = [[line, line] for line in self]
+ if nums:
+ for i in range(len(dsu)):
+ numstr = "".join([ch for ch in dsu[i][0] if ch.isdigit()])
+ try:
+ n = int(numstr)
+ except ValueError:
+ n = 0
+ dsu[i][0] = n
+
+
+ dsu.sort()
+ return SList([t[1] for t in dsu])
+
+
+# FIXME: We need to reimplement type specific displayhook and then add this
+# back as a custom printer. This should also be moved outside utils into the
+# core.
+
+# def print_slist(arg):
+# """ Prettier (non-repr-like) and more informative printer for SList """
+# print "SList (.p, .n, .l, .s, .grep(), .fields(), sort() available):"
+# if hasattr(arg, 'hideonce') and arg.hideonce:
+# arg.hideonce = False
+# return
+#
+# nlprint(arg) # This was a nested list printer, now removed.
+#
+# print_slist = result_display.when_type(SList)(print_slist)
+
+
+def indent(instr,nspaces=4, ntabs=0, flatten=False):
+ """Indent a string a given number of spaces or tabstops.
+
+ indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces.
+
+ Parameters
+ ----------
+
+ instr : basestring
+ The string to be indented.
+ nspaces : int (default: 4)
+ The number of spaces to be indented.
+ ntabs : int (default: 0)
+ The number of tabs to be indented.
+ flatten : bool (default: False)
+ Whether to scrub existing indentation. If True, all lines will be
+ aligned to the same indentation. If False, existing indentation will
+ be strictly increased.
+
+ Returns
+ -------
+
+ str|unicode : string indented by ntabs and nspaces.
+
+ """
+ if instr is None:
+ return
+ ind = '\t'*ntabs+' '*nspaces
+ if flatten:
+ pat = re.compile(r'^\s*', re.MULTILINE)
+ else:
+ pat = re.compile(r'^', re.MULTILINE)
+ outstr = re.sub(pat, ind, instr)
+ if outstr.endswith(os.linesep+ind):
+ return outstr[:-len(ind)]
+ else:
+ return outstr
+
+
+def list_strings(arg):
+ """Always return a list of strings, given a string or list of strings
+ as input.
+
+ Examples
+ --------
+ ::
+
+ In [7]: list_strings('A single string')
+ Out[7]: ['A single string']
+
+ In [8]: list_strings(['A single string in a list'])
+ Out[8]: ['A single string in a list']
+
+ In [9]: list_strings(['A','list','of','strings'])
+ Out[9]: ['A', 'list', 'of', 'strings']
+ """
+
+ if isinstance(arg, py3compat.string_types): return [arg]
+ else: return arg
+
+
+def marquee(txt='',width=78,mark='*'):
+ """Return the input string centered in a 'marquee'.
+
+ Examples
+ --------
+ ::
+
+ In [16]: marquee('A test',40)
+ Out[16]: '**************** A test ****************'
+
+ In [17]: marquee('A test',40,'-')
+ Out[17]: '---------------- A test ----------------'
+
+ In [18]: marquee('A test',40,' ')
+ Out[18]: ' A test '
+
+ """
+ if not txt:
+ return (mark*width)[:width]
+ nmark = (width-len(txt)-2)//len(mark)//2
+ if nmark < 0: nmark =0
+ marks = mark*nmark
+ return '%s %s %s' % (marks,txt,marks)
+
+
+ini_spaces_re = re.compile(r'^(\s+)')
+
+def num_ini_spaces(strng):
+ """Return the number of initial spaces in a string"""
+
+ ini_spaces = ini_spaces_re.match(strng)
+ if ini_spaces:
+ return ini_spaces.end()
+ else:
+ return 0
+
+
+def format_screen(strng):
+ """Format a string for screen printing.
+
+ This removes some latex-type format codes."""
+ # Paragraph continue
+ par_re = re.compile(r'\\$',re.MULTILINE)
+ strng = par_re.sub('',strng)
+ return strng
+
+
+def dedent(text):
+ """Equivalent of textwrap.dedent that ignores unindented first line.
+
+ This means it will still dedent strings like:
+ '''foo
+ is a bar
+ '''
+
+ For use in wrap_paragraphs.
+ """
+
+ if text.startswith('\n'):
+ # text starts with blank line, don't ignore the first line
+ return textwrap.dedent(text)
+
+ # split first line
+ splits = text.split('\n',1)
+ if len(splits) == 1:
+ # only one line
+ return textwrap.dedent(text)
+
+ first, rest = splits
+ # dedent everything but the first line
+ rest = textwrap.dedent(rest)
+ return '\n'.join([first, rest])
+
+
+def wrap_paragraphs(text, ncols=80):
+ """Wrap multiple paragraphs to fit a specified width.
+
+ This is equivalent to textwrap.wrap, but with support for multiple
+ paragraphs, as separated by empty lines.
+
+ Returns
+ -------
+
+ list of complete paragraphs, wrapped to fill `ncols` columns.
+ """
+ paragraph_re = re.compile(r'\n(\s*\n)+', re.MULTILINE)
+ text = dedent(text).strip()
+ paragraphs = paragraph_re.split(text)[::2] # every other entry is space
+ out_ps = []
+ indent_re = re.compile(r'\n\s+', re.MULTILINE)
+ for p in paragraphs:
+ # presume indentation that survives dedent is meaningful formatting,
+ # so don't fill unless text is flush.
+ if indent_re.search(p) is None:
+ # wrap paragraph
+ p = textwrap.fill(p, ncols)
+ out_ps.append(p)
+ return out_ps
+
+
+def long_substr(data):
+ """Return the longest common substring in a list of strings.
+
+ Credit: http://stackoverflow.com/questions/2892931/longest-common-substring-from-more-than-two-strings-python
+ """
+ substr = ''
+ if len(data) > 1 and len(data[0]) > 0:
+ for i in range(len(data[0])):
+ for j in range(len(data[0])-i+1):
+ if j > len(substr) and all(data[0][i:i+j] in x for x in data):
+ substr = data[0][i:i+j]
+ elif len(data) == 1:
+ substr = data[0]
+ return substr
+
+
+def strip_email_quotes(text):
+ """Strip leading email quotation characters ('>').
+
+ Removes any combination of leading '>' interspersed with whitespace that
+ appears *identically* in all lines of the input text.
+
+ Parameters
+ ----------
+ text : str
+
+ Examples
+ --------
+
+ Simple uses::
+
+ In [2]: strip_email_quotes('> > text')
+ Out[2]: 'text'
+
+ In [3]: strip_email_quotes('> > text\\n> > more')
+ Out[3]: 'text\\nmore'
+
+ Note how only the common prefix that appears in all lines is stripped::
+
+ In [4]: strip_email_quotes('> > text\\n> > more\\n> more...')
+ Out[4]: '> text\\n> more\\nmore...'
+
+ So if any line has no quote marks ('>') , then none are stripped from any
+ of them ::
+
+ In [5]: strip_email_quotes('> > text\\n> > more\\nlast different')
+ Out[5]: '> > text\\n> > more\\nlast different'
+ """
+ lines = text.splitlines()
+ matches = set()
+ for line in lines:
+ prefix = re.match(r'^(\s*>[ >]*)', line)
+ if prefix:
+ matches.add(prefix.group(1))
+ else:
+ break
+ else:
+ prefix = long_substr(list(matches))
+ if prefix:
+ strip = len(prefix)
+ text = '\n'.join([ ln[strip:] for ln in lines])
+ return text
+
+def strip_ansi(source):
+ """
+ Remove ansi escape codes from text.
+
+ Parameters
+ ----------
+ source : str
+ Source to remove the ansi from
+ """
+ return re.sub(r'\033\[(\d|;)+?m', '', source)
+
+
+class EvalFormatter(Formatter):
+ """A String Formatter that allows evaluation of simple expressions.
+
+ Note that this version interprets a : as specifying a format string (as per
+ standard string formatting), so if slicing is required, you must explicitly
+ create a slice.
+
+ This is to be used in templating cases, such as the parallel batch
+ script templates, where simple arithmetic on arguments is useful.
+
+ Examples
+ --------
+ ::
+
+ In [1]: f = EvalFormatter()
+ In [2]: f.format('{n//4}', n=8)
+ Out[2]: '2'
+
+ In [3]: f.format("{greeting[slice(2,4)]}", greeting="Hello")
+ Out[3]: 'll'
+ """
+ def get_field(self, name, args, kwargs):
+ v = eval(name, kwargs)
+ return v, name
+
+#XXX: As of Python 3.4, the format string parsing no longer splits on a colon
+# inside [], so EvalFormatter can handle slicing. Once we only support 3.4 and
+# above, it should be possible to remove FullEvalFormatter.
+
+@skip_doctest_py3
+class FullEvalFormatter(Formatter):
+ """A String Formatter that allows evaluation of simple expressions.
+
+ Any time a format key is not found in the kwargs,
+ it will be tried as an expression in the kwargs namespace.
+
+ Note that this version allows slicing using [1:2], so you cannot specify
+ a format string. Use :class:`EvalFormatter` to permit format strings.
+
+ Examples
+ --------
+ ::
+
+ In [1]: f = FullEvalFormatter()
+ In [2]: f.format('{n//4}', n=8)
+ Out[2]: u'2'
+
+ In [3]: f.format('{list(range(5))[2:4]}')
+ Out[3]: u'[2, 3]'
+
+ In [4]: f.format('{3*2}')
+ Out[4]: u'6'
+ """
+ # copied from Formatter._vformat with minor changes to allow eval
+ # and replace the format_spec code with slicing
+ def vformat(self, format_string, args, kwargs):
+ result = []
+ for literal_text, field_name, format_spec, conversion in \
+ self.parse(format_string):
+
+ # output the literal text
+ if literal_text:
+ result.append(literal_text)
+
+ # if there's a field, output it
+ if field_name is not None:
+ # this is some markup, find the object and do
+ # the formatting
+
+ if format_spec:
+ # override format spec, to allow slicing:
+ field_name = ':'.join([field_name, format_spec])
+
+ # eval the contents of the field for the object
+ # to be formatted
+ obj = eval(field_name, kwargs)
+
+ # do any conversion on the resulting object
+ obj = self.convert_field(obj, conversion)
+
+ # format the object and append to the result
+ result.append(self.format_field(obj, ''))
+
+ return u''.join(py3compat.cast_unicode(s) for s in result)
+
+
+@skip_doctest_py3
+class DollarFormatter(FullEvalFormatter):
+ """Formatter allowing Itpl style $foo replacement, for names and attribute
+ access only. Standard {foo} replacement also works, and allows full
+ evaluation of its arguments.
+
+ Examples
+ --------
+ ::
+
+ In [1]: f = DollarFormatter()
+ In [2]: f.format('{n//4}', n=8)
+ Out[2]: u'2'
+
+ In [3]: f.format('23 * 76 is $result', result=23*76)
+ Out[3]: u'23 * 76 is 1748'
+
+ In [4]: f.format('$a or {b}', a=1, b=2)
+ Out[4]: u'1 or 2'
+ """
+ _dollar_pattern = re.compile("(.*?)\$(\$?[\w\.]+)")
+ def parse(self, fmt_string):
+ for literal_txt, field_name, format_spec, conversion \
+ in Formatter.parse(self, fmt_string):
+
+ # Find $foo patterns in the literal text.
+ continue_from = 0
+ txt = ""
+ for m in self._dollar_pattern.finditer(literal_txt):
+ new_txt, new_field = m.group(1,2)
+ # $$foo --> $foo
+ if new_field.startswith("$"):
+ txt += new_txt + new_field
+ else:
+ yield (txt + new_txt, new_field, "", None)
+ txt = ""
+ continue_from = m.end()
+
+ # Re-yield the {foo} style pattern
+ yield (txt + literal_txt[continue_from:], field_name, format_spec, conversion)
+
+#-----------------------------------------------------------------------------
+# Utils to columnize a list of string
+#-----------------------------------------------------------------------------
+
+def _col_chunks(l, max_rows, row_first=False):
+ """Yield successive max_rows-sized column chunks from l."""
+ if row_first:
+ ncols = (len(l) // max_rows) + (len(l) % max_rows > 0)
+ for i in py3compat.xrange(ncols):
+ yield [l[j] for j in py3compat.xrange(i, len(l), ncols)]
+ else:
+ for i in py3compat.xrange(0, len(l), max_rows):
+ yield l[i:(i + max_rows)]
+
+
+def _find_optimal(rlist, row_first=False, separator_size=2, displaywidth=80):
+ """Calculate optimal info to columnize a list of string"""
+ for max_rows in range(1, len(rlist) + 1):
+ col_widths = list(map(max, _col_chunks(rlist, max_rows, row_first)))
+ sumlength = sum(col_widths)
+ ncols = len(col_widths)
+ if sumlength + separator_size * (ncols - 1) <= displaywidth:
+ break
+ return {'num_columns': ncols,
+ 'optimal_separator_width': (displaywidth - sumlength) / (ncols - 1) if (ncols - 1) else 0,
+ 'max_rows': max_rows,
+ 'column_widths': col_widths
+ }
+
+
+def _get_or_default(mylist, i, default=None):
+ """return list item number, or default if don't exist"""
+ if i >= len(mylist):
+ return default
+ else :
+ return mylist[i]
+
+
+def compute_item_matrix(items, row_first=False, empty=None, *args, **kwargs) :
+ """Returns a nested list, and info to columnize items
+
+ Parameters
+ ----------
+
+ items
+ list of strings to columize
+ row_first : (default False)
+ Whether to compute columns for a row-first matrix instead of
+ column-first (default).
+ empty : (default None)
+ default value to fill list if needed
+ separator_size : int (default=2)
+ How much caracters will be used as a separation between each columns.
+ displaywidth : int (default=80)
+ The width of the area onto wich the columns should enter
+
+ Returns
+ -------
+
+ strings_matrix
+
+ nested list of string, the outer most list contains as many list as
+ rows, the innermost lists have each as many element as colums. If the
+ total number of elements in `items` does not equal the product of
+ rows*columns, the last element of some lists are filled with `None`.
+
+ dict_info
+ some info to make columnize easier:
+
+ num_columns
+ number of columns
+ max_rows
+ maximum number of rows (final number may be less)
+ column_widths
+ list of with of each columns
+ optimal_separator_width
+ best separator width between columns
+
+ Examples
+ --------
+ ::
+
+ In [1]: l = ['aaa','b','cc','d','eeeee','f','g','h','i','j','k','l']
+ ...: compute_item_matrix(l, displaywidth=12)
+ Out[1]:
+ ([['aaa', 'f', 'k'],
+ ['b', 'g', 'l'],
+ ['cc', 'h', None],
+ ['d', 'i', None],
+ ['eeeee', 'j', None]],
+ {'num_columns': 3,
+ 'column_widths': [5, 1, 1],
+ 'optimal_separator_width': 2,
+ 'max_rows': 5})
+ """
+ info = _find_optimal(list(map(len, items)), row_first, *args, **kwargs)
+ nrow, ncol = info['max_rows'], info['num_columns']
+ if row_first:
+ return ([[_get_or_default(items, r * ncol + c, default=empty) for c in range(ncol)] for r in range(nrow)], info)
+ else:
+ return ([[_get_or_default(items, c * nrow + r, default=empty) for c in range(ncol)] for r in range(nrow)], info)
+
+
+def columnize(items, row_first=False, separator=' ', displaywidth=80, spread=False):
+ """ Transform a list of strings into a single string with columns.
+
+ Parameters
+ ----------
+ items : sequence of strings
+ The strings to process.
+
+ row_first : (default False)
+ Whether to compute columns for a row-first matrix instead of
+ column-first (default).
+
+ separator : str, optional [default is two spaces]
+ The string that separates columns.
+
+ displaywidth : int, optional [default is 80]
+ Width of the display in number of characters.
+
+ Returns
+ -------
+ The formatted string.
+ """
+ if not items:
+ return '\n'
+ matrix, info = compute_item_matrix(items, row_first=row_first, separator_size=len(separator), displaywidth=displaywidth)
+ if spread:
+ separator = separator.ljust(int(info['optimal_separator_width']))
+ fmatrix = [filter(None, x) for x in matrix]
+ sjoin = lambda x : separator.join([ y.ljust(w, ' ') for y, w in zip(x, info['column_widths'])])
+ return '\n'.join(map(sjoin, fmatrix))+'\n'
+
+
+def get_text_list(list_, last_sep=' and ', sep=", ", wrap_item_with=""):
+ """
+ Return a string with a natural enumeration of items
+
+ >>> get_text_list(['a', 'b', 'c', 'd'])
+ 'a, b, c and d'
+ >>> get_text_list(['a', 'b', 'c'], ' or ')
+ 'a, b or c'
+ >>> get_text_list(['a', 'b', 'c'], ', ')
+ 'a, b, c'
+ >>> get_text_list(['a', 'b'], ' or ')
+ 'a or b'
+ >>> get_text_list(['a'])
+ 'a'
+ >>> get_text_list([])
+ ''
+ >>> get_text_list(['a', 'b'], wrap_item_with="`")
+ '`a` and `b`'
+ >>> get_text_list(['a', 'b', 'c', 'd'], " = ", sep=" + ")
+ 'a + b + c = d'
+ """
+ if len(list_) == 0:
+ return ''
+ if wrap_item_with:
+ list_ = ['%s%s%s' % (wrap_item_with, item, wrap_item_with) for
+ item in list_]
+ if len(list_) == 1:
+ return list_[0]
+ return '%s%s%s' % (
+ sep.join(i for i in list_[:-1]),
+ last_sep, list_[-1])
diff --git a/contrib/python/ipython/py2/IPython/utils/timing.py b/contrib/python/ipython/py2/IPython/utils/timing.py
index ff88bf664d..99b7bbc59a 100644
--- a/contrib/python/ipython/py2/IPython/utils/timing.py
+++ b/contrib/python/ipython/py2/IPython/utils/timing.py
@@ -1,118 +1,118 @@
-# encoding: utf-8
-"""
-Utilities for timing code execution.
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-import time
-
-from .py3compat import xrange
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-# If possible (Unix), use the resource module instead of time.clock()
-try:
- import resource
- def clocku():
- """clocku() -> floating point number
-
- Return the *USER* CPU time in seconds since the start of the process.
- This is done via a call to resource.getrusage, so it avoids the
- wraparound problems in time.clock()."""
-
- return resource.getrusage(resource.RUSAGE_SELF)[0]
-
- def clocks():
- """clocks() -> floating point number
-
- Return the *SYSTEM* CPU time in seconds since the start of the process.
- This is done via a call to resource.getrusage, so it avoids the
- wraparound problems in time.clock()."""
-
- return resource.getrusage(resource.RUSAGE_SELF)[1]
-
- def clock():
- """clock() -> floating point number
-
- Return the *TOTAL USER+SYSTEM* CPU time in seconds since the start of
- the process. This is done via a call to resource.getrusage, so it
- avoids the wraparound problems in time.clock()."""
-
- u,s = resource.getrusage(resource.RUSAGE_SELF)[:2]
- return u+s
-
- def clock2():
- """clock2() -> (t_user,t_system)
-
- Similar to clock(), but return a tuple of user/system times."""
- return resource.getrusage(resource.RUSAGE_SELF)[:2]
-except ImportError:
- # There is no distinction of user/system time under windows, so we just use
- # time.clock() for everything...
- clocku = clocks = clock = time.clock
- def clock2():
- """Under windows, system CPU time can't be measured.
-
- This just returns clock() and zero."""
- return time.clock(),0.0
-
-
-def timings_out(reps,func,*args,**kw):
- """timings_out(reps,func,*args,**kw) -> (t_total,t_per_call,output)
-
- Execute a function reps times, return a tuple with the elapsed total
- CPU time in seconds, the time per call and the function's output.
-
- Under Unix, the return value is the sum of user+system time consumed by
- the process, computed via the resource module. This prevents problems
- related to the wraparound effect which the time.clock() function has.
-
- Under Windows the return value is in wall clock seconds. See the
- documentation for the time module for more details."""
-
- reps = int(reps)
- assert reps >=1, 'reps must be >= 1'
- if reps==1:
- start = clock()
- out = func(*args,**kw)
- tot_time = clock()-start
- else:
- rng = xrange(reps-1) # the last time is executed separately to store output
- start = clock()
- for dummy in rng: func(*args,**kw)
- out = func(*args,**kw) # one last time
- tot_time = clock()-start
- av_time = tot_time / reps
- return tot_time,av_time,out
-
-
-def timings(reps,func,*args,**kw):
- """timings(reps,func,*args,**kw) -> (t_total,t_per_call)
-
- Execute a function reps times, return a tuple with the elapsed total CPU
- time in seconds and the time per call. These are just the first two values
- in timings_out()."""
-
- return timings_out(reps,func,*args,**kw)[0:2]
-
-
-def timing(func,*args,**kw):
- """timing(func,*args,**kw) -> t_total
-
- Execute a function once, return the elapsed total CPU time in
- seconds. This is just the first value in timings_out()."""
-
- return timings_out(1,func,*args,**kw)[0]
-
+# encoding: utf-8
+"""
+Utilities for timing code execution.
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2008-2011 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+import time
+
+from .py3compat import xrange
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+# If possible (Unix), use the resource module instead of time.clock()
+try:
+ import resource
+ def clocku():
+ """clocku() -> floating point number
+
+ Return the *USER* CPU time in seconds since the start of the process.
+ This is done via a call to resource.getrusage, so it avoids the
+ wraparound problems in time.clock()."""
+
+ return resource.getrusage(resource.RUSAGE_SELF)[0]
+
+ def clocks():
+ """clocks() -> floating point number
+
+ Return the *SYSTEM* CPU time in seconds since the start of the process.
+ This is done via a call to resource.getrusage, so it avoids the
+ wraparound problems in time.clock()."""
+
+ return resource.getrusage(resource.RUSAGE_SELF)[1]
+
+ def clock():
+ """clock() -> floating point number
+
+ Return the *TOTAL USER+SYSTEM* CPU time in seconds since the start of
+ the process. This is done via a call to resource.getrusage, so it
+ avoids the wraparound problems in time.clock()."""
+
+ u,s = resource.getrusage(resource.RUSAGE_SELF)[:2]
+ return u+s
+
+ def clock2():
+ """clock2() -> (t_user,t_system)
+
+ Similar to clock(), but return a tuple of user/system times."""
+ return resource.getrusage(resource.RUSAGE_SELF)[:2]
+except ImportError:
+ # There is no distinction of user/system time under windows, so we just use
+ # time.clock() for everything...
+ clocku = clocks = clock = time.clock
+ def clock2():
+ """Under windows, system CPU time can't be measured.
+
+ This just returns clock() and zero."""
+ return time.clock(),0.0
+
+
+def timings_out(reps,func,*args,**kw):
+ """timings_out(reps,func,*args,**kw) -> (t_total,t_per_call,output)
+
+ Execute a function reps times, return a tuple with the elapsed total
+ CPU time in seconds, the time per call and the function's output.
+
+ Under Unix, the return value is the sum of user+system time consumed by
+ the process, computed via the resource module. This prevents problems
+ related to the wraparound effect which the time.clock() function has.
+
+ Under Windows the return value is in wall clock seconds. See the
+ documentation for the time module for more details."""
+
+ reps = int(reps)
+ assert reps >=1, 'reps must be >= 1'
+ if reps==1:
+ start = clock()
+ out = func(*args,**kw)
+ tot_time = clock()-start
+ else:
+ rng = xrange(reps-1) # the last time is executed separately to store output
+ start = clock()
+ for dummy in rng: func(*args,**kw)
+ out = func(*args,**kw) # one last time
+ tot_time = clock()-start
+ av_time = tot_time / reps
+ return tot_time,av_time,out
+
+
+def timings(reps,func,*args,**kw):
+ """timings(reps,func,*args,**kw) -> (t_total,t_per_call)
+
+ Execute a function reps times, return a tuple with the elapsed total CPU
+ time in seconds and the time per call. These are just the first two values
+ in timings_out()."""
+
+ return timings_out(reps,func,*args,**kw)[0:2]
+
+
+def timing(func,*args,**kw):
+ """timing(func,*args,**kw) -> t_total
+
+ Execute a function once, return the elapsed total CPU time in
+ seconds. This is just the first value in timings_out()."""
+
+ return timings_out(1,func,*args,**kw)[0]
+
diff --git a/contrib/python/ipython/py2/IPython/utils/tokenize2.py b/contrib/python/ipython/py2/IPython/utils/tokenize2.py
index 7e60a8a629..cbb5292e5a 100644
--- a/contrib/python/ipython/py2/IPython/utils/tokenize2.py
+++ b/contrib/python/ipython/py2/IPython/utils/tokenize2.py
@@ -1,9 +1,9 @@
-"""Load our patched versions of tokenize.
-"""
-
-import sys
-
-if sys.version_info[0] >= 3:
- from ._tokenize_py3 import *
-else:
- from ._tokenize_py2 import *
+"""Load our patched versions of tokenize.
+"""
+
+import sys
+
+if sys.version_info[0] >= 3:
+ from ._tokenize_py3 import *
+else:
+ from ._tokenize_py2 import *
diff --git a/contrib/python/ipython/py2/IPython/utils/tokenutil.py b/contrib/python/ipython/py2/IPython/utils/tokenutil.py
index 940da98d3d..f52d3b7658 100644
--- a/contrib/python/ipython/py2/IPython/utils/tokenutil.py
+++ b/contrib/python/ipython/py2/IPython/utils/tokenutil.py
@@ -1,128 +1,128 @@
-"""Token-related utilities"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import absolute_import, print_function
-
-from collections import namedtuple
-from io import StringIO
-from keyword import iskeyword
-
-from . import tokenize2
-from .py3compat import cast_unicode_py2
-
-Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line'])
-
-def generate_tokens(readline):
- """wrap generate_tokens to catch EOF errors"""
- try:
- for token in tokenize2.generate_tokens(readline):
- yield token
- except tokenize2.TokenError:
- # catch EOF error
- return
-
-def line_at_cursor(cell, cursor_pos=0):
- """Return the line in a cell at a given cursor position
-
- Used for calling line-based APIs that don't support multi-line input, yet.
-
- Parameters
- ----------
-
+"""Token-related utilities"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import absolute_import, print_function
+
+from collections import namedtuple
+from io import StringIO
+from keyword import iskeyword
+
+from . import tokenize2
+from .py3compat import cast_unicode_py2
+
+Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line'])
+
+def generate_tokens(readline):
+ """wrap generate_tokens to catch EOF errors"""
+ try:
+ for token in tokenize2.generate_tokens(readline):
+ yield token
+ except tokenize2.TokenError:
+ # catch EOF error
+ return
+
+def line_at_cursor(cell, cursor_pos=0):
+ """Return the line in a cell at a given cursor position
+
+ Used for calling line-based APIs that don't support multi-line input, yet.
+
+ Parameters
+ ----------
+
cell: str
- multiline block of text
- cursor_pos: integer
- the cursor position
-
- Returns
- -------
-
- (line, offset): (text, integer)
- The line with the current cursor, and the character offset of the start of the line.
- """
- offset = 0
- lines = cell.splitlines(True)
- for line in lines:
- next_offset = offset + len(line)
- if next_offset >= cursor_pos:
- break
- offset = next_offset
- else:
- line = ""
- return (line, offset)
-
-def token_at_cursor(cell, cursor_pos=0):
- """Get the token at a given cursor
-
- Used for introspection.
-
- Function calls are prioritized, so the token for the callable will be returned
- if the cursor is anywhere inside the call.
-
- Parameters
- ----------
-
- cell : unicode
- A block of Python code
- cursor_pos : int
- The location of the cursor in the block where the token should be found
- """
- cell = cast_unicode_py2(cell)
- names = []
- tokens = []
- call_names = []
-
- offsets = {1: 0} # lines start at 1
- for tup in generate_tokens(StringIO(cell).readline):
-
- tok = Token(*tup)
-
- # token, text, start, end, line = tup
- start_line, start_col = tok.start
- end_line, end_col = tok.end
- if end_line + 1 not in offsets:
- # keep track of offsets for each line
- lines = tok.line.splitlines(True)
- for lineno, line in zip(range(start_line + 1, end_line + 2), lines):
- if lineno not in offsets:
- offsets[lineno] = offsets[lineno-1] + len(line)
-
- offset = offsets[start_line]
- # allow '|foo' to find 'foo' at the beginning of a line
- boundary = cursor_pos + 1 if start_col == 0 else cursor_pos
- if offset + start_col >= boundary:
- # current token starts after the cursor,
- # don't consume it
- break
-
- if tok.token == tokenize2.NAME and not iskeyword(tok.text):
- if names and tokens and tokens[-1].token == tokenize2.OP and tokens[-1].text == '.':
- names[-1] = "%s.%s" % (names[-1], tok.text)
- else:
- names.append(tok.text)
- elif tok.token == tokenize2.OP:
- if tok.text == '=' and names:
- # don't inspect the lhs of an assignment
- names.pop(-1)
- if tok.text == '(' and names:
- # if we are inside a function call, inspect the function
- call_names.append(names[-1])
- elif tok.text == ')' and call_names:
- call_names.pop(-1)
-
- tokens.append(tok)
-
- if offsets[end_line] + end_col > cursor_pos:
- # we found the cursor, stop reading
- break
-
- if call_names:
- return call_names[-1]
- elif names:
- return names[-1]
- else:
- return ''
-
-
+ multiline block of text
+ cursor_pos: integer
+ the cursor position
+
+ Returns
+ -------
+
+ (line, offset): (text, integer)
+ The line with the current cursor, and the character offset of the start of the line.
+ """
+ offset = 0
+ lines = cell.splitlines(True)
+ for line in lines:
+ next_offset = offset + len(line)
+ if next_offset >= cursor_pos:
+ break
+ offset = next_offset
+ else:
+ line = ""
+ return (line, offset)
+
+def token_at_cursor(cell, cursor_pos=0):
+ """Get the token at a given cursor
+
+ Used for introspection.
+
+ Function calls are prioritized, so the token for the callable will be returned
+ if the cursor is anywhere inside the call.
+
+ Parameters
+ ----------
+
+ cell : unicode
+ A block of Python code
+ cursor_pos : int
+ The location of the cursor in the block where the token should be found
+ """
+ cell = cast_unicode_py2(cell)
+ names = []
+ tokens = []
+ call_names = []
+
+ offsets = {1: 0} # lines start at 1
+ for tup in generate_tokens(StringIO(cell).readline):
+
+ tok = Token(*tup)
+
+ # token, text, start, end, line = tup
+ start_line, start_col = tok.start
+ end_line, end_col = tok.end
+ if end_line + 1 not in offsets:
+ # keep track of offsets for each line
+ lines = tok.line.splitlines(True)
+ for lineno, line in zip(range(start_line + 1, end_line + 2), lines):
+ if lineno not in offsets:
+ offsets[lineno] = offsets[lineno-1] + len(line)
+
+ offset = offsets[start_line]
+ # allow '|foo' to find 'foo' at the beginning of a line
+ boundary = cursor_pos + 1 if start_col == 0 else cursor_pos
+ if offset + start_col >= boundary:
+ # current token starts after the cursor,
+ # don't consume it
+ break
+
+ if tok.token == tokenize2.NAME and not iskeyword(tok.text):
+ if names and tokens and tokens[-1].token == tokenize2.OP and tokens[-1].text == '.':
+ names[-1] = "%s.%s" % (names[-1], tok.text)
+ else:
+ names.append(tok.text)
+ elif tok.token == tokenize2.OP:
+ if tok.text == '=' and names:
+ # don't inspect the lhs of an assignment
+ names.pop(-1)
+ if tok.text == '(' and names:
+ # if we are inside a function call, inspect the function
+ call_names.append(names[-1])
+ elif tok.text == ')' and call_names:
+ call_names.pop(-1)
+
+ tokens.append(tok)
+
+ if offsets[end_line] + end_col > cursor_pos:
+ # we found the cursor, stop reading
+ break
+
+ if call_names:
+ return call_names[-1]
+ elif names:
+ return names[-1]
+ else:
+ return ''
+
+
diff --git a/contrib/python/ipython/py2/IPython/utils/traitlets.py b/contrib/python/ipython/py2/IPython/utils/traitlets.py
index 0ff664fb5c..b4ff7a2689 100644
--- a/contrib/python/ipython/py2/IPython/utils/traitlets.py
+++ b/contrib/python/ipython/py2/IPython/utils/traitlets.py
@@ -1,7 +1,7 @@
-from __future__ import absolute_import
-
-from warnings import warn
-
-warn("IPython.utils.traitlets has moved to a top-level traitlets package.")
-
-from traitlets import *
+from __future__ import absolute_import
+
+from warnings import warn
+
+warn("IPython.utils.traitlets has moved to a top-level traitlets package.")
+
+from traitlets import *
diff --git a/contrib/python/ipython/py2/IPython/utils/tz.py b/contrib/python/ipython/py2/IPython/utils/tz.py
index 14172b2f4a..b315d532d1 100644
--- a/contrib/python/ipython/py2/IPython/utils/tz.py
+++ b/contrib/python/ipython/py2/IPython/utils/tz.py
@@ -1,46 +1,46 @@
-# encoding: utf-8
-"""
-Timezone utilities
-
-Just UTC-awareness right now
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2013 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-from datetime import tzinfo, timedelta, datetime
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-# constant for zero offset
-ZERO = timedelta(0)
-
-class tzUTC(tzinfo):
- """tzinfo object for UTC (zero offset)"""
-
- def utcoffset(self, d):
- return ZERO
-
- def dst(self, d):
- return ZERO
-
-UTC = tzUTC()
-
-def utc_aware(unaware):
- """decorator for adding UTC tzinfo to datetime's utcfoo methods"""
- def utc_method(*args, **kwargs):
- dt = unaware(*args, **kwargs)
- return dt.replace(tzinfo=UTC)
- return utc_method
-
-utcfromtimestamp = utc_aware(datetime.utcfromtimestamp)
-utcnow = utc_aware(datetime.utcnow)
+# encoding: utf-8
+"""
+Timezone utilities
+
+Just UTC-awareness right now
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2013 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+from datetime import tzinfo, timedelta, datetime
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+# constant for zero offset
+ZERO = timedelta(0)
+
+class tzUTC(tzinfo):
+ """tzinfo object for UTC (zero offset)"""
+
+ def utcoffset(self, d):
+ return ZERO
+
+ def dst(self, d):
+ return ZERO
+
+UTC = tzUTC()
+
+def utc_aware(unaware):
+ """decorator for adding UTC tzinfo to datetime's utcfoo methods"""
+ def utc_method(*args, **kwargs):
+ dt = unaware(*args, **kwargs)
+ return dt.replace(tzinfo=UTC)
+ return utc_method
+
+utcfromtimestamp = utc_aware(datetime.utcfromtimestamp)
+utcnow = utc_aware(datetime.utcnow)
diff --git a/contrib/python/ipython/py2/IPython/utils/ulinecache.py b/contrib/python/ipython/py2/IPython/utils/ulinecache.py
index 886454c267..f53b0dde69 100644
--- a/contrib/python/ipython/py2/IPython/utils/ulinecache.py
+++ b/contrib/python/ipython/py2/IPython/utils/ulinecache.py
@@ -1,45 +1,45 @@
-"""Wrapper around linecache which decodes files to unicode according to PEP 263.
-
-This is only needed for Python 2 - linecache in Python 3 does the same thing
-itself.
-"""
-import functools
-import linecache
-import sys
-
-from IPython.utils import py3compat
-from IPython.utils import openpy
-
-if py3compat.PY3:
- getline = linecache.getline
-
- # getlines has to be looked up at runtime, because doctests monkeypatch it.
- @functools.wraps(linecache.getlines)
- def getlines(filename, module_globals=None):
- return linecache.getlines(filename, module_globals=module_globals)
-
-else:
- def getlines(filename, module_globals=None):
- """Get the lines (as unicode) for a file from the cache.
- Update the cache if it doesn't contain an entry for this file already."""
- filename = py3compat.cast_bytes(filename, sys.getfilesystemencoding())
- lines = linecache.getlines(filename, module_globals=module_globals)
-
- # The bits we cache ourselves can be unicode.
- if (not lines) or isinstance(lines[0], py3compat.unicode_type):
- return lines
-
- readline = openpy._list_readline(lines)
- try:
- encoding, _ = openpy.detect_encoding(readline)
- except SyntaxError:
- encoding = 'ascii'
- return [l.decode(encoding, 'replace') for l in lines]
-
- # This is a straight copy of linecache.getline
- def getline(filename, lineno, module_globals=None):
- lines = getlines(filename, module_globals)
- if 1 <= lineno <= len(lines):
- return lines[lineno-1]
- else:
- return ''
+"""Wrapper around linecache which decodes files to unicode according to PEP 263.
+
+This is only needed for Python 2 - linecache in Python 3 does the same thing
+itself.
+"""
+import functools
+import linecache
+import sys
+
+from IPython.utils import py3compat
+from IPython.utils import openpy
+
+if py3compat.PY3:
+ getline = linecache.getline
+
+ # getlines has to be looked up at runtime, because doctests monkeypatch it.
+ @functools.wraps(linecache.getlines)
+ def getlines(filename, module_globals=None):
+ return linecache.getlines(filename, module_globals=module_globals)
+
+else:
+ def getlines(filename, module_globals=None):
+ """Get the lines (as unicode) for a file from the cache.
+ Update the cache if it doesn't contain an entry for this file already."""
+ filename = py3compat.cast_bytes(filename, sys.getfilesystemencoding())
+ lines = linecache.getlines(filename, module_globals=module_globals)
+
+ # The bits we cache ourselves can be unicode.
+ if (not lines) or isinstance(lines[0], py3compat.unicode_type):
+ return lines
+
+ readline = openpy._list_readline(lines)
+ try:
+ encoding, _ = openpy.detect_encoding(readline)
+ except SyntaxError:
+ encoding = 'ascii'
+ return [l.decode(encoding, 'replace') for l in lines]
+
+ # This is a straight copy of linecache.getline
+ def getline(filename, lineno, module_globals=None):
+ lines = getlines(filename, module_globals)
+ if 1 <= lineno <= len(lines):
+ return lines[lineno-1]
+ else:
+ return ''
diff --git a/contrib/python/ipython/py2/IPython/utils/version.py b/contrib/python/ipython/py2/IPython/utils/version.py
index 3d1018f7bd..1de0047e6b 100644
--- a/contrib/python/ipython/py2/IPython/utils/version.py
+++ b/contrib/python/ipython/py2/IPython/utils/version.py
@@ -1,36 +1,36 @@
-# encoding: utf-8
-"""
-Utilities for version comparison
-
-It is a bit ridiculous that we need these.
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (C) 2013 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
-
-from distutils.version import LooseVersion
-
-#-----------------------------------------------------------------------------
-# Code
-#-----------------------------------------------------------------------------
-
-def check_version(v, check):
- """check version string v >= check
-
- If dev/prerelease tags result in TypeError for string-number comparison,
- it is assumed that the dependency is satisfied.
- Users on dev branches are responsible for keeping their own packages up to date.
- """
- try:
- return LooseVersion(v) >= LooseVersion(check)
- except TypeError:
- return True
-
+# encoding: utf-8
+"""
+Utilities for version comparison
+
+It is a bit ridiculous that we need these.
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (C) 2013 The IPython Development Team
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#-----------------------------------------------------------------------------
+
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+from distutils.version import LooseVersion
+
+#-----------------------------------------------------------------------------
+# Code
+#-----------------------------------------------------------------------------
+
+def check_version(v, check):
+ """check version string v >= check
+
+ If dev/prerelease tags result in TypeError for string-number comparison,
+ it is assumed that the dependency is satisfied.
+ Users on dev branches are responsible for keeping their own packages up to date.
+ """
+ try:
+ return LooseVersion(v) >= LooseVersion(check)
+ except TypeError:
+ return True
+
diff --git a/contrib/python/ipython/py2/IPython/utils/warn.py b/contrib/python/ipython/py2/IPython/utils/warn.py
index 831e4265ac..dd4852227b 100644
--- a/contrib/python/ipython/py2/IPython/utils/warn.py
+++ b/contrib/python/ipython/py2/IPython/utils/warn.py
@@ -1,65 +1,65 @@
-# encoding: utf-8
-"""
-Utilities for warnings. Shoudn't we just use the built in warnings module.
-"""
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
-
-from __future__ import print_function
-
-import sys
+# encoding: utf-8
+"""
+Utilities for warnings. Shoudn't we just use the built in warnings module.
+"""
+
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+from __future__ import print_function
+
+import sys
import warnings
-
+
warnings.warn("The module IPython.utils.warn is deprecated since IPython 4.0, use the standard warnings module instead", DeprecationWarning)
-
-def warn(msg,level=2,exit_val=1):
+
+def warn(msg,level=2,exit_val=1):
"""Deprecated
-
+
Standard warning printer. Gives formatting consistency.
-
+
Output is sent to sys.stderr.
- Options:
-
- -level(2): allows finer control:
- 0 -> Do nothing, dummy function.
- 1 -> Print message.
- 2 -> Print 'WARNING:' + message. (Default level).
- 3 -> Print 'ERROR:' + message.
- 4 -> Print 'FATAL ERROR:' + message and trigger a sys.exit(exit_val).
-
- -exit_val (1): exit value returned by sys.exit() for a level 4
- warning. Ignored for all other levels."""
+ Options:
+
+ -level(2): allows finer control:
+ 0 -> Do nothing, dummy function.
+ 1 -> Print message.
+ 2 -> Print 'WARNING:' + message. (Default level).
+ 3 -> Print 'ERROR:' + message.
+ 4 -> Print 'FATAL ERROR:' + message and trigger a sys.exit(exit_val).
+
+ -exit_val (1): exit value returned by sys.exit() for a level 4
+ warning. Ignored for all other levels."""
warnings.warn("The module IPython.utils.warn is deprecated since IPython 4.0, use the standard warnings module instead", DeprecationWarning)
- if level>0:
- header = ['','','WARNING: ','ERROR: ','FATAL ERROR: ']
+ if level>0:
+ header = ['','','WARNING: ','ERROR: ','FATAL ERROR: ']
print(header[level], msg, sep='', file=sys.stderr)
- if level == 4:
+ if level == 4:
print('Exiting.\n', file=sys.stderr)
- sys.exit(exit_val)
-
-
-def info(msg):
+ sys.exit(exit_val)
+
+
+def info(msg):
"""Deprecated
Equivalent to warn(msg,level=1)."""
-
- warn(msg,level=1)
-
-
-def error(msg):
+
+ warn(msg,level=1)
+
+
+def error(msg):
"""Deprecated
Equivalent to warn(msg,level=3)."""
-
- warn(msg,level=3)
-
-
-def fatal(msg,exit_val=1):
+
+ warn(msg,level=3)
+
+
+def fatal(msg,exit_val=1):
"""Deprecated
Equivalent to warn(msg,exit_val=exit_val,level=4)."""
-
- warn(msg,exit_val=exit_val,level=4)
+
+ warn(msg,exit_val=exit_val,level=4)
diff --git a/contrib/python/ipython/py2/IPython/utils/wildcard.py b/contrib/python/ipython/py2/IPython/utils/wildcard.py
index f8e895752c..d22491bd96 100644
--- a/contrib/python/ipython/py2/IPython/utils/wildcard.py
+++ b/contrib/python/ipython/py2/IPython/utils/wildcard.py
@@ -1,112 +1,112 @@
-# -*- coding: utf-8 -*-
-"""Support for wildcard pattern matching in object inspection.
-
-Authors
--------
-- Jörgen Stenarson <jorgen.stenarson@bostream.nu>
-- Thomas Kluyver
-"""
-
-#*****************************************************************************
-# Copyright (C) 2005 Jörgen Stenarson <jorgen.stenarson@bostream.nu>
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#*****************************************************************************
-
-import re
-import types
-
-from IPython.utils.dir2 import dir2
-from .py3compat import iteritems
-
-def create_typestr2type_dicts(dont_include_in_type2typestr=["lambda"]):
- """Return dictionaries mapping lower case typename (e.g. 'tuple') to type
- objects from the types package, and vice versa."""
- typenamelist = [tname for tname in dir(types) if tname.endswith("Type")]
- typestr2type, type2typestr = {}, {}
-
- for tname in typenamelist:
- name = tname[:-4].lower() # Cut 'Type' off the end of the name
- obj = getattr(types, tname)
- typestr2type[name] = obj
- if name not in dont_include_in_type2typestr:
- type2typestr[obj] = name
- return typestr2type, type2typestr
-
-typestr2type, type2typestr = create_typestr2type_dicts()
-
-def is_type(obj, typestr_or_type):
- """is_type(obj, typestr_or_type) verifies if obj is of a certain type. It
- can take strings or actual python types for the second argument, i.e.
- 'tuple'<->TupleType. 'all' matches all types.
-
- TODO: Should be extended for choosing more than one type."""
- if typestr_or_type == "all":
- return True
- if type(typestr_or_type) == type:
- test_type = typestr_or_type
- else:
- test_type = typestr2type.get(typestr_or_type, False)
- if test_type:
- return isinstance(obj, test_type)
- return False
-
-def show_hidden(str, show_all=False):
- """Return true for strings starting with single _ if show_all is true."""
- return show_all or str.startswith("__") or not str.startswith("_")
-
-def dict_dir(obj):
- """Produce a dictionary of an object's attributes. Builds on dir2 by
- checking that a getattr() call actually succeeds."""
- ns = {}
- for key in dir2(obj):
- # This seemingly unnecessary try/except is actually needed
- # because there is code out there with metaclasses that
- # create 'write only' attributes, where a getattr() call
- # will fail even if the attribute appears listed in the
- # object's dictionary. Properties can actually do the same
- # thing. In particular, Traits use this pattern
- try:
- ns[key] = getattr(obj, key)
- except AttributeError:
- pass
- return ns
-
-def filter_ns(ns, name_pattern="*", type_pattern="all", ignore_case=True,
- show_all=True):
- """Filter a namespace dictionary by name pattern and item type."""
- pattern = name_pattern.replace("*",".*").replace("?",".")
- if ignore_case:
- reg = re.compile(pattern+"$", re.I)
- else:
- reg = re.compile(pattern+"$")
-
- # Check each one matches regex; shouldn't be hidden; of correct type.
- return dict((key,obj) for key, obj in iteritems(ns) if reg.match(key) \
- and show_hidden(key, show_all) \
- and is_type(obj, type_pattern) )
-
-def list_namespace(namespace, type_pattern, filter, ignore_case=False, show_all=False):
- """Return dictionary of all objects in a namespace dictionary that match
- type_pattern and filter."""
- pattern_list=filter.split(".")
- if len(pattern_list) == 1:
- return filter_ns(namespace, name_pattern=pattern_list[0],
- type_pattern=type_pattern,
- ignore_case=ignore_case, show_all=show_all)
- else:
- # This is where we can change if all objects should be searched or
- # only modules. Just change the type_pattern to module to search only
- # modules
- filtered = filter_ns(namespace, name_pattern=pattern_list[0],
- type_pattern="all",
- ignore_case=ignore_case, show_all=show_all)
- results = {}
- for name, obj in iteritems(filtered):
- ns = list_namespace(dict_dir(obj), type_pattern,
- ".".join(pattern_list[1:]),
- ignore_case=ignore_case, show_all=show_all)
- for inner_name, inner_obj in iteritems(ns):
- results["%s.%s"%(name,inner_name)] = inner_obj
- return results
+# -*- coding: utf-8 -*-
+"""Support for wildcard pattern matching in object inspection.
+
+Authors
+-------
+- Jörgen Stenarson <jorgen.stenarson@bostream.nu>
+- Thomas Kluyver
+"""
+
+#*****************************************************************************
+# Copyright (C) 2005 Jörgen Stenarson <jorgen.stenarson@bostream.nu>
+#
+# Distributed under the terms of the BSD License. The full license is in
+# the file COPYING, distributed as part of this software.
+#*****************************************************************************
+
+import re
+import types
+
+from IPython.utils.dir2 import dir2
+from .py3compat import iteritems
+
+def create_typestr2type_dicts(dont_include_in_type2typestr=["lambda"]):
+ """Return dictionaries mapping lower case typename (e.g. 'tuple') to type
+ objects from the types package, and vice versa."""
+ typenamelist = [tname for tname in dir(types) if tname.endswith("Type")]
+ typestr2type, type2typestr = {}, {}
+
+ for tname in typenamelist:
+ name = tname[:-4].lower() # Cut 'Type' off the end of the name
+ obj = getattr(types, tname)
+ typestr2type[name] = obj
+ if name not in dont_include_in_type2typestr:
+ type2typestr[obj] = name
+ return typestr2type, type2typestr
+
+typestr2type, type2typestr = create_typestr2type_dicts()
+
+def is_type(obj, typestr_or_type):
+ """is_type(obj, typestr_or_type) verifies if obj is of a certain type. It
+ can take strings or actual python types for the second argument, i.e.
+ 'tuple'<->TupleType. 'all' matches all types.
+
+ TODO: Should be extended for choosing more than one type."""
+ if typestr_or_type == "all":
+ return True
+ if type(typestr_or_type) == type:
+ test_type = typestr_or_type
+ else:
+ test_type = typestr2type.get(typestr_or_type, False)
+ if test_type:
+ return isinstance(obj, test_type)
+ return False
+
+def show_hidden(str, show_all=False):
+ """Return true for strings starting with single _ if show_all is true."""
+ return show_all or str.startswith("__") or not str.startswith("_")
+
+def dict_dir(obj):
+ """Produce a dictionary of an object's attributes. Builds on dir2 by
+ checking that a getattr() call actually succeeds."""
+ ns = {}
+ for key in dir2(obj):
+ # This seemingly unnecessary try/except is actually needed
+ # because there is code out there with metaclasses that
+ # create 'write only' attributes, where a getattr() call
+ # will fail even if the attribute appears listed in the
+ # object's dictionary. Properties can actually do the same
+ # thing. In particular, Traits use this pattern
+ try:
+ ns[key] = getattr(obj, key)
+ except AttributeError:
+ pass
+ return ns
+
+def filter_ns(ns, name_pattern="*", type_pattern="all", ignore_case=True,
+ show_all=True):
+ """Filter a namespace dictionary by name pattern and item type."""
+ pattern = name_pattern.replace("*",".*").replace("?",".")
+ if ignore_case:
+ reg = re.compile(pattern+"$", re.I)
+ else:
+ reg = re.compile(pattern+"$")
+
+ # Check each one matches regex; shouldn't be hidden; of correct type.
+ return dict((key,obj) for key, obj in iteritems(ns) if reg.match(key) \
+ and show_hidden(key, show_all) \
+ and is_type(obj, type_pattern) )
+
+def list_namespace(namespace, type_pattern, filter, ignore_case=False, show_all=False):
+ """Return dictionary of all objects in a namespace dictionary that match
+ type_pattern and filter."""
+ pattern_list=filter.split(".")
+ if len(pattern_list) == 1:
+ return filter_ns(namespace, name_pattern=pattern_list[0],
+ type_pattern=type_pattern,
+ ignore_case=ignore_case, show_all=show_all)
+ else:
+ # This is where we can change if all objects should be searched or
+ # only modules. Just change the type_pattern to module to search only
+ # modules
+ filtered = filter_ns(namespace, name_pattern=pattern_list[0],
+ type_pattern="all",
+ ignore_case=ignore_case, show_all=show_all)
+ results = {}
+ for name, obj in iteritems(filtered):
+ ns = list_namespace(dict_dir(obj), type_pattern,
+ ".".join(pattern_list[1:]),
+ ignore_case=ignore_case, show_all=show_all)
+ for inner_name, inner_obj in iteritems(ns):
+ results["%s.%s"%(name,inner_name)] = inner_obj
+ return results