aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorsmosker <smosker@yandex-team.ru>2022-02-10 16:48:21 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:48:21 +0300
commitdd14d17a747a9c259858faf2fcc3ea6b92df4e15 (patch)
treef332cd81782832c17c48d8c3b4511924cd9e47fd
parentb637e2fa3213638fbabe52c15dad14c8237945ac (diff)
downloadydb-dd14d17a747a9c259858faf2fcc3ea6b92df4e15.tar.gz
Restoring authorship annotation for <smosker@yandex-team.ru>. Commit 1 of 2.
-rw-r--r--build/rules/contrib_deps.policy2
-rw-r--r--build/rules/contrib_python.policy10
-rw-r--r--build/ya.conf.json2
-rw-r--r--contrib/python/attrs/attr/__init__.py22
-rw-r--r--contrib/python/attrs/attr/_compat.py52
-rw-r--r--contrib/python/attrs/attr/_funcs.py208
-rw-r--r--contrib/python/attrs/attr/_make.py916
-rw-r--r--contrib/python/attrs/attr/converters.py102
-rw-r--r--contrib/python/attrs/attr/exceptions.py16
-rw-r--r--contrib/python/attrs/attr/validators.py62
-rw-r--r--contrib/python/ipython/py2/IPython/config.py2
-rw-r--r--contrib/python/ipython/py2/IPython/core/application.py18
-rw-r--r--contrib/python/ipython/py2/IPython/core/compilerop.py2
-rw-r--r--contrib/python/ipython/py2/IPython/core/completer.py58
-rw-r--r--contrib/python/ipython/py2/IPython/core/completerlib.py12
-rw-r--r--contrib/python/ipython/py2/IPython/core/crashhandler.py20
-rw-r--r--contrib/python/ipython/py2/IPython/core/debugger.py50
-rw-r--r--contrib/python/ipython/py2/IPython/core/display.py612
-rw-r--r--contrib/python/ipython/py2/IPython/core/displayhook.py30
-rw-r--r--contrib/python/ipython/py2/IPython/core/displaypub.py56
-rw-r--r--contrib/python/ipython/py2/IPython/core/excolors.py18
-rw-r--r--contrib/python/ipython/py2/IPython/core/formatters.py204
-rw-r--r--contrib/python/ipython/py2/IPython/core/history.py6
-rw-r--r--contrib/python/ipython/py2/IPython/core/hooks.py2
-rw-r--r--contrib/python/ipython/py2/IPython/core/inputtransformer.py24
-rw-r--r--contrib/python/ipython/py2/IPython/core/interactiveshell.py68
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/basic.py48
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/execution.py28
-rw-r--r--contrib/python/ipython/py2/IPython/core/magics/script.py16
-rw-r--r--contrib/python/ipython/py2/IPython/core/oinspect.py14
-rw-r--r--contrib/python/ipython/py2/IPython/core/pylabtools.py70
-rw-r--r--contrib/python/ipython/py2/IPython/core/release.py4
-rw-r--r--contrib/python/ipython/py2/IPython/core/shellapp.py16
-rw-r--r--contrib/python/ipython/py2/IPython/core/ultratb.py16
-rw-r--r--contrib/python/ipython/py2/IPython/core/usage.py28
-rw-r--r--contrib/python/ipython/py2/IPython/extensions/autoreload.py4
-rw-r--r--contrib/python/ipython/py2/IPython/external/qt_for_kernel.py8
-rw-r--r--contrib/python/ipython/py2/IPython/external/qt_loaders.py188
-rw-r--r--contrib/python/ipython/py2/IPython/frontend.py2
-rw-r--r--contrib/python/ipython/py2/IPython/html.py2
-rw-r--r--contrib/python/ipython/py2/IPython/kernel/__init__.py2
-rw-r--r--contrib/python/ipython/py2/IPython/lib/deepreload.py4
-rw-r--r--contrib/python/ipython/py2/IPython/lib/demo.py2
-rw-r--r--contrib/python/ipython/py2/IPython/lib/display.py8
-rw-r--r--contrib/python/ipython/py2/IPython/lib/editorhooks.py2
-rw-r--r--contrib/python/ipython/py2/IPython/lib/guisupport.py36
-rw-r--r--contrib/python/ipython/py2/IPython/lib/inputhook.py6
-rw-r--r--contrib/python/ipython/py2/IPython/lib/pretty.py76
-rw-r--r--contrib/python/ipython/py2/IPython/nbconvert.py2
-rw-r--r--contrib/python/ipython/py2/IPython/nbformat.py2
-rw-r--r--contrib/python/ipython/py2/IPython/parallel.py2
-rw-r--r--contrib/python/ipython/py2/IPython/qt.py2
-rw-r--r--contrib/python/ipython/py2/IPython/sphinxext/ipython_directive.py32
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/console.py2
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/debugger.py86
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/embed.py194
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/interactiveshell.py180
-rwxr-xr-xcontrib/python/ipython/py2/IPython/terminal/ipapp.py26
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/magics.py6
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/prompts.py20
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/pt_inputhooks/__init__.py14
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/pt_inputhooks/qt.py14
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/ptutils.py24
-rw-r--r--contrib/python/ipython/py2/IPython/terminal/shortcuts.py38
-rw-r--r--contrib/python/ipython/py2/IPython/testing/decorators.py24
-rw-r--r--contrib/python/ipython/py2/IPython/testing/iptest.py20
-rw-r--r--contrib/python/ipython/py2/IPython/testing/tools.py60
-rw-r--r--contrib/python/ipython/py2/IPython/utils/_get_terminal_size.py262
-rw-r--r--contrib/python/ipython/py2/IPython/utils/_signatures.py8
-rw-r--r--contrib/python/ipython/py2/IPython/utils/capture.py72
-rw-r--r--contrib/python/ipython/py2/IPython/utils/io.py12
-rw-r--r--contrib/python/ipython/py2/IPython/utils/path.py16
-rw-r--r--contrib/python/ipython/py2/IPython/utils/terminal.py12
-rw-r--r--contrib/python/packaging/py2/packaging/_structures.py4
-rw-r--r--contrib/python/packaging/py2/packaging/requirements.py12
-rw-r--r--contrib/python/packaging/py2/packaging/specifiers.py4
-rw-r--r--contrib/python/packaging/py2/packaging/utils.py68
-rw-r--r--contrib/python/packaging/py2/packaging/version.py142
-rw-r--r--contrib/python/packaging/py3/packaging/_structures.py4
-rw-r--r--contrib/python/packaging/py3/packaging/requirements.py10
-rw-r--r--contrib/python/packaging/py3/packaging/specifiers.py4
-rw-r--r--contrib/python/packaging/py3/packaging/utils.py48
-rw-r--r--contrib/python/packaging/py3/packaging/version.py106
-rw-r--r--contrib/python/pickleshare/pickleshare.py86
-rw-r--r--contrib/python/pickleshare/ya.make2
-rw-r--r--contrib/python/pycparser/pycparser/__init__.py10
-rw-r--r--contrib/python/pycparser/pycparser/_ast_gen.py134
-rw-r--r--contrib/python/pycparser/pycparser/_build_tables.py2
-rw-r--r--contrib/python/pycparser/pycparser/_c_ast.cfg2
-rw-r--r--contrib/python/pycparser/pycparser/ast_transforms.py2
-rw-r--r--contrib/python/pycparser/pycparser/c_ast.py550
-rw-r--r--contrib/python/pycparser/pycparser/c_generator.py94
-rw-r--r--contrib/python/pycparser/pycparser/c_lexer.py8
-rw-r--r--contrib/python/pycparser/pycparser/c_parser.py600
-rw-r--r--contrib/python/pycparser/pycparser/lextab.py6
-rw-r--r--contrib/python/pycparser/pycparser/ply/__init__.py2
-rw-r--r--contrib/python/pycparser/pycparser/ply/cpp.py74
-rw-r--r--contrib/python/pycparser/pycparser/ply/lex.py30
-rw-r--r--contrib/python/pycparser/pycparser/ply/yacc.py136
-rw-r--r--contrib/python/pycparser/pycparser/plyparser.py156
-rw-r--r--contrib/python/pycparser/pycparser/yacctab.py116
-rw-r--r--contrib/python/six/six.py166
-rw-r--r--contrib/python/six/ya.make2
-rw-r--r--contrib/python/traitlets/py2/traitlets/traitlets.py8
-rw-r--r--contrib/python/traitlets/py3/traitlets/traitlets.py8
-rw-r--r--contrib/python/ya.make28
-rw-r--r--library/python/ya.make52
107 files changed, 3480 insertions, 3480 deletions
diff --git a/build/rules/contrib_deps.policy b/build/rules/contrib_deps.policy
index 9af4b85cc2..0ad9674460 100644
--- a/build/rules/contrib_deps.policy
+++ b/build/rules/contrib_deps.policy
@@ -49,7 +49,7 @@ ALLOW contrib/python/horovod/horovod/common/syms -> library/python/symbols/regis
ALLOW contrib/python/opensfm -> library/cpp/vl_feat
ALLOW contrib/python/Wand/ImageMagick -> library/python/symbols/registry
ALLOW contrib/nginx/modules/nginx-rtmp-module -> library/cpp/json
-ALLOW contrib/python/gino -> library/python/pyscopg2
+ALLOW contrib/python/gino -> library/python/pyscopg2
ALLOW contrib/python/psycopg2/tests -> mail/devpack
ALLOW contrib/python/asn1crypto -> library/python/symbols/crypto
ALLOW contrib/python/win_unicode_console -> library/python/symbols/win_unicode_console
diff --git a/build/rules/contrib_python.policy b/build/rules/contrib_python.policy
index 60fd149753..5a634d9cf7 100644
--- a/build/rules/contrib_python.policy
+++ b/build/rules/contrib_python.policy
@@ -17,8 +17,8 @@ ALLOW intranet/search/core -> contrib/python/django/django-1.11
ALLOW intranet/search/settings -> contrib/python/django/django-1.11
ALLOW intranet/sync_tools/tests -> contrib/python/django/django-1.11
ALLOW intranet/wiki -> contrib/python/django/django-1.11
-ALLOW intranet/plan/src -> contrib/python/django/django-1.11
-ALLOW intranet/plan -> contrib/python/django/django-1.11
+ALLOW intranet/plan/src -> contrib/python/django/django-1.11
+ALLOW intranet/plan -> contrib/python/django/django-1.11
ALLOW library/python/django_alive -> contrib/python/django/django-1.11
ALLOW library/python/django_celery_monitoring -> contrib/python/django/django-1.11
ALLOW library/python/django_mds -> contrib/python/django/django-1.11
@@ -202,7 +202,7 @@ DENY .* -> contrib/python/pyrepl
# because match with contrib/python/pycrypto
ALLOW .* -> contrib/python/pycryptodome
# pycrypto deprecated
-ALLOW intranet/domenator/src -> contrib/python/pycrypto
+ALLOW intranet/domenator/src -> contrib/python/pycrypto
ALLOW intranet/yandex_directory/src -> contrib/python/pycrypto
ALLOW addappter/backend -> contrib/python/pycrypto
ALLOW adfox -> contrib/python/pycrypto
@@ -376,8 +376,8 @@ ALLOW health/yamd/health_import -> contrib/python/sqlalchemy/sqlalchemy-1.2
ALLOW health/yamd/libs/data_layer -> contrib/python/sqlalchemy/sqlalchemy-1.2
ALLOW health/yamd/pills_postgresql_db/data_layer -> contrib/python/sqlalchemy/sqlalchemy-1.2
ALLOW infra/cores/app -> contrib/python/sqlalchemy/sqlalchemy-1.2
-ALLOW intranet/watcher/alembic -> contrib/python/sqlalchemy/sqlalchemy-1.2
-ALLOW intranet/watcher/src -> contrib/python/sqlalchemy/sqlalchemy-1.2
+ALLOW intranet/watcher/alembic -> contrib/python/sqlalchemy/sqlalchemy-1.2
+ALLOW intranet/watcher/src -> contrib/python/sqlalchemy/sqlalchemy-1.2
ALLOW intranet/domenator/migrations -> contrib/python/sqlalchemy/sqlalchemy-1.2
ALLOW intranet/trip/alembic -> contrib/python/sqlalchemy/sqlalchemy-1.2
ALLOW intranet/trip/src -> contrib/python/sqlalchemy/sqlalchemy-1.2
diff --git a/build/ya.conf.json b/build/ya.conf.json
index 5f7cc875d6..0542aa494e 100644
--- a/build/ya.conf.json
+++ b/build/ya.conf.json
@@ -5899,7 +5899,7 @@
"releaser": {
"formula": {
"sandbox_id": [
- 1026890210
+ 1026890210
],
"match": "releaser"
},
diff --git a/contrib/python/attrs/attr/__init__.py b/contrib/python/attrs/attr/__init__.py
index b1ce7fe248..c11a0ce6ef 100644
--- a/contrib/python/attrs/attr/__init__.py
+++ b/contrib/python/attrs/attr/__init__.py
@@ -9,15 +9,15 @@ from ._cmp import cmp_using
from ._config import get_run_validators, set_run_validators
from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
from ._make import (
- NOTHING,
- Attribute,
- Factory,
- attrib,
- attrs,
- fields,
- fields_dict,
- make_class,
- validate,
+ NOTHING,
+ Attribute,
+ Factory,
+ attrib,
+ attrs,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
)
from ._version_info import VersionInfo
@@ -27,8 +27,8 @@ __version_info__ = VersionInfo._from_version_string(__version__)
__title__ = "attrs"
__description__ = "Classes Without Boilerplate"
-__url__ = "https://www.attrs.org/"
-__uri__ = __url__
+__url__ = "https://www.attrs.org/"
+__uri__ = __url__
__doc__ = __description__ + " <" + __uri__ + ">"
__author__ = "Hynek Schlawack"
diff --git a/contrib/python/attrs/attr/_compat.py b/contrib/python/attrs/attr/_compat.py
index 6939f338da..1d95958354 100644
--- a/contrib/python/attrs/attr/_compat.py
+++ b/contrib/python/attrs/attr/_compat.py
@@ -14,7 +14,7 @@ if PYPY or sys.version_info[:2] >= (3, 6):
ordered_dict = dict
else:
from collections import OrderedDict
-
+
ordered_dict = OrderedDict
@@ -51,45 +51,45 @@ if PY2:
def __setitem__(self, key, val):
# We gently pretend we're a Python 3 mappingproxy.
- raise TypeError(
- "'mappingproxy' object does not support item assignment"
- )
+ raise TypeError(
+ "'mappingproxy' object does not support item assignment"
+ )
def update(self, _):
# We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError(
- "'mappingproxy' object has no attribute 'update'"
- )
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'update'"
+ )
def __delitem__(self, _):
# We gently pretend we're a Python 3 mappingproxy.
- raise TypeError(
- "'mappingproxy' object does not support item deletion"
- )
+ raise TypeError(
+ "'mappingproxy' object does not support item deletion"
+ )
def clear(self):
# We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError(
- "'mappingproxy' object has no attribute 'clear'"
- )
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'clear'"
+ )
def pop(self, key, default=None):
# We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError(
- "'mappingproxy' object has no attribute 'pop'"
- )
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'pop'"
+ )
def popitem(self):
# We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError(
- "'mappingproxy' object has no attribute 'popitem'"
- )
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'popitem'"
+ )
def setdefault(self, key, default=None):
# We gently pretend we're a Python 3 mappingproxy.
- raise AttributeError(
- "'mappingproxy' object has no attribute 'setdefault'"
- )
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'setdefault'"
+ )
def __repr__(self):
# Override to be identical to the Python 3 version.
@@ -105,8 +105,8 @@ if PY2:
We only warn on Python 3 because we are not aware of any concrete
consequences of not setting the cell on Python 2.
"""
-
-
+
+
else: # Python 3 and later.
from collections.abc import Mapping, Sequence # noqa
@@ -144,10 +144,10 @@ def make_set_closure_cell():
# pypy makes this easy. (It also supports the logic below, but
# why not do the easy/fast thing?)
if PYPY:
-
+
def set_closure_cell(cell, value):
cell.__setstate__((value,))
-
+
return set_closure_cell
# Otherwise gotta do it the hard way.
diff --git a/contrib/python/attrs/attr/_funcs.py b/contrib/python/attrs/attr/_funcs.py
index fda508c5c4..0073198f0f 100644
--- a/contrib/python/attrs/attr/_funcs.py
+++ b/contrib/python/attrs/attr/_funcs.py
@@ -7,14 +7,14 @@ from ._make import NOTHING, _obj_setattr, fields
from .exceptions import AttrsAttributeNotFoundError
-def asdict(
- inst,
- recurse=True,
- filter=None,
- dict_factory=dict,
- retain_collection_types=False,
+def asdict(
+ inst,
+ recurse=True,
+ filter=None,
+ dict_factory=dict,
+ retain_collection_types=False,
value_serializer=None,
-):
+):
"""
Return the ``attrs`` attribute values of *inst* as a dict.
@@ -59,49 +59,49 @@ def asdict(
if recurse is True:
if has(v.__class__):
- rv[a.name] = asdict(
+ rv[a.name] = asdict(
v,
True,
filter,
dict_factory,
retain_collection_types,
value_serializer,
- )
+ )
elif isinstance(v, (tuple, list, set, frozenset)):
cf = v.__class__ if retain_collection_types is True else list
- rv[a.name] = cf(
- [
- _asdict_anything(
+ rv[a.name] = cf(
+ [
+ _asdict_anything(
i,
filter,
dict_factory,
retain_collection_types,
value_serializer,
- )
- for i in v
- ]
- )
+ )
+ for i in v
+ ]
+ )
elif isinstance(v, dict):
df = dict_factory
- rv[a.name] = df(
- (
- _asdict_anything(
+ rv[a.name] = df(
+ (
+ _asdict_anything(
kk,
filter,
df,
retain_collection_types,
value_serializer,
- ),
- _asdict_anything(
+ ),
+ _asdict_anything(
vv,
filter,
df,
retain_collection_types,
value_serializer,
- ),
- )
- for kk, vv in iteritems(v)
- )
+ ),
+ )
+ for kk, vv in iteritems(v)
+ )
else:
rv[a.name] = v
else:
@@ -117,10 +117,10 @@ def _asdict_anything(
value_serializer,
):
"""
- ``asdict`` only works on attrs instances, this works on anything.
- """
- if getattr(val.__class__, "__attrs_attrs__", None) is not None:
- # Attrs class.
+ ``asdict`` only works on attrs instances, this works on anything.
+ """
+ if getattr(val.__class__, "__attrs_attrs__", None) is not None:
+ # Attrs class.
rv = asdict(
val,
True,
@@ -130,48 +130,48 @@ def _asdict_anything(
value_serializer,
)
elif isinstance(val, (tuple, list, set, frozenset)):
- cf = val.__class__ if retain_collection_types is True else list
- rv = cf(
- [
- _asdict_anything(
+ cf = val.__class__ if retain_collection_types is True else list
+ rv = cf(
+ [
+ _asdict_anything(
i,
filter,
dict_factory,
retain_collection_types,
value_serializer,
- )
- for i in val
- ]
- )
- elif isinstance(val, dict):
- df = dict_factory
- rv = df(
- (
+ )
+ for i in val
+ ]
+ )
+ elif isinstance(val, dict):
+ df = dict_factory
+ rv = df(
+ (
_asdict_anything(
kk, filter, df, retain_collection_types, value_serializer
),
_asdict_anything(
vv, filter, df, retain_collection_types, value_serializer
),
- )
- for kk, vv in iteritems(val)
- )
- else:
- rv = val
+ )
+ for kk, vv in iteritems(val)
+ )
+ else:
+ rv = val
if value_serializer is not None:
rv = value_serializer(None, None, rv)
- return rv
-
-
-def astuple(
- inst,
- recurse=True,
- filter=None,
- tuple_factory=tuple,
- retain_collection_types=False,
-):
- """
+ return rv
+
+
+def astuple(
+ inst,
+ recurse=True,
+ filter=None,
+ tuple_factory=tuple,
+ retain_collection_types=False,
+):
+ """
Return the ``attrs`` attribute values of *inst* as a tuple.
Optionally recurse into other ``attrs``-decorated classes.
@@ -206,56 +206,56 @@ def astuple(
continue
if recurse is True:
if has(v.__class__):
- rv.append(
- astuple(
- v,
- recurse=True,
- filter=filter,
- tuple_factory=tuple_factory,
- retain_collection_types=retain,
- )
- )
+ rv.append(
+ astuple(
+ v,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ )
elif isinstance(v, (tuple, list, set, frozenset)):
cf = v.__class__ if retain is True else list
- rv.append(
- cf(
- [
- astuple(
- j,
- recurse=True,
- filter=filter,
- tuple_factory=tuple_factory,
- retain_collection_types=retain,
- )
- if has(j.__class__)
- else j
- for j in v
- ]
- )
- )
+ rv.append(
+ cf(
+ [
+ astuple(
+ j,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(j.__class__)
+ else j
+ for j in v
+ ]
+ )
+ )
elif isinstance(v, dict):
df = v.__class__ if retain is True else dict
- rv.append(
- df(
+ rv.append(
+ df(
(
astuple(
kk,
tuple_factory=tuple_factory,
- retain_collection_types=retain,
- )
- if has(kk.__class__)
- else kk,
+ retain_collection_types=retain,
+ )
+ if has(kk.__class__)
+ else kk,
astuple(
vv,
tuple_factory=tuple_factory,
- retain_collection_types=retain,
- )
- if has(vv.__class__)
- else vv,
+ retain_collection_types=retain,
+ )
+ if has(vv.__class__)
+ else vv,
)
- for kk, vv in iteritems(v)
- )
- )
+ for kk, vv in iteritems(v)
+ )
+ )
else:
rv.append(v)
else:
@@ -294,21 +294,21 @@ def assoc(inst, **changes):
Use `evolve` instead.
"""
import warnings
-
- warnings.warn(
- "assoc is deprecated and will be removed after 2018/01.",
- DeprecationWarning,
- stacklevel=2,
- )
+
+ warnings.warn(
+ "assoc is deprecated and will be removed after 2018/01.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
new = copy.copy(inst)
attrs = fields(inst.__class__)
for k, v in iteritems(changes):
a = getattr(attrs, k, NOTHING)
if a is NOTHING:
raise AttrsAttributeNotFoundError(
- "{k} is not an attrs attribute on {cl}.".format(
- k=k, cl=new.__class__
- )
+ "{k} is not an attrs attribute on {cl}.".format(
+ k=k, cl=new.__class__
+ )
)
_obj_setattr(new, k, v)
return new
diff --git a/contrib/python/attrs/attr/_make.py b/contrib/python/attrs/attr/_make.py
index a1912b1233..a29ab0a9fb 100644
--- a/contrib/python/attrs/attr/_make.py
+++ b/contrib/python/attrs/attr/_make.py
@@ -1,6 +1,6 @@
from __future__ import absolute_import, division, print_function
-import copy
+import copy
import inspect
import linecache
import sys
@@ -12,21 +12,21 @@ from operator import itemgetter
from . import _config, setters
from ._compat import (
- PY2,
+ PY2,
PYPY,
- isclass,
- iteritems,
- metadata_proxy,
+ isclass,
+ iteritems,
+ metadata_proxy,
new_class,
- ordered_dict,
- set_closure_cell,
+ ordered_dict,
+ set_closure_cell,
)
from .exceptions import (
- DefaultAlreadySetError,
- FrozenInstanceError,
- NotAnAttrsClassError,
- PythonTooOldError,
- UnannotatedAttributeError,
+ DefaultAlreadySetError,
+ FrozenInstanceError,
+ NotAnAttrsClassError,
+ PythonTooOldError,
+ UnannotatedAttributeError,
)
@@ -38,19 +38,19 @@ if not PY2:
_obj_setattr = object.__setattr__
_init_converter_pat = "__attr_converter_%s"
_init_factory_pat = "__attr_factory_{}"
-_tuple_property_pat = (
- " {attr_name} = _attrs_property(_attrs_itemgetter({index}))"
-)
+_tuple_property_pat = (
+ " {attr_name} = _attrs_property(_attrs_itemgetter({index}))"
+)
_classvar_prefixes = (
"typing.ClassVar",
"t.ClassVar",
"ClassVar",
"typing_extensions.ClassVar",
)
-# we don't use a double-underscore prefix because that triggers
-# name mangling when trying to create a slot for the field
-# (when slots=True)
-_hash_cache_field = "_attrs_cached_hash"
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_hash_cache_field = "_attrs_cached_hash"
_empty_metadata_singleton = metadata_proxy({})
@@ -62,17 +62,17 @@ class _Nothing(object):
"""
Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
- ``_Nothing`` is a singleton. There is only ever one of it.
+ ``_Nothing`` is a singleton. There is only ever one of it.
.. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
"""
- _singleton = None
+ _singleton = None
- def __new__(cls):
- if _Nothing._singleton is None:
- _Nothing._singleton = super(_Nothing, cls).__new__(cls)
- return _Nothing._singleton
+ def __new__(cls):
+ if _Nothing._singleton is None:
+ _Nothing._singleton = super(_Nothing, cls).__new__(cls)
+ return _Nothing._singleton
def __repr__(self):
return "NOTHING"
@@ -115,22 +115,22 @@ class _CacheHashWrapper(int):
return _none_constructor, _args
-def attrib(
- default=NOTHING,
- validator=None,
- repr=True,
+def attrib(
+ default=NOTHING,
+ validator=None,
+ repr=True,
cmp=None,
- hash=None,
- init=True,
- metadata=None,
- type=None,
- converter=None,
- factory=None,
- kw_only=False,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
eq=None,
order=None,
on_setattr=None,
-):
+):
"""
Create a new attribute on a class.
@@ -225,19 +225,19 @@ def attrib(
Regardless of the approach used, the type will be stored on
``Attribute.type``.
- Please note that ``attrs`` doesn't do anything with this metadata by
- itself. You can use it as part of your own code or for
+ Please note that ``attrs`` doesn't do anything with this metadata by
+ itself. You can use it as part of your own code or for
`static type checking <types>`.
- :param kw_only: Make this attribute keyword-only (Python 3+)
- in the generated ``__init__`` (if ``init`` is ``False``, this
- parameter is ignored).
+ :param kw_only: Make this attribute keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
:param on_setattr: Allows to overwrite the *on_setattr* setting from
`attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used.
Set to `attr.setters.NO_OP` to run **no** `setattr` hooks for this
attribute -- regardless of the setting in `attr.s`.
:type on_setattr: `callable`, or a list of callables, or `None`, or
`attr.setters.NO_OP`
-
+
.. versionadded:: 15.2.0 *convert*
.. versionadded:: 16.3.0 *metadata*
.. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
@@ -249,7 +249,7 @@ def attrib(
*convert* to achieve consistency with other noun-based arguments.
.. versionadded:: 18.1.0
``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
- .. versionadded:: 18.2.0 *kw_only*
+ .. versionadded:: 18.2.0 *kw_only*
.. versionchanged:: 19.2.0 *convert* keyword argument removed.
.. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
.. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
@@ -276,7 +276,7 @@ def attrib(
"exclusive."
)
if not callable(factory):
- raise ValueError("The `factory` argument must be a callable.")
+ raise ValueError("The `factory` argument must be a callable.")
default = Factory(factory)
if metadata is None:
@@ -302,7 +302,7 @@ def attrib(
converter=converter,
metadata=metadata,
type=type,
- kw_only=kw_only,
+ kw_only=kw_only,
eq=eq,
eq_key=eq_key,
order=order,
@@ -358,29 +358,29 @@ def _make_attr_tuple_class(cls_name, attr_names):
]
if attr_names:
for i, attr_name in enumerate(attr_names):
- attr_class_template.append(
- _tuple_property_pat.format(index=i, attr_name=attr_name)
- )
+ attr_class_template.append(
+ _tuple_property_pat.format(index=i, attr_name=attr_name)
+ )
else:
attr_class_template.append(" pass")
- globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
+ globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
_compile_and_eval("\n".join(attr_class_template), globs)
return globs[attr_class_name]
# Tuple class for extracted attributes from a class definition.
-# `base_attrs` is a subset of `attrs`.
-_Attributes = _make_attr_tuple_class(
- "_Attributes",
- [
- # all attributes to build dunder methods for
- "attrs",
- # attributes that have been inherited
- "base_attrs",
- # map inherited attributes to their originating classes
- "base_attrs_map",
- ],
-)
+# `base_attrs` is a subset of `attrs`.
+_Attributes = _make_attr_tuple_class(
+ "_Attributes",
+ [
+ # all attributes to build dunder methods for
+ "attrs",
+ # attributes that have been inherited
+ "base_attrs",
+ # map inherited attributes to their originating classes
+ "base_attrs_map",
+ ],
+)
def _is_class_var(annot):
@@ -511,14 +511,14 @@ def _transform_attrs(
anns = _get_annotations(cls)
if these is not None:
- ca_list = [(name, ca) for name, ca in iteritems(these)]
+ ca_list = [(name, ca) for name, ca in iteritems(these)]
if not isinstance(these, ordered_dict):
ca_list.sort(key=_counter_getter)
elif auto_attribs is True:
ca_names = {
name
- for name, attr in cd.items()
+ for name, attr in cd.items()
if isinstance(attr, _CountingAttr)
}
ca_list = []
@@ -539,27 +539,27 @@ def _transform_attrs(
unannotated = ca_names - annot_names
if len(unannotated) > 0:
raise UnannotatedAttributeError(
- "The following `attr.ib`s lack a type annotation: "
- + ", ".join(
- sorted(unannotated, key=lambda n: cd.get(n).counter)
- )
- + "."
+ "The following `attr.ib`s lack a type annotation: "
+ + ", ".join(
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
+ )
+ + "."
)
else:
- ca_list = sorted(
- (
- (name, attr)
- for name, attr in cd.items()
- if isinstance(attr, _CountingAttr)
- ),
- key=lambda e: e[1].counter,
- )
+ ca_list = sorted(
+ (
+ (name, attr)
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ ),
+ key=lambda e: e[1].counter,
+ )
own_attrs = [
Attribute.from_counting_attr(
- name=attr_name, ca=ca, type=anns.get(attr_name)
+ name=attr_name, ca=ca, type=anns.get(attr_name)
)
- for attr_name, ca in ca_list
+ for attr_name, ca in ca_list
]
if collect_by_mro:
@@ -571,16 +571,16 @@ def _transform_attrs(
cls, {a.name for a in own_attrs}
)
- attr_names = [a.name for a in base_attrs + own_attrs]
+ attr_names = [a.name for a in base_attrs + own_attrs]
AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
- if kw_only:
+ if kw_only:
own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
- attrs = AttrsClass(base_attrs + own_attrs)
-
+ attrs = AttrsClass(base_attrs + own_attrs)
+
# Mandatory vs non-mandatory attr order only matters when they are part of
# the __init__ signature and when they aren't kw_only (which are moved to
# the end and can be mandatory or non-mandatory in any order, as they will
@@ -590,7 +590,7 @@ def _transform_attrs(
if had_default is True and a.default is NOTHING:
raise ValueError(
"No mandatory attributes allowed after an attribute with a "
- "default value or factory. Attribute in question: %r" % (a,)
+ "default value or factory. Attribute in question: %r" % (a,)
)
if had_default is False and a.default is not NOTHING:
@@ -598,7 +598,7 @@ def _transform_attrs(
if field_transformer is not None:
attrs = field_transformer(cls, attrs)
- return _Attributes((attrs, base_attrs, base_attr_map))
+ return _Attributes((attrs, base_attrs, base_attr_map))
if PYPY:
@@ -637,19 +637,19 @@ class _ClassBuilder(object):
"""
Iteratively build *one* class.
"""
-
+
__slots__ = (
"_attr_names",
"_attrs",
"_base_attr_map",
"_base_names",
"_cache_hash",
- "_cls",
- "_cls_dict",
+ "_cls",
+ "_cls_dict",
"_delete_attribs",
- "_frozen",
+ "_frozen",
"_has_pre_init",
- "_has_post_init",
+ "_has_post_init",
"_is_exc",
"_on_setattr",
"_slots",
@@ -658,24 +658,24 @@ class _ClassBuilder(object):
"_has_custom_setattr",
)
- def __init__(
- self,
- cls,
- these,
- slots,
- frozen,
- weakref_slot,
+ def __init__(
+ self,
+ cls,
+ these,
+ slots,
+ frozen,
+ weakref_slot,
getstate_setstate,
- auto_attribs,
- kw_only,
- cache_hash,
+ auto_attribs,
+ kw_only,
+ cache_hash,
is_exc,
collect_by_mro,
on_setattr,
has_custom_setattr,
field_transformer,
- ):
- attrs, base_attrs, base_map = _transform_attrs(
+ ):
+ attrs, base_attrs, base_map = _transform_attrs(
cls,
these,
auto_attribs,
@@ -687,13 +687,13 @@ class _ClassBuilder(object):
self._cls = cls
self._cls_dict = dict(cls.__dict__) if slots else {}
self._attrs = attrs
- self._base_names = set(a.name for a in base_attrs)
- self._base_attr_map = base_map
+ self._base_names = set(a.name for a in base_attrs)
+ self._base_attr_map = base_map
self._attr_names = tuple(a.name for a in attrs)
self._slots = slots
self._frozen = frozen
- self._weakref_slot = weakref_slot
- self._cache_hash = cache_hash
+ self._weakref_slot = weakref_slot
+ self._cache_hash = cache_hash
self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
self._delete_attribs = not bool(these)
@@ -724,7 +724,7 @@ class _ClassBuilder(object):
"""
Finalize class based on the accumulated configuration.
- Builder cannot be used after calling this method.
+ Builder cannot be used after calling this method.
"""
if self._slots is True:
return self._create_slots_class()
@@ -736,22 +736,22 @@ class _ClassBuilder(object):
Apply accumulated methods and return the class.
"""
cls = self._cls
- base_names = self._base_names
+ base_names = self._base_names
# Clean class of attribute definitions (`attr.ib()`s).
if self._delete_attribs:
for name in self._attr_names:
- if (
- name not in base_names
+ if (
+ name not in base_names
and getattr(cls, name, _sentinel) is not _sentinel
- ):
- try:
- delattr(cls, name)
- except AttributeError:
- # This can happen if a base class defines a class
- # variable and we want to set an attribute with the
- # same name by using only a type annotation.
- pass
+ ):
+ try:
+ delattr(cls, name)
+ except AttributeError:
+ # This can happen if a base class defines a class
+ # variable and we want to set an attribute with the
+ # same name by using only a type annotation.
+ pass
# Attach our dunder methods.
for name, value in self._cls_dict.items():
@@ -776,7 +776,7 @@ class _ClassBuilder(object):
cd = {
k: v
for k, v in iteritems(self._cls_dict)
- if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
+ if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
}
# If our class doesn't have its own implementation of __setattr__
@@ -789,7 +789,7 @@ class _ClassBuilder(object):
# XXX: OK with us.
if not self._has_own_setattr:
cd["__attrs_own_setattr__"] = False
-
+
if not self._has_custom_setattr:
for base_cls in self._cls.__bases__:
if base_cls.__dict__.get("__attrs_own_setattr__", False):
@@ -800,30 +800,30 @@ class _ClassBuilder(object):
# and check for an existing __weakref__.
existing_slots = dict()
weakref_inherited = False
- for base_cls in self._cls.__mro__[1:-1]:
+ for base_cls in self._cls.__mro__[1:-1]:
if base_cls.__dict__.get("__weakref__", None) is not None:
- weakref_inherited = True
+ weakref_inherited = True
existing_slots.update(
{
name: getattr(base_cls, name)
for name in getattr(base_cls, "__slots__", [])
}
)
-
+
base_names = set(self._base_names)
- names = self._attr_names
- if (
- self._weakref_slot
- and "__weakref__" not in getattr(self._cls, "__slots__", ())
- and "__weakref__" not in names
- and not weakref_inherited
- ):
- names += ("__weakref__",)
-
+ names = self._attr_names
+ if (
+ self._weakref_slot
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
+ and "__weakref__" not in names
+ and not weakref_inherited
+ ):
+ names += ("__weakref__",)
+
# We only add the names of attributes that aren't inherited.
# Setting __slots__ to inherited attributes wastes memory.
- slot_names = [name for name in names if name not in base_names]
+ slot_names = [name for name in names if name not in base_names]
# There are slots for attributes from current class
# that are defined in parent classes.
# As their descriptors may be overriden by a child class,
@@ -835,16 +835,16 @@ class _ClassBuilder(object):
}
slot_names = [name for name in slot_names if name not in reused_slots]
cd.update(reused_slots)
- if self._cache_hash:
- slot_names.append(_hash_cache_field)
- cd["__slots__"] = tuple(slot_names)
+ if self._cache_hash:
+ slot_names.append(_hash_cache_field)
+ cd["__slots__"] = tuple(slot_names)
qualname = getattr(self._cls, "__qualname__", None)
if qualname is not None:
cd["__qualname__"] = qualname
# Create new class based on old class and our methods.
- cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
# The following is a fix for
# https://github.com/python-attrs/attrs/issues/102. On Python 3,
@@ -936,12 +936,12 @@ class _ClassBuilder(object):
def add_hash(self):
self._cls_dict["__hash__"] = self._add_method_dunders(
- _make_hash(
+ _make_hash(
self._cls,
self._attrs,
frozen=self._frozen,
cache_hash=self._cache_hash,
- )
+ )
)
return self
@@ -955,8 +955,8 @@ class _ClassBuilder(object):
self._has_post_init,
self._frozen,
self._slots,
- self._cache_hash,
- self._base_attr_map,
+ self._cache_hash,
+ self._base_attr_map,
self._is_exc,
self._on_setattr is not None
and self._on_setattr is not setters.NO_OP,
@@ -991,7 +991,7 @@ class _ClassBuilder(object):
cd["__eq__"] = self._add_method_dunders(
_make_eq(self._cls, self._attrs)
- )
+ )
cd["__ne__"] = self._add_method_dunders(_make_ne())
return self
@@ -1053,7 +1053,7 @@ class _ClassBuilder(object):
try:
method.__qualname__ = ".".join(
- (self._cls.__qualname__, method.__name__)
+ (self._cls.__qualname__, method.__name__)
)
except AttributeError:
pass
@@ -1169,21 +1169,21 @@ def _determine_whether_to_implement(
return default
-def attrs(
- maybe_cls=None,
- these=None,
- repr_ns=None,
+def attrs(
+ maybe_cls=None,
+ these=None,
+ repr_ns=None,
repr=None,
cmp=None,
- hash=None,
+ hash=None,
init=None,
- slots=False,
- frozen=False,
- weakref_slot=True,
- str=False,
- auto_attribs=False,
- kw_only=False,
- cache_hash=False,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=False,
+ kw_only=False,
+ cache_hash=False,
auto_exc=False,
eq=None,
order=None,
@@ -1192,7 +1192,7 @@ def attrs(
getstate_setstate=None,
on_setattr=None,
field_transformer=None,
-):
+):
r"""
A class decorator that adds `dunder
<https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the
@@ -1265,7 +1265,7 @@ def attrs(
2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to
None, marking it unhashable (which it is).
3. If *eq* is False, ``__hash__`` will be left untouched meaning the
- ``__hash__`` method of the base class will be used (if base class is
+ ``__hash__`` method of the base class will be used (if base class is
``object``, this means it will fall back to id-based hashing.).
Although not recommended, you can decide for yourself and force
@@ -1314,8 +1314,8 @@ def attrs(
5. Subclasses of a frozen class are frozen too.
- :param bool weakref_slot: Make instances weak-referenceable. This has no
- effect unless ``slots`` is also enabled.
+ :param bool weakref_slot: Make instances weak-referenceable. This has no
+ effect unless ``slots`` is also enabled.
:param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated
attributes (Python 3.6 and later only) from the class body.
@@ -1343,12 +1343,12 @@ def attrs(
report on our bug tracker.
.. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/
- :param bool kw_only: Make all attributes keyword-only (Python 3+)
- in the generated ``__init__`` (if ``init`` is ``False``, this
- parameter is ignored).
- :param bool cache_hash: Ensure that the object's hash code is computed
- only once and stored on the object. If this is set to ``True``,
- hashing must be either explicitly or implicitly enabled for this
+ :param bool kw_only: Make all attributes keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param bool cache_hash: Ensure that the object's hash code is computed
+ only once and stored on the object. If this is set to ``True``,
+ hashing must be either explicitly or implicitly enabled for this
class. If the hash code is cached, avoid any reassignments of
fields involved in hash code computation or mutations of the objects
those fields point to after object creation. If such changes occur,
@@ -1370,7 +1370,7 @@ def attrs(
collects attributes from base classes. The default behavior is
incorrect in certain cases of multiple inheritance. It should be on by
default but is kept off for backward-compatability.
-
+
See issue `#428 <https://github.com/python-attrs/attrs/issues/428>`_ for
more details.
@@ -1417,17 +1417,17 @@ def attrs(
.. versionchanged:: 18.1.0
If *these* is passed, no attributes are deleted from the class body.
.. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
- .. versionadded:: 18.2.0 *weakref_slot*
- .. deprecated:: 18.2.0
- ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+ .. versionadded:: 18.2.0 *weakref_slot*
+ .. deprecated:: 18.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
`DeprecationWarning` if the classes compared are subclasses of
- each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
- to each other.
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+ to each other.
.. versionchanged:: 19.2.0
``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
subclasses comparable anymore.
- .. versionadded:: 18.2.0 *kw_only*
- .. versionadded:: 18.2.0 *cache_hash*
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionadded:: 18.2.0 *cache_hash*
.. versionadded:: 19.1.0 *auto_exc*
.. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
.. versionadded:: 19.2.0 *eq* and *order*
@@ -1445,7 +1445,7 @@ def attrs(
raise PythonTooOldError(
"auto_detect only works on Python 3 and later."
)
-
+
eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
hash_ = hash # work around the lack of nonlocal
@@ -1466,12 +1466,12 @@ def attrs(
if has_own_setattr and is_frozen:
raise ValueError("Can't freeze a class with a custom __setattr__.")
- builder = _ClassBuilder(
- cls,
- these,
- slots,
+ builder = _ClassBuilder(
+ cls,
+ these,
+ slots,
is_frozen,
- weakref_slot,
+ weakref_slot,
_determine_whether_to_implement(
cls,
getstate_setstate,
@@ -1479,15 +1479,15 @@ def attrs(
("__getstate__", "__setstate__"),
default=slots,
),
- auto_attribs,
- kw_only,
- cache_hash,
+ auto_attribs,
+ kw_only,
+ cache_hash,
is_exc,
collect_by_mro,
on_setattr,
has_own_setattr,
field_transformer,
- )
+ )
if _determine_whether_to_implement(
cls, repr, auto_detect, ("__repr__",)
):
@@ -1523,12 +1523,12 @@ def attrs(
elif hash is False or (hash is None and eq is False) or is_exc:
# Don't do anything. Should fall back to __object__'s __hash__
# which is by id.
- if cache_hash:
- raise TypeError(
- "Invalid value for cache_hash. To use hash caching,"
- " hashing must be either explicitly or implicitly "
- "enabled."
- )
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
elif hash is True or (
hash is None and eq is True and is_frozen is True
):
@@ -1536,25 +1536,25 @@ def attrs(
builder.add_hash()
else:
# Raise TypeError on attempts to hash.
- if cache_hash:
- raise TypeError(
- "Invalid value for cache_hash. To use hash caching,"
- " hashing must be either explicitly or implicitly "
- "enabled."
- )
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
builder.make_unhashable()
if _determine_whether_to_implement(
cls, init, auto_detect, ("__init__",)
):
builder.add_init()
- else:
+ else:
builder.add_attrs_init()
- if cache_hash:
- raise TypeError(
- "Invalid value for cache_hash. To use hash caching,"
- " init must be True."
- )
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " init must be True."
+ )
return builder.build_class()
@@ -1574,22 +1574,22 @@ Internal alias so we can use it in functions that take an argument called
if PY2:
-
- def _has_frozen_base_class(cls):
+
+ def _has_frozen_base_class(cls):
"""
Check whether *cls* has a frozen ancestor by looking at its
__setattr__.
"""
return (
- getattr(cls.__setattr__, "__module__", None)
- == _frozen_setattrs.__module__
- and cls.__setattr__.__name__ == _frozen_setattrs.__name__
+ getattr(cls.__setattr__, "__module__", None)
+ == _frozen_setattrs.__module__
+ and cls.__setattr__.__name__ == _frozen_setattrs.__name__
)
-
-
+
+
else:
-
- def _has_frozen_base_class(cls):
+
+ def _has_frozen_base_class(cls):
"""
Check whether *cls* has a frozen ancestor by looking at its
__setattr__.
@@ -1632,8 +1632,8 @@ def _make_hash(cls, attrs, frozen, cache_hash):
a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
)
- tab = " "
-
+ tab = " "
+
unique_filename = _generate_unique_filename(cls, "hash")
type_hash = hash(unique_filename)
@@ -1655,41 +1655,41 @@ def _make_hash(cls, attrs, frozen, cache_hash):
method_lines = [hash_def]
- def append_hash_computation_lines(prefix, indent):
- """
- Generate the code for actually computing the hash code.
- Below this will either be returned directly or used to compute
- a value which is then cached, depending on the value of cache_hash
- """
+ def append_hash_computation_lines(prefix, indent):
+ """
+ Generate the code for actually computing the hash code.
+ Below this will either be returned directly or used to compute
+ a value which is then cached, depending on the value of cache_hash
+ """
- method_lines.extend(
+ method_lines.extend(
[
indent + prefix + hash_func,
indent + " %d," % (type_hash,),
]
- )
-
- for a in attrs:
- method_lines.append(indent + " self.%s," % a.name)
-
+ )
+
+ for a in attrs:
+ method_lines.append(indent + " self.%s," % a.name)
+
method_lines.append(indent + " " + closing_braces)
-
- if cache_hash:
- method_lines.append(tab + "if self.%s is None:" % _hash_cache_field)
- if frozen:
- append_hash_computation_lines(
- "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2
- )
- method_lines.append(tab * 2 + ")") # close __setattr__
- else:
- append_hash_computation_lines(
- "self.%s = " % _hash_cache_field, tab * 2
- )
- method_lines.append(tab + "return self.%s" % _hash_cache_field)
- else:
- append_hash_computation_lines("return ", tab)
-
- script = "\n".join(method_lines)
+
+ if cache_hash:
+ method_lines.append(tab + "if self.%s is None:" % _hash_cache_field)
+ if frozen:
+ append_hash_computation_lines(
+ "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab * 2 + ")") # close __setattr__
+ else:
+ append_hash_computation_lines(
+ "self.%s = " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab + "return self.%s" % _hash_cache_field)
+ else:
+ append_hash_computation_lines("return ", tab)
+
+ script = "\n".join(method_lines)
return _make_method("__hash__", script, unique_filename)
@@ -1725,7 +1725,7 @@ def _make_eq(cls, attrs):
Create __eq__ method for *cls* with *attrs*.
"""
attrs = [a for a in attrs if a.eq]
-
+
unique_filename = _generate_unique_filename(cls, "eq")
lines = [
"def __eq__(self, other):",
@@ -1738,7 +1738,7 @@ def _make_eq(cls, attrs):
globs = {}
if attrs:
lines.append(" return (")
- others = [" ) == ("]
+ others = [" ) == ("]
for a in attrs:
if a.eq_key:
cmp_name = "_%s_key" % (a.name,)
@@ -1899,7 +1899,7 @@ def _make_repr(attrs, ns):
return "".join(result) + ")"
finally:
working_set.remove(id(self))
-
+
return __repr__
@@ -1991,11 +1991,11 @@ def _is_slot_cls(cls):
return "__slots__" in cls.__dict__
-def _is_slot_attr(a_name, base_attr_map):
+def _is_slot_attr(a_name, base_attr_map):
"""
Check if the attribute name comes from a slot class.
"""
- return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name])
+ return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name])
def _make_init(
@@ -2173,7 +2173,7 @@ if _kw_only:
return lines
-def _attrs_to_init_script(
+def _attrs_to_init_script(
attrs,
frozen,
slots,
@@ -2185,7 +2185,7 @@ def _attrs_to_init_script(
needs_cached_setattr,
has_global_on_setattr,
attrs_init,
-):
+):
"""
Return a script of an initializer for *attrs* and a dict of globals.
@@ -2214,15 +2214,15 @@ def _attrs_to_init_script(
# Dict frozen classes assign directly to __dict__.
# But only if the attribute doesn't come from an ancestor slot
# class.
- # Note _inst_dict will be used again below if cache_hash is True
- lines.append("_inst_dict = self.__dict__")
+ # Note _inst_dict will be used again below if cache_hash is True
+ lines.append("_inst_dict = self.__dict__")
def fmt_setter(attr_name, value_var, has_on_setattr):
- if _is_slot_attr(attr_name, base_attr_map):
+ if _is_slot_attr(attr_name, base_attr_map):
return _setattr(attr_name, value_var, has_on_setattr)
return "_inst_dict['%s'] = %s" % (attr_name, value_var)
-
+
def fmt_setter_with_converter(
attr_name, value_var, has_on_setattr
):
@@ -2243,13 +2243,13 @@ def _attrs_to_init_script(
fmt_setter_with_converter = _assign_with_converter
args = []
- kw_only_args = []
+ kw_only_args = []
attrs_to_validate = []
# This is a dictionary of names to validator and converter callables.
# Injecting this into __init__ globals lets us avoid lookups.
names_for_globals = {}
- annotations = {"return": None}
+ annotations = {"return": None}
for a in attrs:
if a.validator:
@@ -2271,49 +2271,49 @@ def _attrs_to_init_script(
if has_factory:
init_factory_name = _init_factory_pat.format(a.name)
if a.converter is not None:
- lines.append(
- fmt_setter_with_converter(
- attr_name,
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
init_factory_name + "(%s)" % (maybe_self,),
has_on_setattr,
- )
- )
+ )
+ )
conv_name = _init_converter_pat % (a.name,)
names_for_globals[conv_name] = a.converter
else:
- lines.append(
- fmt_setter(
- attr_name,
+ lines.append(
+ fmt_setter(
+ attr_name,
init_factory_name + "(%s)" % (maybe_self,),
has_on_setattr,
- )
- )
+ )
+ )
names_for_globals[init_factory_name] = a.default.factory
else:
if a.converter is not None:
- lines.append(
- fmt_setter_with_converter(
- attr_name,
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
"attr_dict['%s'].default" % (attr_name,),
has_on_setattr,
- )
- )
+ )
+ )
conv_name = _init_converter_pat % (a.name,)
names_for_globals[conv_name] = a.converter
else:
- lines.append(
- fmt_setter(
- attr_name,
+ lines.append(
+ fmt_setter(
+ attr_name,
"attr_dict['%s'].default" % (attr_name,),
has_on_setattr,
- )
- )
+ )
+ )
elif a.default is not NOTHING and not has_factory:
arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name)
- if a.kw_only:
- kw_only_args.append(arg)
- else:
- args.append(arg)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
if a.converter is not None:
lines.append(
@@ -2321,59 +2321,59 @@ def _attrs_to_init_script(
attr_name, arg_name, has_on_setattr
)
)
- names_for_globals[
+ names_for_globals[
_init_converter_pat % (a.name,)
- ] = a.converter
+ ] = a.converter
else:
lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
elif has_factory:
arg = "%s=NOTHING" % (arg_name,)
- if a.kw_only:
- kw_only_args.append(arg)
- else:
- args.append(arg)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
lines.append("if %s is not NOTHING:" % (arg_name,))
init_factory_name = _init_factory_pat.format(a.name)
if a.converter is not None:
- lines.append(
+ lines.append(
" "
+ fmt_setter_with_converter(
attr_name, arg_name, has_on_setattr
)
- )
+ )
lines.append("else:")
- lines.append(
- " "
- + fmt_setter_with_converter(
- attr_name,
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name,
init_factory_name + "(" + maybe_self + ")",
has_on_setattr,
- )
+ )
)
- names_for_globals[
+ names_for_globals[
_init_converter_pat % (a.name,)
- ] = a.converter
+ ] = a.converter
else:
lines.append(
" " + fmt_setter(attr_name, arg_name, has_on_setattr)
)
lines.append("else:")
- lines.append(
- " "
- + fmt_setter(
- attr_name,
+ lines.append(
+ " "
+ + fmt_setter(
+ attr_name,
init_factory_name + "(" + maybe_self + ")",
has_on_setattr,
- )
- )
+ )
+ )
names_for_globals[init_factory_name] = a.default.factory
else:
- if a.kw_only:
- kw_only_args.append(arg_name)
- else:
- args.append(arg_name)
+ if a.kw_only:
+ kw_only_args.append(arg_name)
+ else:
+ args.append(arg_name)
if a.converter is not None:
lines.append(
@@ -2381,9 +2381,9 @@ def _attrs_to_init_script(
attr_name, arg_name, has_on_setattr
)
)
- names_for_globals[
+ names_for_globals[
_init_converter_pat % (a.name,)
- ] = a.converter
+ ] = a.converter
else:
lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
@@ -2412,32 +2412,32 @@ def _attrs_to_init_script(
for a in attrs_to_validate:
val_name = "__attr_validator_" + a.name
attr_name = "__attr_" + a.name
- lines.append(
+ lines.append(
" %s(self, %s, self.%s)" % (val_name, attr_name, a.name)
- )
+ )
names_for_globals[val_name] = a.validator
names_for_globals[attr_name] = a
if post_init:
lines.append("self.__attrs_post_init__()")
- # because this is set only after __attrs_post_init is called, a crash
- # will result if post-init tries to access the hash code. This seemed
- # preferable to setting this beforehand, in which case alteration to
- # field values during post-init combined with post-init accessing the
- # hash code would result in silent bugs.
- if cache_hash:
- if frozen:
- if slots:
- # if frozen and slots, then _setattr defined above
- init_hash_cache = "_setattr('%s', %s)"
- else:
- # if frozen and not slots, then _inst_dict defined above
- init_hash_cache = "_inst_dict['%s'] = %s"
- else:
- init_hash_cache = "self.%s = %s"
- lines.append(init_hash_cache % (_hash_cache_field, "None"))
-
+ # because this is set only after __attrs_post_init is called, a crash
+ # will result if post-init tries to access the hash code. This seemed
+ # preferable to setting this beforehand, in which case alteration to
+ # field values during post-init combined with post-init accessing the
+ # hash code would result in silent bugs.
+ if cache_hash:
+ if frozen:
+ if slots:
+ # if frozen and slots, then _setattr defined above
+ init_hash_cache = "_setattr('%s', %s)"
+ else:
+ # if frozen and not slots, then _inst_dict defined above
+ init_hash_cache = "_inst_dict['%s'] = %s"
+ else:
+ init_hash_cache = "self.%s = %s"
+ lines.append(init_hash_cache % (_hash_cache_field, "None"))
+
# For exceptions we rely on BaseException.__init__ for proper
# initialization.
if is_exc:
@@ -2445,9 +2445,9 @@ def _attrs_to_init_script(
lines.append("BaseException.__init__(self, %s)" % (vals,))
- args = ", ".join(args)
- if kw_only_args:
- if PY2:
+ args = ", ".join(args)
+ if kw_only_args:
+ if PY2:
lines = _unpack_kw_only_lines_py2(kw_only_args) + lines
args += "%s**_kw_only" % (", " if args else "",) # leading comma
@@ -2455,19 +2455,19 @@ def _attrs_to_init_script(
args += "%s*, %s" % (
", " if args else "", # leading comma
", ".join(kw_only_args), # kw_only args
- )
- return (
- """\
+ )
+ return (
+ """\
def {init_name}(self, {args}):
{lines}
""".format(
init_name=("__attrs_init__" if attrs_init else "__init__"),
args=args,
lines="\n ".join(lines) if lines else "pass",
- ),
- names_for_globals,
- annotations,
- )
+ ),
+ names_for_globals,
+ annotations,
+ )
class Attribute(object):
@@ -2496,46 +2496,46 @@ class Attribute(object):
For the full version history of the fields, see `attr.ib`.
"""
-
+
__slots__ = (
- "name",
- "default",
- "validator",
- "repr",
+ "name",
+ "default",
+ "validator",
+ "repr",
"eq",
"eq_key",
"order",
"order_key",
- "hash",
- "init",
- "metadata",
- "type",
- "converter",
- "kw_only",
+ "hash",
+ "init",
+ "metadata",
+ "type",
+ "converter",
+ "kw_only",
"inherited",
"on_setattr",
)
- def __init__(
- self,
- name,
- default,
- validator,
- repr,
+ def __init__(
+ self,
+ name,
+ default,
+ validator,
+ repr,
cmp, # XXX: unused, remove along with other cmp code.
- hash,
- init,
+ hash,
+ init,
inherited,
- metadata=None,
- type=None,
- converter=None,
- kw_only=False,
+ metadata=None,
+ type=None,
+ converter=None,
+ kw_only=False,
eq=None,
eq_key=None,
order=None,
order_key=None,
on_setattr=None,
- ):
+ ):
eq, eq_key, order, order_key = _determine_attrib_eq_order(
cmp, eq_key or eq, order_key or order, True
)
@@ -2556,16 +2556,16 @@ class Attribute(object):
bound_setattr("hash", hash)
bound_setattr("init", init)
bound_setattr("converter", converter)
- bound_setattr(
- "metadata",
- (
- metadata_proxy(metadata)
- if metadata
- else _empty_metadata_singleton
- ),
- )
+ bound_setattr(
+ "metadata",
+ (
+ metadata_proxy(metadata)
+ if metadata
+ else _empty_metadata_singleton
+ ),
+ )
bound_setattr("type", type)
- bound_setattr("kw_only", kw_only)
+ bound_setattr("kw_only", kw_only)
bound_setattr("inherited", inherited)
bound_setattr("on_setattr", on_setattr)
@@ -2583,21 +2583,21 @@ class Attribute(object):
)
inst_dict = {
k: getattr(ca, k)
- for k in Attribute.__slots__
- if k
- not in (
- "name",
- "validator",
- "default",
- "type",
+ for k in Attribute.__slots__
+ if k
+ not in (
+ "name",
+ "validator",
+ "default",
+ "type",
"inherited",
) # exclude methods and deprecated alias
}
return cls(
- name=name,
- validator=ca._validator,
- default=ca._default,
- type=type,
+ name=name,
+ validator=ca._validator,
+ default=ca._default,
+ type=type,
cmp=None,
inherited=False,
**inst_dict
@@ -2614,8 +2614,8 @@ class Attribute(object):
# Don't use attr.evolve since fields(Attribute) doesn't work
def evolve(self, **changes):
- """
- Copy *self* and apply *changes*.
+ """
+ Copy *self* and apply *changes*.
This works similarly to `attr.evolve` but that function does not work
with ``Attribute``.
@@ -2623,56 +2623,56 @@ class Attribute(object):
It is mainly meant to be used for `transform-fields`.
.. versionadded:: 20.3.0
- """
- new = copy.copy(self)
-
- new._setattrs(changes.items())
-
- return new
-
+ """
+ new = copy.copy(self)
+
+ new._setattrs(changes.items())
+
+ return new
+
# Don't use _add_pickle since fields(Attribute) doesn't work
def __getstate__(self):
"""
Play nice with pickle.
"""
- return tuple(
- getattr(self, name) if name != "metadata" else dict(self.metadata)
- for name in self.__slots__
- )
+ return tuple(
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
+ for name in self.__slots__
+ )
def __setstate__(self, state):
"""
Play nice with pickle.
"""
- self._setattrs(zip(self.__slots__, state))
-
- def _setattrs(self, name_values_pairs):
+ self._setattrs(zip(self.__slots__, state))
+
+ def _setattrs(self, name_values_pairs):
bound_setattr = _obj_setattr.__get__(self, Attribute)
- for name, value in name_values_pairs:
+ for name, value in name_values_pairs:
if name != "metadata":
bound_setattr(name, value)
else:
- bound_setattr(
- name,
- metadata_proxy(value)
- if value
- else _empty_metadata_singleton,
- )
+ bound_setattr(
+ name,
+ metadata_proxy(value)
+ if value
+ else _empty_metadata_singleton,
+ )
_a = [
- Attribute(
- name=name,
- default=NOTHING,
- validator=None,
- repr=True,
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
cmp=None,
eq=True,
order=False,
- hash=(name != "metadata"),
- init=True,
+ hash=(name != "metadata"),
+ init=True,
inherited=False,
- )
+ )
for name in Attribute.__slots__
]
@@ -2693,41 +2693,41 @@ class _CountingAttr(object):
*Internal* data structure of the attrs library. Running into is most
likely the result of a bug like a forgotten `@attr.s` decorator.
"""
-
- __slots__ = (
- "counter",
- "_default",
- "repr",
+
+ __slots__ = (
+ "counter",
+ "_default",
+ "repr",
"eq",
"eq_key",
"order",
"order_key",
- "hash",
- "init",
- "metadata",
- "_validator",
- "converter",
- "type",
- "kw_only",
+ "hash",
+ "init",
+ "metadata",
+ "_validator",
+ "converter",
+ "type",
+ "kw_only",
"on_setattr",
- )
+ )
__attrs_attrs__ = tuple(
- Attribute(
- name=name,
- default=NOTHING,
- validator=None,
- repr=True,
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
cmp=None,
- hash=True,
- init=True,
- kw_only=False,
+ hash=True,
+ init=True,
+ kw_only=False,
eq=True,
eq_key=None,
order=False,
order_key=None,
inherited=False,
on_setattr=None,
- )
+ )
for name in (
"counter",
"_default",
@@ -2739,43 +2739,43 @@ class _CountingAttr(object):
"on_setattr",
)
) + (
- Attribute(
- name="metadata",
- default=None,
- validator=None,
- repr=True,
+ Attribute(
+ name="metadata",
+ default=None,
+ validator=None,
+ repr=True,
cmp=None,
- hash=False,
- init=True,
- kw_only=False,
+ hash=False,
+ init=True,
+ kw_only=False,
eq=True,
eq_key=None,
order=False,
order_key=None,
inherited=False,
on_setattr=None,
- ),
+ ),
)
cls_counter = 0
- def __init__(
- self,
- default,
- validator,
- repr,
+ def __init__(
+ self,
+ default,
+ validator,
+ repr,
cmp,
- hash,
- init,
- converter,
- metadata,
- type,
- kw_only,
+ hash,
+ init,
+ converter,
+ metadata,
+ type,
+ kw_only,
eq,
eq_key,
order,
order_key,
on_setattr,
- ):
+ ):
_CountingAttr.cls_counter += 1
self.counter = _CountingAttr.cls_counter
self._default = default
@@ -2790,7 +2790,7 @@ class _CountingAttr(object):
self.init = init
self.metadata = metadata
self.type = type
- self.kw_only = kw_only
+ self.kw_only = kw_only
self.on_setattr = on_setattr
def validator(self, meth):
@@ -2842,7 +2842,7 @@ class Factory(object):
.. versionadded:: 17.1.0 *takes_self*
"""
-
+
__slots__ = ("factory", "takes_self")
def __init__(self, factory, takes_self=False):
@@ -2938,7 +2938,7 @@ def make_class(name, attrs, bases=(object,), **attributes_arguments):
# defined for arguments greater than 0 (IronPython).
try:
type_.__module__ = sys._getframe(1).f_globals.get(
- "__name__", "__main__"
+ "__name__", "__main__"
)
except (AttributeError, ValueError):
pass
@@ -2967,7 +2967,7 @@ class _AndValidator(object):
"""
Compose many validators to a single one.
"""
-
+
_validators = attrib()
def __call__(self, inst, attr, value):
@@ -2988,8 +2988,8 @@ def and_(*validators):
vals = []
for validator in validators:
vals.extend(
- validator._validators
- if isinstance(validator, _AndValidator)
+ validator._validators
+ if isinstance(validator, _AndValidator)
else [validator]
)
diff --git a/contrib/python/attrs/attr/converters.py b/contrib/python/attrs/attr/converters.py
index 2777db6d0a..3373374b56 100644
--- a/contrib/python/attrs/attr/converters.py
+++ b/contrib/python/attrs/attr/converters.py
@@ -7,7 +7,7 @@ from __future__ import absolute_import, division, print_function
from ._compat import PY2
from ._make import NOTHING, Factory, pipe
-
+
if not PY2:
import inspect
import typing
@@ -31,7 +31,7 @@ def optional(converter):
:param callable converter: the converter that is used for non-``None``
values.
- .. versionadded:: 17.1.0
+ .. versionadded:: 17.1.0
"""
def optional_converter(val):
@@ -57,55 +57,55 @@ def optional(converter):
]
return optional_converter
-
-
-def default_if_none(default=NOTHING, factory=None):
- """
- A converter that allows to replace ``None`` values by *default* or the
- result of *factory*.
-
- :param default: Value to be used if ``None`` is passed. Passing an instance
+
+
+def default_if_none(default=NOTHING, factory=None):
+ """
+ A converter that allows to replace ``None`` values by *default* or the
+ result of *factory*.
+
+ :param default: Value to be used if ``None`` is passed. Passing an instance
of `attr.Factory` is supported, however the ``takes_self`` option
- is *not*.
+ is *not*.
:param callable factory: A callable that takes no parameters whose result
- is used if ``None`` is passed.
-
- :raises TypeError: If **neither** *default* or *factory* is passed.
- :raises TypeError: If **both** *default* and *factory* are passed.
+ is used if ``None`` is passed.
+
+ :raises TypeError: If **neither** *default* or *factory* is passed.
+ :raises TypeError: If **both** *default* and *factory* are passed.
:raises ValueError: If an instance of `attr.Factory` is passed with
- ``takes_self=True``.
-
- .. versionadded:: 18.2.0
- """
- if default is NOTHING and factory is None:
- raise TypeError("Must pass either `default` or `factory`.")
-
- if default is not NOTHING and factory is not None:
- raise TypeError(
- "Must pass either `default` or `factory` but not both."
- )
-
- if factory is not None:
- default = Factory(factory)
-
- if isinstance(default, Factory):
- if default.takes_self:
- raise ValueError(
- "`takes_self` is not supported by default_if_none."
- )
-
- def default_if_none_converter(val):
- if val is not None:
- return val
-
- return default.factory()
-
- else:
-
- def default_if_none_converter(val):
- if val is not None:
- return val
-
- return default
-
- return default_if_none_converter
+ ``takes_self=True``.
+
+ .. versionadded:: 18.2.0
+ """
+ if default is NOTHING and factory is None:
+ raise TypeError("Must pass either `default` or `factory`.")
+
+ if default is not NOTHING and factory is not None:
+ raise TypeError(
+ "Must pass either `default` or `factory` but not both."
+ )
+
+ if factory is not None:
+ default = Factory(factory)
+
+ if isinstance(default, Factory):
+ if default.takes_self:
+ raise ValueError(
+ "`takes_self` is not supported by default_if_none."
+ )
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default.factory()
+
+ else:
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default
+
+ return default_if_none_converter
diff --git a/contrib/python/attrs/attr/exceptions.py b/contrib/python/attrs/attr/exceptions.py
index f6f9861bea..8698f42ecf 100644
--- a/contrib/python/attrs/attr/exceptions.py
+++ b/contrib/python/attrs/attr/exceptions.py
@@ -11,7 +11,7 @@ class FrozenError(AttributeError):
.. versionadded:: 20.1.0
"""
-
+
msg = "can't set attribute"
args = [msg]
@@ -64,15 +64,15 @@ class UnannotatedAttributeError(RuntimeError):
.. versionadded:: 17.3.0
"""
-
-
-class PythonTooOldError(RuntimeError):
- """
+
+
+class PythonTooOldError(RuntimeError):
+ """
It was attempted to use an ``attrs`` feature that requires a newer Python
version.
-
- .. versionadded:: 18.2.0
- """
+
+ .. versionadded:: 18.2.0
+ """
class NotCallableError(TypeError):
diff --git a/contrib/python/attrs/attr/validators.py b/contrib/python/attrs/attr/validators.py
index b9a73054e9..0f972dbab5 100644
--- a/contrib/python/attrs/attr/validators.py
+++ b/contrib/python/attrs/attr/validators.py
@@ -34,20 +34,20 @@ class _InstanceOfValidator(object):
if not isinstance(value, self.type):
raise TypeError(
"'{name}' must be {type!r} (got {value!r} that is a "
- "{actual!r}).".format(
- name=attr.name,
- type=self.type,
- actual=value.__class__,
- value=value,
- ),
- attr,
- self.type,
- value,
+ "{actual!r}).".format(
+ name=attr.name,
+ type=self.type,
+ actual=value.__class__,
+ value=value,
+ ),
+ attr,
+ self.type,
+ value,
)
def __repr__(self):
- return "<instance_of validator for type {type!r}>".format(
- type=self.type
+ return "<instance_of validator for type {type!r}>".format(
+ type=self.type
)
@@ -150,17 +150,17 @@ class _ProvidesValidator(object):
if not self.interface.providedBy(value):
raise TypeError(
"'{name}' must provide {interface!r} which {value!r} "
- "doesn't.".format(
- name=attr.name, interface=self.interface, value=value
- ),
- attr,
- self.interface,
- value,
+ "doesn't.".format(
+ name=attr.name, interface=self.interface, value=value
+ ),
+ attr,
+ self.interface,
+ value,
)
def __repr__(self):
- return "<provides validator for interface {interface!r}>".format(
- interface=self.interface
+ return "<provides validator for interface {interface!r}>".format(
+ interface=self.interface
)
@@ -192,8 +192,8 @@ class _OptionalValidator(object):
self.validator(inst, attr, value)
def __repr__(self):
- return "<optional validator for {what} or None>".format(
- what=repr(self.validator)
+ return "<optional validator for {what} or None>".format(
+ what=repr(self.validator)
)
@@ -220,21 +220,21 @@ class _InValidator(object):
options = attrib()
def __call__(self, inst, attr, value):
- try:
- in_options = value in self.options
+ try:
+ in_options = value in self.options
except TypeError: # e.g. `1 in "abc"`
- in_options = False
-
- if not in_options:
+ in_options = False
+
+ if not in_options:
raise ValueError(
- "'{name}' must be in {options!r} (got {value!r})".format(
- name=attr.name, options=self.options, value=value
- )
+ "'{name}' must be in {options!r} (got {value!r})".format(
+ name=attr.name, options=self.options, value=value
+ )
)
def __repr__(self):
- return "<in_ validator with options {options!r}>".format(
- options=self.options
+ return "<in_ validator with options {options!r}>".format(
+ options=self.options
)
diff --git a/contrib/python/ipython/py2/IPython/config.py b/contrib/python/ipython/py2/IPython/config.py
index cf2bacafad..95aacd6b9f 100644
--- a/contrib/python/ipython/py2/IPython/config.py
+++ b/contrib/python/ipython/py2/IPython/config.py
@@ -9,7 +9,7 @@ from warnings import warn
from IPython.utils.shimmodule import ShimModule, ShimWarning
-warn("The `IPython.config` package has been deprecated since IPython 4.0. "
+warn("The `IPython.config` package has been deprecated since IPython 4.0. "
"You should import from traitlets.config instead.", ShimWarning)
diff --git a/contrib/python/ipython/py2/IPython/core/application.py b/contrib/python/ipython/py2/IPython/core/application.py
index af28133945..f6d002a06d 100644
--- a/contrib/python/ipython/py2/IPython/core/application.py
+++ b/contrib/python/ipython/py2/IPython/core/application.py
@@ -44,14 +44,14 @@ else:
"/etc/ipython",
]
-
-ENV_CONFIG_DIRS = []
-_env_config_dir = os.path.join(sys.prefix, 'etc', 'ipython')
-if _env_config_dir not in SYSTEM_CONFIG_DIRS:
- # only add ENV_CONFIG if sys.prefix is not already included
- ENV_CONFIG_DIRS.append(_env_config_dir)
-
-
+
+ENV_CONFIG_DIRS = []
+_env_config_dir = os.path.join(sys.prefix, 'etc', 'ipython')
+if _env_config_dir not in SYSTEM_CONFIG_DIRS:
+ # only add ENV_CONFIG if sys.prefix is not already included
+ ENV_CONFIG_DIRS.append(_env_config_dir)
+
+
_envvar = os.environ.get('IPYTHON_SUPPRESS_CONFIG_ERRORS')
if _envvar in {None, ''}:
IPYTHON_SUPPRESS_CONFIG_ERRORS = None
@@ -411,7 +411,7 @@ class BaseIPythonApplication(Application):
def init_config_files(self):
"""[optionally] copy default config files into profile dir."""
- self.config_file_paths.extend(ENV_CONFIG_DIRS)
+ self.config_file_paths.extend(ENV_CONFIG_DIRS)
self.config_file_paths.extend(SYSTEM_CONFIG_DIRS)
# copy config files
path = self.builtin_profile_dir
diff --git a/contrib/python/ipython/py2/IPython/core/compilerop.py b/contrib/python/ipython/py2/IPython/core/compilerop.py
index f529eb5224..0e8f094e31 100644
--- a/contrib/python/ipython/py2/IPython/core/compilerop.py
+++ b/contrib/python/ipython/py2/IPython/core/compilerop.py
@@ -56,7 +56,7 @@ def code_name(code, number=0):
This now expects code to be unicode.
"""
- hash_digest = hashlib.sha1(code.encode("utf-8")).hexdigest()
+ hash_digest = hashlib.sha1(code.encode("utf-8")).hexdigest()
# Include the number and 12 characters of the hash in the name. It's
# pretty much impossible that in a single session we'll have collisions
# even with truncated hashes, and the full one makes tracebacks too long
diff --git a/contrib/python/ipython/py2/IPython/core/completer.py b/contrib/python/ipython/py2/IPython/core/completer.py
index b386945e54..a8aaf1080b 100644
--- a/contrib/python/ipython/py2/IPython/core/completer.py
+++ b/contrib/python/ipython/py2/IPython/core/completer.py
@@ -25,7 +25,7 @@ import re
import sys
import unicodedata
import string
-import warnings
+import warnings
from traitlets.config.configurable import Configurable
from IPython.core.error import TryNext
@@ -47,9 +47,9 @@ if sys.platform == 'win32':
else:
PROTECTABLES = ' ()[]{}?=\\|;:\'#*"^&'
-# Protect against returning an enormous number of completions which the frontend
-# may have trouble processing.
-MATCHES_LIMIT = 500
+# Protect against returning an enormous number of completions which the frontend
+# may have trouble processing.
+MATCHES_LIMIT = 500
def has_open_quotes(s):
"""Return whether a string has open quotes.
@@ -243,11 +243,11 @@ class Completer(Configurable):
"""
).tag(config=True)
- backslash_combining_completions = Bool(True,
- help="Enable unicode completions, e.g. \\alpha<tab> . "
- "Includes completion of latex commands, unicode names, and expanding "
- "unicode characters back to latex commands.").tag(config=True)
-
+ backslash_combining_completions = Bool(True,
+ help="Enable unicode completions, e.g. \\alpha<tab> . "
+ "Includes completion of latex commands, unicode names, and expanding "
+ "unicode characters back to latex commands.").tag(config=True)
+
def __init__(self, namespace=None, global_namespace=None, **kwargs):
"""Create a new completer for the command line.
@@ -549,13 +549,13 @@ class IPCompleter(Completer):
""",
).tag(config=True)
- @observe('limit_to__all__')
- def _limit_to_all_changed(self, change):
- warnings.warn('`IPython.core.IPCompleter.limit_to__all__` configuration '
- 'value has been deprecated since IPython 5.0, will be made to have '
- 'no effects and then removed in future version of IPython.',
- UserWarning)
-
+ @observe('limit_to__all__')
+ def _limit_to_all_changed(self, change):
+ warnings.warn('`IPython.core.IPCompleter.limit_to__all__` configuration '
+ 'value has been deprecated since IPython 5.0, will be made to have '
+ 'no effects and then removed in future version of IPython.',
+ UserWarning)
+
def __init__(self, shell=None, namespace=None, global_namespace=None,
use_readline=True, config=None, **kwargs):
"""IPCompleter() -> completer
@@ -1093,12 +1093,12 @@ class IPCompleter(Completer):
return [cast_unicode_py2(r) for r in res if r.lower().startswith(text_low)]
except TryNext:
pass
- except KeyboardInterrupt:
- """
- If custom completer take too long,
- let keyboard interrupt abort and return nothing.
- """
- break
+ except KeyboardInterrupt:
+ """
+ If custom completer take too long,
+ let keyboard interrupt abort and return nothing.
+ """
+ break
return None
@@ -1137,21 +1137,21 @@ class IPCompleter(Completer):
if cursor_pos is None:
cursor_pos = len(line_buffer) if text is None else len(text)
- if self.use_main_ns:
- self.namespace = __main__.__dict__
-
- if PY3 and self.backslash_combining_completions:
+ if self.use_main_ns:
+ self.namespace = __main__.__dict__
+ if PY3 and self.backslash_combining_completions:
+
base_text = text if not line_buffer else line_buffer[:cursor_pos]
latex_text, latex_matches = self.latex_matches(base_text)
if latex_matches:
- return latex_text, latex_matches
+ return latex_text, latex_matches
name_text = ''
name_matches = []
for meth in (self.unicode_name_matches, back_latex_name_matches, back_unicode_name_matches):
name_text, name_matches = meth(base_text)
if name_text:
- return name_text, name_matches[:MATCHES_LIMIT]
+ return name_text, name_matches[:MATCHES_LIMIT]
# if text is either None or an empty string, rely on the line buffer
if not text:
@@ -1192,6 +1192,6 @@ class IPCompleter(Completer):
# different types of objects. The rlcomplete() method could then
# simply collapse the dict into a list for readline, but we'd have
# richer completion semantics in other evironments.
- self.matches = sorted(set(self.matches), key=completions_sorting_key)[:MATCHES_LIMIT]
+ self.matches = sorted(set(self.matches), key=completions_sorting_key)[:MATCHES_LIMIT]
return text, self.matches
diff --git a/contrib/python/ipython/py2/IPython/core/completerlib.py b/contrib/python/ipython/py2/IPython/core/completerlib.py
index e736ca73d1..a41f1c4af9 100644
--- a/contrib/python/ipython/py2/IPython/core/completerlib.py
+++ b/contrib/python/ipython/py2/IPython/core/completerlib.py
@@ -164,11 +164,11 @@ def get_root_modules():
ip.db['rootmodules_cache'] maps sys.path entries to list of modules.
"""
ip = get_ipython()
- if ip is None:
- # No global shell instance to store cached list of modules.
- # Don't try to scan for modules every time.
- return list(sys.builtin_module_names)
-
+ if ip is None:
+ # No global shell instance to store cached list of modules.
+ # Don't try to scan for modules every time.
+ return list(sys.builtin_module_names)
+
rootmodules_cache = ip.db.get('rootmodules_cache', {})
rootmodules = list(sys.builtin_module_names)
start_time = time()
@@ -207,7 +207,7 @@ def is_importable(module, attr, only_modules):
return not(attr[:2] == '__' and attr[-2:] == '__')
def try_import(mod, only_modules=False):
- mod = mod.rstrip('.')
+ mod = mod.rstrip('.')
try:
m = __import__(mod)
except:
diff --git a/contrib/python/ipython/py2/IPython/core/crashhandler.py b/contrib/python/ipython/py2/IPython/core/crashhandler.py
index 22bbd7ae81..a6dededada 100644
--- a/contrib/python/ipython/py2/IPython/core/crashhandler.py
+++ b/contrib/python/ipython/py2/IPython/core/crashhandler.py
@@ -54,16 +54,16 @@ with the subject '{app_name} Crash Report'.
If you want to do it now, the following command will work (under Unix):
mail -s '{app_name} Crash Report' {contact_email} < {crash_report_fname}
-In your email, please also include information about:
-- The operating system under which the crash happened: Linux, macOS, Windows,
- other, and which exact version (for example: Ubuntu 16.04.3, macOS 10.13.2,
- Windows 10 Pro), and whether it is 32-bit or 64-bit;
-- How {app_name} was installed: using pip or conda, from GitHub, as part of
- a Docker container, or other, providing more detail if possible;
-- How to reproduce the crash: what exact sequence of instructions can one
- input to get the same crash? Ideally, find a minimal yet complete sequence
- of instructions that yields the crash.
-
+In your email, please also include information about:
+- The operating system under which the crash happened: Linux, macOS, Windows,
+ other, and which exact version (for example: Ubuntu 16.04.3, macOS 10.13.2,
+ Windows 10 Pro), and whether it is 32-bit or 64-bit;
+- How {app_name} was installed: using pip or conda, from GitHub, as part of
+ a Docker container, or other, providing more detail if possible;
+- How to reproduce the crash: what exact sequence of instructions can one
+ input to get the same crash? Ideally, find a minimal yet complete sequence
+ of instructions that yields the crash.
+
To ensure accurate tracking of this issue, please file a report about it at:
{bug_tracker}
"""
diff --git a/contrib/python/ipython/py2/IPython/core/debugger.py b/contrib/python/ipython/py2/IPython/core/debugger.py
index f08cfb1a78..a2f6200b31 100644
--- a/contrib/python/ipython/py2/IPython/core/debugger.py
+++ b/contrib/python/ipython/py2/IPython/core/debugger.py
@@ -13,8 +13,8 @@ The code in this file is mainly lifted out of cmd.py in Python 2.2, with minor
changes. Licensing should therefore be under the standard Python terms. For
details on the PSF (Python Software Foundation) standard license, see:
-https://docs.python.org/2/license.html
-"""
+https://docs.python.org/2/license.html
+"""
#*****************************************************************************
#
@@ -65,7 +65,7 @@ def BdbQuit_excepthook(et, ev, tb, excepthook=None):
parameter.
"""
warnings.warn("`BdbQuit_excepthook` is deprecated since version 5.1",
- DeprecationWarning, stacklevel=2)
+ DeprecationWarning, stacklevel=2)
if et==bdb.BdbQuit:
print('Exiting Debugger.')
elif excepthook is not None:
@@ -78,7 +78,7 @@ def BdbQuit_excepthook(et, ev, tb, excepthook=None):
def BdbQuit_IPython_excepthook(self,et,ev,tb,tb_offset=None):
warnings.warn(
"`BdbQuit_IPython_excepthook` is deprecated since version 5.1",
- DeprecationWarning, stacklevel=2)
+ DeprecationWarning, stacklevel=2)
print('Exiting Debugger.')
@@ -130,7 +130,7 @@ class Tracer(object):
"""
warnings.warn("`Tracer` is deprecated since version 5.1, directly use "
"`IPython.core.debugger.Pdb.set_trace()`",
- DeprecationWarning, stacklevel=2)
+ DeprecationWarning, stacklevel=2)
ip = get_ipython()
if ip is None:
@@ -203,7 +203,7 @@ def _file_lines(fname):
return out
-class Pdb(OldPdb):
+class Pdb(OldPdb):
"""Modified Pdb class, does not load readline.
for a standalone version that uses prompt_toolkit, see
@@ -228,14 +228,14 @@ class Pdb(OldPdb):
self.shell = get_ipython()
if self.shell is None:
- save_main = sys.modules['__main__']
+ save_main = sys.modules['__main__']
# No IPython instance running, we must create one
from IPython.terminal.interactiveshell import \
TerminalInteractiveShell
self.shell = TerminalInteractiveShell.instance()
- # needed by any code which calls __import__("__main__") after
- # the debugger was entered. See also #9941.
- sys.modules['__main__'] = save_main
+ # needed by any code which calls __import__("__main__") after
+ # the debugger was entered. See also #9941.
+ sys.modules['__main__'] = save_main
if color_scheme is not None:
warnings.warn(
@@ -485,8 +485,8 @@ class Pdb(OldPdb):
pass
def do_list(self, arg):
- """Print lines of code from the current stack frame
- """
+ """Print lines of code from the current stack frame
+ """
self.lastcmd = 'list'
last = None
if arg:
@@ -530,10 +530,10 @@ class Pdb(OldPdb):
return inspect.getblock(lines[lineno:]), lineno+1
def do_longlist(self, arg):
- """Print lines of code from the current stack frame.
-
- Shows more lines than 'list' does.
- """
+ """Print lines of code from the current stack frame.
+
+ Shows more lines than 'list' does.
+ """
self.lastcmd = 'longlist'
try:
lines, lineno = self.getsourcelines(self.curframe)
@@ -607,12 +607,12 @@ class Pdb(OldPdb):
self.print_stack_trace()
do_w = do_where
-
-
-def set_trace(frame=None):
- """
- Start debugging from `frame`.
-
- If frame is not specified, debugging starts from caller's frame.
- """
- Pdb().set_trace(frame or sys._getframe().f_back)
+
+
+def set_trace(frame=None):
+ """
+ Start debugging from `frame`.
+
+ If frame is not specified, debugging starts from caller's frame.
+ """
+ Pdb().set_trace(frame or sys._getframe().f_back)
diff --git a/contrib/python/ipython/py2/IPython/core/display.py b/contrib/python/ipython/py2/IPython/core/display.py
index 5c82a57b31..c696f6c831 100644
--- a/contrib/python/ipython/py2/IPython/core/display.py
+++ b/contrib/python/ipython/py2/IPython/core/display.py
@@ -11,7 +11,7 @@ try:
except ImportError:
from base64 import encodestring as base64_encode
-from binascii import b2a_hex, hexlify
+from binascii import b2a_hex, hexlify
import json
import mimetypes
import os
@@ -26,9 +26,9 @@ from IPython.testing.skipdoctest import skip_doctest
__all__ = ['display', 'display_pretty', 'display_html', 'display_markdown',
'display_svg', 'display_png', 'display_jpeg', 'display_latex', 'display_json',
'display_javascript', 'display_pdf', 'DisplayObject', 'TextDisplayObject',
-'Pretty', 'HTML', 'Markdown', 'Math', 'Latex', 'SVG', 'ProgressBar', 'JSON', 'Javascript',
+'Pretty', 'HTML', 'Markdown', 'Math', 'Latex', 'SVG', 'ProgressBar', 'JSON', 'Javascript',
'Image', 'clear_output', 'set_matplotlib_formats', 'set_matplotlib_close',
-'publish_display_data', 'update_display', 'DisplayHandle']
+'publish_display_data', 'update_display', 'DisplayHandle']
#-----------------------------------------------------------------------------
# utility functions
@@ -79,8 +79,8 @@ def _display_mimetype(mimetype, objs, raw=False, metadata=None):
#-----------------------------------------------------------------------------
# Main functions
#-----------------------------------------------------------------------------
-# use * to indicate transient is keyword-only
-def publish_display_data(data, metadata=None, source=None, **kwargs):
+# use * to indicate transient is keyword-only
+def publish_display_data(data, metadata=None, source=None, **kwargs):
"""Publish data and metadata to all frontends.
See the ``display_data`` message in the messaging documentation for
@@ -115,38 +115,38 @@ def publish_display_data(data, metadata=None, source=None, **kwargs):
to specify metadata about particular representations.
source : str, deprecated
Unused.
- transient : dict, keyword-only
- A dictionary of transient data, such as display_id.
+ transient : dict, keyword-only
+ A dictionary of transient data, such as display_id.
"""
from IPython.core.interactiveshell import InteractiveShell
-
- display_pub = InteractiveShell.instance().display_pub
-
- # only pass transient if supplied,
- # to avoid errors with older ipykernel.
- # TODO: We could check for ipykernel version and provide a detailed upgrade message.
-
- display_pub.publish(
+
+ display_pub = InteractiveShell.instance().display_pub
+
+ # only pass transient if supplied,
+ # to avoid errors with older ipykernel.
+ # TODO: We could check for ipykernel version and provide a detailed upgrade message.
+
+ display_pub.publish(
data=data,
metadata=metadata,
- **kwargs
+ **kwargs
)
-
-def _new_id():
- """Generate a new random text id with urandom"""
- return b2a_hex(os.urandom(16)).decode('ascii')
-
-
+
+def _new_id():
+ """Generate a new random text id with urandom"""
+ return b2a_hex(os.urandom(16)).decode('ascii')
+
+
def display(*objs, **kwargs):
"""Display a Python object in all frontends.
By default all representations will be computed and sent to the frontends.
Frontends can decide which representation is used and how.
- In terminal IPython this will be similar to using :func:`print`, for use in richer
- frontends see Jupyter notebook examples with rich display logic.
-
+ In terminal IPython this will be similar to using :func:`print`, for use in richer
+ frontends see Jupyter notebook examples with rich display logic.
+
Parameters
----------
objs : tuple of objects
@@ -154,11 +154,11 @@ def display(*objs, **kwargs):
raw : bool, optional
Are the objects to be displayed already mimetype-keyed dicts of raw display data,
or Python objects that need to be formatted before display? [default: False]
- include : list, tuple or set, optional
+ include : list, tuple or set, optional
A list of format type strings (MIME types) to include in the
format data dict. If this is set *only* the format types included
in this list will be computed.
- exclude : list, tuple or set, optional
+ exclude : list, tuple or set, optional
A list of format type strings (MIME types) to exclude in the format
data dict. If this is set all format types will be computed,
except for those included in this argument.
@@ -166,147 +166,147 @@ def display(*objs, **kwargs):
A dictionary of metadata to associate with the output.
mime-type keys in this dictionary will be associated with the individual
representation formats, if they exist.
- transient : dict, optional
- A dictionary of transient data to associate with the output.
- Data in this dict should not be persisted to files (e.g. notebooks).
- display_id : str, bool optional
- Set an id for the display.
- This id can be used for updating this display area later via update_display.
- If given as `True`, generate a new `display_id`
- kwargs: additional keyword-args, optional
- Additional keyword-arguments are passed through to the display publisher.
-
- Returns
- -------
-
- handle: DisplayHandle
- Returns a handle on updatable displays for use with :func:`update_display`,
- if `display_id` is given. Returns :any:`None` if no `display_id` is given
- (default).
-
- Examples
- --------
-
- >>> class Json(object):
- ... def __init__(self, json):
- ... self.json = json
- ... def _repr_pretty_(self, pp, cycle):
- ... import json
- ... pp.text(json.dumps(self.json, indent=2))
- ... def __repr__(self):
- ... return str(self.json)
- ...
-
- >>> d = Json({1:2, 3: {4:5}})
-
- >>> print(d)
- {1: 2, 3: {4: 5}}
-
- >>> display(d)
- {
- "1": 2,
- "3": {
- "4": 5
- }
- }
-
- >>> def int_formatter(integer, pp, cycle):
- ... pp.text('I'*integer)
-
- >>> plain = get_ipython().display_formatter.formatters['text/plain']
- >>> plain.for_type(int, int_formatter)
- <function _repr_pprint at 0x...>
- >>> display(7-5)
- II
-
- >>> del plain.type_printers[int]
- >>> display(7-5)
- 2
-
- See Also
- --------
-
- :func:`update_display`
-
- Notes
- -----
-
- In Python, objects can declare their textual representation using the
- `__repr__` method. IPython expands on this idea and allows objects to declare
- other, rich representations including:
-
- - HTML
- - JSON
- - PNG
- - JPEG
- - SVG
- - LaTeX
-
- A single object can declare some or all of these representations; all are
- handled by IPython's display system.
-
- The main idea of the first approach is that you have to implement special
- display methods when you define your class, one for each representation you
- want to use. Here is a list of the names of the special methods and the
- values they must return:
-
- - `_repr_html_`: return raw HTML as a string
- - `_repr_json_`: return a JSONable dict
- - `_repr_jpeg_`: return raw JPEG data
- - `_repr_png_`: return raw PNG data
- - `_repr_svg_`: return raw SVG data as a string
- - `_repr_latex_`: return LaTeX commands in a string surrounded by "$".
- - `_repr_mimebundle_`: return a full mimebundle containing the mapping
- from all mimetypes to data
-
- When you are directly writing your own classes, you can adapt them for
- display in IPython by following the above approach. But in practice, you
- often need to work with existing classes that you can't easily modify.
-
- You can refer to the documentation on IPython display formatters in order to
- register custom formatters for already existing types.
-
- .. versionadded:: 5.4 display available without import
- .. versionadded:: 6.1 display available without import
-
- Since IPython 5.4 and 6.1 :func:`display` is automatically made available to
- the user without import. If you are using display in a document that might
- be used in a pure python context or with older version of IPython, use the
- following import at the top of your file::
-
- from IPython.display import display
-
+ transient : dict, optional
+ A dictionary of transient data to associate with the output.
+ Data in this dict should not be persisted to files (e.g. notebooks).
+ display_id : str, bool optional
+ Set an id for the display.
+ This id can be used for updating this display area later via update_display.
+ If given as `True`, generate a new `display_id`
+ kwargs: additional keyword-args, optional
+ Additional keyword-arguments are passed through to the display publisher.
+
+ Returns
+ -------
+
+ handle: DisplayHandle
+ Returns a handle on updatable displays for use with :func:`update_display`,
+ if `display_id` is given. Returns :any:`None` if no `display_id` is given
+ (default).
+
+ Examples
+ --------
+
+ >>> class Json(object):
+ ... def __init__(self, json):
+ ... self.json = json
+ ... def _repr_pretty_(self, pp, cycle):
+ ... import json
+ ... pp.text(json.dumps(self.json, indent=2))
+ ... def __repr__(self):
+ ... return str(self.json)
+ ...
+
+ >>> d = Json({1:2, 3: {4:5}})
+
+ >>> print(d)
+ {1: 2, 3: {4: 5}}
+
+ >>> display(d)
+ {
+ "1": 2,
+ "3": {
+ "4": 5
+ }
+ }
+
+ >>> def int_formatter(integer, pp, cycle):
+ ... pp.text('I'*integer)
+
+ >>> plain = get_ipython().display_formatter.formatters['text/plain']
+ >>> plain.for_type(int, int_formatter)
+ <function _repr_pprint at 0x...>
+ >>> display(7-5)
+ II
+
+ >>> del plain.type_printers[int]
+ >>> display(7-5)
+ 2
+
+ See Also
+ --------
+
+ :func:`update_display`
+
+ Notes
+ -----
+
+ In Python, objects can declare their textual representation using the
+ `__repr__` method. IPython expands on this idea and allows objects to declare
+ other, rich representations including:
+
+ - HTML
+ - JSON
+ - PNG
+ - JPEG
+ - SVG
+ - LaTeX
+
+ A single object can declare some or all of these representations; all are
+ handled by IPython's display system.
+
+ The main idea of the first approach is that you have to implement special
+ display methods when you define your class, one for each representation you
+ want to use. Here is a list of the names of the special methods and the
+ values they must return:
+
+ - `_repr_html_`: return raw HTML as a string
+ - `_repr_json_`: return a JSONable dict
+ - `_repr_jpeg_`: return raw JPEG data
+ - `_repr_png_`: return raw PNG data
+ - `_repr_svg_`: return raw SVG data as a string
+ - `_repr_latex_`: return LaTeX commands in a string surrounded by "$".
+ - `_repr_mimebundle_`: return a full mimebundle containing the mapping
+ from all mimetypes to data
+
+ When you are directly writing your own classes, you can adapt them for
+ display in IPython by following the above approach. But in practice, you
+ often need to work with existing classes that you can't easily modify.
+
+ You can refer to the documentation on IPython display formatters in order to
+ register custom formatters for already existing types.
+
+ .. versionadded:: 5.4 display available without import
+ .. versionadded:: 6.1 display available without import
+
+ Since IPython 5.4 and 6.1 :func:`display` is automatically made available to
+ the user without import. If you are using display in a document that might
+ be used in a pure python context or with older version of IPython, use the
+ following import at the top of your file::
+
+ from IPython.display import display
+
"""
from IPython.core.interactiveshell import InteractiveShell
-
- if not InteractiveShell.initialized():
- # Directly print objects.
- print(*objs)
- return
-
- raw = kwargs.pop('raw', False)
- include = kwargs.pop('include', None)
- exclude = kwargs.pop('exclude', None)
- metadata = kwargs.pop('metadata', None)
- transient = kwargs.pop('transient', None)
- display_id = kwargs.pop('display_id', None)
- if transient is None:
- transient = {}
- if display_id:
- if display_id is True:
- display_id = _new_id()
- transient['display_id'] = display_id
- if kwargs.get('update') and 'display_id' not in transient:
- raise TypeError('display_id required for update_display')
- if transient:
- kwargs['transient'] = transient
+
+ if not InteractiveShell.initialized():
+ # Directly print objects.
+ print(*objs)
+ return
+
+ raw = kwargs.pop('raw', False)
+ include = kwargs.pop('include', None)
+ exclude = kwargs.pop('exclude', None)
+ metadata = kwargs.pop('metadata', None)
+ transient = kwargs.pop('transient', None)
+ display_id = kwargs.pop('display_id', None)
+ if transient is None:
+ transient = {}
+ if display_id:
+ if display_id is True:
+ display_id = _new_id()
+ transient['display_id'] = display_id
+ if kwargs.get('update') and 'display_id' not in transient:
+ raise TypeError('display_id required for update_display')
+ if transient:
+ kwargs['transient'] = transient
if not raw:
format = InteractiveShell.instance().display_formatter.format
for obj in objs:
if raw:
- publish_display_data(data=obj, metadata=metadata, **kwargs)
+ publish_display_data(data=obj, metadata=metadata, **kwargs)
else:
format_dict, md_dict = format(obj, include=include, exclude=exclude)
if not format_dict:
@@ -315,86 +315,86 @@ def display(*objs, **kwargs):
if metadata:
# kwarg-specified metadata gets precedence
_merge(md_dict, metadata)
- publish_display_data(data=format_dict, metadata=md_dict, **kwargs)
- if display_id:
- return DisplayHandle(display_id)
-
-
-# use * for keyword-only display_id arg
-def update_display(obj, **kwargs):
- """Update an existing display by id
-
- Parameters
- ----------
-
- obj:
- The object with which to update the display
- display_id: keyword-only
- The id of the display to update
-
- See Also
- --------
-
- :func:`display`
- """
- sentinel = object()
- display_id = kwargs.pop('display_id', sentinel)
- if display_id is sentinel:
- raise TypeError("update_display() missing 1 required keyword-only argument: 'display_id'")
- kwargs['update'] = True
- display(obj, display_id=display_id, **kwargs)
-
-
-class DisplayHandle(object):
- """A handle on an updatable display
-
- Call `.update(obj)` to display a new object.
-
- Call `.display(obj`) to add a new instance of this display,
- and update existing instances.
-
- See Also
- --------
-
- :func:`display`, :func:`update_display`
-
- """
-
- def __init__(self, display_id=None):
- if display_id is None:
- display_id = _new_id()
- self.display_id = display_id
-
- def __repr__(self):
- return "<%s display_id=%s>" % (self.__class__.__name__, self.display_id)
-
- def display(self, obj, **kwargs):
- """Make a new display with my id, updating existing instances.
-
- Parameters
- ----------
-
- obj:
- object to display
- **kwargs:
- additional keyword arguments passed to display
- """
- display(obj, display_id=self.display_id, **kwargs)
-
- def update(self, obj, **kwargs):
- """Update existing displays with my id
-
- Parameters
- ----------
-
- obj:
- object to display
- **kwargs:
- additional keyword arguments passed to update_display
- """
- update_display(obj, display_id=self.display_id, **kwargs)
-
-
+ publish_display_data(data=format_dict, metadata=md_dict, **kwargs)
+ if display_id:
+ return DisplayHandle(display_id)
+
+
+# use * for keyword-only display_id arg
+def update_display(obj, **kwargs):
+ """Update an existing display by id
+
+ Parameters
+ ----------
+
+ obj:
+ The object with which to update the display
+ display_id: keyword-only
+ The id of the display to update
+
+ See Also
+ --------
+
+ :func:`display`
+ """
+ sentinel = object()
+ display_id = kwargs.pop('display_id', sentinel)
+ if display_id is sentinel:
+ raise TypeError("update_display() missing 1 required keyword-only argument: 'display_id'")
+ kwargs['update'] = True
+ display(obj, display_id=display_id, **kwargs)
+
+
+class DisplayHandle(object):
+ """A handle on an updatable display
+
+ Call `.update(obj)` to display a new object.
+
+ Call `.display(obj`) to add a new instance of this display,
+ and update existing instances.
+
+ See Also
+ --------
+
+ :func:`display`, :func:`update_display`
+
+ """
+
+ def __init__(self, display_id=None):
+ if display_id is None:
+ display_id = _new_id()
+ self.display_id = display_id
+
+ def __repr__(self):
+ return "<%s display_id=%s>" % (self.__class__.__name__, self.display_id)
+
+ def display(self, obj, **kwargs):
+ """Make a new display with my id, updating existing instances.
+
+ Parameters
+ ----------
+
+ obj:
+ object to display
+ **kwargs:
+ additional keyword arguments passed to display
+ """
+ display(obj, display_id=self.display_id, **kwargs)
+
+ def update(self, obj, **kwargs):
+ """Update existing displays with my id
+
+ Parameters
+ ----------
+
+ obj:
+ object to display
+ **kwargs:
+ additional keyword arguments passed to update_display
+ """
+ update_display(obj, display_id=self.display_id, **kwargs)
+
+
def display_pretty(*objs, **kwargs):
"""Display the pretty (default) representation of an object.
@@ -664,8 +664,8 @@ class TextDisplayObject(DisplayObject):
class Pretty(TextDisplayObject):
- def _repr_pretty_(self, pp, cycle):
- return pp.text(self.data)
+ def _repr_pretty_(self, pp, cycle):
+ return pp.text(self.data)
class HTML(TextDisplayObject):
@@ -703,7 +703,7 @@ class Latex(TextDisplayObject):
class SVG(DisplayObject):
- _read_flags = 'rb'
+ _read_flags = 'rb'
# wrap data in a property, which extracts the <svg> tag, discarding
# document headers
_data = None
@@ -735,68 +735,68 @@ class SVG(DisplayObject):
def _repr_svg_(self):
return self.data
-class ProgressBar(DisplayObject):
- """Progressbar supports displaying a progressbar like element
- """
- def __init__(self, total):
- """Creates a new progressbar
-
- Parameters
- ----------
- total : int
- maximum size of the progressbar
- """
- self.total = total
- self._progress = 0
- self.html_width = '60ex'
- self.text_width = 60
- self._display_id = hexlify(os.urandom(8)).decode('ascii')
-
- def __repr__(self):
- fraction = self.progress / self.total
- filled = '=' * int(fraction * self.text_width)
- rest = ' ' * (self.text_width - len(filled))
- return '[{}{}] {}/{}'.format(
- filled, rest,
- self.progress, self.total,
- )
-
- def _repr_html_(self):
- return "<progress style='width:{}' max='{}' value='{}'></progress>".format(
- self.html_width, self.total, self.progress)
-
- def display(self):
- display(self, display_id=self._display_id)
-
- def update(self):
- display(self, display_id=self._display_id, update=True)
-
- @property
- def progress(self):
- return self._progress
-
- @progress.setter
- def progress(self, value):
- self._progress = value
- self.update()
-
- def __iter__(self):
- self.display()
- self._progress = -1 # First iteration is 0
- return self
-
- def __next__(self):
- """Returns current value and increments display by one."""
- self.progress += 1
- if self.progress < self.total:
- return self.progress
- else:
- raise StopIteration()
-
- def next(self):
- """Python 2 compatibility"""
- return self.__next__()
-
+class ProgressBar(DisplayObject):
+ """Progressbar supports displaying a progressbar like element
+ """
+ def __init__(self, total):
+ """Creates a new progressbar
+
+ Parameters
+ ----------
+ total : int
+ maximum size of the progressbar
+ """
+ self.total = total
+ self._progress = 0
+ self.html_width = '60ex'
+ self.text_width = 60
+ self._display_id = hexlify(os.urandom(8)).decode('ascii')
+
+ def __repr__(self):
+ fraction = self.progress / self.total
+ filled = '=' * int(fraction * self.text_width)
+ rest = ' ' * (self.text_width - len(filled))
+ return '[{}{}] {}/{}'.format(
+ filled, rest,
+ self.progress, self.total,
+ )
+
+ def _repr_html_(self):
+ return "<progress style='width:{}' max='{}' value='{}'></progress>".format(
+ self.html_width, self.total, self.progress)
+
+ def display(self):
+ display(self, display_id=self._display_id)
+
+ def update(self):
+ display(self, display_id=self._display_id, update=True)
+
+ @property
+ def progress(self):
+ return self._progress
+
+ @progress.setter
+ def progress(self, value):
+ self._progress = value
+ self.update()
+
+ def __iter__(self):
+ self.display()
+ self._progress = -1 # First iteration is 0
+ return self
+
+ def __next__(self):
+ """Returns current value and increments display by one."""
+ self.progress += 1
+ if self.progress < self.total:
+ return self.progress
+ else:
+ raise StopIteration()
+
+ def next(self):
+ """Python 2 compatibility"""
+ return self.__next__()
+
class JSON(DisplayObject):
"""JSON expects a JSON-able dict or list
@@ -1012,7 +1012,7 @@ class Image(DisplayObject):
if ext is not None:
if ext == u'jpg' or ext == u'jpeg':
format = self._FMT_JPEG
- elif ext == u'png':
+ elif ext == u'png':
format = self._FMT_PNG
else:
format = ext.lower()
diff --git a/contrib/python/ipython/py2/IPython/core/displayhook.py b/contrib/python/ipython/py2/IPython/core/displayhook.py
index cce7c83d16..50aaa7a50c 100644
--- a/contrib/python/ipython/py2/IPython/core/displayhook.py
+++ b/contrib/python/ipython/py2/IPython/core/displayhook.py
@@ -45,7 +45,7 @@ class DisplayHook(Configurable):
self.do_full_cache = 0
cache_size = 0
warn('caching was disabled (min value for cache size is %s).' %
- cache_size_min,stacklevel=3)
+ cache_size_min,stacklevel=3)
else:
self.do_full_cache = 1
@@ -293,17 +293,17 @@ class DisplayHook(Configurable):
# IronPython blocks here forever
if sys.platform != "cli":
gc.collect()
-
-
-class CapturingDisplayHook(object):
- def __init__(self, shell, outputs=None):
- self.shell = shell
- if outputs is None:
- outputs = []
- self.outputs = outputs
-
- def __call__(self, result=None):
- if result is None:
- return
- format_dict, md_dict = self.shell.display_formatter.format(result)
- self.outputs.append({ 'data': format_dict, 'metadata': md_dict })
+
+
+class CapturingDisplayHook(object):
+ def __init__(self, shell, outputs=None):
+ self.shell = shell
+ if outputs is None:
+ outputs = []
+ self.outputs = outputs
+
+ def __call__(self, result=None):
+ if result is None:
+ return
+ format_dict, md_dict = self.shell.display_formatter.format(result)
+ self.outputs.append({ 'data': format_dict, 'metadata': md_dict })
diff --git a/contrib/python/ipython/py2/IPython/core/displaypub.py b/contrib/python/ipython/py2/IPython/core/displaypub.py
index 82a859ae15..cb60efb52a 100644
--- a/contrib/python/ipython/py2/IPython/core/displaypub.py
+++ b/contrib/python/ipython/py2/IPython/core/displaypub.py
@@ -53,8 +53,8 @@ class DisplayPublisher(Configurable):
if not isinstance(metadata, dict):
raise TypeError('metadata must be a dict, got: %r' % data)
- # use * to indicate transient, update are keyword-only
- def publish(self, data, metadata=None, source=None, **kwargs):
+ # use * to indicate transient, update are keyword-only
+ def publish(self, data, metadata=None, source=None, **kwargs):
"""Publish data and metadata to all frontends.
See the ``display_data`` message in the messaging documentation for
@@ -90,21 +90,21 @@ class DisplayPublisher(Configurable):
the data itself.
source : str, deprecated
Unused.
- transient: dict, keyword-only
- A dictionary for transient data.
- Data in this dictionary should not be persisted as part of saving this output.
- Examples include 'display_id'.
- update: bool, keyword-only, default: False
- If True, only update existing outputs with the same display_id,
- rather than creating a new output.
+ transient: dict, keyword-only
+ A dictionary for transient data.
+ Data in this dictionary should not be persisted as part of saving this output.
+ Examples include 'display_id'.
+ update: bool, keyword-only, default: False
+ If True, only update existing outputs with the same display_id,
+ rather than creating a new output.
"""
- # These are kwargs only on Python 3, not used there.
- # For consistency and avoid code divergence we leave them here to
- # simplify potential backport
- transient = kwargs.pop('transient', None)
- update = kwargs.pop('update', False)
-
+ # These are kwargs only on Python 3, not used there.
+ # For consistency and avoid code divergence we leave them here to
+ # simplify potential backport
+ transient = kwargs.pop('transient', None)
+ update = kwargs.pop('update', False)
+
# The default is to simply write the plain text data using sys.stdout.
if 'text/plain' in data:
print(data['text/plain'])
@@ -121,19 +121,19 @@ class CapturingDisplayPublisher(DisplayPublisher):
"""A DisplayPublisher that stores"""
outputs = List()
- def publish(self, data, metadata=None, source=None, **kwargs):
-
- # These are kwargs only on Python 3, not used there.
- # For consistency and avoid code divergence we leave them here to
- # simplify potential backport
- transient = kwargs.pop('transient', None)
- update = kwargs.pop('update', False)
-
- self.outputs.append({'data':data, 'metadata':metadata,
- 'transient':transient, 'update':update})
-
+ def publish(self, data, metadata=None, source=None, **kwargs):
+
+ # These are kwargs only on Python 3, not used there.
+ # For consistency and avoid code divergence we leave them here to
+ # simplify potential backport
+ transient = kwargs.pop('transient', None)
+ update = kwargs.pop('update', False)
+
+ self.outputs.append({'data':data, 'metadata':metadata,
+ 'transient':transient, 'update':update})
+
def clear_output(self, wait=False):
super(CapturingDisplayPublisher, self).clear_output(wait)
-
+
# empty the list, *do not* reassign a new list
- self.outputs.clear()
+ self.outputs.clear()
diff --git a/contrib/python/ipython/py2/IPython/core/excolors.py b/contrib/python/ipython/py2/IPython/core/excolors.py
index 487bde18c8..ed28b9564b 100644
--- a/contrib/python/ipython/py2/IPython/core/excolors.py
+++ b/contrib/python/ipython/py2/IPython/core/excolors.py
@@ -3,7 +3,7 @@
Color schemes for exception handling code in IPython.
"""
-import os
+import os
import warnings
#*****************************************************************************
@@ -156,12 +156,12 @@ def exception_colors():
Normal = C.Normal,
))
- # Hack: the 'neutral' colours are not very visible on a dark background on
- # Windows. Since Windows command prompts have a dark background by default, and
- # relatively few users are likely to alter that, we will use the 'Linux' colours,
- # designed for a dark background, as the default on Windows.
- if os.name == "nt":
- ex_colors.add_scheme(ex_colors['Linux'].copy('Neutral'))
+ # Hack: the 'neutral' colours are not very visible on a dark background on
+ # Windows. Since Windows command prompts have a dark background by default, and
+ # relatively few users are likely to alter that, we will use the 'Linux' colours,
+ # designed for a dark background, as the default on Windows.
+ if os.name == "nt":
+ ex_colors.add_scheme(ex_colors['Linux'].copy('Neutral'))
return ex_colors
@@ -172,8 +172,8 @@ class Deprec(object):
def __getattr__(self, name):
val = getattr(self.wrapped, name)
- warnings.warn("Using ExceptionColors global is deprecated and will be removed in IPython 6.0",
- DeprecationWarning, stacklevel=2)
+ warnings.warn("Using ExceptionColors global is deprecated and will be removed in IPython 6.0",
+ DeprecationWarning, stacklevel=2)
# using getattr after warnings break ipydoctest in weird way for 3.5
return val
diff --git a/contrib/python/ipython/py2/IPython/core/formatters.py b/contrib/python/ipython/py2/IPython/core/formatters.py
index d990619f27..964c0e4791 100644
--- a/contrib/python/ipython/py2/IPython/core/formatters.py
+++ b/contrib/python/ipython/py2/IPython/core/formatters.py
@@ -53,17 +53,17 @@ class DisplayFormatter(Configurable):
formatter.enabled = True
else:
formatter.enabled = False
-
+
ipython_display_formatter = ForwardDeclaredInstance('FormatterABC')
@default('ipython_display_formatter')
def _default_formatter(self):
return IPythonDisplayFormatter(parent=self)
-
- mimebundle_formatter = ForwardDeclaredInstance('FormatterABC')
- @default('mimebundle_formatter')
- def _default_mime_formatter(self):
- return MimeBundleFormatter(parent=self)
-
+
+ mimebundle_formatter = ForwardDeclaredInstance('FormatterABC')
+ @default('mimebundle_formatter')
+ def _default_mime_formatter(self):
+ return MimeBundleFormatter(parent=self)
+
# A dict of formatter whose keys are format types (MIME types) and whose
# values are subclasses of BaseFormatter.
formatters = Dict()
@@ -93,7 +93,7 @@ class DisplayFormatter(Configurable):
By default all format types will be computed.
- The following MIME types are usually implemented:
+ The following MIME types are usually implemented:
* text/plain
* text/html
@@ -110,15 +110,15 @@ class DisplayFormatter(Configurable):
----------
obj : object
The Python object whose format data will be computed.
- include : list, tuple or set; optional
+ include : list, tuple or set; optional
A list of format type strings (MIME types) to include in the
format data dict. If this is set *only* the format types included
in this list will be computed.
- exclude : list, tuple or set; optional
+ exclude : list, tuple or set; optional
A list of format type string (MIME types) to exclude in the format
data dict. If this is set all format types will be computed,
except for those included in this argument.
- Mimetypes present in exclude will take precedence over the ones in include
+ Mimetypes present in exclude will take precedence over the ones in include
Returns
-------
@@ -132,15 +132,15 @@ class DisplayFormatter(Configurable):
metadata_dict is a dictionary of metadata about each mime-type output.
Its keys will be a strict subset of the keys in format_dict.
-
- Notes
- -----
-
- If an object implement `_repr_mimebundle_` as well as various
- `_repr_*_`, the data returned by `_repr_mimebundle_` will take
- precedence and the corresponding `_repr_*_` for this mimetype will
- not be called.
-
+
+ Notes
+ -----
+
+ If an object implement `_repr_mimebundle_` as well as various
+ `_repr_*_`, the data returned by `_repr_mimebundle_` will take
+ precedence and the corresponding `_repr_*_` for this mimetype will
+ not be called.
+
"""
format_dict = {}
md_dict = {}
@@ -148,21 +148,21 @@ class DisplayFormatter(Configurable):
if self.ipython_display_formatter(obj):
# object handled itself, don't proceed
return {}, {}
-
- format_dict, md_dict = self.mimebundle_formatter(obj, include=include, exclude=exclude)
-
- if format_dict or md_dict:
- if include:
- format_dict = {k:v for k,v in format_dict.items() if k in include}
- md_dict = {k:v for k,v in md_dict.items() if k in include}
- if exclude:
- format_dict = {k:v for k,v in format_dict.items() if k not in exclude}
- md_dict = {k:v for k,v in md_dict.items() if k not in exclude}
-
+
+ format_dict, md_dict = self.mimebundle_formatter(obj, include=include, exclude=exclude)
+
+ if format_dict or md_dict:
+ if include:
+ format_dict = {k:v for k,v in format_dict.items() if k in include}
+ md_dict = {k:v for k,v in md_dict.items() if k in include}
+ if exclude:
+ format_dict = {k:v for k,v in format_dict.items() if k not in exclude}
+ md_dict = {k:v for k,v in md_dict.items() if k not in exclude}
+
for format_type, formatter in self.formatters.items():
- if format_type in format_dict:
- # already got it from mimebundle, don't render again
- continue
+ if format_type in format_dict:
+ # already got it from mimebundle, don't render again
+ continue
if include and format_type not in include:
continue
if exclude and format_type in exclude:
@@ -217,7 +217,7 @@ def catch_format_error(method, self, *args, **kwargs):
r = method(self, *args, **kwargs)
except NotImplementedError:
# don't warn on NotImplementedErrors
- return self._check_return(None, args[0])
+ return self._check_return(None, args[0])
except Exception:
exc_info = sys.exc_info()
ip = get_ipython()
@@ -225,7 +225,7 @@ def catch_format_error(method, self, *args, **kwargs):
ip.showtraceback(exc_info)
else:
traceback.print_exception(*exc_info)
- return self._check_return(None, args[0])
+ return self._check_return(None, args[0])
return self._check_return(r, args[0])
@@ -876,7 +876,7 @@ class PDFFormatter(BaseFormatter):
_return_type = (bytes, unicode_type)
class IPythonDisplayFormatter(BaseFormatter):
- """An escape-hatch Formatter for objects that know how to display themselves.
+ """An escape-hatch Formatter for objects that know how to display themselves.
To define the callables that compute the representation of your
objects, define a :meth:`_ipython_display_` method or use the :meth:`for_type`
@@ -886,13 +886,13 @@ class IPythonDisplayFormatter(BaseFormatter):
This display formatter has highest priority.
If it fires, no other display formatter will be called.
-
- Prior to IPython 6.1, `_ipython_display_` was the only way to display custom mime-types
- without registering a new Formatter.
-
- IPython 6.1 introduces `_repr_mimebundle_` for displaying custom mime-types,
- so `_ipython_display_` should only be used for objects that require unusual
- display patterns, such as multiple display calls.
+
+ Prior to IPython 6.1, `_ipython_display_` was the only way to display custom mime-types
+ without registering a new Formatter.
+
+ IPython 6.1 introduces `_repr_mimebundle_` for displaying custom mime-types,
+ so `_ipython_display_` should only be used for objects that require unusual
+ display patterns, such as multiple display calls.
"""
print_method = ObjectName('_ipython_display_')
_return_type = (type(None), bool)
@@ -916,63 +916,63 @@ class IPythonDisplayFormatter(BaseFormatter):
return True
-class MimeBundleFormatter(BaseFormatter):
- """A Formatter for arbitrary mime-types.
-
- Unlike other `_repr_<mimetype>_` methods,
- `_repr_mimebundle_` should return mime-bundle data,
- either the mime-keyed `data` dictionary or the tuple `(data, metadata)`.
- Any mime-type is valid.
-
- To define the callables that compute the mime-bundle representation of your
- objects, define a :meth:`_repr_mimebundle_` method or use the :meth:`for_type`
- or :meth:`for_type_by_name` methods to register functions that handle
- this.
-
- .. versionadded:: 6.1
- """
- print_method = ObjectName('_repr_mimebundle_')
- _return_type = dict
-
- def _check_return(self, r, obj):
- r = super(MimeBundleFormatter, self)._check_return(r, obj)
- # always return (data, metadata):
- if r is None:
- return {}, {}
- if not isinstance(r, tuple):
- return r, {}
- return r
-
- @catch_format_error
- def __call__(self, obj, include=None, exclude=None):
- """Compute the format for an object.
-
- Identical to parent's method but we pass extra parameters to the method.
-
- Unlike other _repr_*_ `_repr_mimebundle_` should allow extra kwargs, in
- particular `include` and `exclude`.
- """
- if self.enabled:
- # lookup registered printer
- try:
- printer = self.lookup(obj)
- except KeyError:
- pass
- else:
- return printer(obj)
- # Finally look for special method names
- method = get_real_method(obj, self.print_method)
-
- if method is not None:
- d = {}
- d['include'] = include
- d['exclude'] = exclude
- return method(**d)
- return None
- else:
- return None
-
-
+class MimeBundleFormatter(BaseFormatter):
+ """A Formatter for arbitrary mime-types.
+
+ Unlike other `_repr_<mimetype>_` methods,
+ `_repr_mimebundle_` should return mime-bundle data,
+ either the mime-keyed `data` dictionary or the tuple `(data, metadata)`.
+ Any mime-type is valid.
+
+ To define the callables that compute the mime-bundle representation of your
+ objects, define a :meth:`_repr_mimebundle_` method or use the :meth:`for_type`
+ or :meth:`for_type_by_name` methods to register functions that handle
+ this.
+
+ .. versionadded:: 6.1
+ """
+ print_method = ObjectName('_repr_mimebundle_')
+ _return_type = dict
+
+ def _check_return(self, r, obj):
+ r = super(MimeBundleFormatter, self)._check_return(r, obj)
+ # always return (data, metadata):
+ if r is None:
+ return {}, {}
+ if not isinstance(r, tuple):
+ return r, {}
+ return r
+
+ @catch_format_error
+ def __call__(self, obj, include=None, exclude=None):
+ """Compute the format for an object.
+
+ Identical to parent's method but we pass extra parameters to the method.
+
+ Unlike other _repr_*_ `_repr_mimebundle_` should allow extra kwargs, in
+ particular `include` and `exclude`.
+ """
+ if self.enabled:
+ # lookup registered printer
+ try:
+ printer = self.lookup(obj)
+ except KeyError:
+ pass
+ else:
+ return printer(obj)
+ # Finally look for special method names
+ method = get_real_method(obj, self.print_method)
+
+ if method is not None:
+ d = {}
+ d['include'] = include
+ d['exclude'] = exclude
+ return method(**d)
+ return None
+ else:
+ return None
+
+
FormatterABC.register(BaseFormatter)
FormatterABC.register(PlainTextFormatter)
FormatterABC.register(HTMLFormatter)
@@ -985,7 +985,7 @@ FormatterABC.register(LatexFormatter)
FormatterABC.register(JSONFormatter)
FormatterABC.register(JavascriptFormatter)
FormatterABC.register(IPythonDisplayFormatter)
-FormatterABC.register(MimeBundleFormatter)
+FormatterABC.register(MimeBundleFormatter)
def format_display_data(obj, include=None, exclude=None):
diff --git a/contrib/python/ipython/py2/IPython/core/history.py b/contrib/python/ipython/py2/IPython/core/history.py
index 2e7fdbc845..894f927f7a 100644
--- a/contrib/python/ipython/py2/IPython/core/history.py
+++ b/contrib/python/ipython/py2/IPython/core/history.py
@@ -21,7 +21,7 @@ import threading
from traitlets.config.configurable import LoggingConfigurable
from decorator import decorator
from IPython.utils.decorators import undoc
-from IPython.paths import locate_profile
+from IPython.paths import locate_profile
from IPython.utils import py3compat
from traitlets import (
Any, Bool, Dict, Instance, Integer, List, Unicode, TraitError,
@@ -301,8 +301,8 @@ class HistoryAccessor(HistoryAccessorBase):
cur = self.db.execute("SELECT session, line, %s FROM %s " %\
(toget, sqlfrom) + sql, params)
if output: # Regroup into 3-tuples, and parse JSON
- return ((ses, lin, (py3compat.cast_unicode_py2(inp), py3compat.cast_unicode_py2(out)))
- for ses, lin, inp, out in cur)
+ return ((ses, lin, (py3compat.cast_unicode_py2(inp), py3compat.cast_unicode_py2(out)))
+ for ses, lin, inp, out in cur)
return cur
@needs_sqlite
diff --git a/contrib/python/ipython/py2/IPython/core/hooks.py b/contrib/python/ipython/py2/IPython/core/hooks.py
index e6fc84087f..482a44a79e 100644
--- a/contrib/python/ipython/py2/IPython/core/hooks.py
+++ b/contrib/python/ipython/py2/IPython/core/hooks.py
@@ -99,7 +99,7 @@ def fix_error_editor(self,filename,linenum,column,msg):
in future versions. It appears to be used only for automatically fixing syntax
error that has been broken for a few years and has thus been removed. If you
happend to use this function and still need it please make your voice heard on
-the mailing list ipython-dev@python.org , or on the GitHub Issue tracker:
+the mailing list ipython-dev@python.org , or on the GitHub Issue tracker:
https://github.com/ipython/ipython/issues/9649 """, UserWarning)
def vim_quickfix_file():
diff --git a/contrib/python/ipython/py2/IPython/core/inputtransformer.py b/contrib/python/ipython/py2/IPython/core/inputtransformer.py
index 3ba49b951d..b321e3c1ba 100644
--- a/contrib/python/ipython/py2/IPython/core/inputtransformer.py
+++ b/contrib/python/ipython/py2/IPython/core/inputtransformer.py
@@ -126,19 +126,19 @@ class TokenInputTransformer(InputTransformer):
"""
def __init__(self, func):
self.func = func
- self.buf = []
+ self.buf = []
self.reset_tokenizer()
-
+
def reset_tokenizer(self):
- it = iter(self.buf)
- nxt = it.__next__ if PY3 else it.next
- self.tokenizer = generate_tokens(nxt)
-
+ it = iter(self.buf)
+ nxt = it.__next__ if PY3 else it.next
+ self.tokenizer = generate_tokens(nxt)
+
def push(self, line):
- self.buf.append(line + '\n')
- if all(l.isspace() for l in self.buf):
+ self.buf.append(line + '\n')
+ if all(l.isspace() for l in self.buf):
return self.reset()
-
+
tokens = []
stop_at_NL = False
try:
@@ -158,13 +158,13 @@ class TokenInputTransformer(InputTransformer):
return self.output(tokens)
def output(self, tokens):
- self.buf[:] = []
+ self.buf[:] = []
self.reset_tokenizer()
return untokenize(self.func(tokens)).rstrip('\n')
def reset(self):
- l = ''.join(self.buf)
- self.buf[:] = []
+ l = ''.join(self.buf)
+ self.buf[:] = []
self.reset_tokenizer()
if l:
return l.rstrip('\n')
diff --git a/contrib/python/ipython/py2/IPython/core/interactiveshell.py b/contrib/python/ipython/py2/IPython/core/interactiveshell.py
index ad8824b606..52d80fceeb 100644
--- a/contrib/python/ipython/py2/IPython/core/interactiveshell.py
+++ b/contrib/python/ipython/py2/IPython/core/interactiveshell.py
@@ -58,7 +58,7 @@ from IPython.core.prefilter import PrefilterManager
from IPython.core.profiledir import ProfileDir
from IPython.core.usage import default_banner
from IPython.testing.skipdoctest import skip_doctest_py2, skip_doctest
-from IPython.display import display
+from IPython.display import display
from IPython.utils import PyColorize
from IPython.utils import io
from IPython.utils import py3compat
@@ -637,7 +637,7 @@ class InteractiveShell(SingletonConfigurable):
# removing on exit or representing the existence of more than one
# IPython at a time.
builtin_mod.__dict__['__IPYTHON__'] = True
- builtin_mod.__dict__['display'] = display
+ builtin_mod.__dict__['display'] = display
self.builtin_trap = BuiltinTrap(shell=self)
@@ -2064,7 +2064,7 @@ class InteractiveShell(SingletonConfigurable):
etpl = "Line magic function `%%%s` not found%s."
extra = '' if cm is None else (' (But cell magic `%%%%%s` exists, '
'did you mean that instead?)' % magic_name )
- raise UsageError(etpl % (magic_name, extra))
+ raise UsageError(etpl % (magic_name, extra))
else:
# Note: this is the distance in the stack to the user's frame.
# This will need to be updated if the internal calling logic gets
@@ -2101,7 +2101,7 @@ class InteractiveShell(SingletonConfigurable):
etpl = "Cell magic `%%{0}` not found{1}."
extra = '' if lm is None else (' (But line magic `%{0}` exists, '
'did you mean that instead?)'.format(magic_name))
- raise UsageError(etpl.format(magic_name, extra))
+ raise UsageError(etpl.format(magic_name, extra))
elif cell == '':
message = '%%{0} is a cell magic, but the cell body is empty.'.format(magic_name)
if self.find_line_magic(magic_name) is not None:
@@ -2537,12 +2537,12 @@ class InteractiveShell(SingletonConfigurable):
"""generator for sequence of code blocks to run"""
if fname.endswith('.ipynb'):
from nbformat import read
- nb = read(fname, as_version=4)
- if not nb.cells:
- return
- for cell in nb.cells:
- if cell.cell_type == 'code':
- yield cell.source
+ nb = read(fname, as_version=4)
+ if not nb.cells:
+ return
+ for cell in nb.cells:
+ if cell.cell_type == 'code':
+ yield cell.source
else:
with open(fname) as f:
yield f.read()
@@ -2626,8 +2626,8 @@ class InteractiveShell(SingletonConfigurable):
result.execution_count = self.execution_count
def error_before_exec(value):
- if store_history:
- self.execution_count += 1
+ if store_history:
+ self.execution_count += 1
result.error_before_exec = value
self.last_execution_succeeded = False
return result
@@ -2900,32 +2900,32 @@ class InteractiveShell(SingletonConfigurable):
# For backwards compatibility
runcode = run_code
- def check_complete(self, code):
- """Return whether a block of code is ready to execute, or should be continued
-
- Parameters
- ----------
- source : string
- Python input code, which can be multiline.
-
- Returns
- -------
- status : str
- One of 'complete', 'incomplete', or 'invalid' if source is not a
- prefix of valid code.
- indent : str
- When status is 'incomplete', this is some whitespace to insert on
- the next line of the prompt.
- """
- status, nspaces = self.input_splitter.check_complete(code)
- return status, ' ' * (nspaces or 0)
-
+ def check_complete(self, code):
+ """Return whether a block of code is ready to execute, or should be continued
+
+ Parameters
+ ----------
+ source : string
+ Python input code, which can be multiline.
+
+ Returns
+ -------
+ status : str
+ One of 'complete', 'incomplete', or 'invalid' if source is not a
+ prefix of valid code.
+ indent : str
+ When status is 'incomplete', this is some whitespace to insert on
+ the next line of the prompt.
+ """
+ status, nspaces = self.input_splitter.check_complete(code)
+ return status, ' ' * (nspaces or 0)
+
#-------------------------------------------------------------------------
# Things related to GUI support and pylab
#-------------------------------------------------------------------------
- active_eventloop = None
-
+ active_eventloop = None
+
def enable_gui(self, gui=None):
raise NotImplementedError('Implement enable_gui in a subclass')
diff --git a/contrib/python/ipython/py2/IPython/core/magics/basic.py b/contrib/python/ipython/py2/IPython/core/magics/basic.py
index ca69e2e698..304bf4228c 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/basic.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/basic.py
@@ -3,7 +3,7 @@
from __future__ import print_function
from __future__ import absolute_import
-import argparse
+import argparse
import io
import sys
from pprint import pformat
@@ -288,14 +288,14 @@ Currently the magic system has the following functions:""",
@line_magic
def profile(self, parameter_s=''):
- """DEPRECATED since IPython 2.0.
-
- Raise `UsageError`. To profile code use the :magic:`prun` magic.
-
+ """DEPRECATED since IPython 2.0.
+ Raise `UsageError`. To profile code use the :magic:`prun` magic.
+
+
See Also
--------
- prun : run code using the Python profiler (:magic:`prun`)
+ prun : run code using the Python profiler (:magic:`prun`)
"""
warn("%profile is now deprecated. Please use get_ipython().profile instead.")
from IPython.core.application import BaseIPythonApplication
@@ -551,7 +551,7 @@ Currently the magic system has the following functions:""",
@magic_arguments.magic_arguments()
@magic_arguments.argument(
'-e', '--export', action='store_true', default=False,
- help=argparse.SUPPRESS
+ help=argparse.SUPPRESS
)
@magic_arguments.argument(
'filename', type=unicode_type,
@@ -562,24 +562,24 @@ Currently the magic system has the following functions:""",
"""Export and convert IPython notebooks.
This function can export the current IPython history to a notebook file.
- For example, to export the history to "foo.ipynb" do "%notebook foo.ipynb".
-
- The -e or --export flag is deprecated in IPython 5.2, and will be
- removed in the future.
+ For example, to export the history to "foo.ipynb" do "%notebook foo.ipynb".
+
+ The -e or --export flag is deprecated in IPython 5.2, and will be
+ removed in the future.
"""
args = magic_arguments.parse_argstring(self.notebook, s)
from nbformat import write, v4
-
- cells = []
- hist = list(self.shell.history_manager.get_range())
- if(len(hist)<=1):
- raise ValueError('History is empty, cannot export')
- for session, execution_count, source in hist[:-1]:
- cells.append(v4.new_code_cell(
- execution_count=execution_count,
- source=source
- ))
- nb = v4.new_notebook(cells=cells)
- with io.open(args.filename, 'w', encoding='utf-8') as f:
- write(nb, f, version=4)
+
+ cells = []
+ hist = list(self.shell.history_manager.get_range())
+ if(len(hist)<=1):
+ raise ValueError('History is empty, cannot export')
+ for session, execution_count, source in hist[:-1]:
+ cells.append(v4.new_code_cell(
+ execution_count=execution_count,
+ source=source
+ ))
+ nb = v4.new_notebook(cells=cells)
+ with io.open(args.filename, 'w', encoding='utf-8') as f:
+ write(nb, f, version=4)
diff --git a/contrib/python/ipython/py2/IPython/core/magics/execution.py b/contrib/python/ipython/py2/IPython/core/magics/execution.py
index 3734b0cdae..73f7581cb6 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/execution.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/execution.py
@@ -437,7 +437,7 @@ python-profiler package from non-free.""")
def _debug_exec(self, code, breakpoint):
if breakpoint:
- (filename, bp_line) = breakpoint.rsplit(':', 1)
+ (filename, bp_line) = breakpoint.rsplit(':', 1)
bp_line = int(bp_line)
else:
(filename, bp_line) = (None, None)
@@ -806,17 +806,17 @@ python-profiler package from non-free.""")
self.shell.InteractiveTB.pdb = self.shell.InteractiveTB.debugger_cls()
deb = self.shell.InteractiveTB.pdb
- # deb.checkline() fails if deb.curframe exists but is None; it can
- # handle it not existing. https://github.com/ipython/ipython/issues/10028
- if hasattr(deb, 'curframe'):
- del deb.curframe
-
+ # deb.checkline() fails if deb.curframe exists but is None; it can
+ # handle it not existing. https://github.com/ipython/ipython/issues/10028
+ if hasattr(deb, 'curframe'):
+ del deb.curframe
+
# reset Breakpoint state, which is moronically kept
# in a class
bdb.Breakpoint.next = 1
bdb.Breakpoint.bplist = {}
bdb.Breakpoint.bpbynumber = [None]
- deb.clear_all_breaks()
+ deb.clear_all_breaks()
if bp_line is not None:
# Set an initial breakpoint to stop execution
maxtries = 10
@@ -1013,13 +1013,13 @@ python-profiler package from non-free.""")
ast_setup = self.shell.transform_ast(ast_setup)
ast_stmt = self.shell.transform_ast(ast_stmt)
- # Check that these compile to valid Python code *outside* the timer func
- # Invalid code may become valid when put inside the function & loop,
- # which messes up error messages.
- # https://github.com/ipython/ipython/issues/10636
- self.shell.compile(ast_setup, "<magic-timeit-setup>", "exec")
- self.shell.compile(ast_stmt, "<magic-timeit-stmt>", "exec")
-
+ # Check that these compile to valid Python code *outside* the timer func
+ # Invalid code may become valid when put inside the function & loop,
+ # which messes up error messages.
+ # https://github.com/ipython/ipython/issues/10636
+ self.shell.compile(ast_setup, "<magic-timeit-setup>", "exec")
+ self.shell.compile(ast_stmt, "<magic-timeit-stmt>", "exec")
+
# This codestring is taken from timeit.template - we fill it in as an
# AST, so that we can apply our AST transformations to the user code
# without affecting the timing code.
diff --git a/contrib/python/ipython/py2/IPython/core/magics/script.py b/contrib/python/ipython/py2/IPython/core/magics/script.py
index 3fbddc38a8..22e6c46579 100644
--- a/contrib/python/ipython/py2/IPython/core/magics/script.py
+++ b/contrib/python/ipython/py2/IPython/core/magics/script.py
@@ -246,8 +246,8 @@ class ScriptMagics(Magics):
def kill_bg_processes(self):
"""Kill all BG processes which are still running."""
- if not self.bg_processes:
- return
+ if not self.bg_processes:
+ return
for p in self.bg_processes:
if p.poll() is None:
try:
@@ -255,9 +255,9 @@ class ScriptMagics(Magics):
except:
pass
time.sleep(0.1)
- self._gc_bg_processes()
- if not self.bg_processes:
- return
+ self._gc_bg_processes()
+ if not self.bg_processes:
+ return
for p in self.bg_processes:
if p.poll() is None:
try:
@@ -265,9 +265,9 @@ class ScriptMagics(Magics):
except:
pass
time.sleep(0.1)
- self._gc_bg_processes()
- if not self.bg_processes:
- return
+ self._gc_bg_processes()
+ if not self.bg_processes:
+ return
for p in self.bg_processes:
if p.poll() is None:
try:
diff --git a/contrib/python/ipython/py2/IPython/core/oinspect.py b/contrib/python/ipython/py2/IPython/core/oinspect.py
index 55a4efe8c0..0360a9c98f 100644
--- a/contrib/python/ipython/py2/IPython/core/oinspect.py
+++ b/contrib/python/ipython/py2/IPython/core/oinspect.py
@@ -651,7 +651,7 @@ class Inspector(Colorable):
# Functions, methods, classes
append_field(_mime, 'Signature', 'definition', code_formatter)
append_field(_mime, 'Init signature', 'init_definition', code_formatter)
- if detail_level > 0 and info['source']:
+ if detail_level > 0 and info['source']:
append_field(_mime, 'Source', 'source', code_formatter)
else:
append_field(_mime, 'Docstring', 'docstring', formatter)
@@ -662,9 +662,9 @@ class Inspector(Colorable):
else:
# General Python objects
- append_field(_mime, 'Signature', 'definition', code_formatter)
- append_field(_mime, 'Call signature', 'call_def', code_formatter)
-
+ append_field(_mime, 'Signature', 'definition', code_formatter)
+ append_field(_mime, 'Call signature', 'call_def', code_formatter)
+
append_field(_mime, 'Type', 'type_name')
# Base class for old-style instances
@@ -678,8 +678,8 @@ class Inspector(Colorable):
append_field(_mime, 'Namespace', 'namespace')
append_field(_mime, 'Length', 'length')
- append_field(_mime, 'File', 'file')
-
+ append_field(_mime, 'File', 'file')
+
# Source or docstring, depending on detail level and whether
# source found.
if detail_level > 0:
@@ -690,7 +690,7 @@ class Inspector(Colorable):
append_field(_mime, 'Class docstring', 'class_docstring', formatter)
append_field(_mime, 'Init docstring', 'init_docstring', formatter)
append_field(_mime, 'Call docstring', 'call_docstring', formatter)
-
+
return self.format_mime(_mime)
diff --git a/contrib/python/ipython/py2/IPython/core/pylabtools.py b/contrib/python/ipython/py2/IPython/core/pylabtools.py
index a1932d8c48..63c38b4386 100644
--- a/contrib/python/ipython/py2/IPython/core/pylabtools.py
+++ b/contrib/python/ipython/py2/IPython/core/pylabtools.py
@@ -19,18 +19,18 @@ backends = {'tk': 'TkAgg',
'wx': 'WXAgg',
'qt4': 'Qt4Agg',
'qt5': 'Qt5Agg',
- 'qt': 'Qt5Agg',
+ 'qt': 'Qt5Agg',
'osx': 'MacOSX',
'nbagg': 'nbAgg',
'notebook': 'nbAgg',
- 'agg': 'agg',
- 'svg': 'svg',
- 'pdf': 'pdf',
- 'ps': 'ps',
- 'inline': 'module://ipykernel.pylab.backend_inline',
- 'ipympl': 'module://ipympl.backend_nbagg',
- 'widget': 'module://ipympl.backend_nbagg',
- }
+ 'agg': 'agg',
+ 'svg': 'svg',
+ 'pdf': 'pdf',
+ 'ps': 'ps',
+ 'inline': 'module://ipykernel.pylab.backend_inline',
+ 'ipympl': 'module://ipympl.backend_nbagg',
+ 'widget': 'module://ipympl.backend_nbagg',
+ }
# We also need a reverse backends2guis mapping that will properly choose which
# GUI support to activate based on the desired matplotlib backend. For the
@@ -45,13 +45,13 @@ backend2gui['GTK'] = backend2gui['GTKCairo'] = 'gtk'
backend2gui['GTK3Cairo'] = 'gtk3'
backend2gui['WX'] = 'wx'
backend2gui['CocoaAgg'] = 'osx'
-# And some backends that don't need GUI integration
-del backend2gui['nbAgg']
-del backend2gui['agg']
-del backend2gui['svg']
-del backend2gui['pdf']
-del backend2gui['ps']
-del backend2gui['module://ipykernel.pylab.backend_inline']
+# And some backends that don't need GUI integration
+del backend2gui['nbAgg']
+del backend2gui['agg']
+del backend2gui['svg']
+del backend2gui['pdf']
+del backend2gui['ps']
+del backend2gui['module://ipykernel.pylab.backend_inline']
#-----------------------------------------------------------------------------
# Matplotlib utilities
@@ -111,7 +111,7 @@ def print_figure(fig, fmt='png', bbox_inches='tight', **kwargs):
if not fig.axes and not fig.lines:
return
- dpi = fig.dpi
+ dpi = fig.dpi
if fmt == 'retina':
dpi = dpi * 2
fmt = 'png'
@@ -171,7 +171,7 @@ def mpl_runner(safe_execfile):
properly handle interactive rendering."""
import matplotlib
- import matplotlib.pyplot as plt
+ import matplotlib.pyplot as plt
#print '*** Matplotlib runner ***' # dbg
# turn off rendering until end of script
@@ -180,18 +180,18 @@ def mpl_runner(safe_execfile):
safe_execfile(fname,*where,**kw)
matplotlib.interactive(is_interactive)
# make rendering call now, if the user tried to do it
- if plt.draw_if_interactive.called:
- plt.draw()
- plt.draw_if_interactive.called = False
-
- # re-draw everything that is stale
- try:
- da = plt.draw_all
- except AttributeError:
- pass
- else:
- da()
-
+ if plt.draw_if_interactive.called:
+ plt.draw()
+ plt.draw_if_interactive.called = False
+
+ # re-draw everything that is stale
+ try:
+ da = plt.draw_all
+ except AttributeError:
+ pass
+ else:
+ da()
+
return mpl_execfile
@@ -231,8 +231,8 @@ def select_figure_formats(shell, formats, **kwargs):
formats = set(formats)
[ f.pop(Figure, None) for f in shell.display_formatter.formatters.values() ]
- mplbackend = matplotlib.get_backend().lower()
- if mplbackend == 'nbagg' or mplbackend == 'module://ipympl.backend_nbagg':
+ mplbackend = matplotlib.get_backend().lower()
+ if mplbackend == 'nbagg' or mplbackend == 'module://ipympl.backend_nbagg':
formatter = shell.display_formatter.ipython_display_formatter
formatter.for_type(Figure, _reshow_nbagg_figure)
@@ -316,12 +316,12 @@ def activate_matplotlib(backend):
# This must be imported last in the matplotlib series, after
# backend/interactivity choices have been made
- import matplotlib.pyplot as plt
+ import matplotlib.pyplot as plt
- plt.show._needmain = False
+ plt.show._needmain = False
# We need to detect at runtime whether show() is called by the user.
# For this, we wrap it into a decorator which adds a 'called' flag.
- plt.draw_if_interactive = flag_calls(plt.draw_if_interactive)
+ plt.draw_if_interactive = flag_calls(plt.draw_if_interactive)
def import_pylab(user_ns, import_all=True):
diff --git a/contrib/python/ipython/py2/IPython/core/release.py b/contrib/python/ipython/py2/IPython/core/release.py
index 94dea1073b..494e7e41ee 100644
--- a/contrib/python/ipython/py2/IPython/core/release.py
+++ b/contrib/python/ipython/py2/IPython/core/release.py
@@ -101,9 +101,9 @@ authors = {'Fernando' : ('Fernando Perez','fperez.net@gmail.com'),
author = 'The IPython Development Team'
-author_email = 'ipython-dev@python.org'
+author_email = 'ipython-dev@python.org'
-url = 'https://ipython.org'
+url = 'https://ipython.org'
platforms = ['Linux','Mac OSX','Windows']
diff --git a/contrib/python/ipython/py2/IPython/core/shellapp.py b/contrib/python/ipython/py2/IPython/core/shellapp.py
index 213648246e..43b900ea5a 100644
--- a/contrib/python/ipython/py2/IPython/core/shellapp.py
+++ b/contrib/python/ipython/py2/IPython/core/shellapp.py
@@ -11,14 +11,14 @@ from __future__ import absolute_import
from __future__ import print_function
import glob
-from itertools import chain
+from itertools import chain
import os
import sys
from traitlets.config.application import boolean_flag
from traitlets.config.configurable import Configurable
from traitlets.config.loader import Config
-from IPython.core.application import SYSTEM_CONFIG_DIRS, ENV_CONFIG_DIRS
+from IPython.core.application import SYSTEM_CONFIG_DIRS, ENV_CONFIG_DIRS
from IPython.core import pylabtools
from IPython.utils import py3compat
from IPython.utils.contexts import preserve_keys
@@ -333,9 +333,9 @@ class InteractiveShellApp(Configurable):
def _run_startup_files(self):
"""Run files from profile startup directory"""
- startup_dirs = [self.profile_dir.startup_dir] + [
- os.path.join(p, 'startup') for p in chain(ENV_CONFIG_DIRS, SYSTEM_CONFIG_DIRS)
- ]
+ startup_dirs = [self.profile_dir.startup_dir] + [
+ os.path.join(p, 'startup') for p in chain(ENV_CONFIG_DIRS, SYSTEM_CONFIG_DIRS)
+ ]
startup_files = []
if self.exec_PYTHONSTARTUP and os.environ.get('PYTHONSTARTUP', False) and \
@@ -347,9 +347,9 @@ class InteractiveShellApp(Configurable):
except:
self.log.warning("Unknown error in handling PYTHONSTARTUP file %s:", python_startup)
self.shell.showtraceback()
- for startup_dir in startup_dirs[::-1]:
- startup_files += glob.glob(os.path.join(startup_dir, '*.py'))
- startup_files += glob.glob(os.path.join(startup_dir, '*.ipy'))
+ for startup_dir in startup_dirs[::-1]:
+ startup_files += glob.glob(os.path.join(startup_dir, '*.py'))
+ startup_files += glob.glob(os.path.join(startup_dir, '*.ipy'))
if not startup_files:
return
diff --git a/contrib/python/ipython/py2/IPython/core/ultratb.py b/contrib/python/ipython/py2/IPython/core/ultratb.py
index a855145825..2977e9109b 100644
--- a/contrib/python/ipython/py2/IPython/core/ultratb.py
+++ b/contrib/python/ipython/py2/IPython/core/ultratb.py
@@ -438,7 +438,7 @@ def is_recursion_error(etype, value, records):
# by stack frames in IPython itself. >500 frames probably indicates
# a recursion error.
return (etype is recursion_error_type) \
- and str("recursion") in str(value).lower() \
+ and str("recursion") in str(value).lower() \
and len(records) > 500
def find_recursion(etype, value, records):
@@ -1130,12 +1130,12 @@ class VerboseTB(TBTools):
# problems, but it generates empty tracebacks for console errors
# (5 blanks lines) where none should be returned.
return _fixed_getinnerframes(etb, number_of_lines_of_context, tb_offset)
- except UnicodeDecodeError:
- # This can occur if a file's encoding magic comment is wrong.
- # I can't see a way to recover without duplicating a bunch of code
- # from the stdlib traceback module. --TK
- error('\nUnicodeDecodeError while processing traceback.\n')
- return None
+ except UnicodeDecodeError:
+ # This can occur if a file's encoding magic comment is wrong.
+ # I can't see a way to recover without duplicating a bunch of code
+ # from the stdlib traceback module. --TK
+ error('\nUnicodeDecodeError while processing traceback.\n')
+ return None
except:
# FIXME: I've been getting many crash reports from python 2.3
# users, traceable to inspect.py. If I can find a small test-case
@@ -1227,7 +1227,7 @@ class VerboseTB(TBTools):
if force or self.call_pdb:
if self.pdb is None:
- self.pdb = self.debugger_cls()
+ self.pdb = self.debugger_cls()
# the system displayhook may have changed, restore the original
# for pdb
display_trap = DisplayTrap(hook=sys.__displayhook__)
diff --git a/contrib/python/ipython/py2/IPython/core/usage.py b/contrib/python/ipython/py2/IPython/core/usage.py
index c4d3c16eca..0ad963646d 100644
--- a/contrib/python/ipython/py2/IPython/core/usage.py
+++ b/contrib/python/ipython/py2/IPython/core/usage.py
@@ -67,9 +67,9 @@ interactive_usage = """
IPython -- An enhanced Interactive Python
=========================================
-IPython offers a fully compatible replacement for the standard Python
-interpreter, with convenient shell features, special commands, command
-history mechanism and output results caching.
+IPython offers a fully compatible replacement for the standard Python
+interpreter, with convenient shell features, special commands, command
+history mechanism and output results caching.
At your system command line, type 'ipython -h' to see the command line
options available. This document only describes interactive features.
@@ -77,8 +77,8 @@ options available. This document only describes interactive features.
MAIN FEATURES
-------------
-* Access to the standard Python help with object docstrings and the Python
- manuals. Simply type 'help' (no quotes) to invoke it.
+* Access to the standard Python help with object docstrings and the Python
+ manuals. Simply type 'help' (no quotes) to invoke it.
* Magic commands: type %magic for information on the magic subsystem.
@@ -86,12 +86,12 @@ MAIN FEATURES
* Dynamic object information:
- Typing ?word or word? prints detailed information about an object. Certain
- long strings (code, etc.) get snipped in the center for brevity.
+ Typing ?word or word? prints detailed information about an object. Certain
+ long strings (code, etc.) get snipped in the center for brevity.
Typing ??word or word?? gives access to the full information without
- snipping long strings. Strings that are longer than the screen are printed
- through the less pager.
+ snipping long strings. Strings that are longer than the screen are printed
+ through the less pager.
The ?/?? system gives access to the full source code for any object (if
available), shows function prototypes and other useful information.
@@ -99,16 +99,16 @@ MAIN FEATURES
If you just want to see an object's docstring, type '%pdoc object' (without
quotes, and without % if you have automagic on).
-* Tab completion in the local namespace:
+* Tab completion in the local namespace:
At any time, hitting tab will complete any available python commands or
variable names, and show you a list of the possible completions if there's
no unambiguous one. It will also complete filenames in the current directory.
-* Search previous command history in multiple ways:
+* Search previous command history in multiple ways:
- - Start typing, and then use arrow keys up/down or (Ctrl-p/Ctrl-n) to search
- through the history items that match what you've typed so far.
+ - Start typing, and then use arrow keys up/down or (Ctrl-p/Ctrl-n) to search
+ through the history items that match what you've typed so far.
- Hit Ctrl-r: opens a search prompt. Begin typing and the system searches
your history for lines that match what you've typed so far, completing as
@@ -120,7 +120,7 @@ MAIN FEATURES
* Logging of input with the ability to save and restore a working session.
-* System shell with !. Typing !ls will run 'ls' in the current directory.
+* System shell with !. Typing !ls will run 'ls' in the current directory.
* The reload command does a 'deep' reload of a module: changes made to the
module since you imported will actually be available without having to exit.
diff --git a/contrib/python/ipython/py2/IPython/extensions/autoreload.py b/contrib/python/ipython/py2/IPython/extensions/autoreload.py
index d3e420574d..66f3f94ef1 100644
--- a/contrib/python/ipython/py2/IPython/extensions/autoreload.py
+++ b/contrib/python/ipython/py2/IPython/extensions/autoreload.py
@@ -186,8 +186,8 @@ class ModuleReloader(object):
if not hasattr(module, '__file__') or module.__file__ is None:
return None, None
- if getattr(module, '__name__', None) in [None, '__mp_main__', '__main__']:
- # we cannot reload(__main__) or reload(__mp_main__)
+ if getattr(module, '__name__', None) in [None, '__mp_main__', '__main__']:
+ # we cannot reload(__main__) or reload(__mp_main__)
return None, None
filename = module.__file__
diff --git a/contrib/python/ipython/py2/IPython/external/qt_for_kernel.py b/contrib/python/ipython/py2/IPython/external/qt_for_kernel.py
index 1a94e7e0a2..be7a4d6470 100644
--- a/contrib/python/ipython/py2/IPython/external/qt_for_kernel.py
+++ b/contrib/python/ipython/py2/IPython/external/qt_for_kernel.py
@@ -33,10 +33,10 @@ import sys
from IPython.utils.version import check_version
from IPython.external.qt_loaders import (load_qt, loaded_api, QT_API_PYSIDE,
- QT_API_PYSIDE2, QT_API_PYQT, QT_API_PYQT5,
+ QT_API_PYSIDE2, QT_API_PYQT, QT_API_PYQT5,
QT_API_PYQTv1, QT_API_PYQT_DEFAULT)
-_qt_apis = (QT_API_PYSIDE, QT_API_PYSIDE2, QT_API_PYQT, QT_API_PYQT5, QT_API_PYQTv1,
+_qt_apis = (QT_API_PYSIDE, QT_API_PYSIDE2, QT_API_PYQT, QT_API_PYQT5, QT_API_PYQTv1,
QT_API_PYQT_DEFAULT)
#Constraints placed on an imported matplotlib
@@ -83,8 +83,8 @@ def get_options():
qt_api = os.environ.get('QT_API', None)
if qt_api is None:
#no ETS variable. Ask mpl, then use default fallback path
- return matplotlib_options(mpl) or [QT_API_PYQT_DEFAULT, QT_API_PYSIDE,
- QT_API_PYQT5, QT_API_PYSIDE2]
+ return matplotlib_options(mpl) or [QT_API_PYQT_DEFAULT, QT_API_PYSIDE,
+ QT_API_PYQT5, QT_API_PYSIDE2]
elif qt_api not in _qt_apis:
raise RuntimeError("Invalid Qt API %r, valid values are: %r" %
(qt_api, ', '.join(_qt_apis)))
diff --git a/contrib/python/ipython/py2/IPython/external/qt_loaders.py b/contrib/python/ipython/py2/IPython/external/qt_loaders.py
index 3b894fb2ab..a9cdf7785c 100644
--- a/contrib/python/ipython/py2/IPython/external/qt_loaders.py
+++ b/contrib/python/ipython/py2/IPython/external/qt_loaders.py
@@ -20,17 +20,17 @@ QT_API_PYQT5 = 'pyqt5'
QT_API_PYQTv1 = 'pyqtv1' # Force version 2
QT_API_PYQT_DEFAULT = 'pyqtdefault' # use system default for version 1 vs. 2
QT_API_PYSIDE = 'pyside'
-QT_API_PYSIDE2 = 'pyside2'
-
-api_to_module = {QT_API_PYSIDE2: 'PySide2',
- QT_API_PYSIDE: 'PySide',
- QT_API_PYQT: 'PyQt4',
- QT_API_PYQTv1: 'PyQt4',
- QT_API_PYQT5: 'PyQt5',
- QT_API_PYQT_DEFAULT: 'PyQt4',
- }
+QT_API_PYSIDE2 = 'pyside2'
+api_to_module = {QT_API_PYSIDE2: 'PySide2',
+ QT_API_PYSIDE: 'PySide',
+ QT_API_PYQT: 'PyQt4',
+ QT_API_PYQTv1: 'PyQt4',
+ QT_API_PYQT5: 'PyQt5',
+ QT_API_PYQT_DEFAULT: 'PyQt4',
+ }
+
class ImportDenier(object):
"""Import Hook that will guard against bad Qt imports
once IPython commits to a specific binding
@@ -56,28 +56,28 @@ class ImportDenier(object):
""" % (fullname, loaded_api()))
ID = ImportDenier()
-sys.meta_path.insert(0, ID)
+sys.meta_path.insert(0, ID)
def commit_api(api):
"""Commit to a particular API, and trigger ImportErrors on subsequent
dangerous imports"""
- if api == QT_API_PYSIDE2:
- ID.forbid('PySide')
- ID.forbid('PyQt4')
- ID.forbid('PyQt5')
+ if api == QT_API_PYSIDE2:
+ ID.forbid('PySide')
+ ID.forbid('PyQt4')
+ ID.forbid('PyQt5')
if api == QT_API_PYSIDE:
- ID.forbid('PySide2')
+ ID.forbid('PySide2')
ID.forbid('PyQt4')
ID.forbid('PyQt5')
elif api == QT_API_PYQT5:
- ID.forbid('PySide2')
+ ID.forbid('PySide2')
ID.forbid('PySide')
ID.forbid('PyQt4')
else: # There are three other possibilities, all representing PyQt4
ID.forbid('PyQt5')
- ID.forbid('PySide2')
+ ID.forbid('PySide2')
ID.forbid('PySide')
@@ -89,7 +89,7 @@ def loaded_api():
Returns
-------
- None, 'pyside2', 'pyside', 'pyqt', 'pyqt5', or 'pyqtv1'
+ None, 'pyside2', 'pyside', 'pyqt', 'pyqt5', or 'pyqtv1'
"""
if 'PyQt4.QtCore' in sys.modules:
if qtapi_version() == 2:
@@ -98,21 +98,21 @@ def loaded_api():
return QT_API_PYQTv1
elif 'PySide.QtCore' in sys.modules:
return QT_API_PYSIDE
- elif 'PySide2.QtCore' in sys.modules:
- return QT_API_PYSIDE2
+ elif 'PySide2.QtCore' in sys.modules:
+ return QT_API_PYSIDE2
elif 'PyQt5.QtCore' in sys.modules:
return QT_API_PYQT5
return None
def has_binding(api):
- """Safely check for PyQt4/5, PySide or PySide2, without importing submodules
-
- Supports Python <= 3.3
+ """Safely check for PyQt4/5, PySide or PySide2, without importing submodules
+ Supports Python <= 3.3
+
Parameters
----------
- api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault']
+ api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault']
Which module to check for
Returns
@@ -122,7 +122,7 @@ def has_binding(api):
# we can't import an incomplete pyside and pyqt4
# this will cause a crash in sip (#1431)
# check for complete presence before importing
- module_name = api_to_module[api]
+ module_name = api_to_module[api]
import imp
try:
@@ -132,7 +132,7 @@ def has_binding(api):
imp.find_module('QtCore', mod.__path__)
imp.find_module('QtGui', mod.__path__)
imp.find_module('QtSvg', mod.__path__)
- if api in (QT_API_PYQT5, QT_API_PYSIDE2):
+ if api in (QT_API_PYQT5, QT_API_PYSIDE2):
# QT5 requires QtWidgets too
imp.find_module('QtWidgets', mod.__path__)
@@ -144,49 +144,49 @@ def has_binding(api):
except ImportError:
return False
-def has_binding_new(api):
- """Safely check for PyQt4/5, PySide or PySide2, without importing submodules
-
- Supports Python >= 3.4
-
- Parameters
- ----------
- api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault']
- Which module to check for
-
- Returns
- -------
- True if the relevant module appears to be importable
- """
- module_name = api_to_module[api]
- from importlib.util import find_spec
-
- required = ['QtCore', 'QtGui', 'QtSvg']
- if api in (QT_API_PYQT5, QT_API_PYSIDE2):
- # QT5 requires QtWidgets too
- required.append('QtWidgets')
-
- for submod in required:
- try:
- spec = find_spec('%s.%s' % (module_name, submod))
- except ImportError:
- # Package (e.g. PyQt5) not found
- return False
- else:
- if spec is None:
- # Submodule (e.g. PyQt5.QtCore) not found
- return False
-
- if api == QT_API_PYSIDE:
- # We can also safely check PySide version
- import PySide
- return check_version(PySide.__version__, '1.0.3')
-
- return True
-
-if sys.version_info >= (3, 4):
- has_binding = has_binding_new
-
+def has_binding_new(api):
+ """Safely check for PyQt4/5, PySide or PySide2, without importing submodules
+
+ Supports Python >= 3.4
+
+ Parameters
+ ----------
+ api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault']
+ Which module to check for
+
+ Returns
+ -------
+ True if the relevant module appears to be importable
+ """
+ module_name = api_to_module[api]
+ from importlib.util import find_spec
+
+ required = ['QtCore', 'QtGui', 'QtSvg']
+ if api in (QT_API_PYQT5, QT_API_PYSIDE2):
+ # QT5 requires QtWidgets too
+ required.append('QtWidgets')
+
+ for submod in required:
+ try:
+ spec = find_spec('%s.%s' % (module_name, submod))
+ except ImportError:
+ # Package (e.g. PyQt5) not found
+ return False
+ else:
+ if spec is None:
+ # Submodule (e.g. PyQt5.QtCore) not found
+ return False
+
+ if api == QT_API_PYSIDE:
+ # We can also safely check PySide version
+ import PySide
+ return check_version(PySide.__version__, '1.0.3')
+
+ return True
+
+if sys.version_info >= (3, 4):
+ has_binding = has_binding_new
+
def qtapi_version():
"""Return which QString API has been set, if any
@@ -285,23 +285,23 @@ def import_pyside():
from PySide import QtGui, QtCore, QtSvg
return QtCore, QtGui, QtSvg, QT_API_PYSIDE
-def import_pyside2():
- """
- Import PySide2
-
- ImportErrors raised within this function are non-recoverable
- """
- from PySide2 import QtGui, QtCore, QtSvg, QtWidgets, QtPrintSupport
-
- # Join QtGui and QtWidgets for Qt4 compatibility.
- QtGuiCompat = types.ModuleType('QtGuiCompat')
- QtGuiCompat.__dict__.update(QtGui.__dict__)
- QtGuiCompat.__dict__.update(QtWidgets.__dict__)
- QtGuiCompat.__dict__.update(QtPrintSupport.__dict__)
-
- return QtCore, QtGuiCompat, QtSvg, QT_API_PYSIDE2
-
-
+def import_pyside2():
+ """
+ Import PySide2
+
+ ImportErrors raised within this function are non-recoverable
+ """
+ from PySide2 import QtGui, QtCore, QtSvg, QtWidgets, QtPrintSupport
+
+ # Join QtGui and QtWidgets for Qt4 compatibility.
+ QtGuiCompat = types.ModuleType('QtGuiCompat')
+ QtGuiCompat.__dict__.update(QtGui.__dict__)
+ QtGuiCompat.__dict__.update(QtWidgets.__dict__)
+ QtGuiCompat.__dict__.update(QtPrintSupport.__dict__)
+
+ return QtCore, QtGuiCompat, QtSvg, QT_API_PYSIDE2
+
+
def load_qt(api_options):
"""
Attempt to import Qt, given a preference list
@@ -312,7 +312,7 @@ def load_qt(api_options):
Parameters
----------
api_options: List of strings
- The order of APIs to try. Valid items are 'pyside', 'pyside2',
+ The order of APIs to try. Valid items are 'pyside', 'pyside2',
'pyqt', 'pyqt5', 'pyqtv1' and 'pyqtdefault'
Returns
@@ -328,14 +328,14 @@ def load_qt(api_options):
bindings (either becaues they aren't installed, or because
an incompatible library has already been installed)
"""
- loaders = {
- QT_API_PYSIDE2: import_pyside2,
- QT_API_PYSIDE: import_pyside,
+ loaders = {
+ QT_API_PYSIDE2: import_pyside2,
+ QT_API_PYSIDE: import_pyside,
QT_API_PYQT: import_pyqt4,
QT_API_PYQT5: import_pyqt5,
QT_API_PYQTv1: partial(import_pyqt4, version=1),
QT_API_PYQT_DEFAULT: partial(import_pyqt4, version=None)
- }
+ }
for api in api_options:
@@ -355,18 +355,18 @@ def load_qt(api_options):
else:
raise ImportError("""
Could not load requested Qt binding. Please ensure that
- PyQt4 >= 4.7, PyQt5, PySide >= 1.0.3 or PySide2 is available,
+ PyQt4 >= 4.7, PyQt5, PySide >= 1.0.3 or PySide2 is available,
and only one is imported per session.
Currently-imported Qt library: %r
PyQt4 available (requires QtCore, QtGui, QtSvg): %s
PyQt5 available (requires QtCore, QtGui, QtSvg, QtWidgets): %s
PySide >= 1.0.3 installed: %s
- PySide2 installed: %s
+ PySide2 installed: %s
Tried to load: %r
""" % (loaded_api(),
has_binding(QT_API_PYQT),
has_binding(QT_API_PYQT5),
has_binding(QT_API_PYSIDE),
- has_binding(QT_API_PYSIDE2),
+ has_binding(QT_API_PYSIDE2),
api_options))
diff --git a/contrib/python/ipython/py2/IPython/frontend.py b/contrib/python/ipython/py2/IPython/frontend.py
index 9cc3eaff2f..75dfcdf7fb 100644
--- a/contrib/python/ipython/py2/IPython/frontend.py
+++ b/contrib/python/ipython/py2/IPython/frontend.py
@@ -17,7 +17,7 @@ from warnings import warn
from IPython.utils.shimmodule import ShimModule, ShimWarning
-warn("The top-level `frontend` package has been deprecated since IPython 1.0. "
+warn("The top-level `frontend` package has been deprecated since IPython 1.0. "
"All its subpackages have been moved to the top `IPython` level.", ShimWarning)
# Unconditionally insert the shim into sys.modules so that further import calls
diff --git a/contrib/python/ipython/py2/IPython/html.py b/contrib/python/ipython/py2/IPython/html.py
index 050be5c599..99e1717efb 100644
--- a/contrib/python/ipython/py2/IPython/html.py
+++ b/contrib/python/ipython/py2/IPython/html.py
@@ -9,7 +9,7 @@ from warnings import warn
from IPython.utils.shimmodule import ShimModule, ShimWarning
-warn("The `IPython.html` package has been deprecated since IPython 4.0. "
+warn("The `IPython.html` package has been deprecated since IPython 4.0. "
"You should import from `notebook` instead. "
"`IPython.html.widgets` has moved to `ipywidgets`.", ShimWarning)
diff --git a/contrib/python/ipython/py2/IPython/kernel/__init__.py b/contrib/python/ipython/py2/IPython/kernel/__init__.py
index 70a05ed4aa..3c902b7ffb 100644
--- a/contrib/python/ipython/py2/IPython/kernel/__init__.py
+++ b/contrib/python/ipython/py2/IPython/kernel/__init__.py
@@ -9,7 +9,7 @@ from warnings import warn
from IPython.utils.shimmodule import ShimModule, ShimWarning
-warn("The `IPython.kernel` package has been deprecated since IPython 4.0."
+warn("The `IPython.kernel` package has been deprecated since IPython 4.0."
"You should import from ipykernel or jupyter_client instead.", ShimWarning)
diff --git a/contrib/python/ipython/py2/IPython/lib/deepreload.py b/contrib/python/ipython/py2/IPython/lib/deepreload.py
index 76b493c0bb..aa4836ba76 100644
--- a/contrib/python/ipython/py2/IPython/lib/deepreload.py
+++ b/contrib/python/ipython/py2/IPython/lib/deepreload.py
@@ -327,8 +327,8 @@ except AttributeError:
original_reload = imp.reload # Python 3
# Replacement for reload()
-def reload(module, exclude=('sys', 'os.path', builtin_mod_name, '__main__',
- 'numpy', 'numpy._globals')):
+def reload(module, exclude=('sys', 'os.path', builtin_mod_name, '__main__',
+ 'numpy', 'numpy._globals')):
"""Recursively reload all modules used in the given module. Optionally
takes a list of modules to exclude from reloading. The default exclude
list contains sys, __main__, and __builtin__, to prevent, e.g., resetting
diff --git a/contrib/python/ipython/py2/IPython/lib/demo.py b/contrib/python/ipython/py2/IPython/lib/demo.py
index b0f3503ed7..419e27a4b3 100644
--- a/contrib/python/ipython/py2/IPython/lib/demo.py
+++ b/contrib/python/ipython/py2/IPython/lib/demo.py
@@ -106,7 +106,7 @@ the execution.
This is probably best explained with the simple example file below. You can
copy this into a file named ex_demo.py, and try running it via::
- from IPython.lib.demo import Demo
+ from IPython.lib.demo import Demo
d = Demo('ex_demo.py')
d()
diff --git a/contrib/python/ipython/py2/IPython/lib/display.py b/contrib/python/ipython/py2/IPython/lib/display.py
index 9221e2e062..5d0b644c85 100644
--- a/contrib/python/ipython/py2/IPython/lib/display.py
+++ b/contrib/python/ipython/py2/IPython/lib/display.py
@@ -251,7 +251,7 @@ class YouTubeVideo(IFrame):
start=int(timedelta(hours=1, minutes=46, seconds=40).total_seconds())
Other parameters can be provided as documented at
- https://developers.google.com/youtube/player_parameters#Parameters
+ https://developers.google.com/youtube/player_parameters#Parameters
When converting the notebook using nbconvert, a jpeg representation of the video
will be inserted in the document.
@@ -324,11 +324,11 @@ class FileLink(object):
----------
path : str
path to the file or directory that should be formatted
- url_prefix : str
+ url_prefix : str
prefix to be prepended to all files to form a working link [default:
- '']
+ '']
result_html_prefix : str
- text to append to beginning to link [default: '']
+ text to append to beginning to link [default: '']
result_html_suffix : str
text to append at the end of link [default: '<br>']
"""
diff --git a/contrib/python/ipython/py2/IPython/lib/editorhooks.py b/contrib/python/ipython/py2/IPython/lib/editorhooks.py
index 392557b509..057580b365 100644
--- a/contrib/python/ipython/py2/IPython/lib/editorhooks.py
+++ b/contrib/python/ipython/py2/IPython/lib/editorhooks.py
@@ -54,7 +54,7 @@ def install_editor(template, wait=False):
if sys.platform.startswith('win'):
cmd = shlex.split(cmd)
proc = subprocess.Popen(cmd, shell=True)
- if proc.wait() != 0:
+ if proc.wait() != 0:
raise TryNext()
if wait:
py3compat.input("Press Enter when done editing:")
diff --git a/contrib/python/ipython/py2/IPython/lib/guisupport.py b/contrib/python/ipython/py2/IPython/lib/guisupport.py
index 5e13d4343c..b2cc89b57c 100644
--- a/contrib/python/ipython/py2/IPython/lib/guisupport.py
+++ b/contrib/python/ipython/py2/IPython/lib/guisupport.py
@@ -57,10 +57,10 @@ so you don't have to depend on IPython.
"""
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
-from IPython.core.getipython import get_ipython
+from IPython.core.getipython import get_ipython
#-----------------------------------------------------------------------------
# wx
@@ -78,15 +78,15 @@ def get_app_wx(*args, **kwargs):
def is_event_loop_running_wx(app=None):
"""Is the wx event loop running."""
- # New way: check attribute on shell instance
- ip = get_ipython()
- if ip is not None:
- if ip.active_eventloop and ip.active_eventloop == 'wx':
- return True
- # Fall through to checking the application, because Wx has a native way
- # to check if the event loop is running, unlike Qt.
-
- # Old way: check Wx application
+ # New way: check attribute on shell instance
+ ip = get_ipython()
+ if ip is not None:
+ if ip.active_eventloop and ip.active_eventloop == 'wx':
+ return True
+ # Fall through to checking the application, because Wx has a native way
+ # to check if the event loop is running, unlike Qt.
+
+ # Old way: check Wx application
if app is None:
app = get_app_wx()
if hasattr(app, '_in_event_loop'):
@@ -121,12 +121,12 @@ def get_app_qt4(*args, **kwargs):
def is_event_loop_running_qt4(app=None):
"""Is the qt4 event loop running."""
- # New way: check attribute on shell instance
- ip = get_ipython()
- if ip is not None:
- return ip.active_eventloop and ip.active_eventloop.startswith('qt')
-
- # Old way: check attribute on QApplication singleton
+ # New way: check attribute on shell instance
+ ip = get_ipython()
+ if ip is not None:
+ return ip.active_eventloop and ip.active_eventloop.startswith('qt')
+
+ # Old way: check attribute on QApplication singleton
if app is None:
app = get_app_qt4([''])
if hasattr(app, '_in_event_loop'):
diff --git a/contrib/python/ipython/py2/IPython/lib/inputhook.py b/contrib/python/ipython/py2/IPython/lib/inputhook.py
index e6e8f2dbbc..b82c85a480 100644
--- a/contrib/python/ipython/py2/IPython/lib/inputhook.py
+++ b/contrib/python/ipython/py2/IPython/lib/inputhook.py
@@ -658,9 +658,9 @@ guis = inputhook_manager.guihooks
def _deprecated_disable():
- warn("This function is deprecated since IPython 4.0 use disable_gui() instead",
- DeprecationWarning, stacklevel=2)
+ warn("This function is deprecated since IPython 4.0 use disable_gui() instead",
+ DeprecationWarning, stacklevel=2)
inputhook_manager.disable_gui()
-
+
disable_wx = disable_qt4 = disable_gtk = disable_gtk3 = disable_glut = \
disable_pyglet = disable_osx = _deprecated_disable
diff --git a/contrib/python/ipython/py2/IPython/lib/pretty.py b/contrib/python/ipython/py2/IPython/lib/pretty.py
index 28eee523c5..08ee1de914 100644
--- a/contrib/python/ipython/py2/IPython/lib/pretty.py
+++ b/contrib/python/ipython/py2/IPython/lib/pretty.py
@@ -96,9 +96,9 @@ __all__ = ['pretty', 'pprint', 'PrettyPrinter', 'RepresentationPrinter',
MAX_SEQ_LENGTH = 1000
-# The language spec says that dicts preserve order from 3.7, but CPython
-# does so from 3.6, so it seems likely that people will expect that.
-DICT_IS_ORDERED = sys.version_info >= (3, 6)
+# The language spec says that dicts preserve order from 3.7, but CPython
+# does so from 3.6, so it seems likely that people will expect that.
+DICT_IS_ORDERED = sys.version_info >= (3, 6)
_re_pattern_type = type(re.compile(''))
def _safe_getattr(obj, attr, default=None):
@@ -122,21 +122,21 @@ else:
cast_unicode(text, encoding=get_stream_enc(sys.stdout)))
-def _sorted_for_pprint(items):
- """
- Sort the given items for pretty printing. Since some predictable
- sorting is better than no sorting at all, we sort on the string
- representation if normal sorting fails.
- """
- items = list(items)
- try:
- return sorted(items)
- except Exception:
- try:
- return sorted(items, key=str)
- except Exception:
- return items
-
+def _sorted_for_pprint(items):
+ """
+ Sort the given items for pretty printing. Since some predictable
+ sorting is better than no sorting at all, we sort on the string
+ representation if normal sorting fails.
+ """
+ items = list(items)
+ try:
+ return sorted(items)
+ except Exception:
+ try:
+ return sorted(items, key=str)
+ except Exception:
+ return items
+
def pretty(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH):
"""
Pretty print the object's representation.
@@ -398,10 +398,10 @@ class RepresentationPrinter(PrettyPrinter):
meth = cls._repr_pretty_
if callable(meth):
return meth(obj, self, cycle)
- if cls is not object \
- and callable(cls.__dict__.get('__repr__')):
- return _repr_pprint(obj, self, cycle)
-
+ if cls is not object \
+ and callable(cls.__dict__.get('__repr__')):
+ return _repr_pprint(obj, self, cycle)
+
return _default_pprint(obj, self, cycle)
finally:
self.end_group()
@@ -552,7 +552,7 @@ def _default_pprint(obj, p, cycle):
p.end_group(1, '>')
-def _seq_pprinter_factory(start, end):
+def _seq_pprinter_factory(start, end):
"""
Factory that returns a pprint function useful for sequences. Used by
the default pprint for tuples, dicts, and lists.
@@ -574,7 +574,7 @@ def _seq_pprinter_factory(start, end):
return inner
-def _set_pprinter_factory(start, end):
+def _set_pprinter_factory(start, end):
"""
Factory that returns a pprint function useful for sets and frozensets.
"""
@@ -583,15 +583,15 @@ def _set_pprinter_factory(start, end):
return p.text(start + '...' + end)
if len(obj) == 0:
# Special case.
- p.text(type(obj).__name__ + '()')
+ p.text(type(obj).__name__ + '()')
else:
step = len(start)
p.begin_group(step, start)
# Like dictionary keys, we will try to sort the items if there aren't too many
if not (p.max_seq_length and len(obj) >= p.max_seq_length):
- items = _sorted_for_pprint(obj)
- else:
- items = obj
+ items = _sorted_for_pprint(obj)
+ else:
+ items = obj
for idx, x in p._enumerate(items):
if idx:
p.text(',')
@@ -601,7 +601,7 @@ def _set_pprinter_factory(start, end):
return inner
-def _dict_pprinter_factory(start, end):
+def _dict_pprinter_factory(start, end):
"""
Factory that returns a pprint function used by the default pprint of
dicts and dict proxies.
@@ -613,10 +613,10 @@ def _dict_pprinter_factory(start, end):
p.begin_group(step, start)
keys = obj.keys()
# if dict isn't large enough to be truncated, sort keys before displaying
- # From Python 3.7, dicts preserve order by definition, so we don't sort.
- if not DICT_IS_ORDERED \
- and not (p.max_seq_length and len(obj) >= p.max_seq_length):
- keys = _sorted_for_pprint(keys)
+ # From Python 3.7, dicts preserve order by definition, so we don't sort.
+ if not DICT_IS_ORDERED \
+ and not (p.max_seq_length and len(obj) >= p.max_seq_length):
+ keys = _sorted_for_pprint(keys)
for idx, key in p._enumerate(keys):
if idx:
p.text(',')
@@ -743,12 +743,12 @@ _type_pprinters = {
int: _repr_pprint,
float: _repr_pprint,
str: _repr_pprint,
- tuple: _seq_pprinter_factory('(', ')'),
- list: _seq_pprinter_factory('[', ']'),
- dict: _dict_pprinter_factory('{', '}'),
+ tuple: _seq_pprinter_factory('(', ')'),
+ list: _seq_pprinter_factory('[', ']'),
+ dict: _dict_pprinter_factory('{', '}'),
- set: _set_pprinter_factory('{', '}'),
- frozenset: _set_pprinter_factory('frozenset({', '})'),
+ set: _set_pprinter_factory('{', '}'),
+ frozenset: _set_pprinter_factory('frozenset({', '})'),
super: _super_pprint,
_re_pattern_type: _re_pattern_pprint,
type: _type_pprint,
diff --git a/contrib/python/ipython/py2/IPython/nbconvert.py b/contrib/python/ipython/py2/IPython/nbconvert.py
index 2de4ee50bc..1e708edce8 100644
--- a/contrib/python/ipython/py2/IPython/nbconvert.py
+++ b/contrib/python/ipython/py2/IPython/nbconvert.py
@@ -9,7 +9,7 @@ from warnings import warn
from IPython.utils.shimmodule import ShimModule, ShimWarning
-warn("The `IPython.nbconvert` package has been deprecated since IPython 4.0. "
+warn("The `IPython.nbconvert` package has been deprecated since IPython 4.0. "
"You should import from nbconvert instead.", ShimWarning)
# Unconditionally insert the shim into sys.modules so that further import calls
diff --git a/contrib/python/ipython/py2/IPython/nbformat.py b/contrib/python/ipython/py2/IPython/nbformat.py
index 310277de00..8b24f1d1f1 100644
--- a/contrib/python/ipython/py2/IPython/nbformat.py
+++ b/contrib/python/ipython/py2/IPython/nbformat.py
@@ -9,7 +9,7 @@ from warnings import warn
from IPython.utils.shimmodule import ShimModule, ShimWarning
-warn("The `IPython.nbformat` package has been deprecated since IPython 4.0. "
+warn("The `IPython.nbformat` package has been deprecated since IPython 4.0. "
"You should import from nbformat instead.", ShimWarning)
# Unconditionally insert the shim into sys.modules so that further import calls
diff --git a/contrib/python/ipython/py2/IPython/parallel.py b/contrib/python/ipython/py2/IPython/parallel.py
index 0f10012783..7c107b13c7 100644
--- a/contrib/python/ipython/py2/IPython/parallel.py
+++ b/contrib/python/ipython/py2/IPython/parallel.py
@@ -9,7 +9,7 @@ from warnings import warn
from IPython.utils.shimmodule import ShimModule, ShimWarning
-warn("The `IPython.parallel` package has been deprecated since IPython 4.0. "
+warn("The `IPython.parallel` package has been deprecated since IPython 4.0. "
"You should import from ipyparallel instead.", ShimWarning)
# Unconditionally insert the shim into sys.modules so that further import calls
diff --git a/contrib/python/ipython/py2/IPython/qt.py b/contrib/python/ipython/py2/IPython/qt.py
index 7557a3f329..deee85151e 100644
--- a/contrib/python/ipython/py2/IPython/qt.py
+++ b/contrib/python/ipython/py2/IPython/qt.py
@@ -9,7 +9,7 @@ from warnings import warn
from IPython.utils.shimmodule import ShimModule, ShimWarning
-warn("The `IPython.qt` package has been deprecated since IPython 4.0. "
+warn("The `IPython.qt` package has been deprecated since IPython 4.0. "
"You should import from qtconsole instead.", ShimWarning)
# Unconditionally insert the shim into sys.modules so that further import calls
diff --git a/contrib/python/ipython/py2/IPython/sphinxext/ipython_directive.py b/contrib/python/ipython/py2/IPython/sphinxext/ipython_directive.py
index 8df9ace1f3..6f550202cb 100644
--- a/contrib/python/ipython/py2/IPython/sphinxext/ipython_directive.py
+++ b/contrib/python/ipython/py2/IPython/sphinxext/ipython_directive.py
@@ -104,8 +104,8 @@ or :okwarning: options:
In [1]: 1/0
In [2]: # raise warning.
-To Do
------
+To Do
+-----
- Turn the ad-hoc test() function into a real test suite.
- Break up ipython-specific functionality from matplotlib stuff into better
@@ -127,7 +127,7 @@ from __future__ import print_function
# Stdlib
import atexit
-import errno
+import errno
import os
import re
import sys
@@ -139,7 +139,7 @@ import shutil
# Third-party
from docutils.parsers.rst import directives
-from docutils.parsers.rst import Directive
+from docutils.parsers.rst import Directive
# Our own
from traitlets.config import Config
@@ -358,9 +358,9 @@ class EmbeddedSphinxShell(object):
source_dir = self.source_dir
saveargs = decorator.split(' ')
filename = saveargs[1]
- # insert relative path to image file in source (as absolute path for Sphinx)
- outfile = '/' + os.path.relpath(os.path.join(savefig_dir,filename),
- source_dir)
+ # insert relative path to image file in source (as absolute path for Sphinx)
+ outfile = '/' + os.path.relpath(os.path.join(savefig_dir,filename),
+ source_dir)
imagerows = ['.. image:: %s'%outfile]
@@ -843,8 +843,8 @@ class IPythonDirective(Directive):
# get config variables to set figure output directory
savefig_dir = config.ipython_savefig_dir
- source_dir = self.state.document.settings.env.srcdir
- savefig_dir = os.path.join(source_dir, savefig_dir)
+ source_dir = self.state.document.settings.env.srcdir
+ savefig_dir = os.path.join(source_dir, savefig_dir)
# get regex and prompt stuff
rgxin = config.ipython_rgxin
@@ -863,12 +863,12 @@ class IPythonDirective(Directive):
(savefig_dir, source_dir, rgxin, rgxout, promptin, promptout,
mplbackend, exec_lines, hold_count) = self.get_config_options()
- try:
- os.makedirs(savefig_dir)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
+ try:
+ os.makedirs(savefig_dir)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
if self.shell is None:
# We will be here many times. However, when the
# EmbeddedSphinxShell is created, its interactive shell member
@@ -977,7 +977,7 @@ def setup(app):
setup.app = app
app.add_directive('ipython', IPythonDirective)
- app.add_config_value('ipython_savefig_dir', 'savefig', 'env')
+ app.add_config_value('ipython_savefig_dir', 'savefig', 'env')
app.add_config_value('ipython_rgxin',
re.compile('In \[(\d+)\]:\s?(.*)\s*'), 'env')
app.add_config_value('ipython_rgxout',
diff --git a/contrib/python/ipython/py2/IPython/terminal/console.py b/contrib/python/ipython/py2/IPython/terminal/console.py
index 65571a7572..a3e4a7abff 100644
--- a/contrib/python/ipython/py2/IPython/terminal/console.py
+++ b/contrib/python/ipython/py2/IPython/terminal/console.py
@@ -9,7 +9,7 @@ from warnings import warn
from IPython.utils.shimmodule import ShimModule, ShimWarning
-warn("The `IPython.terminal.console` package has been deprecated since IPython 4.0. "
+warn("The `IPython.terminal.console` package has been deprecated since IPython 4.0. "
"You should import from jupyter_console instead.", ShimWarning)
# Unconditionally insert the shim into sys.modules so that further import calls
diff --git a/contrib/python/ipython/py2/IPython/terminal/debugger.py b/contrib/python/ipython/py2/IPython/terminal/debugger.py
index c68a3204a6..11819f71cc 100644
--- a/contrib/python/ipython/py2/IPython/terminal/debugger.py
+++ b/contrib/python/ipython/py2/IPython/terminal/debugger.py
@@ -1,24 +1,24 @@
-import signal
-import sys
-
+import signal
+import sys
+
from IPython.core.debugger import Pdb
from IPython.core.completer import IPCompleter
from .ptutils import IPythonPTCompleter
-from .shortcuts import suspend_to_bg, cursor_in_leading_ws
-
-from prompt_toolkit.enums import DEFAULT_BUFFER
-from prompt_toolkit.filters import (Condition, HasFocus, HasSelection,
- ViInsertMode, EmacsInsertMode)
-from prompt_toolkit.keys import Keys
-from prompt_toolkit.key_binding.manager import KeyBindingManager
-from prompt_toolkit.key_binding.bindings.completion import display_completions_like_readline
+from .shortcuts import suspend_to_bg, cursor_in_leading_ws
+
+from prompt_toolkit.enums import DEFAULT_BUFFER
+from prompt_toolkit.filters import (Condition, HasFocus, HasSelection,
+ ViInsertMode, EmacsInsertMode)
+from prompt_toolkit.keys import Keys
+from prompt_toolkit.key_binding.manager import KeyBindingManager
+from prompt_toolkit.key_binding.bindings.completion import display_completions_like_readline
from prompt_toolkit.token import Token
from prompt_toolkit.shortcuts import create_prompt_application
from prompt_toolkit.interface import CommandLineInterface
from prompt_toolkit.enums import EditingMode
-
+
class TerminalPdb(Pdb):
def __init__(self, *args, **kwargs):
Pdb.__init__(self, *args, **kwargs)
@@ -29,9 +29,9 @@ class TerminalPdb(Pdb):
def get_prompt_tokens(cli):
return [(Token.Prompt, self.prompt)]
- def patch_stdout(**kwargs):
- return self.pt_cli.patch_stdout_context(**kwargs)
-
+ def patch_stdout(**kwargs):
+ return self.pt_cli.patch_stdout_context(**kwargs)
+
if self._ptcomp is None:
compl = IPCompleter(shell=self.shell,
namespace={},
@@ -39,32 +39,32 @@ class TerminalPdb(Pdb):
use_readline=False,
parent=self.shell,
)
- self._ptcomp = IPythonPTCompleter(compl, patch_stdout=patch_stdout)
-
- kbmanager = KeyBindingManager.for_prompt()
- supports_suspend = Condition(lambda cli: hasattr(signal, 'SIGTSTP'))
- kbmanager.registry.add_binding(Keys.ControlZ, filter=supports_suspend
- )(suspend_to_bg)
-
- if self.shell.display_completions == 'readlinelike':
- kbmanager.registry.add_binding(Keys.ControlI,
- filter=(HasFocus(DEFAULT_BUFFER)
- & ~HasSelection()
- & ViInsertMode() | EmacsInsertMode()
- & ~cursor_in_leading_ws
- ))(display_completions_like_readline)
- multicolumn = (self.shell.display_completions == 'multicolumn')
-
+ self._ptcomp = IPythonPTCompleter(compl, patch_stdout=patch_stdout)
+
+ kbmanager = KeyBindingManager.for_prompt()
+ supports_suspend = Condition(lambda cli: hasattr(signal, 'SIGTSTP'))
+ kbmanager.registry.add_binding(Keys.ControlZ, filter=supports_suspend
+ )(suspend_to_bg)
+
+ if self.shell.display_completions == 'readlinelike':
+ kbmanager.registry.add_binding(Keys.ControlI,
+ filter=(HasFocus(DEFAULT_BUFFER)
+ & ~HasSelection()
+ & ViInsertMode() | EmacsInsertMode()
+ & ~cursor_in_leading_ws
+ ))(display_completions_like_readline)
+ multicolumn = (self.shell.display_completions == 'multicolumn')
+
self._pt_app = create_prompt_application(
editing_mode=getattr(EditingMode, self.shell.editing_mode.upper()),
- key_bindings_registry=kbmanager.registry,
+ key_bindings_registry=kbmanager.registry,
history=self.shell.debugger_history,
completer= self._ptcomp,
enable_history_search=True,
mouse_support=self.shell.mouse_support,
- get_prompt_tokens=get_prompt_tokens,
- display_completions_in_columns=multicolumn,
- style=self.shell.style
+ get_prompt_tokens=get_prompt_tokens,
+ display_completions_in_columns=multicolumn,
+ style=self.shell.style
)
self.pt_cli = CommandLineInterface(self._pt_app, eventloop=self.shell._eventloop)
@@ -104,11 +104,11 @@ class TerminalPdb(Pdb):
raise
-def set_trace(frame=None):
- """
- Start debugging from `frame`.
-
- If frame is not specified, debugging starts from caller's frame.
- """
- TerminalPdb().set_trace(frame or sys._getframe().f_back)
-
+def set_trace(frame=None):
+ """
+ Start debugging from `frame`.
+
+ If frame is not specified, debugging starts from caller's frame.
+ """
+ TerminalPdb().set_trace(frame or sys._getframe().f_back)
+
diff --git a/contrib/python/ipython/py2/IPython/terminal/embed.py b/contrib/python/ipython/py2/IPython/terminal/embed.py
index 5ad70431e4..f906091dc1 100644
--- a/contrib/python/ipython/py2/IPython/terminal/embed.py
+++ b/contrib/python/ipython/py2/IPython/terminal/embed.py
@@ -12,7 +12,7 @@ import sys
import warnings
from IPython.core import ultratb, compilerop
-from IPython.core import magic_arguments
+from IPython.core import magic_arguments
from IPython.core.magic import Magics, magics_class, line_magic
from IPython.core.interactiveshell import DummyMod, InteractiveShell
from IPython.terminal.interactiveshell import TerminalInteractiveShell
@@ -28,70 +28,70 @@ class KillEmbeded(Exception):pass
class EmbeddedMagics(Magics):
@line_magic
- @magic_arguments.magic_arguments()
- @magic_arguments.argument('-i', '--instance', action='store_true',
- help='Kill instance instead of call location')
- @magic_arguments.argument('-x', '--exit', action='store_true',
- help='Also exit the current session')
- @magic_arguments.argument('-y', '--yes', action='store_true',
- help='Do not ask confirmation')
+ @magic_arguments.magic_arguments()
+ @magic_arguments.argument('-i', '--instance', action='store_true',
+ help='Kill instance instead of call location')
+ @magic_arguments.argument('-x', '--exit', action='store_true',
+ help='Also exit the current session')
+ @magic_arguments.argument('-y', '--yes', action='store_true',
+ help='Do not ask confirmation')
def kill_embedded(self, parameter_s=''):
- """%kill_embedded : deactivate for good the current embedded IPython
+ """%kill_embedded : deactivate for good the current embedded IPython
This function (after asking for confirmation) sets an internal flag so
- that an embedded IPython will never activate again for the given call
- location. This is useful to permanently disable a shell that is being
- called inside a loop: once you've figured out what you needed from it,
- you may then kill it and the program will then continue to run without
- the interactive shell interfering again.
-
-
- Kill Instance Option
- --------------------
-
- If for some reasons you need to kill the location where the instance is
- created and not called, for example if you create a single instance in
- one place and debug in many locations, you can use the ``--instance``
- option to kill this specific instance. Like for the ``call location``
- killing an "instance" should work even if it is recreated within a
- loop.
-
- .. note::
-
- This was the default behavior before IPython 5.2
-
+ that an embedded IPython will never activate again for the given call
+ location. This is useful to permanently disable a shell that is being
+ called inside a loop: once you've figured out what you needed from it,
+ you may then kill it and the program will then continue to run without
+ the interactive shell interfering again.
+
+
+ Kill Instance Option
+ --------------------
+
+ If for some reasons you need to kill the location where the instance is
+ created and not called, for example if you create a single instance in
+ one place and debug in many locations, you can use the ``--instance``
+ option to kill this specific instance. Like for the ``call location``
+ killing an "instance" should work even if it is recreated within a
+ loop.
+
+ .. note::
+
+ This was the default behavior before IPython 5.2
+
"""
- args = magic_arguments.parse_argstring(self.kill_embedded, parameter_s)
- print(args)
- if args.instance:
- # let no ask
- if not args.yes:
- kill = ask_yes_no(
- "Are you sure you want to kill this embedded instance? [y/N] ", 'n')
- else:
- kill = True
- if kill:
- self.shell._disable_init_location()
- print("This embedded IPython instance will not reactivate anymore "
- "once you exit.")
- else:
- if not args.yes:
- kill = ask_yes_no(
- "Are you sure you want to kill this embedded call_location? [y/N] ", 'n')
- else:
- kill = True
- if kill:
- self.shell.embedded_active = False
- print("This embedded IPython call location will not reactivate anymore "
- "once you exit.")
-
- if args.exit:
- # Ask-exit does not really ask, it just set internals flags to exit
- # on next loop.
- self.shell.ask_exit()
-
-
+ args = magic_arguments.parse_argstring(self.kill_embedded, parameter_s)
+ print(args)
+ if args.instance:
+ # let no ask
+ if not args.yes:
+ kill = ask_yes_no(
+ "Are you sure you want to kill this embedded instance? [y/N] ", 'n')
+ else:
+ kill = True
+ if kill:
+ self.shell._disable_init_location()
+ print("This embedded IPython instance will not reactivate anymore "
+ "once you exit.")
+ else:
+ if not args.yes:
+ kill = ask_yes_no(
+ "Are you sure you want to kill this embedded call_location? [y/N] ", 'n')
+ else:
+ kill = True
+ if kill:
+ self.shell.embedded_active = False
+ print("This embedded IPython call location will not reactivate anymore "
+ "once you exit.")
+
+ if args.exit:
+ # Ask-exit does not really ask, it just set internals flags to exit
+ # on next loop.
+ self.shell.ask_exit()
+
+
@line_magic
def exit_raise(self, parameter_s=''):
"""%exit_raise Make the current embedded kernel exit and raise and exception.
@@ -126,34 +126,34 @@ class InteractiveShellEmbed(TerminalInteractiveShell):
@property
def embedded_active(self):
- return (self._call_location_id not in InteractiveShellEmbed._inactive_locations)\
- and (self._init_location_id not in InteractiveShellEmbed._inactive_locations)
-
- def _disable_init_location(self):
- """Disable the current Instance creation location"""
- InteractiveShellEmbed._inactive_locations.add(self._init_location_id)
+ return (self._call_location_id not in InteractiveShellEmbed._inactive_locations)\
+ and (self._init_location_id not in InteractiveShellEmbed._inactive_locations)
+ def _disable_init_location(self):
+ """Disable the current Instance creation location"""
+ InteractiveShellEmbed._inactive_locations.add(self._init_location_id)
+
@embedded_active.setter
def embedded_active(self, value):
- if value:
- InteractiveShellEmbed._inactive_locations.discard(
- self._call_location_id)
- InteractiveShellEmbed._inactive_locations.discard(
- self._init_location_id)
+ if value:
+ InteractiveShellEmbed._inactive_locations.discard(
+ self._call_location_id)
+ InteractiveShellEmbed._inactive_locations.discard(
+ self._init_location_id)
else:
- InteractiveShellEmbed._inactive_locations.add(
- self._call_location_id)
+ InteractiveShellEmbed._inactive_locations.add(
+ self._call_location_id)
def __init__(self, **kw):
if kw.get('user_global_ns', None) is not None:
- raise DeprecationWarning(
- "Key word argument `user_global_ns` has been replaced by `user_module` since IPython 4.0.")
+ raise DeprecationWarning(
+ "Key word argument `user_global_ns` has been replaced by `user_module` since IPython 4.0.")
- clid = kw.pop('_init_location_id', None)
- if not clid:
- frame = sys._getframe(1)
- clid = '%s:%s' % (frame.f_code.co_filename, frame.f_lineno)
- self._init_location_id = clid
+ clid = kw.pop('_init_location_id', None)
+ if not clid:
+ frame = sys._getframe(1)
+ clid = '%s:%s' % (frame.f_code.co_filename, frame.f_lineno)
+ self._init_location_id = clid
super(InteractiveShellEmbed,self).__init__(**kw)
@@ -164,9 +164,9 @@ class InteractiveShellEmbed(TerminalInteractiveShell):
call_pdb=self.pdb)
def init_sys_modules(self):
- """
- Explicitly overwrite :mod:`IPython.core.interactiveshell` to do nothing.
- """
+ """
+ Explicitly overwrite :mod:`IPython.core.interactiveshell` to do nothing.
+ """
pass
def init_magics(self):
@@ -174,7 +174,7 @@ class InteractiveShellEmbed(TerminalInteractiveShell):
self.register_magics(EmbeddedMagics)
def __call__(self, header='', local_ns=None, module=None, dummy=None,
- stack_depth=1, global_ns=None, compile_flags=None, **kw):
+ stack_depth=1, global_ns=None, compile_flags=None, **kw):
"""Activate the interactive interpreter.
__call__(self,header='',local_ns=None,module=None,dummy=None) -> Start
@@ -191,16 +191,16 @@ class InteractiveShellEmbed(TerminalInteractiveShell):
can still have a specific call work by making it as IPShell(dummy=False).
"""
- # we are called, set the underlying interactiveshell not to exit.
- self.keep_running = True
-
+ # we are called, set the underlying interactiveshell not to exit.
+ self.keep_running = True
+
# If the user has turned it off, go away
- clid = kw.pop('_call_location_id', None)
- if not clid:
- frame = sys._getframe(1)
- clid = '%s:%s' % (frame.f_code.co_filename, frame.f_lineno)
- self._call_location_id = clid
-
+ clid = kw.pop('_call_location_id', None)
+ if not clid:
+ frame = sys._getframe(1)
+ clid = '%s:%s' % (frame.f_code.co_filename, frame.f_lineno)
+ self._call_location_id = clid
+
if not self.embedded_active:
return
@@ -379,10 +379,10 @@ def embed(**kwargs):
cls = type(saved_shell_instance)
cls.clear_instance()
frame = sys._getframe(1)
- shell = InteractiveShellEmbed.instance(_init_location_id='%s:%s' % (
- frame.f_code.co_filename, frame.f_lineno), **kwargs)
- shell(header=header, stack_depth=2, compile_flags=compile_flags,
- _call_location_id='%s:%s' % (frame.f_code.co_filename, frame.f_lineno))
+ shell = InteractiveShellEmbed.instance(_init_location_id='%s:%s' % (
+ frame.f_code.co_filename, frame.f_lineno), **kwargs)
+ shell(header=header, stack_depth=2, compile_flags=compile_flags,
+ _call_location_id='%s:%s' % (frame.f_code.co_filename, frame.f_lineno))
InteractiveShellEmbed.clear_instance()
#restore previous instance
if saved_shell_instance is not None:
diff --git a/contrib/python/ipython/py2/IPython/terminal/interactiveshell.py b/contrib/python/ipython/py2/IPython/terminal/interactiveshell.py
index f67cc6b502..247235c1fd 100644
--- a/contrib/python/ipython/py2/IPython/terminal/interactiveshell.py
+++ b/contrib/python/ipython/py2/IPython/terminal/interactiveshell.py
@@ -8,12 +8,12 @@ from warnings import warn
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
from IPython.utils import io
-from IPython.utils.py3compat import PY3, cast_unicode_py2, input, string_types
+from IPython.utils.py3compat import PY3, cast_unicode_py2, input, string_types
from IPython.utils.terminal import toggle_set_term_title, set_term_title
from IPython.utils.process import abbrev_cwd
-from traitlets import Bool, Unicode, Dict, Integer, observe, Instance, Type, default, Enum, Union
+from traitlets import Bool, Unicode, Dict, Integer, observe, Instance, Type, default, Enum, Union
-from prompt_toolkit.document import Document
+from prompt_toolkit.document import Document
from prompt_toolkit.enums import DEFAULT_BUFFER, EditingMode
from prompt_toolkit.filters import (HasFocus, Condition, IsDone)
from prompt_toolkit.history import InMemoryHistory
@@ -23,13 +23,13 @@ from prompt_toolkit.key_binding.manager import KeyBindingManager
from prompt_toolkit.layout.processors import ConditionalProcessor, HighlightMatchingBracketProcessor
from prompt_toolkit.styles import PygmentsStyle, DynamicStyle
-from pygments.styles import get_style_by_name
-from pygments.style import Style
+from pygments.styles import get_style_by_name
+from pygments.style import Style
from pygments.token import Token
from .debugger import TerminalPdb, Pdb
from .magics import TerminalMagics
-from .pt_inputhooks import get_inputhook_name_and_func
+from .pt_inputhooks import get_inputhook_name_and_func
from .prompts import Prompts, ClassicPrompts, RichPromptDisplayHook
from .ptutils import IPythonPTCompleter, IPythonPTLexer
from .shortcuts import register_ipython_shortcuts
@@ -104,7 +104,7 @@ class TerminalInteractiveShell(InteractiveShell):
_pt_app = None
simple_prompt = Bool(_use_simple_prompt,
- help="""Use `raw_input` for the REPL, without completion and prompt colors.
+ help="""Use `raw_input` for the REPL, without completion and prompt colors.
Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR. Known usage are:
IPython own testing machinery, and emacs inferior-shell integration through elpy.
@@ -134,21 +134,21 @@ class TerminalInteractiveShell(InteractiveShell):
help="Enable mouse support in the prompt"
).tag(config=True)
- # We don't load the list of styles for the help string, because loading
- # Pygments plugins takes time and can cause unexpected errors.
- highlighting_style = Union([Unicode('legacy'), Type(klass=Style)],
- help="""The name or class of a Pygments style to use for syntax
- highlighting. To see available styles, run `pygmentize -L styles`."""
+ # We don't load the list of styles for the help string, because loading
+ # Pygments plugins takes time and can cause unexpected errors.
+ highlighting_style = Union([Unicode('legacy'), Type(klass=Style)],
+ help="""The name or class of a Pygments style to use for syntax
+ highlighting. To see available styles, run `pygmentize -L styles`."""
).tag(config=True)
-
+
@observe('highlighting_style')
@observe('colors')
def _highlighting_style_changed(self, change):
self.refresh_style()
def refresh_style(self):
- self._style = self._make_style_from_name_or_cls(self.highlighting_style)
+ self._style = self._make_style_from_name_or_cls(self.highlighting_style)
highlighting_style_overrides = Dict(
@@ -186,7 +186,7 @@ class TerminalInteractiveShell(InteractiveShell):
help="Automatically set the terminal title"
).tag(config=True)
- display_completions = Enum(('column', 'multicolumn','readlinelike'),
+ display_completions = Enum(('column', 'multicolumn','readlinelike'),
help= ( "Options for displaying tab completions, 'column', 'multicolumn', and "
"'readlinelike'. These options are for `prompt_toolkit`, see "
"`prompt_toolkit` documentation for more information."
@@ -194,14 +194,14 @@ class TerminalInteractiveShell(InteractiveShell):
default_value='multicolumn').tag(config=True)
highlight_matching_brackets = Bool(True,
- help="Highlight matching brackets.",
- ).tag(config=True)
-
- extra_open_editor_shortcuts = Bool(False,
- help="Enable vi (v) or Emacs (C-X C-E) shortcuts to open an external editor. "
- "This is in addition to the F2 binding, which is always enabled."
+ help="Highlight matching brackets.",
).tag(config=True)
+ extra_open_editor_shortcuts = Bool(False,
+ help="Enable vi (v) or Emacs (C-X C-E) shortcuts to open an external editor. "
+ "This is in addition to the F2 binding, which is always enabled."
+ ).tag(config=True)
+
@observe('term_title')
def init_term_title(self, change=None):
# Enable or disable the terminal title.
@@ -215,29 +215,29 @@ class TerminalInteractiveShell(InteractiveShell):
super(TerminalInteractiveShell, self).init_display_formatter()
# terminal only supports plain text
self.display_formatter.active_types = ['text/plain']
- # disable `_ipython_display_`
- self.display_formatter.ipython_display_formatter.enabled = False
+ # disable `_ipython_display_`
+ self.display_formatter.ipython_display_formatter.enabled = False
def init_prompt_toolkit_cli(self):
if self.simple_prompt:
# Fall back to plain non-interactive output for tests.
# This is very limited, and only accepts a single line.
def prompt():
- isp = self.input_splitter
- prompt_text = "".join(x[1] for x in self.prompts.in_prompt_tokens())
- prompt_continuation = "".join(x[1] for x in self.prompts.continuation_prompt_tokens())
- while isp.push_accepts_more():
- line = cast_unicode_py2(input(prompt_text))
- isp.push(line)
- prompt_text = prompt_continuation
- return isp.source_reset()
+ isp = self.input_splitter
+ prompt_text = "".join(x[1] for x in self.prompts.in_prompt_tokens())
+ prompt_continuation = "".join(x[1] for x in self.prompts.continuation_prompt_tokens())
+ while isp.push_accepts_more():
+ line = cast_unicode_py2(input(prompt_text))
+ isp.push(line)
+ prompt_text = prompt_continuation
+ return isp.source_reset()
self.prompt_for_code = prompt
return
# Set up keyboard shortcuts
- kbmanager = KeyBindingManager.for_prompt(
- enable_open_in_editor=self.extra_open_editor_shortcuts,
- )
+ kbmanager = KeyBindingManager.for_prompt(
+ enable_open_in_editor=self.extra_open_editor_shortcuts,
+ )
register_ipython_shortcuts(kbmanager.registry, self)
# Pre-populate history from IPython's history database
@@ -249,24 +249,24 @@ class TerminalInteractiveShell(InteractiveShell):
cell = cell.rstrip()
if cell and (cell != last_cell):
history.append(cell)
- last_cell = cell
+ last_cell = cell
- self._style = self._make_style_from_name_or_cls(self.highlighting_style)
- self.style = DynamicStyle(lambda: self._style)
+ self._style = self._make_style_from_name_or_cls(self.highlighting_style)
+ self.style = DynamicStyle(lambda: self._style)
editing_mode = getattr(EditingMode, self.editing_mode.upper())
- def patch_stdout(**kwargs):
- return self.pt_cli.patch_stdout_context(**kwargs)
-
+ def patch_stdout(**kwargs):
+ return self.pt_cli.patch_stdout_context(**kwargs)
+
self._pt_app = create_prompt_application(
editing_mode=editing_mode,
key_bindings_registry=kbmanager.registry,
history=history,
- completer=IPythonPTCompleter(shell=self,
- patch_stdout=patch_stdout),
+ completer=IPythonPTCompleter(shell=self,
+ patch_stdout=patch_stdout),
enable_history_search=True,
- style=self.style,
+ style=self.style,
mouse_support=self.mouse_support,
**self._layout_options()
)
@@ -275,14 +275,14 @@ class TerminalInteractiveShell(InteractiveShell):
self._pt_app, eventloop=self._eventloop,
output=create_output(true_color=self.true_color))
- def _make_style_from_name_or_cls(self, name_or_cls):
+ def _make_style_from_name_or_cls(self, name_or_cls):
"""
Small wrapper that make an IPython compatible style from a style name
- We need that to add style for prompt ... etc.
+ We need that to add style for prompt ... etc.
"""
style_overrides = {}
- if name_or_cls == 'legacy':
+ if name_or_cls == 'legacy':
legacy = self.colors.lower()
if legacy == 'linux':
style_cls = get_style_by_name('monokai')
@@ -307,26 +307,26 @@ class TerminalInteractiveShell(InteractiveShell):
Token.OutPrompt: '#990000',
Token.OutPromptNum: '#ff0000 bold',
})
-
- # Hack: Due to limited color support on the Windows console
- # the prompt colors will be wrong without this
- if os.name == 'nt':
- style_overrides.update({
- Token.Prompt: '#ansidarkgreen',
- Token.PromptNum: '#ansigreen bold',
- Token.OutPrompt: '#ansidarkred',
- Token.OutPromptNum: '#ansired bold',
- })
+
+ # Hack: Due to limited color support on the Windows console
+ # the prompt colors will be wrong without this
+ if os.name == 'nt':
+ style_overrides.update({
+ Token.Prompt: '#ansidarkgreen',
+ Token.PromptNum: '#ansigreen bold',
+ Token.OutPrompt: '#ansidarkred',
+ Token.OutPromptNum: '#ansired bold',
+ })
elif legacy =='nocolor':
style_cls=_NoStyle
style_overrides = {}
else :
raise ValueError('Got unknown colors: ', legacy)
else :
- if isinstance(name_or_cls, string_types):
- style_cls = get_style_by_name(name_or_cls)
- else:
- style_cls = name_or_cls
+ if isinstance(name_or_cls, string_types):
+ style_cls = get_style_by_name(name_or_cls)
+ else:
+ style_cls = name_or_cls
style_overrides = {
Token.Prompt: '#009900',
Token.PromptNum: '#00ff00 bold',
@@ -373,11 +373,11 @@ class TerminalInteractiveShell(InteractiveShell):
return document.text
def enable_win_unicode_console(self):
- if sys.version_info >= (3, 6):
- # Since PEP 528, Python uses the unicode APIs for the Windows
- # console by default, so WUC shouldn't be needed.
- return
-
+ if sys.version_info >= (3, 6):
+ # Since PEP 528, Python uses the unicode APIs for the Windows
+ # console by default, so WUC shouldn't be needed.
+ return
+
import win_unicode_console
if PY3:
@@ -451,18 +451,18 @@ class TerminalInteractiveShell(InteractiveShell):
def pre_prompt(self):
if self.rl_next_input:
- # We can't set the buffer here, because it will be reset just after
- # this. Adding a callable to pre_run_callables does what we need
- # after the buffer is reset.
- s = cast_unicode_py2(self.rl_next_input)
- def set_doc():
- self.pt_cli.application.buffer.document = Document(s)
- if hasattr(self.pt_cli, 'pre_run_callables'):
- self.pt_cli.pre_run_callables.append(set_doc)
- else:
- # Older version of prompt_toolkit; it's OK to set the document
- # directly here.
- set_doc()
+ # We can't set the buffer here, because it will be reset just after
+ # this. Adding a callable to pre_run_callables does what we need
+ # after the buffer is reset.
+ s = cast_unicode_py2(self.rl_next_input)
+ def set_doc():
+ self.pt_cli.application.buffer.document = Document(s)
+ if hasattr(self.pt_cli, 'pre_run_callables'):
+ self.pt_cli.pre_run_callables.append(set_doc)
+ else:
+ # Older version of prompt_toolkit; it's OK to set the document
+ # directly here.
+ set_doc()
self.rl_next_input = None
def interact(self, display_banner=DISPLAY_BANNER_DEPRECATED):
@@ -494,28 +494,28 @@ class TerminalInteractiveShell(InteractiveShell):
try:
self.interact()
break
- except KeyboardInterrupt as e:
- print("\n%s escaped interact()\n" % type(e).__name__)
- finally:
- # An interrupt during the eventloop will mess up the
- # internal state of the prompt_toolkit library.
- # Stopping the eventloop fixes this, see
- # https://github.com/ipython/ipython/pull/9867
- if hasattr(self, '_eventloop'):
- self._eventloop.stop()
+ except KeyboardInterrupt as e:
+ print("\n%s escaped interact()\n" % type(e).__name__)
+ finally:
+ # An interrupt during the eventloop will mess up the
+ # internal state of the prompt_toolkit library.
+ # Stopping the eventloop fixes this, see
+ # https://github.com/ipython/ipython/pull/9867
+ if hasattr(self, '_eventloop'):
+ self._eventloop.stop()
_inputhook = None
def inputhook(self, context):
if self._inputhook is not None:
self._inputhook(context)
- active_eventloop = None
+ active_eventloop = None
def enable_gui(self, gui=None):
if gui:
- self.active_eventloop, self._inputhook =\
- get_inputhook_name_and_func(gui)
+ self.active_eventloop, self._inputhook =\
+ get_inputhook_name_and_func(gui)
else:
- self.active_eventloop = self._inputhook = None
+ self.active_eventloop = self._inputhook = None
# Run !system commands directly, not through pipes, so terminal programs
# work correctly.
diff --git a/contrib/python/ipython/py2/IPython/terminal/ipapp.py b/contrib/python/ipython/py2/IPython/terminal/ipapp.py
index 6b25aaa3e3..5f6ba29962 100755
--- a/contrib/python/ipython/py2/IPython/terminal/ipapp.py
+++ b/contrib/python/ipython/py2/IPython/terminal/ipapp.py
@@ -35,7 +35,7 @@ from IPython.extensions.storemagic import StoreMagics
from .interactiveshell import TerminalInteractiveShell
from IPython.paths import get_ipython_dir
from traitlets import (
- Bool, List, Dict, default, observe, Type
+ Bool, List, Dict, default, observe, Type
)
#-----------------------------------------------------------------------------
@@ -185,13 +185,13 @@ class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp):
flags = Dict(flags)
aliases = Dict(aliases)
classes = List()
-
- interactive_shell_class = Type(
- klass=object, # use default_value otherwise which only allow subclasses.
- default_value=TerminalInteractiveShell,
- help="Class to use to instantiate the TerminalInteractiveShell object. Useful for custom Frontends"
- ).tag(config=True)
-
+
+ interactive_shell_class = Type(
+ klass=object, # use default_value otherwise which only allow subclasses.
+ default_value=TerminalInteractiveShell,
+ help="Class to use to instantiate the TerminalInteractiveShell object. Useful for custom Frontends"
+ ).tag(config=True)
+
@default('classes')
def _classes_default(self):
"""This has to be in a method, for TerminalIPythonApp to be available."""
@@ -327,7 +327,7 @@ class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp):
# shell.display_banner should always be False for the terminal
# based app, because we call shell.show_banner() by hand below
# so the banner shows *before* all extension loading stuff.
- self.shell = self.interactive_shell_class.instance(parent=self,
+ self.shell = self.interactive_shell_class.instance(parent=self,
profile_dir=self.profile_dir,
ipython_dir=self.ipython_dir, user_ns=self.user_ns)
self.shell.configurables.append(self)
@@ -365,10 +365,10 @@ def load_default_config(ipython_dir=None):
ipython_dir = get_ipython_dir()
profile_dir = os.path.join(ipython_dir, 'profile_default')
- app = TerminalIPythonApp()
- app.config_file_paths.append(profile_dir)
- app.load_config_file()
- return app.config
+ app = TerminalIPythonApp()
+ app.config_file_paths.append(profile_dir)
+ app.load_config_file()
+ return app.config
launch_new_instance = TerminalIPythonApp.launch_instance
diff --git a/contrib/python/ipython/py2/IPython/terminal/magics.py b/contrib/python/ipython/py2/IPython/terminal/magics.py
index 3a844a0397..c8b5ea5a35 100644
--- a/contrib/python/ipython/py2/IPython/terminal/magics.py
+++ b/contrib/python/ipython/py2/IPython/terminal/magics.py
@@ -82,9 +82,9 @@ class TerminalMagics(Magics):
@line_magic
def autoindent(self, parameter_s = ''):
- """Toggle autoindent on/off (deprecated)"""
- print("%autoindent is deprecated since IPython 5: you can now paste "
- "multiple lines without turning autoindentation off.")
+ """Toggle autoindent on/off (deprecated)"""
+ print("%autoindent is deprecated since IPython 5: you can now paste "
+ "multiple lines without turning autoindentation off.")
self.shell.set_autoindent()
print("Automatic indentation is:",['OFF','ON'][self.shell.autoindent])
diff --git a/contrib/python/ipython/py2/IPython/terminal/prompts.py b/contrib/python/ipython/py2/IPython/terminal/prompts.py
index 43c2170503..d153289c43 100644
--- a/contrib/python/ipython/py2/IPython/terminal/prompts.py
+++ b/contrib/python/ipython/py2/IPython/terminal/prompts.py
@@ -63,18 +63,18 @@ class RichPromptDisplayHook(DisplayHook):
"""Subclass of base display hook using coloured prompt"""
def write_output_prompt(self):
sys.stdout.write(self.shell.separate_out)
- # If we're not displaying a prompt, it effectively ends with a newline,
- # because the output will be left-aligned.
- self.prompt_end_newline = True
-
+ # If we're not displaying a prompt, it effectively ends with a newline,
+ # because the output will be left-aligned.
+ self.prompt_end_newline = True
+
if self.do_full_cache:
tokens = self.shell.prompts.out_prompt_tokens()
- prompt_txt = ''.join(s for t, s in tokens)
- if prompt_txt and not prompt_txt.endswith('\n'):
- # Ask for a newline before multiline output
- self.prompt_end_newline = False
-
+ prompt_txt = ''.join(s for t, s in tokens)
+ if prompt_txt and not prompt_txt.endswith('\n'):
+ # Ask for a newline before multiline output
+ self.prompt_end_newline = False
+
if self.shell.pt_cli:
self.shell.pt_cli.print_tokens(tokens)
else:
- sys.stdout.write(prompt_txt)
+ sys.stdout.write(prompt_txt)
diff --git a/contrib/python/ipython/py2/IPython/terminal/pt_inputhooks/__init__.py b/contrib/python/ipython/py2/IPython/terminal/pt_inputhooks/__init__.py
index 3766973e82..3f45bc1cd1 100644
--- a/contrib/python/ipython/py2/IPython/terminal/pt_inputhooks/__init__.py
+++ b/contrib/python/ipython/py2/IPython/terminal/pt_inputhooks/__init__.py
@@ -30,20 +30,20 @@ class UnknownBackend(KeyError):
"Supported event loops are: {}").format(self.name,
', '.join(backends + sorted(registered)))
-def get_inputhook_name_and_func(gui):
+def get_inputhook_name_and_func(gui):
if gui in registered:
- return gui, registered[gui]
+ return gui, registered[gui]
if gui not in backends:
raise UnknownBackend(gui)
if gui in aliases:
- return get_inputhook_name_and_func(aliases[gui])
+ return get_inputhook_name_and_func(aliases[gui])
- gui_mod = gui
+ gui_mod = gui
if gui == 'qt5':
os.environ['QT_API'] = 'pyqt5'
- gui_mod = 'qt'
+ gui_mod = 'qt'
- mod = importlib.import_module('IPython.terminal.pt_inputhooks.'+gui_mod)
- return gui, mod.inputhook
+ mod = importlib.import_module('IPython.terminal.pt_inputhooks.'+gui_mod)
+ return gui, mod.inputhook
diff --git a/contrib/python/ipython/py2/IPython/terminal/pt_inputhooks/qt.py b/contrib/python/ipython/py2/IPython/terminal/pt_inputhooks/qt.py
index 7395ac39eb..536050e8cd 100644
--- a/contrib/python/ipython/py2/IPython/terminal/pt_inputhooks/qt.py
+++ b/contrib/python/ipython/py2/IPython/terminal/pt_inputhooks/qt.py
@@ -39,11 +39,11 @@ def inputhook(context):
else:
# On POSIX platforms, we can use a file descriptor to quit the event
# loop when there is input ready to read.
- notifier = QtCore.QSocketNotifier(context.fileno(),
- QtCore.QSocketNotifier.Read)
- # connect the callback we care about before we turn it on
- notifier.activated.connect(event_loop.exit)
+ notifier = QtCore.QSocketNotifier(context.fileno(),
+ QtCore.QSocketNotifier.Read)
+ # connect the callback we care about before we turn it on
+ notifier.activated.connect(event_loop.exit)
notifier.setEnabled(True)
- # only start the event loop we are not already flipped
- if not context.input_is_ready():
- event_loop.exec_()
+ # only start the event loop we are not already flipped
+ if not context.input_is_ready():
+ event_loop.exec_()
diff --git a/contrib/python/ipython/py2/IPython/terminal/ptutils.py b/contrib/python/ipython/py2/IPython/terminal/ptutils.py
index c9ff705642..03e33155b1 100644
--- a/contrib/python/ipython/py2/IPython/terminal/ptutils.py
+++ b/contrib/python/ipython/py2/IPython/terminal/ptutils.py
@@ -22,14 +22,14 @@ import pygments.lexers as pygments_lexers
class IPythonPTCompleter(Completer):
"""Adaptor to provide IPython completions to prompt_toolkit"""
- def __init__(self, ipy_completer=None, shell=None, patch_stdout=None):
+ def __init__(self, ipy_completer=None, shell=None, patch_stdout=None):
if shell is None and ipy_completer is None:
raise TypeError("Please pass shell=an InteractiveShell instance.")
self._ipy_completer = ipy_completer
self.shell = shell
- if patch_stdout is None:
- raise TypeError("Please pass patch_stdout")
- self.patch_stdout = patch_stdout
+ if patch_stdout is None:
+ raise TypeError("Please pass patch_stdout")
+ self.patch_stdout = patch_stdout
@property
def ipy_completer(self):
@@ -42,14 +42,14 @@ class IPythonPTCompleter(Completer):
if not document.current_line.strip():
return
- # Some bits of our completion system may print stuff (e.g. if a module
- # is imported). This context manager ensures that doesn't interfere with
- # the prompt.
- with self.patch_stdout():
- used, matches = self.ipy_completer.complete(
- line_buffer=document.current_line,
- cursor_pos=document.cursor_position_col
- )
+ # Some bits of our completion system may print stuff (e.g. if a module
+ # is imported). This context manager ensures that doesn't interfere with
+ # the prompt.
+ with self.patch_stdout():
+ used, matches = self.ipy_completer.complete(
+ line_buffer=document.current_line,
+ cursor_pos=document.cursor_position_col
+ )
start_pos = -len(used)
for m in matches:
if not m:
diff --git a/contrib/python/ipython/py2/IPython/terminal/shortcuts.py b/contrib/python/ipython/py2/IPython/terminal/shortcuts.py
index 8f0ae43e84..71ce01fe9c 100644
--- a/contrib/python/ipython/py2/IPython/terminal/shortcuts.py
+++ b/contrib/python/ipython/py2/IPython/terminal/shortcuts.py
@@ -62,10 +62,10 @@ def register_ipython_shortcuts(registry, shell):
filter=(HasFocus(DEFAULT_BUFFER)
& EmacsInsertMode()))(newline_with_copy_margin)
- registry.add_binding(Keys.F2,
- filter=HasFocus(DEFAULT_BUFFER)
- )(open_input_in_editor)
-
+ registry.add_binding(Keys.F2,
+ filter=HasFocus(DEFAULT_BUFFER)
+ )(open_input_in_editor)
+
if shell.display_completions == 'readlinelike':
registry.add_binding(Keys.ControlI,
filter=(HasFocus(DEFAULT_BUFFER)
@@ -96,18 +96,18 @@ def newline_or_execute_outer(shell):
b.cancel_completion()
return
- # If there's only one line, treat it as if the cursor is at the end.
- # See https://github.com/ipython/ipython/issues/10425
- if d.line_count == 1:
- check_text = d.text
- else:
- check_text = d.text[:d.cursor_position]
- status, indent = shell.input_splitter.check_complete(check_text + '\n')
-
- if not (d.on_last_line or
- d.cursor_position_row >= d.line_count - d.empty_line_count_at_the_end()
- ):
- b.insert_text('\n' + (' ' * (indent or 0)))
+ # If there's only one line, treat it as if the cursor is at the end.
+ # See https://github.com/ipython/ipython/issues/10425
+ if d.line_count == 1:
+ check_text = d.text
+ else:
+ check_text = d.text[:d.cursor_position]
+ status, indent = shell.input_splitter.check_complete(check_text + '\n')
+
+ if not (d.on_last_line or
+ d.cursor_position_row >= d.line_count - d.empty_line_count_at_the_end()
+ ):
+ b.insert_text('\n' + (' ' * (indent or 0)))
return
if (status != 'incomplete') and b.accept_action.is_returnable:
@@ -181,9 +181,9 @@ def newline_with_copy_margin(event):
pos_diff = cursor_start_pos - cursor_end_pos
b.cursor_right(count=pos_diff)
-def open_input_in_editor(event):
- event.cli.current_buffer.tempfile_suffix = ".py"
- event.cli.current_buffer.open_in_editor(event.cli)
+def open_input_in_editor(event):
+ event.cli.current_buffer.tempfile_suffix = ".py"
+ event.cli.current_buffer.open_in_editor(event.cli)
if sys.platform == 'win32':
diff --git a/contrib/python/ipython/py2/IPython/testing/decorators.py b/contrib/python/ipython/py2/IPython/testing/decorators.py
index c9807ce70e..3f18773292 100644
--- a/contrib/python/ipython/py2/IPython/testing/decorators.py
+++ b/contrib/python/ipython/py2/IPython/testing/decorators.py
@@ -67,7 +67,7 @@ def as_unittest(func):
# Utility functions
-def apply_wrapper(wrapper, func):
+def apply_wrapper(wrapper, func):
"""Apply a wrapper to a function for decoration.
This mixes Michele Simionato's decorator tool with nose's make_decorator,
@@ -76,14 +76,14 @@ def apply_wrapper(wrapper, func):
This will ensure that wrapped functions can still be well introspected via
IPython, for example.
"""
- warnings.warn("The function `apply_wrapper` is deprecated since IPython 4.0",
- DeprecationWarning, stacklevel=2)
+ warnings.warn("The function `apply_wrapper` is deprecated since IPython 4.0",
+ DeprecationWarning, stacklevel=2)
import nose.tools
return decorator(wrapper,nose.tools.make_decorator(func)(wrapper))
-def make_label_dec(label, ds=None):
+def make_label_dec(label, ds=None):
"""Factory function to create a decorator that applies one or more labels.
Parameters
@@ -128,8 +128,8 @@ def make_label_dec(label, ds=None):
True
"""
- warnings.warn("The function `make_label_dec` is deprecated since IPython 4.0",
- DeprecationWarning, stacklevel=2)
+ warnings.warn("The function `make_label_dec` is deprecated since IPython 4.0",
+ DeprecationWarning, stacklevel=2)
if isinstance(label, string_types):
labels = [label]
else:
@@ -285,8 +285,8 @@ def decorated_dummy(dec, name):
import IPython.testing.decorators as dec
setup = dec.decorated_dummy(dec.skip_if_no_x11, __name__)
"""
- warnings.warn("The function `decorated_dummy` is deprecated since IPython 4.0",
- DeprecationWarning, stacklevel=2)
+ warnings.warn("The function `decorated_dummy` is deprecated since IPython 4.0",
+ DeprecationWarning, stacklevel=2)
dummy = lambda: None
dummy.__name__ = name
return dec(dummy)
@@ -319,8 +319,8 @@ skip_if_no_x11 = skipif(_x11_skip_cond, _x11_skip_msg)
# not a decorator itself, returns a dummy function to be used as setup
def skip_file_no_x11(name):
- warnings.warn("The function `skip_file_no_x11` is deprecated since IPython 4.0",
- DeprecationWarning, stacklevel=2)
+ warnings.warn("The function `skip_file_no_x11` is deprecated since IPython 4.0",
+ DeprecationWarning, stacklevel=2)
return decorated_dummy(skip_if_no_x11, name) if _x11_skip_cond else None
# Other skip decorators
@@ -375,8 +375,8 @@ def onlyif_any_cmd_exists(*commands):
"""
Decorator to skip test unless at least one of `commands` is found.
"""
- warnings.warn("The function `onlyif_any_cmd_exists` is deprecated since IPython 4.0",
- DeprecationWarning, stacklevel=2)
+ warnings.warn("The function `onlyif_any_cmd_exists` is deprecated since IPython 4.0",
+ DeprecationWarning, stacklevel=2)
for cmd in commands:
if which(cmd):
return null_deco
diff --git a/contrib/python/ipython/py2/IPython/testing/iptest.py b/contrib/python/ipython/py2/IPython/testing/iptest.py
index 4018264125..ed6d55d9bc 100644
--- a/contrib/python/ipython/py2/IPython/testing/iptest.py
+++ b/contrib/python/ipython/py2/IPython/testing/iptest.py
@@ -50,16 +50,16 @@ if sys.version_info > (3,0):
warnings.filterwarnings('error', message=".*{'config': True}.*", category=DeprecationWarning, module='IPy.*')
warnings.filterwarnings('default', message='.*', category=Warning, module='IPy.*')
-warnings.filterwarnings('error', message='.*apply_wrapper.*', category=DeprecationWarning, module='.*')
-warnings.filterwarnings('error', message='.*make_label_dec', category=DeprecationWarning, module='.*')
-warnings.filterwarnings('error', message='.*decorated_dummy.*', category=DeprecationWarning, module='.*')
-warnings.filterwarnings('error', message='.*skip_file_no_x11.*', category=DeprecationWarning, module='.*')
-warnings.filterwarnings('error', message='.*onlyif_any_cmd_exists.*', category=DeprecationWarning, module='.*')
-
-warnings.filterwarnings('error', message='.*disable_gui.*', category=DeprecationWarning, module='.*')
-
-warnings.filterwarnings('error', message='.*ExceptionColors global is deprecated.*', category=DeprecationWarning, module='.*')
-
+warnings.filterwarnings('error', message='.*apply_wrapper.*', category=DeprecationWarning, module='.*')
+warnings.filterwarnings('error', message='.*make_label_dec', category=DeprecationWarning, module='.*')
+warnings.filterwarnings('error', message='.*decorated_dummy.*', category=DeprecationWarning, module='.*')
+warnings.filterwarnings('error', message='.*skip_file_no_x11.*', category=DeprecationWarning, module='.*')
+warnings.filterwarnings('error', message='.*onlyif_any_cmd_exists.*', category=DeprecationWarning, module='.*')
+
+warnings.filterwarnings('error', message='.*disable_gui.*', category=DeprecationWarning, module='.*')
+
+warnings.filterwarnings('error', message='.*ExceptionColors global is deprecated.*', category=DeprecationWarning, module='.*')
+
if version_info < (6,):
# nose.tools renames all things from `camelCase` to `snake_case` which raise an
# warning with the runner they also import from standard import library. (as of Dec 2015)
diff --git a/contrib/python/ipython/py2/IPython/testing/tools.py b/contrib/python/ipython/py2/IPython/testing/tools.py
index 23bf6a68cb..1a2ba6fd43 100644
--- a/contrib/python/ipython/py2/IPython/testing/tools.py
+++ b/contrib/python/ipython/py2/IPython/testing/tools.py
@@ -7,8 +7,8 @@ Authors
from __future__ import absolute_import
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
import os
import re
@@ -18,11 +18,11 @@ import tempfile
from contextlib import contextmanager
from io import StringIO
from subprocess import Popen, PIPE
-try:
- from unittest.mock import patch
-except ImportError:
- # Python 2 compatibility
- from mock import patch
+try:
+ from unittest.mock import patch
+except ImportError:
+ # Python 2 compatibility
+ from mock import patch
try:
# These tools are used by parts of the runtime, so we make the nose
@@ -438,28 +438,28 @@ def make_tempfile(name):
finally:
os.unlink(name)
-def fake_input(inputs):
- """Temporarily replace the input() function to return the given values
-
- Use as a context manager:
-
- with fake_input(['result1', 'result2']):
- ...
-
- Values are returned in order. If input() is called again after the last value
- was used, EOFError is raised.
- """
- it = iter(inputs)
- def mock_input(prompt=''):
- try:
- return next(it)
- except StopIteration:
- raise EOFError('No more inputs given')
-
- input_name = '%s.%s' % (py3compat.builtin_mod_name,
- 'input' if py3compat.PY3 else 'raw_input')
- return patch(input_name, mock_input)
-
+def fake_input(inputs):
+ """Temporarily replace the input() function to return the given values
+
+ Use as a context manager:
+
+ with fake_input(['result1', 'result2']):
+ ...
+
+ Values are returned in order. If input() is called again after the last value
+ was used, EOFError is raised.
+ """
+ it = iter(inputs)
+ def mock_input(prompt=''):
+ try:
+ return next(it)
+ except StopIteration:
+ raise EOFError('No more inputs given')
+
+ input_name = '%s.%s' % (py3compat.builtin_mod_name,
+ 'input' if py3compat.PY3 else 'raw_input')
+ return patch(input_name, mock_input)
+
def help_output_test(subcommand=''):
"""test that `ipython [subcommand] -h` works"""
cmd = get_ipython_cmd() + [subcommand, '-h']
@@ -478,6 +478,6 @@ def help_all_output_test(subcommand=''):
nt.assert_equal(rc, 0, err)
nt.assert_not_in("Traceback", err)
nt.assert_in("Options", out)
- nt.assert_in("Class", out)
+ nt.assert_in("Class", out)
return out, err
diff --git a/contrib/python/ipython/py2/IPython/utils/_get_terminal_size.py b/contrib/python/ipython/py2/IPython/utils/_get_terminal_size.py
index b2c989e7aa..31eef93abd 100644
--- a/contrib/python/ipython/py2/IPython/utils/_get_terminal_size.py
+++ b/contrib/python/ipython/py2/IPython/utils/_get_terminal_size.py
@@ -1,131 +1,131 @@
-# vendored version of backports.get_terminal_size as nemesapece package are a
-# mess and break, especially on ubuntu. This file is under MIT Licence.
-# See https://pypi.python.org/pypi/backports.shutil_get_terminal_size
-#
-# commit: afc5714b1545a5a3aa44cfb5e078d39165bf76ab (Feb 20, 2016)
-# from
-# https://github.com/chrippa/backports.shutil_get_terminal_size
-#
-# The MIT License (MIT)
-#
-# Copyright (c) 2014 Christopher Rosell
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-# THE SOFTWARE.
-#
-"""This is a backport of shutil.get_terminal_size from Python 3.3.
-
-The original implementation is in C, but here we use the ctypes and
-fcntl modules to create a pure Python version of os.get_terminal_size.
-"""
-
-import os
-import struct
-import sys
-
-from collections import namedtuple
-
-__all__ = ["get_terminal_size"]
-
-
-terminal_size = namedtuple("terminal_size", "columns lines")
-
-try:
- from ctypes import windll, create_string_buffer, WinError
-
- _handle_ids = {
- 0: -10,
- 1: -11,
- 2: -12,
- }
-
- def _get_terminal_size(fd):
- handle = windll.kernel32.GetStdHandle(_handle_ids[fd])
- if handle == 0:
- raise OSError('handle cannot be retrieved')
- if handle == -1:
- raise WinError()
- csbi = create_string_buffer(22)
- res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)
- if res:
- res = struct.unpack("hhhhHhhhhhh", csbi.raw)
- left, top, right, bottom = res[5:9]
- columns = right - left + 1
- lines = bottom - top + 1
- return terminal_size(columns, lines)
- else:
- raise WinError()
-
-except ImportError:
- import fcntl
- import termios
-
- def _get_terminal_size(fd):
- try:
- res = fcntl.ioctl(fd, termios.TIOCGWINSZ, b"\x00" * 4)
- except IOError as e:
- raise OSError(e)
- lines, columns = struct.unpack("hh", res)
-
- return terminal_size(columns, lines)
-
-
-def get_terminal_size(fallback=(80, 24)):
- """Get the size of the terminal window.
-
- For each of the two dimensions, the environment variable, COLUMNS
- and LINES respectively, is checked. If the variable is defined and
- the value is a positive integer, it is used.
-
- When COLUMNS or LINES is not defined, which is the common case,
- the terminal connected to sys.__stdout__ is queried
- by invoking os.get_terminal_size.
-
- If the terminal size cannot be successfully queried, either because
- the system doesn't support querying, or because we are not
- connected to a terminal, the value given in fallback parameter
- is used. Fallback defaults to (80, 24) which is the default
- size used by many terminal emulators.
-
- The value returned is a named tuple of type os.terminal_size.
- """
- # Try the environment first
- try:
- columns = int(os.environ["COLUMNS"])
- except (KeyError, ValueError):
- columns = 0
-
- try:
- lines = int(os.environ["LINES"])
- except (KeyError, ValueError):
- lines = 0
-
- # Only query if necessary
- if columns <= 0 or lines <= 0:
- try:
- size = _get_terminal_size(sys.__stdout__.fileno())
- except (NameError, OSError):
- size = terminal_size(*fallback)
-
- if columns <= 0:
- columns = size.columns
- if lines <= 0:
- lines = size.lines
-
- return terminal_size(columns, lines)
-
+# vendored version of backports.get_terminal_size as nemesapece package are a
+# mess and break, especially on ubuntu. This file is under MIT Licence.
+# See https://pypi.python.org/pypi/backports.shutil_get_terminal_size
+#
+# commit: afc5714b1545a5a3aa44cfb5e078d39165bf76ab (Feb 20, 2016)
+# from
+# https://github.com/chrippa/backports.shutil_get_terminal_size
+#
+# The MIT License (MIT)
+#
+# Copyright (c) 2014 Christopher Rosell
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+"""This is a backport of shutil.get_terminal_size from Python 3.3.
+
+The original implementation is in C, but here we use the ctypes and
+fcntl modules to create a pure Python version of os.get_terminal_size.
+"""
+
+import os
+import struct
+import sys
+
+from collections import namedtuple
+
+__all__ = ["get_terminal_size"]
+
+
+terminal_size = namedtuple("terminal_size", "columns lines")
+
+try:
+ from ctypes import windll, create_string_buffer, WinError
+
+ _handle_ids = {
+ 0: -10,
+ 1: -11,
+ 2: -12,
+ }
+
+ def _get_terminal_size(fd):
+ handle = windll.kernel32.GetStdHandle(_handle_ids[fd])
+ if handle == 0:
+ raise OSError('handle cannot be retrieved')
+ if handle == -1:
+ raise WinError()
+ csbi = create_string_buffer(22)
+ res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)
+ if res:
+ res = struct.unpack("hhhhHhhhhhh", csbi.raw)
+ left, top, right, bottom = res[5:9]
+ columns = right - left + 1
+ lines = bottom - top + 1
+ return terminal_size(columns, lines)
+ else:
+ raise WinError()
+
+except ImportError:
+ import fcntl
+ import termios
+
+ def _get_terminal_size(fd):
+ try:
+ res = fcntl.ioctl(fd, termios.TIOCGWINSZ, b"\x00" * 4)
+ except IOError as e:
+ raise OSError(e)
+ lines, columns = struct.unpack("hh", res)
+
+ return terminal_size(columns, lines)
+
+
+def get_terminal_size(fallback=(80, 24)):
+ """Get the size of the terminal window.
+
+ For each of the two dimensions, the environment variable, COLUMNS
+ and LINES respectively, is checked. If the variable is defined and
+ the value is a positive integer, it is used.
+
+ When COLUMNS or LINES is not defined, which is the common case,
+ the terminal connected to sys.__stdout__ is queried
+ by invoking os.get_terminal_size.
+
+ If the terminal size cannot be successfully queried, either because
+ the system doesn't support querying, or because we are not
+ connected to a terminal, the value given in fallback parameter
+ is used. Fallback defaults to (80, 24) which is the default
+ size used by many terminal emulators.
+
+ The value returned is a named tuple of type os.terminal_size.
+ """
+ # Try the environment first
+ try:
+ columns = int(os.environ["COLUMNS"])
+ except (KeyError, ValueError):
+ columns = 0
+
+ try:
+ lines = int(os.environ["LINES"])
+ except (KeyError, ValueError):
+ lines = 0
+
+ # Only query if necessary
+ if columns <= 0 or lines <= 0:
+ try:
+ size = _get_terminal_size(sys.__stdout__.fileno())
+ except (NameError, OSError):
+ size = terminal_size(*fallback)
+
+ if columns <= 0:
+ columns = size.columns
+ if lines <= 0:
+ lines = size.lines
+
+ return terminal_size(columns, lines)
+
diff --git a/contrib/python/ipython/py2/IPython/utils/_signatures.py b/contrib/python/ipython/py2/IPython/utils/_signatures.py
index 20f52b98ed..9ffab265db 100644
--- a/contrib/python/ipython/py2/IPython/utils/_signatures.py
+++ b/contrib/python/ipython/py2/IPython/utils/_signatures.py
@@ -21,7 +21,7 @@ import itertools
import functools
import re
import types
-import inspect
+import inspect
# patch for single-file
@@ -72,7 +72,7 @@ def signature(obj):
if not callable(obj):
raise TypeError('{0!r} is not a callable object'.format(obj))
- if inspect.ismethod(obj):
+ if inspect.ismethod(obj):
if obj.__self__ is None:
# Unbound method - treat it as a function (no distinction in Py 3)
obj = obj.__func__
@@ -97,7 +97,7 @@ def signature(obj):
else:
return signature(wrapped)
- if inspect.isfunction(obj):
+ if inspect.isfunction(obj):
return Signature.from_function(obj)
if isinstance(obj, functools.partial):
@@ -512,7 +512,7 @@ class Signature(object):
def from_function(cls, func):
'''Constructs Signature for the given python function'''
- if not inspect.isfunction(func):
+ if not inspect.isfunction(func):
raise TypeError('{0!r} is not a Python function'.format(func))
Parameter = cls._parameter_cls
diff --git a/contrib/python/ipython/py2/IPython/utils/capture.py b/contrib/python/ipython/py2/IPython/utils/capture.py
index d8f919568c..e09ff5543e 100644
--- a/contrib/python/ipython/py2/IPython/utils/capture.py
+++ b/contrib/python/ipython/py2/IPython/utils/capture.py
@@ -21,17 +21,17 @@ else:
class RichOutput(object):
- def __init__(self, data=None, metadata=None, transient=None, update=False):
+ def __init__(self, data=None, metadata=None, transient=None, update=False):
self.data = data or {}
self.metadata = metadata or {}
- self.transient = transient or {}
- self.update = update
-
+ self.transient = transient or {}
+ self.update = update
+
def display(self):
from IPython.display import publish_display_data
- publish_display_data(data=self.data, metadata=self.metadata,
- transient=self.transient, update=self.update)
-
+ publish_display_data(data=self.data, metadata=self.metadata,
+ transient=self.transient, update=self.update)
+
def _repr_mime_(self, mime):
if mime not in self.data:
return
@@ -43,22 +43,22 @@ class RichOutput(object):
def _repr_html_(self):
return self._repr_mime_("text/html")
-
+
def _repr_latex_(self):
return self._repr_mime_("text/latex")
-
+
def _repr_json_(self):
return self._repr_mime_("application/json")
-
+
def _repr_javascript_(self):
return self._repr_mime_("application/javascript")
-
+
def _repr_png_(self):
return self._repr_mime_("image/png")
-
+
def _repr_jpeg_(self):
return self._repr_mime_("image/jpeg")
-
+
def _repr_svg_(self):
return self._repr_mime_("image/svg+xml")
@@ -75,35 +75,35 @@ class CapturedIO(object):
Additionally, there's a ``c.show()`` method which will print all of the
above in the same order, and can be invoked simply via ``c()``.
"""
-
+
def __init__(self, stdout, stderr, outputs=None):
self._stdout = stdout
self._stderr = stderr
if outputs is None:
outputs = []
self._outputs = outputs
-
+
def __str__(self):
return self.stdout
-
+
@property
def stdout(self):
"Captured standard output"
if not self._stdout:
return ''
return self._stdout.getvalue()
-
+
@property
def stderr(self):
"Captured standard error"
if not self._stderr:
return ''
return self._stderr.getvalue()
-
+
@property
def outputs(self):
"""A list of the captured rich display outputs, if any.
-
+
If you have a CapturedIO object ``c``, these can be displayed in IPython
using::
@@ -111,17 +111,17 @@ class CapturedIO(object):
for o in c.outputs:
display(o)
"""
- return [ RichOutput(**kargs) for kargs in self._outputs ]
-
+ return [ RichOutput(**kargs) for kargs in self._outputs ]
+
def show(self):
"""write my output to sys.stdout/err as appropriate"""
sys.stdout.write(self.stdout)
sys.stderr.write(self.stderr)
sys.stdout.flush()
sys.stderr.flush()
- for kargs in self._outputs:
- RichOutput(**kargs).display()
-
+ for kargs in self._outputs:
+ RichOutput(**kargs).display()
+
__call__ = show
@@ -130,27 +130,27 @@ class capture_output(object):
stdout = True
stderr = True
display = True
-
+
def __init__(self, stdout=True, stderr=True, display=True):
self.stdout = stdout
self.stderr = stderr
self.display = display
self.shell = None
-
+
def __enter__(self):
from IPython.core.getipython import get_ipython
from IPython.core.displaypub import CapturingDisplayPublisher
- from IPython.core.displayhook import CapturingDisplayHook
-
+ from IPython.core.displayhook import CapturingDisplayHook
+
self.sys_stdout = sys.stdout
self.sys_stderr = sys.stderr
-
+
if self.display:
self.shell = get_ipython()
if self.shell is None:
self.save_display_pub = None
self.display = False
-
+
stdout = stderr = outputs = None
if self.stdout:
stdout = sys.stdout = StringIO()
@@ -160,17 +160,17 @@ class capture_output(object):
self.save_display_pub = self.shell.display_pub
self.shell.display_pub = CapturingDisplayPublisher()
outputs = self.shell.display_pub.outputs
- self.save_display_hook = sys.displayhook
- sys.displayhook = CapturingDisplayHook(shell=self.shell,
- outputs=outputs)
-
+ self.save_display_hook = sys.displayhook
+ sys.displayhook = CapturingDisplayHook(shell=self.shell,
+ outputs=outputs)
+
return CapturedIO(stdout, stderr, outputs)
-
+
def __exit__(self, exc_type, exc_value, traceback):
sys.stdout = self.sys_stdout
sys.stderr = self.sys_stderr
if self.display and self.shell:
self.shell.display_pub = self.save_display_pub
- sys.displayhook = self.save_display_hook
+ sys.displayhook = self.save_display_hook
diff --git a/contrib/python/ipython/py2/IPython/utils/io.py b/contrib/python/ipython/py2/IPython/utils/io.py
index 036d6e3926..cbbd094017 100644
--- a/contrib/python/ipython/py2/IPython/utils/io.py
+++ b/contrib/python/ipython/py2/IPython/utils/io.py
@@ -39,12 +39,12 @@ class IOStream:
def clone(meth):
return not hasattr(self, meth) and not meth.startswith('_')
for meth in filter(clone, dir(stream)):
- try:
- val = getattr(stream, meth)
- except AttributeError:
- pass
- else:
- setattr(self, meth, val)
+ try:
+ val = getattr(stream, meth)
+ except AttributeError:
+ pass
+ else:
+ setattr(self, meth, val)
def __repr__(self):
cls = self.__class__
diff --git a/contrib/python/ipython/py2/IPython/utils/path.py b/contrib/python/ipython/py2/IPython/utils/path.py
index fa850812c7..e4489a71e0 100644
--- a/contrib/python/ipython/py2/IPython/utils/path.py
+++ b/contrib/python/ipython/py2/IPython/utils/path.py
@@ -77,7 +77,7 @@ def unquote_filename(name, win32=(sys.platform=='win32')):
unquoting is now taken care of by :func:`IPython.utils.process.arg_split`.
"""
warn("'unquote_filename' is deprecated since IPython 5.0 and should not "
- "be used anymore", DeprecationWarning, stacklevel=2)
+ "be used anymore", DeprecationWarning, stacklevel=2)
if win32:
if name.startswith(("'", '"')) and name.endswith(("'", '"')):
name = name[1:-1]
@@ -104,7 +104,7 @@ def get_py_filename(name, force_win32=None):
if force_win32 is not None:
warn("The 'force_win32' argument to 'get_py_filename' is deprecated "
"since IPython 5.0 and should not be used anymore",
- DeprecationWarning, stacklevel=2)
+ DeprecationWarning, stacklevel=2)
if not os.path.isfile(name) and not name.endswith('.py'):
name += '.py'
if os.path.isfile(name):
@@ -255,31 +255,31 @@ def get_xdg_cache_dir():
@undoc
def get_ipython_dir():
- warn("get_ipython_dir has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
+ warn("get_ipython_dir has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
from IPython.paths import get_ipython_dir
return get_ipython_dir()
@undoc
def get_ipython_cache_dir():
- warn("get_ipython_cache_dir has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
+ warn("get_ipython_cache_dir has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
from IPython.paths import get_ipython_cache_dir
return get_ipython_cache_dir()
@undoc
def get_ipython_package_dir():
- warn("get_ipython_package_dir has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
+ warn("get_ipython_package_dir has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
from IPython.paths import get_ipython_package_dir
return get_ipython_package_dir()
@undoc
def get_ipython_module_path(module_str):
- warn("get_ipython_module_path has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
+ warn("get_ipython_module_path has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
from IPython.paths import get_ipython_module_path
return get_ipython_module_path(module_str)
@undoc
def locate_profile(profile='default'):
- warn("locate_profile has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
+ warn("locate_profile has moved to the IPython.paths module since IPython 4.0.", stacklevel=2)
from IPython.paths import locate_profile
return locate_profile(profile=profile)
@@ -370,7 +370,7 @@ def target_update(target,deps,cmd):
def filehash(path):
"""Make an MD5 hash of a file, ignoring any differences in line
ending characters."""
- warn("filehash() is deprecated since IPython 4.0", DeprecationWarning, stacklevel=2)
+ warn("filehash() is deprecated since IPython 4.0", DeprecationWarning, stacklevel=2)
with open(path, "rU") as f:
return md5(py3compat.str_to_bytes(f.read())).hexdigest()
diff --git a/contrib/python/ipython/py2/IPython/utils/terminal.py b/contrib/python/ipython/py2/IPython/utils/terminal.py
index e92c410c79..afebe10e52 100644
--- a/contrib/python/ipython/py2/IPython/utils/terminal.py
+++ b/contrib/python/ipython/py2/IPython/utils/terminal.py
@@ -9,8 +9,8 @@ Authors:
* Alexander Belchenko (e-mail: bialix AT ukr.net)
"""
-from __future__ import absolute_import
-
+from __future__ import absolute_import
+
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
@@ -21,10 +21,10 @@ try:
from shutil import get_terminal_size as _get_terminal_size
except ImportError:
# use backport on Python 2
- try:
- from backports.shutil_get_terminal_size import get_terminal_size as _get_terminal_size
- except ImportError:
- from ._get_terminal_size import get_terminal_size as _get_terminal_size
+ try:
+ from backports.shutil_get_terminal_size import get_terminal_size as _get_terminal_size
+ except ImportError:
+ from ._get_terminal_size import get_terminal_size as _get_terminal_size
from . import py3compat
diff --git a/contrib/python/packaging/py2/packaging/_structures.py b/contrib/python/packaging/py2/packaging/_structures.py
index 800d5c5588..eccc677d8d 100644
--- a/contrib/python/packaging/py2/packaging/_structures.py
+++ b/contrib/python/packaging/py2/packaging/_structures.py
@@ -41,7 +41,7 @@ class InfinityType(object):
# type: (object) -> NegativeInfinityType
return NegativeInfinity
-
+
Infinity = InfinityType()
@@ -82,5 +82,5 @@ class NegativeInfinityType(object):
# type: (object) -> InfinityType
return Infinity
-
+
NegativeInfinity = NegativeInfinityType()
diff --git a/contrib/python/packaging/py2/packaging/requirements.py b/contrib/python/packaging/py2/packaging/requirements.py
index aa69d50d1a..4675aad33c 100644
--- a/contrib/python/packaging/py2/packaging/requirements.py
+++ b/contrib/python/packaging/py2/packaging/requirements.py
@@ -80,8 +80,8 @@ MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
lambda s, l, t: Marker(s[t._original_start : t._original_end])
)
-MARKER_SEPARATOR = SEMICOLON
-MARKER = MARKER_SEPARATOR + MARKER_EXPR
+MARKER_SEPARATOR = SEMICOLON
+MARKER = MARKER_SEPARATOR + MARKER_EXPR
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)
@@ -89,9 +89,9 @@ URL_AND_MARKER = URL + Optional(MARKER)
NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
-# pyparsing isn't thread safe during initialization, so we do it eagerly, see
-# issue #104
-REQUIREMENT.parseString("x[]")
+# pyparsing isn't thread safe during initialization, so we do it eagerly, see
+# issue #104
+REQUIREMENT.parseString("x[]")
class Requirement(object):
@@ -127,7 +127,7 @@ class Requirement(object):
elif not (parsed_url.scheme and parsed_url.netloc) or (
not parsed_url.scheme and not parsed_url.netloc
):
- raise InvalidRequirement("Invalid URL: {0}".format(req.url))
+ raise InvalidRequirement("Invalid URL: {0}".format(req.url))
self.url = req.url # type: TOptional[str]
else:
self.url = None
diff --git a/contrib/python/packaging/py2/packaging/specifiers.py b/contrib/python/packaging/py2/packaging/specifiers.py
index a6a83c1fe9..695d88be1e 100644
--- a/contrib/python/packaging/py2/packaging/specifiers.py
+++ b/contrib/python/packaging/py2/packaging/specifiers.py
@@ -238,7 +238,7 @@ class _IndividualSpecifier(BaseSpecifier):
):
found_prereleases.append(version)
# Either this is not a prerelease, or we should have been
- # accepting prereleases from the beginning.
+ # accepting prereleases from the beginning.
else:
yielded = True
yield version
@@ -582,7 +582,7 @@ class Specifier(_IndividualSpecifier):
return False
# Ensure that we do not allow a local version of the version mentioned
- # in the specifier, which is technically greater than, to match.
+ # in the specifier, which is technically greater than, to match.
if prospective.local is not None:
if Version(prospective.base_version) == Version(spec.base_version):
return False
diff --git a/contrib/python/packaging/py2/packaging/utils.py b/contrib/python/packaging/py2/packaging/utils.py
index 6e8c2a3e5b..341ca66c62 100644
--- a/contrib/python/packaging/py2/packaging/utils.py
+++ b/contrib/python/packaging/py2/packaging/utils.py
@@ -7,11 +7,11 @@ import re
from ._typing import TYPE_CHECKING, cast
from .tags import Tag, parse_tag
-from .version import InvalidVersion, Version
+from .version import InvalidVersion, Version
if TYPE_CHECKING: # pragma: no cover
from typing import FrozenSet, NewType, Tuple, Union
-
+
BuildTag = Union[Tuple[()], Tuple[int, str]]
NormalizedName = NewType("NormalizedName", str)
else:
@@ -41,48 +41,48 @@ def canonicalize_name(name):
# This is taken from PEP 503.
value = _canonicalize_regex.sub("-", name).lower()
return cast(NormalizedName, value)
-
-
+
+
def canonicalize_version(version):
# type: (Union[Version, str]) -> Union[Version, str]
- """
+ """
This is very similar to Version.__str__, but has one subtle difference
- with the way it handles the release segment.
- """
+ with the way it handles the release segment.
+ """
if not isinstance(version, Version):
try:
version = Version(version)
except InvalidVersion:
# Legacy versions cannot be normalized
return version
-
- parts = []
-
- # Epoch
- if version.epoch != 0:
- parts.append("{0}!".format(version.epoch))
-
- # Release segment
- # NB: This strips trailing '.0's to normalize
+
+ parts = []
+
+ # Epoch
+ if version.epoch != 0:
+ parts.append("{0}!".format(version.epoch))
+
+ # Release segment
+ # NB: This strips trailing '.0's to normalize
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release)))
-
- # Pre-release
- if version.pre is not None:
- parts.append("".join(str(x) for x in version.pre))
-
- # Post-release
- if version.post is not None:
- parts.append(".post{0}".format(version.post))
-
- # Development release
- if version.dev is not None:
- parts.append(".dev{0}".format(version.dev))
-
- # Local version segment
- if version.local is not None:
- parts.append("+{0}".format(version.local))
-
- return "".join(parts)
+
+ # Pre-release
+ if version.pre is not None:
+ parts.append("".join(str(x) for x in version.pre))
+
+ # Post-release
+ if version.post is not None:
+ parts.append(".post{0}".format(version.post))
+
+ # Development release
+ if version.dev is not None:
+ parts.append(".dev{0}".format(version.dev))
+
+ # Local version segment
+ if version.local is not None:
+ parts.append("+{0}".format(version.local))
+
+ return "".join(parts)
def parse_wheel_filename(filename):
diff --git a/contrib/python/packaging/py2/packaging/version.py b/contrib/python/packaging/py2/packaging/version.py
index 517d91f248..4384913407 100644
--- a/contrib/python/packaging/py2/packaging/version.py
+++ b/contrib/python/packaging/py2/packaging/version.py
@@ -149,31 +149,31 @@ class LegacyVersion(_BaseVersion):
return self._version
@property
- def epoch(self):
+ def epoch(self):
# type: () -> int
- return -1
-
- @property
- def release(self):
+ return -1
+
+ @property
+ def release(self):
# type: () -> None
- return None
-
- @property
- def pre(self):
+ return None
+
+ @property
+ def pre(self):
# type: () -> None
- return None
-
- @property
- def post(self):
+ return None
+
+ @property
+ def post(self):
# type: () -> None
- return None
-
- @property
- def dev(self):
+ return None
+
+ @property
+ def dev(self):
# type: () -> None
- return None
-
- @property
+ return None
+
+ @property
def local(self):
# type: () -> None
return None
@@ -188,12 +188,12 @@ class LegacyVersion(_BaseVersion):
# type: () -> bool
return False
- @property
- def is_devrelease(self):
+ @property
+ def is_devrelease(self):
# type: () -> bool
- return False
-
+ return False
+
_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
_legacy_version_replacement_map = {
@@ -250,7 +250,7 @@ def _legacy_cmpkey(version):
return epoch, tuple(parts)
-
+
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
VERSION_PATTERN = r"""
@@ -328,67 +328,67 @@ class Version(_BaseVersion):
parts = []
# Epoch
- if self.epoch != 0:
- parts.append("{0}!".format(self.epoch))
+ if self.epoch != 0:
+ parts.append("{0}!".format(self.epoch))
# Release segment
- parts.append(".".join(str(x) for x in self.release))
+ parts.append(".".join(str(x) for x in self.release))
# Pre-release
- if self.pre is not None:
- parts.append("".join(str(x) for x in self.pre))
+ if self.pre is not None:
+ parts.append("".join(str(x) for x in self.pre))
# Post-release
- if self.post is not None:
- parts.append(".post{0}".format(self.post))
+ if self.post is not None:
+ parts.append(".post{0}".format(self.post))
# Development release
- if self.dev is not None:
- parts.append(".dev{0}".format(self.dev))
+ if self.dev is not None:
+ parts.append(".dev{0}".format(self.dev))
# Local version segment
- if self.local is not None:
- parts.append("+{0}".format(self.local))
+ if self.local is not None:
+ parts.append("+{0}".format(self.local))
return "".join(parts)
@property
- def epoch(self):
+ def epoch(self):
# type: () -> int
_epoch = self._version.epoch # type: int
return _epoch
-
- @property
- def release(self):
+
+ @property
+ def release(self):
# type: () -> Tuple[int, ...]
_release = self._version.release # type: Tuple[int, ...]
return _release
-
- @property
- def pre(self):
+
+ @property
+ def pre(self):
# type: () -> Optional[Tuple[str, int]]
_pre = self._version.pre # type: Optional[Tuple[str, int]]
return _pre
-
- @property
- def post(self):
+
+ @property
+ def post(self):
# type: () -> Optional[Tuple[str, int]]
- return self._version.post[1] if self._version.post else None
-
- @property
- def dev(self):
+ return self._version.post[1] if self._version.post else None
+
+ @property
+ def dev(self):
# type: () -> Optional[Tuple[str, int]]
- return self._version.dev[1] if self._version.dev else None
-
- @property
- def local(self):
+ return self._version.dev[1] if self._version.dev else None
+
+ @property
+ def local(self):
# type: () -> Optional[str]
- if self._version.local:
- return ".".join(str(x) for x in self._version.local)
- else:
- return None
-
- @property
+ if self._version.local:
+ return ".".join(str(x) for x in self._version.local)
+ else:
+ return None
+
+ @property
def public(self):
# type: () -> str
return str(self).split("+", 1)[0]
@@ -399,34 +399,34 @@ class Version(_BaseVersion):
parts = []
# Epoch
- if self.epoch != 0:
- parts.append("{0}!".format(self.epoch))
+ if self.epoch != 0:
+ parts.append("{0}!".format(self.epoch))
# Release segment
- parts.append(".".join(str(x) for x in self.release))
+ parts.append(".".join(str(x) for x in self.release))
return "".join(parts)
@property
def is_prerelease(self):
# type: () -> bool
- return self.dev is not None or self.pre is not None
+ return self.dev is not None or self.pre is not None
@property
def is_postrelease(self):
# type: () -> bool
- return self.post is not None
+ return self.post is not None
- @property
- def is_devrelease(self):
+ @property
+ def is_devrelease(self):
# type: () -> bool
- return self.dev is not None
+ return self.dev is not None
@property
def major(self):
# type: () -> int
return self.release[0] if len(self.release) >= 1 else 0
-
+
@property
def minor(self):
# type: () -> int
@@ -476,7 +476,7 @@ def _parse_letter_version(
return None
-_local_version_separators = re.compile(r"[\._-]")
+_local_version_separators = re.compile(r"[\._-]")
def _parse_local_version(local):
@@ -487,7 +487,7 @@ def _parse_local_version(local):
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
- for part in _local_version_separators.split(local)
+ for part in _local_version_separators.split(local)
)
return None
diff --git a/contrib/python/packaging/py3/packaging/_structures.py b/contrib/python/packaging/py3/packaging/_structures.py
index 90a6465f96..e6339ed9be 100644
--- a/contrib/python/packaging/py3/packaging/_structures.py
+++ b/contrib/python/packaging/py3/packaging/_structures.py
@@ -28,7 +28,7 @@ class InfinityType:
def __neg__(self: object) -> "NegativeInfinityType":
return NegativeInfinity
-
+
Infinity = InfinityType()
@@ -57,5 +57,5 @@ class NegativeInfinityType:
def __neg__(self: object) -> InfinityType:
return Infinity
-
+
NegativeInfinity = NegativeInfinityType()
diff --git a/contrib/python/packaging/py3/packaging/requirements.py b/contrib/python/packaging/py3/packaging/requirements.py
index 53f9a3aa42..6a439cd668 100644
--- a/contrib/python/packaging/py3/packaging/requirements.py
+++ b/contrib/python/packaging/py3/packaging/requirements.py
@@ -70,8 +70,8 @@ MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
lambda s, l, t: Marker(s[t._original_start : t._original_end])
)
-MARKER_SEPARATOR = SEMICOLON
-MARKER = MARKER_SEPARATOR + MARKER_EXPR
+MARKER_SEPARATOR = SEMICOLON
+MARKER = MARKER_SEPARATOR + MARKER_EXPR
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)
@@ -79,9 +79,9 @@ URL_AND_MARKER = URL + Optional(MARKER)
NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
-# pyparsing isn't thread safe during initialization, so we do it eagerly, see
-# issue #104
-REQUIREMENT.parseString("x[]")
+# pyparsing isn't thread safe during initialization, so we do it eagerly, see
+# issue #104
+REQUIREMENT.parseString("x[]")
class Requirement:
diff --git a/contrib/python/packaging/py3/packaging/specifiers.py b/contrib/python/packaging/py3/packaging/specifiers.py
index 0e218a6f9f..76202dc3d3 100644
--- a/contrib/python/packaging/py3/packaging/specifiers.py
+++ b/contrib/python/packaging/py3/packaging/specifiers.py
@@ -211,7 +211,7 @@ class _IndividualSpecifier(BaseSpecifier):
):
found_prereleases.append(version)
# Either this is not a prerelease, or we should have been
- # accepting prereleases from the beginning.
+ # accepting prereleases from the beginning.
else:
yielded = True
yield version
@@ -536,7 +536,7 @@ class Specifier(_IndividualSpecifier):
return False
# Ensure that we do not allow a local version of the version mentioned
- # in the specifier, which is technically greater than, to match.
+ # in the specifier, which is technically greater than, to match.
if prospective.local is not None:
if Version(prospective.base_version) == Version(spec.base_version):
return False
diff --git a/contrib/python/packaging/py3/packaging/utils.py b/contrib/python/packaging/py3/packaging/utils.py
index bab11b80c6..2da24c30e9 100644
--- a/contrib/python/packaging/py3/packaging/utils.py
+++ b/contrib/python/packaging/py3/packaging/utils.py
@@ -6,11 +6,11 @@ import re
from typing import FrozenSet, NewType, Tuple, Union, cast
from .tags import Tag, parse_tag
-from .version import InvalidVersion, Version
+from .version import InvalidVersion, Version
BuildTag = Union[Tuple[()], Tuple[int, str]]
NormalizedName = NewType("NormalizedName", str)
-
+
class InvalidWheelFilename(ValueError):
"""
@@ -33,13 +33,13 @@ def canonicalize_name(name: str) -> NormalizedName:
# This is taken from PEP 503.
value = _canonicalize_regex.sub("-", name).lower()
return cast(NormalizedName, value)
-
-
+
+
def canonicalize_version(version: Union[Version, str]) -> str:
- """
+ """
This is very similar to Version.__str__, but has one subtle difference
- with the way it handles the release segment.
- """
+ with the way it handles the release segment.
+ """
if isinstance(version, str):
try:
parsed = Version(version)
@@ -48,34 +48,34 @@ def canonicalize_version(version: Union[Version, str]) -> str:
return version
else:
parsed = version
-
- parts = []
-
- # Epoch
+
+ parts = []
+
+ # Epoch
if parsed.epoch != 0:
parts.append(f"{parsed.epoch}!")
-
- # Release segment
- # NB: This strips trailing '.0's to normalize
+
+ # Release segment
+ # NB: This strips trailing '.0's to normalize
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release)))
-
- # Pre-release
+
+ # Pre-release
if parsed.pre is not None:
parts.append("".join(str(x) for x in parsed.pre))
-
- # Post-release
+
+ # Post-release
if parsed.post is not None:
parts.append(f".post{parsed.post}")
-
- # Development release
+
+ # Development release
if parsed.dev is not None:
parts.append(f".dev{parsed.dev}")
-
- # Local version segment
+
+ # Local version segment
if parsed.local is not None:
parts.append(f"+{parsed.local}")
-
- return "".join(parts)
+
+ return "".join(parts)
def parse_wheel_filename(
diff --git a/contrib/python/packaging/py3/packaging/version.py b/contrib/python/packaging/py3/packaging/version.py
index de9a09a4ed..91dba8ee9b 100644
--- a/contrib/python/packaging/py3/packaging/version.py
+++ b/contrib/python/packaging/py3/packaging/version.py
@@ -130,25 +130,25 @@ class LegacyVersion(_BaseVersion):
@property
def epoch(self) -> int:
- return -1
-
- @property
+ return -1
+
+ @property
def release(self) -> None:
- return None
-
- @property
+ return None
+
+ @property
def pre(self) -> None:
- return None
-
- @property
+ return None
+
+ @property
def post(self) -> None:
- return None
-
- @property
+ return None
+
+ @property
def dev(self) -> None:
- return None
-
- @property
+ return None
+
+ @property
def local(self) -> None:
return None
@@ -160,11 +160,11 @@ class LegacyVersion(_BaseVersion):
def is_postrelease(self) -> bool:
return False
- @property
+ @property
def is_devrelease(self) -> bool:
- return False
-
+ return False
+
_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
_legacy_version_replacement_map = {
@@ -219,7 +219,7 @@ def _legacy_cmpkey(version: str) -> LegacyCmpKey:
return epoch, tuple(parts)
-
+
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
VERSION_PATTERN = r"""
@@ -294,26 +294,26 @@ class Version(_BaseVersion):
parts = []
# Epoch
- if self.epoch != 0:
+ if self.epoch != 0:
parts.append(f"{self.epoch}!")
# Release segment
- parts.append(".".join(str(x) for x in self.release))
+ parts.append(".".join(str(x) for x in self.release))
# Pre-release
- if self.pre is not None:
- parts.append("".join(str(x) for x in self.pre))
+ if self.pre is not None:
+ parts.append("".join(str(x) for x in self.pre))
# Post-release
- if self.post is not None:
+ if self.post is not None:
parts.append(f".post{self.post}")
# Development release
- if self.dev is not None:
+ if self.dev is not None:
parts.append(f".dev{self.dev}")
# Local version segment
- if self.local is not None:
+ if self.local is not None:
parts.append(f"+{self.local}")
return "".join(parts)
@@ -322,33 +322,33 @@ class Version(_BaseVersion):
def epoch(self) -> int:
_epoch: int = self._version.epoch
return _epoch
-
- @property
+
+ @property
def release(self) -> Tuple[int, ...]:
_release: Tuple[int, ...] = self._version.release
return _release
-
- @property
+
+ @property
def pre(self) -> Optional[Tuple[str, int]]:
_pre: Optional[Tuple[str, int]] = self._version.pre
return _pre
-
- @property
+
+ @property
def post(self) -> Optional[int]:
- return self._version.post[1] if self._version.post else None
-
- @property
+ return self._version.post[1] if self._version.post else None
+
+ @property
def dev(self) -> Optional[int]:
- return self._version.dev[1] if self._version.dev else None
-
- @property
+ return self._version.dev[1] if self._version.dev else None
+
+ @property
def local(self) -> Optional[str]:
- if self._version.local:
- return ".".join(str(x) for x in self._version.local)
- else:
- return None
-
- @property
+ if self._version.local:
+ return ".".join(str(x) for x in self._version.local)
+ else:
+ return None
+
+ @property
def public(self) -> str:
return str(self).split("+", 1)[0]
@@ -357,30 +357,30 @@ class Version(_BaseVersion):
parts = []
# Epoch
- if self.epoch != 0:
+ if self.epoch != 0:
parts.append(f"{self.epoch}!")
# Release segment
- parts.append(".".join(str(x) for x in self.release))
+ parts.append(".".join(str(x) for x in self.release))
return "".join(parts)
@property
def is_prerelease(self) -> bool:
- return self.dev is not None or self.pre is not None
+ return self.dev is not None or self.pre is not None
@property
def is_postrelease(self) -> bool:
- return self.post is not None
+ return self.post is not None
- @property
+ @property
def is_devrelease(self) -> bool:
- return self.dev is not None
+ return self.dev is not None
@property
def major(self) -> int:
return self.release[0] if len(self.release) >= 1 else 0
-
+
@property
def minor(self) -> int:
return self.release[1] if len(self.release) >= 2 else 0
@@ -426,7 +426,7 @@ def _parse_letter_version(
return None
-_local_version_separators = re.compile(r"[\._-]")
+_local_version_separators = re.compile(r"[\._-]")
def _parse_local_version(local: str) -> Optional[LocalType]:
@@ -436,7 +436,7 @@ def _parse_local_version(local: str) -> Optional[LocalType]:
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
- for part in _local_version_separators.split(local)
+ for part in _local_version_separators.split(local)
)
return None
diff --git a/contrib/python/pickleshare/pickleshare.py b/contrib/python/pickleshare/pickleshare.py
index 086f84f6ea..1ed4a96a9c 100644
--- a/contrib/python/pickleshare/pickleshare.py
+++ b/contrib/python/pickleshare/pickleshare.py
@@ -26,7 +26,7 @@ This module is certainly not ZODB, but can be used for low-load
(non-mission-critical) situations where tiny code size trumps the
advanced features of a "real" object database.
-Installation guide: pip install pickleshare
+Installation guide: pip install pickleshare
Author: Ville Vainio <vivainio@gmail.com>
License: MIT open source license.
@@ -36,52 +36,52 @@ License: MIT open source license.
from __future__ import print_function
-__version__ = "0.7.5"
-
-try:
- from pathlib import Path
-except ImportError:
- # Python 2 backport
- from pathlib2 import Path
+__version__ = "0.7.5"
+try:
+ from pathlib import Path
+except ImportError:
+ # Python 2 backport
+ from pathlib2 import Path
+
import os,stat,time
try:
- import collections.abc as collections_abc
-except ImportError:
- import collections as collections_abc
-try:
+ import collections.abc as collections_abc
+except ImportError:
+ import collections as collections_abc
+try:
import cPickle as pickle
except ImportError:
import pickle
import errno
-import sys
-
-if sys.version_info[0] >= 3:
- string_types = (str,)
-else:
- string_types = (str, unicode)
+import sys
+if sys.version_info[0] >= 3:
+ string_types = (str,)
+else:
+ string_types = (str, unicode)
+
def gethashfile(key):
return ("%02x" % abs(hash(key) % 256))[-2:]
_sentinel = object()
-class PickleShareDB(collections_abc.MutableMapping):
+class PickleShareDB(collections_abc.MutableMapping):
""" The main 'connection' object for PickleShare database """
def __init__(self,root):
""" Return a db object that will manage the specied directory"""
- if not isinstance(root, string_types):
- root = str(root)
- root = os.path.abspath(os.path.expanduser(root))
- self.root = Path(root)
- if not self.root.is_dir():
- # catching the exception is necessary if multiple processes are concurrently trying to create a folder
- # exists_ok keyword argument of mkdir does the same but only from Python 3.5
- try:
- self.root.mkdir(parents=True)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
+ if not isinstance(root, string_types):
+ root = str(root)
+ root = os.path.abspath(os.path.expanduser(root))
+ self.root = Path(root)
+ if not self.root.is_dir():
+ # catching the exception is necessary if multiple processes are concurrently trying to create a folder
+ # exists_ok keyword argument of mkdir does the same but only from Python 3.5
+ try:
+ self.root.mkdir(parents=True)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
# cache has { 'key' : (obj, orig_mod_time) }
self.cache = {}
@@ -110,14 +110,14 @@ class PickleShareDB(collections_abc.MutableMapping):
""" db['key'] = 5 """
fil = self.root / key
parent = fil.parent
- if parent and not parent.is_dir():
- parent.mkdir(parents=True)
+ if parent and not parent.is_dir():
+ parent.mkdir(parents=True)
# We specify protocol 2, so that we can mostly go between Python 2
# and Python 3. We can upgrade to protocol 3 when Python 2 is obsolete.
with fil.open('wb') as f:
pickle.dump(value, f, protocol=2)
try:
- self.cache[fil] = (value, fil.stat().st_mtime)
+ self.cache[fil] = (value, fil.stat().st_mtime)
except OSError as e:
if e.errno != errno.ENOENT:
raise
@@ -125,8 +125,8 @@ class PickleShareDB(collections_abc.MutableMapping):
def hset(self, hashroot, key, value):
""" hashed set """
hroot = self.root / hashroot
- if not hroot.is_dir():
- hroot.mkdir()
+ if not hroot.is_dir():
+ hroot.mkdir()
hfile = hroot / gethashfile(key)
d = self.get(hfile, {})
d.update( {key : value})
@@ -193,9 +193,9 @@ class PickleShareDB(collections_abc.MutableMapping):
self[hashroot + '/xx'] = all
for f in hfiles:
p = self.root / f
- if p.name == 'xx':
+ if p.name == 'xx':
continue
- p.unlink()
+ p.unlink()
@@ -204,7 +204,7 @@ class PickleShareDB(collections_abc.MutableMapping):
fil = self.root / key
self.cache.pop(fil,None)
try:
- fil.unlink()
+ fil.unlink()
except OSError:
# notfound and permission denied are ok - we
# lost, the other process wins the conflict
@@ -212,16 +212,16 @@ class PickleShareDB(collections_abc.MutableMapping):
def _normalized(self, p):
""" Make a key suitable for user's eyes """
- return str(p.relative_to(self.root)).replace('\\','/')
+ return str(p.relative_to(self.root)).replace('\\','/')
def keys(self, globpat = None):
""" All keys in DB, or all keys matching a glob"""
if globpat is None:
- files = self.root.rglob('*')
+ files = self.root.rglob('*')
else:
- files = self.root.glob(globpat)
- return [self._normalized(p) for p in files if p.is_file()]
+ files = self.root.glob(globpat)
+ return [self._normalized(p) for p in files if p.is_file()]
def __iter__(self):
return iter(self.keys())
diff --git a/contrib/python/pickleshare/ya.make b/contrib/python/pickleshare/ya.make
index e24c2cdad7..d1a1376083 100644
--- a/contrib/python/pickleshare/ya.make
+++ b/contrib/python/pickleshare/ya.make
@@ -4,7 +4,7 @@ PY23_LIBRARY()
OWNER(borman g:python-contrib)
-VERSION(0.7.5)
+VERSION(0.7.5)
LICENSE(MIT)
diff --git a/contrib/python/pycparser/pycparser/__init__.py b/contrib/python/pycparser/pycparser/__init__.py
index d82eb2d6fb..5c179e725f 100644
--- a/contrib/python/pycparser/pycparser/__init__.py
+++ b/contrib/python/pycparser/pycparser/__init__.py
@@ -4,14 +4,14 @@
# This package file exports some convenience functions for
# interacting with pycparser
#
-# Eli Bendersky [https://eli.thegreenplace.net/]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
__all__ = ['c_lexer', 'c_parser', 'c_ast']
__version__ = '2.21'
-import io
-from subprocess import check_output
+import io
+from subprocess import check_output
from .c_parser import CParser
@@ -39,7 +39,7 @@ def preprocess_file(filename, cpp_path='cpp', cpp_args=''):
try:
# Note the use of universal_newlines to treat all newlines
# as \n for Python's purpose
- text = check_output(path_list, universal_newlines=True)
+ text = check_output(path_list, universal_newlines=True)
except OSError as e:
raise RuntimeError("Unable to invoke 'cpp'. " +
'Make sure its path was passed correctly\n' +
@@ -82,7 +82,7 @@ def parse_file(filename, use_cpp=False, cpp_path='cpp', cpp_args='',
if use_cpp:
text = preprocess_file(filename, cpp_path, cpp_args)
else:
- with io.open(filename) as f:
+ with io.open(filename) as f:
text = f.read()
if parser is None:
diff --git a/contrib/python/pycparser/pycparser/_ast_gen.py b/contrib/python/pycparser/pycparser/_ast_gen.py
index 0f7d330ba6..176a0adb4b 100644
--- a/contrib/python/pycparser/pycparser/_ast_gen.py
+++ b/contrib/python/pycparser/pycparser/_ast_gen.py
@@ -7,7 +7,7 @@
# The design of this module was inspired by astgen.py from the
# Python 2.5 code-base.
#
-# Eli Bendersky [https://eli.thegreenplace.net/]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
from string import Template
@@ -62,7 +62,7 @@ class NodeCfg(object):
contents: a list of contents - attributes and child nodes
See comment at the top of the configuration file for details.
"""
-
+
def __init__(self, name, contents):
self.name = name
self.all_entries = []
@@ -84,7 +84,7 @@ class NodeCfg(object):
def generate_source(self):
src = self._gen_init()
src += '\n' + self._gen_children()
- src += '\n' + self._gen_iter()
+ src += '\n' + self._gen_iter()
src += '\n' + self._gen_attr_names()
return src
@@ -132,33 +132,33 @@ class NodeCfg(object):
return src
- def _gen_iter(self):
- src = ' def __iter__(self):\n'
-
- if self.all_entries:
- for child in self.child:
- src += (
- ' if self.%(child)s is not None:\n' +
- ' yield self.%(child)s\n') % (dict(child=child))
-
- for seq_child in self.seq_child:
- src += (
- ' for child in (self.%(child)s or []):\n'
- ' yield child\n') % (dict(child=seq_child))
-
- if not (self.child or self.seq_child):
- # Empty generator
- src += (
- ' return\n' +
- ' yield\n')
- else:
- # Empty generator
- src += (
- ' return\n' +
- ' yield\n')
-
- return src
-
+ def _gen_iter(self):
+ src = ' def __iter__(self):\n'
+
+ if self.all_entries:
+ for child in self.child:
+ src += (
+ ' if self.%(child)s is not None:\n' +
+ ' yield self.%(child)s\n') % (dict(child=child))
+
+ for seq_child in self.seq_child:
+ src += (
+ ' for child in (self.%(child)s or []):\n'
+ ' yield child\n') % (dict(child=seq_child))
+
+ if not (self.child or self.seq_child):
+ # Empty generator
+ src += (
+ ' return\n' +
+ ' yield\n')
+ else:
+ # Empty generator
+ src += (
+ ' return\n' +
+ ' yield\n')
+
+ return src
+
def _gen_attr_names(self):
src = " attr_names = (" + ''.join("%r, " % nm for nm in self.attr) + ')'
return src
@@ -178,7 +178,7 @@ r'''#-----------------------------------------------------------------
#
# AST Node classes.
#
-# Eli Bendersky [https://eli.thegreenplace.net/]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
@@ -187,38 +187,38 @@ r'''#-----------------------------------------------------------------
_PROLOGUE_CODE = r'''
import sys
-def _repr(obj):
- """
- Get the representation of an object, with dedicated pprint-like format for lists.
- """
- if isinstance(obj, list):
- return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]'
- else:
+def _repr(obj):
+ """
+ Get the representation of an object, with dedicated pprint-like format for lists.
+ """
+ if isinstance(obj, list):
+ return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]'
+ else:
return repr(obj)
class Node(object):
__slots__ = ()
""" Abstract base class for AST nodes.
"""
- def __repr__(self):
- """ Generates a python representation of the current node
- """
- result = self.__class__.__name__ + '('
-
- indent = ''
- separator = ''
- for name in self.__slots__[:-2]:
- result += separator
- result += indent
- result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__)))))
+ def __repr__(self):
+ """ Generates a python representation of the current node
+ """
+ result = self.__class__.__name__ + '('
- separator = ','
- indent = '\n ' + (' ' * len(self.__class__.__name__))
+ indent = ''
+ separator = ''
+ for name in self.__slots__[:-2]:
+ result += separator
+ result += indent
+ result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__)))))
- result += indent + ')'
+ separator = ','
+ indent = '\n ' + (' ' * len(self.__class__.__name__))
- return result
+ result += indent + ')'
+ return result
+
def children(self):
""" A sequence of all children that are Nodes
"""
@@ -308,29 +308,29 @@ class NodeVisitor(object):
* Modeled after Python's own AST visiting facilities
(the ast module of Python 3.0)
"""
-
- _method_cache = None
-
+
+ _method_cache = None
+
def visit(self, node):
""" Visit a node.
"""
-
- if self._method_cache is None:
- self._method_cache = {}
-
- visitor = self._method_cache.get(node.__class__.__name__, None)
- if visitor is None:
- method = 'visit_' + node.__class__.__name__
- visitor = getattr(self, method, self.generic_visit)
- self._method_cache[node.__class__.__name__] = visitor
-
+
+ if self._method_cache is None:
+ self._method_cache = {}
+
+ visitor = self._method_cache.get(node.__class__.__name__, None)
+ if visitor is None:
+ method = 'visit_' + node.__class__.__name__
+ visitor = getattr(self, method, self.generic_visit)
+ self._method_cache[node.__class__.__name__] = visitor
+
return visitor(node)
def generic_visit(self, node):
""" Called if no explicit visitor function exists for a
node. Implements preorder visiting of the node.
"""
- for c in node:
+ for c in node:
self.visit(c)
'''
diff --git a/contrib/python/pycparser/pycparser/_build_tables.py b/contrib/python/pycparser/pycparser/_build_tables.py
index 958381ad0f..761cc4cf1a 100644
--- a/contrib/python/pycparser/pycparser/_build_tables.py
+++ b/contrib/python/pycparser/pycparser/_build_tables.py
@@ -6,7 +6,7 @@
# Also generates AST code from the configuration file.
# Should be called from the pycparser directory.
#
-# Eli Bendersky [https://eli.thegreenplace.net/]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
diff --git a/contrib/python/pycparser/pycparser/_c_ast.cfg b/contrib/python/pycparser/pycparser/_c_ast.cfg
index 0626533e8a..6f254f4d32 100644
--- a/contrib/python/pycparser/pycparser/_c_ast.cfg
+++ b/contrib/python/pycparser/pycparser/_c_ast.cfg
@@ -9,7 +9,7 @@
# <name>** - a sequence of child nodes
# <name> - an attribute
#
-# Eli Bendersky [https://eli.thegreenplace.net/]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
diff --git a/contrib/python/pycparser/pycparser/ast_transforms.py b/contrib/python/pycparser/pycparser/ast_transforms.py
index 367dcf54c5..633ae74b3a 100644
--- a/contrib/python/pycparser/pycparser/ast_transforms.py
+++ b/contrib/python/pycparser/pycparser/ast_transforms.py
@@ -3,7 +3,7 @@
#
# Some utilities used by the parser to create a friendlier AST.
#
-# Eli Bendersky [https://eli.thegreenplace.net/]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
diff --git a/contrib/python/pycparser/pycparser/c_ast.py b/contrib/python/pycparser/pycparser/c_ast.py
index 6575a2ad39..fc07648fef 100644
--- a/contrib/python/pycparser/pycparser/c_ast.py
+++ b/contrib/python/pycparser/pycparser/c_ast.py
@@ -11,45 +11,45 @@
#
# AST Node classes.
#
-# Eli Bendersky [https://eli.thegreenplace.net/]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
import sys
-def _repr(obj):
- """
- Get the representation of an object, with dedicated pprint-like format for lists.
- """
- if isinstance(obj, list):
- return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]'
- else:
+def _repr(obj):
+ """
+ Get the representation of an object, with dedicated pprint-like format for lists.
+ """
+ if isinstance(obj, list):
+ return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]'
+ else:
return repr(obj)
class Node(object):
__slots__ = ()
""" Abstract base class for AST nodes.
"""
- def __repr__(self):
- """ Generates a python representation of the current node
- """
- result = self.__class__.__name__ + '('
-
- indent = ''
- separator = ''
- for name in self.__slots__[:-2]:
- result += separator
- result += indent
- result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__)))))
+ def __repr__(self):
+ """ Generates a python representation of the current node
+ """
+ result = self.__class__.__name__ + '('
- separator = ','
- indent = '\n ' + (' ' * len(self.__class__.__name__))
+ indent = ''
+ separator = ''
+ for name in self.__slots__[:-2]:
+ result += separator
+ result += indent
+ result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__)))))
- result += indent + ')'
+ separator = ','
+ indent = '\n ' + (' ' * len(self.__class__.__name__))
- return result
+ result += indent + ')'
+ return result
+
def children(self):
""" A sequence of all children that are Nodes
"""
@@ -139,29 +139,29 @@ class NodeVisitor(object):
* Modeled after Python's own AST visiting facilities
(the ast module of Python 3.0)
"""
-
- _method_cache = None
-
+
+ _method_cache = None
+
def visit(self, node):
""" Visit a node.
"""
-
- if self._method_cache is None:
- self._method_cache = {}
-
- visitor = self._method_cache.get(node.__class__.__name__, None)
- if visitor is None:
- method = 'visit_' + node.__class__.__name__
- visitor = getattr(self, method, self.generic_visit)
- self._method_cache[node.__class__.__name__] = visitor
-
+
+ if self._method_cache is None:
+ self._method_cache = {}
+
+ visitor = self._method_cache.get(node.__class__.__name__, None)
+ if visitor is None:
+ method = 'visit_' + node.__class__.__name__
+ visitor = getattr(self, method, self.generic_visit)
+ self._method_cache[node.__class__.__name__] = visitor
+
return visitor(node)
def generic_visit(self, node):
""" Called if no explicit visitor function exists for a
node. Implements preorder visiting of the node.
"""
- for c in node:
+ for c in node:
self.visit(c)
class ArrayDecl(Node):
@@ -178,12 +178,12 @@ class ArrayDecl(Node):
if self.dim is not None: nodelist.append(("dim", self.dim))
return tuple(nodelist)
- def __iter__(self):
- if self.type is not None:
- yield self.type
- if self.dim is not None:
- yield self.dim
-
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+ if self.dim is not None:
+ yield self.dim
+
attr_names = ('dim_quals', )
class ArrayRef(Node):
@@ -199,12 +199,12 @@ class ArrayRef(Node):
if self.subscript is not None: nodelist.append(("subscript", self.subscript))
return tuple(nodelist)
- def __iter__(self):
- if self.name is not None:
- yield self.name
- if self.subscript is not None:
- yield self.subscript
-
+ def __iter__(self):
+ if self.name is not None:
+ yield self.name
+ if self.subscript is not None:
+ yield self.subscript
+
attr_names = ()
class Assignment(Node):
@@ -221,12 +221,12 @@ class Assignment(Node):
if self.rvalue is not None: nodelist.append(("rvalue", self.rvalue))
return tuple(nodelist)
- def __iter__(self):
- if self.lvalue is not None:
- yield self.lvalue
- if self.rvalue is not None:
- yield self.rvalue
-
+ def __iter__(self):
+ if self.lvalue is not None:
+ yield self.lvalue
+ if self.rvalue is not None:
+ yield self.rvalue
+
attr_names = ('op', )
class Alignas(Node):
@@ -260,12 +260,12 @@ class BinaryOp(Node):
if self.right is not None: nodelist.append(("right", self.right))
return tuple(nodelist)
- def __iter__(self):
- if self.left is not None:
- yield self.left
- if self.right is not None:
- yield self.right
-
+ def __iter__(self):
+ if self.left is not None:
+ yield self.left
+ if self.right is not None:
+ yield self.right
+
attr_names = ('op', )
class Break(Node):
@@ -276,10 +276,10 @@ class Break(Node):
def children(self):
return ()
- def __iter__(self):
- return
- yield
-
+ def __iter__(self):
+ return
+ yield
+
attr_names = ()
class Case(Node):
@@ -296,12 +296,12 @@ class Case(Node):
nodelist.append(("stmts[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- if self.expr is not None:
- yield self.expr
- for child in (self.stmts or []):
- yield child
-
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+ for child in (self.stmts or []):
+ yield child
+
attr_names = ()
class Cast(Node):
@@ -317,12 +317,12 @@ class Cast(Node):
if self.expr is not None: nodelist.append(("expr", self.expr))
return tuple(nodelist)
- def __iter__(self):
- if self.to_type is not None:
- yield self.to_type
- if self.expr is not None:
- yield self.expr
-
+ def __iter__(self):
+ if self.to_type is not None:
+ yield self.to_type
+ if self.expr is not None:
+ yield self.expr
+
attr_names = ()
class Compound(Node):
@@ -337,10 +337,10 @@ class Compound(Node):
nodelist.append(("block_items[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- for child in (self.block_items or []):
- yield child
-
+ def __iter__(self):
+ for child in (self.block_items or []):
+ yield child
+
attr_names = ()
class CompoundLiteral(Node):
@@ -356,12 +356,12 @@ class CompoundLiteral(Node):
if self.init is not None: nodelist.append(("init", self.init))
return tuple(nodelist)
- def __iter__(self):
- if self.type is not None:
- yield self.type
- if self.init is not None:
- yield self.init
-
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+ if self.init is not None:
+ yield self.init
+
attr_names = ()
class Constant(Node):
@@ -375,10 +375,10 @@ class Constant(Node):
nodelist = []
return tuple(nodelist)
- def __iter__(self):
- return
- yield
-
+ def __iter__(self):
+ return
+ yield
+
attr_names = ('type', 'value', )
class Continue(Node):
@@ -389,10 +389,10 @@ class Continue(Node):
def children(self):
return ()
- def __iter__(self):
- return
- yield
-
+ def __iter__(self):
+ return
+ yield
+
attr_names = ()
class Decl(Node):
@@ -415,14 +415,14 @@ class Decl(Node):
if self.bitsize is not None: nodelist.append(("bitsize", self.bitsize))
return tuple(nodelist)
- def __iter__(self):
- if self.type is not None:
- yield self.type
- if self.init is not None:
- yield self.init
- if self.bitsize is not None:
- yield self.bitsize
-
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+ if self.init is not None:
+ yield self.init
+ if self.bitsize is not None:
+ yield self.bitsize
+
attr_names = ('name', 'quals', 'align', 'storage', 'funcspec', )
class DeclList(Node):
@@ -437,10 +437,10 @@ class DeclList(Node):
nodelist.append(("decls[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- for child in (self.decls or []):
- yield child
-
+ def __iter__(self):
+ for child in (self.decls or []):
+ yield child
+
attr_names = ()
class Default(Node):
@@ -455,10 +455,10 @@ class Default(Node):
nodelist.append(("stmts[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- for child in (self.stmts or []):
- yield child
-
+ def __iter__(self):
+ for child in (self.stmts or []):
+ yield child
+
attr_names = ()
class DoWhile(Node):
@@ -474,12 +474,12 @@ class DoWhile(Node):
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
- def __iter__(self):
- if self.cond is not None:
- yield self.cond
- if self.stmt is not None:
- yield self.stmt
-
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.stmt is not None:
+ yield self.stmt
+
attr_names = ()
class EllipsisParam(Node):
@@ -490,10 +490,10 @@ class EllipsisParam(Node):
def children(self):
return ()
- def __iter__(self):
- return
- yield
-
+ def __iter__(self):
+ return
+ yield
+
attr_names = ()
class EmptyStatement(Node):
@@ -504,10 +504,10 @@ class EmptyStatement(Node):
def children(self):
return ()
- def __iter__(self):
- return
- yield
-
+ def __iter__(self):
+ return
+ yield
+
attr_names = ()
class Enum(Node):
@@ -522,10 +522,10 @@ class Enum(Node):
if self.values is not None: nodelist.append(("values", self.values))
return tuple(nodelist)
- def __iter__(self):
- if self.values is not None:
- yield self.values
-
+ def __iter__(self):
+ if self.values is not None:
+ yield self.values
+
attr_names = ('name', )
class Enumerator(Node):
@@ -540,10 +540,10 @@ class Enumerator(Node):
if self.value is not None: nodelist.append(("value", self.value))
return tuple(nodelist)
- def __iter__(self):
- if self.value is not None:
- yield self.value
-
+ def __iter__(self):
+ if self.value is not None:
+ yield self.value
+
attr_names = ('name', )
class EnumeratorList(Node):
@@ -558,10 +558,10 @@ class EnumeratorList(Node):
nodelist.append(("enumerators[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- for child in (self.enumerators or []):
- yield child
-
+ def __iter__(self):
+ for child in (self.enumerators or []):
+ yield child
+
attr_names = ()
class ExprList(Node):
@@ -576,10 +576,10 @@ class ExprList(Node):
nodelist.append(("exprs[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- for child in (self.exprs or []):
- yield child
-
+ def __iter__(self):
+ for child in (self.exprs or []):
+ yield child
+
attr_names = ()
class FileAST(Node):
@@ -594,10 +594,10 @@ class FileAST(Node):
nodelist.append(("ext[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- for child in (self.ext or []):
- yield child
-
+ def __iter__(self):
+ for child in (self.ext or []):
+ yield child
+
attr_names = ()
class For(Node):
@@ -617,16 +617,16 @@ class For(Node):
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
- def __iter__(self):
- if self.init is not None:
- yield self.init
- if self.cond is not None:
- yield self.cond
- if self.next is not None:
- yield self.next
- if self.stmt is not None:
- yield self.stmt
-
+ def __iter__(self):
+ if self.init is not None:
+ yield self.init
+ if self.cond is not None:
+ yield self.cond
+ if self.next is not None:
+ yield self.next
+ if self.stmt is not None:
+ yield self.stmt
+
attr_names = ()
class FuncCall(Node):
@@ -642,12 +642,12 @@ class FuncCall(Node):
if self.args is not None: nodelist.append(("args", self.args))
return tuple(nodelist)
- def __iter__(self):
- if self.name is not None:
- yield self.name
- if self.args is not None:
- yield self.args
-
+ def __iter__(self):
+ if self.name is not None:
+ yield self.name
+ if self.args is not None:
+ yield self.args
+
attr_names = ()
class FuncDecl(Node):
@@ -663,12 +663,12 @@ class FuncDecl(Node):
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
- def __iter__(self):
- if self.args is not None:
- yield self.args
- if self.type is not None:
- yield self.type
-
+ def __iter__(self):
+ if self.args is not None:
+ yield self.args
+ if self.type is not None:
+ yield self.type
+
attr_names = ()
class FuncDef(Node):
@@ -687,14 +687,14 @@ class FuncDef(Node):
nodelist.append(("param_decls[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- if self.decl is not None:
- yield self.decl
- if self.body is not None:
- yield self.body
- for child in (self.param_decls or []):
- yield child
-
+ def __iter__(self):
+ if self.decl is not None:
+ yield self.decl
+ if self.body is not None:
+ yield self.body
+ for child in (self.param_decls or []):
+ yield child
+
attr_names = ()
class Goto(Node):
@@ -707,10 +707,10 @@ class Goto(Node):
nodelist = []
return tuple(nodelist)
- def __iter__(self):
- return
- yield
-
+ def __iter__(self):
+ return
+ yield
+
attr_names = ('name', )
class ID(Node):
@@ -723,10 +723,10 @@ class ID(Node):
nodelist = []
return tuple(nodelist)
- def __iter__(self):
- return
- yield
-
+ def __iter__(self):
+ return
+ yield
+
attr_names = ('name', )
class IdentifierType(Node):
@@ -739,10 +739,10 @@ class IdentifierType(Node):
nodelist = []
return tuple(nodelist)
- def __iter__(self):
- return
- yield
-
+ def __iter__(self):
+ return
+ yield
+
attr_names = ('names', )
class If(Node):
@@ -760,14 +760,14 @@ class If(Node):
if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
return tuple(nodelist)
- def __iter__(self):
- if self.cond is not None:
- yield self.cond
- if self.iftrue is not None:
- yield self.iftrue
- if self.iffalse is not None:
- yield self.iffalse
-
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.iftrue is not None:
+ yield self.iftrue
+ if self.iffalse is not None:
+ yield self.iffalse
+
attr_names = ()
class InitList(Node):
@@ -782,10 +782,10 @@ class InitList(Node):
nodelist.append(("exprs[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- for child in (self.exprs or []):
- yield child
-
+ def __iter__(self):
+ for child in (self.exprs or []):
+ yield child
+
attr_names = ()
class Label(Node):
@@ -800,10 +800,10 @@ class Label(Node):
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
- def __iter__(self):
- if self.stmt is not None:
- yield self.stmt
-
+ def __iter__(self):
+ if self.stmt is not None:
+ yield self.stmt
+
attr_names = ('name', )
class NamedInitializer(Node):
@@ -820,12 +820,12 @@ class NamedInitializer(Node):
nodelist.append(("name[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- if self.expr is not None:
- yield self.expr
- for child in (self.name or []):
- yield child
-
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+ for child in (self.name or []):
+ yield child
+
attr_names = ()
class ParamList(Node):
@@ -840,10 +840,10 @@ class ParamList(Node):
nodelist.append(("params[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- for child in (self.params or []):
- yield child
-
+ def __iter__(self):
+ for child in (self.params or []):
+ yield child
+
attr_names = ()
class PtrDecl(Node):
@@ -858,10 +858,10 @@ class PtrDecl(Node):
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
- def __iter__(self):
- if self.type is not None:
- yield self.type
-
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
attr_names = ('quals', )
class Return(Node):
@@ -875,10 +875,10 @@ class Return(Node):
if self.expr is not None: nodelist.append(("expr", self.expr))
return tuple(nodelist)
- def __iter__(self):
- if self.expr is not None:
- yield self.expr
-
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+
attr_names = ()
class StaticAssert(Node):
@@ -915,10 +915,10 @@ class Struct(Node):
nodelist.append(("decls[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- for child in (self.decls or []):
- yield child
-
+ def __iter__(self):
+ for child in (self.decls or []):
+ yield child
+
attr_names = ('name', )
class StructRef(Node):
@@ -935,12 +935,12 @@ class StructRef(Node):
if self.field is not None: nodelist.append(("field", self.field))
return tuple(nodelist)
- def __iter__(self):
- if self.name is not None:
- yield self.name
- if self.field is not None:
- yield self.field
-
+ def __iter__(self):
+ if self.name is not None:
+ yield self.name
+ if self.field is not None:
+ yield self.field
+
attr_names = ('type', )
class Switch(Node):
@@ -956,12 +956,12 @@ class Switch(Node):
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
- def __iter__(self):
- if self.cond is not None:
- yield self.cond
- if self.stmt is not None:
- yield self.stmt
-
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.stmt is not None:
+ yield self.stmt
+
attr_names = ()
class TernaryOp(Node):
@@ -979,14 +979,14 @@ class TernaryOp(Node):
if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
return tuple(nodelist)
- def __iter__(self):
- if self.cond is not None:
- yield self.cond
- if self.iftrue is not None:
- yield self.iftrue
- if self.iffalse is not None:
- yield self.iffalse
-
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.iftrue is not None:
+ yield self.iftrue
+ if self.iffalse is not None:
+ yield self.iffalse
+
attr_names = ()
class TypeDecl(Node):
@@ -1003,10 +1003,10 @@ class TypeDecl(Node):
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
- def __iter__(self):
- if self.type is not None:
- yield self.type
-
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
attr_names = ('declname', 'quals', 'align', )
class Typedef(Node):
@@ -1023,10 +1023,10 @@ class Typedef(Node):
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
- def __iter__(self):
- if self.type is not None:
- yield self.type
-
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
attr_names = ('name', 'quals', 'storage', )
class Typename(Node):
@@ -1043,10 +1043,10 @@ class Typename(Node):
if self.type is not None: nodelist.append(("type", self.type))
return tuple(nodelist)
- def __iter__(self):
- if self.type is not None:
- yield self.type
-
+ def __iter__(self):
+ if self.type is not None:
+ yield self.type
+
attr_names = ('name', 'quals', 'align', )
class UnaryOp(Node):
@@ -1061,10 +1061,10 @@ class UnaryOp(Node):
if self.expr is not None: nodelist.append(("expr", self.expr))
return tuple(nodelist)
- def __iter__(self):
- if self.expr is not None:
- yield self.expr
-
+ def __iter__(self):
+ if self.expr is not None:
+ yield self.expr
+
attr_names = ('op', )
class Union(Node):
@@ -1080,10 +1080,10 @@ class Union(Node):
nodelist.append(("decls[%d]" % i, child))
return tuple(nodelist)
- def __iter__(self):
- for child in (self.decls or []):
- yield child
-
+ def __iter__(self):
+ for child in (self.decls or []):
+ yield child
+
attr_names = ('name', )
class While(Node):
@@ -1099,12 +1099,12 @@ class While(Node):
if self.stmt is not None: nodelist.append(("stmt", self.stmt))
return tuple(nodelist)
- def __iter__(self):
- if self.cond is not None:
- yield self.cond
- if self.stmt is not None:
- yield self.stmt
-
+ def __iter__(self):
+ if self.cond is not None:
+ yield self.cond
+ if self.stmt is not None:
+ yield self.stmt
+
attr_names = ()
class Pragma(Node):
@@ -1117,9 +1117,9 @@ class Pragma(Node):
nodelist = []
return tuple(nodelist)
- def __iter__(self):
- return
- yield
-
+ def __iter__(self):
+ return
+ yield
+
attr_names = ('string', )
diff --git a/contrib/python/pycparser/pycparser/c_generator.py b/contrib/python/pycparser/pycparser/c_generator.py
index 1057b2c62e..7edeca7fcd 100644
--- a/contrib/python/pycparser/pycparser/c_generator.py
+++ b/contrib/python/pycparser/pycparser/c_generator.py
@@ -3,7 +3,7 @@
#
# C code generator from pycparser AST nodes.
#
-# Eli Bendersky [https://eli.thegreenplace.net/]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
from . import c_ast
@@ -43,7 +43,7 @@ class CGenerator(object):
def visit_ID(self, n):
return n.name
-
+
def visit_Pragma(self, n):
ret = '#pragma'
if n.string:
@@ -178,24 +178,24 @@ class CGenerator(object):
return ', '.join(visited_subexprs)
def visit_Enum(self, n):
- return self._generate_struct_union_enum(n, name='enum')
+ return self._generate_struct_union_enum(n, name='enum')
def visit_Alignas(self, n):
return '_Alignas({})'.format(self.visit(n.alignment))
- def visit_Enumerator(self, n):
- if not n.value:
- return '{indent}{name},\n'.format(
- indent=self._make_indent(),
- name=n.name,
- )
- else:
- return '{indent}{name} = {value},\n'.format(
- indent=self._make_indent(),
- name=n.name,
- value=self.visit(n.value),
- )
-
+ def visit_Enumerator(self, n):
+ if not n.value:
+ return '{indent}{name},\n'.format(
+ indent=self._make_indent(),
+ name=n.name,
+ )
+ else:
+ return '{indent}{name} = {value},\n'.format(
+ indent=self._make_indent(),
+ name=n.name,
+ value=self.visit(n.value),
+ )
+
def visit_FuncDef(self, n):
decl = self.visit(n.decl)
self.indent_level = 0
@@ -226,10 +226,10 @@ class CGenerator(object):
s += self._make_indent() + '}\n'
return s
- def visit_CompoundLiteral(self, n):
- return '(' + self.visit(n.type) + '){' + self.visit(n.init) + '}'
-
-
+ def visit_CompoundLiteral(self, n):
+ return '(' + self.visit(n.type) + '){' + self.visit(n.init) + '}'
+
+
def visit_EmptyStatement(self, n):
return ';'
@@ -325,21 +325,21 @@ class CGenerator(object):
return '...'
def visit_Struct(self, n):
- return self._generate_struct_union_enum(n, 'struct')
+ return self._generate_struct_union_enum(n, 'struct')
def visit_Typename(self, n):
return self._generate_type(n.type)
def visit_Union(self, n):
- return self._generate_struct_union_enum(n, 'union')
+ return self._generate_struct_union_enum(n, 'union')
def visit_NamedInitializer(self, n):
s = ''
for name in n.name:
if isinstance(name, c_ast.ID):
s += '.' + name.name
- else:
- s += '[' + self.visit(name) + ']'
+ else:
+ s += '[' + self.visit(name) + ']'
s += ' = ' + self._visit_expr(n.expr)
return s
@@ -355,37 +355,37 @@ class CGenerator(object):
def visit_PtrDecl(self, n):
return self._generate_type(n, emit_declname=False)
- def _generate_struct_union_enum(self, n, name):
- """ Generates code for structs, unions, and enums. name should be
- 'struct', 'union', or 'enum'.
+ def _generate_struct_union_enum(self, n, name):
+ """ Generates code for structs, unions, and enums. name should be
+ 'struct', 'union', or 'enum'.
"""
- if name in ('struct', 'union'):
- members = n.decls
- body_function = self._generate_struct_union_body
- else:
- assert name == 'enum'
- members = None if n.values is None else n.values.enumerators
- body_function = self._generate_enum_body
+ if name in ('struct', 'union'):
+ members = n.decls
+ body_function = self._generate_struct_union_body
+ else:
+ assert name == 'enum'
+ members = None if n.values is None else n.values.enumerators
+ body_function = self._generate_enum_body
s = name + ' ' + (n.name or '')
- if members is not None:
- # None means no members
- # Empty sequence means an empty list of members
+ if members is not None:
+ # None means no members
+ # Empty sequence means an empty list of members
s += '\n'
s += self._make_indent()
self.indent_level += 2
s += '{\n'
- s += body_function(members)
+ s += body_function(members)
self.indent_level -= 2
s += self._make_indent() + '}'
return s
- def _generate_struct_union_body(self, members):
- return ''.join(self._generate_stmt(decl) for decl in members)
-
- def _generate_enum_body(self, members):
- # `[:-2] + '\n'` removes the final `,` from the enumerator list
- return ''.join(self.visit(value) for value in members)[:-2] + '\n'
-
+ def _generate_struct_union_body(self, members):
+ return ''.join(self._generate_stmt(decl) for decl in members)
+
+ def _generate_enum_body(self, members):
+ # `[:-2] + '\n'` removes the final `,` from the enumerator list
+ return ''.join(self.visit(value) for value in members)[:-2] + '\n'
+
def _generate_stmt(self, n, add_indent=False):
""" Generation from a statement node. This method exists as a wrapper
for individual visit_* methods to handle different treatment of
@@ -498,5 +498,5 @@ class CGenerator(object):
""" Returns True for nodes that are "simple" - i.e. nodes that always
have higher precedence than operators.
"""
- return isinstance(n, (c_ast.Constant, c_ast.ID, c_ast.ArrayRef,
- c_ast.StructRef, c_ast.FuncCall))
+ return isinstance(n, (c_ast.Constant, c_ast.ID, c_ast.ArrayRef,
+ c_ast.StructRef, c_ast.FuncCall))
diff --git a/contrib/python/pycparser/pycparser/c_lexer.py b/contrib/python/pycparser/pycparser/c_lexer.py
index d68d8ebfa3..5ac2231352 100644
--- a/contrib/python/pycparser/pycparser/c_lexer.py
+++ b/contrib/python/pycparser/pycparser/c_lexer.py
@@ -3,7 +3,7 @@
#
# CLexer class: lexer for the C language
#
-# Eli Bendersky [https://eli.thegreenplace.net/]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
import re
@@ -51,8 +51,8 @@ class CLexer(object):
# Allow either "# line" or "# <num>" to support GCC's
# cpp output
#
- self.line_pattern = re.compile(r'([ \t]*line\W)|([ \t]*\d+)')
- self.pragma_pattern = re.compile(r'[ \t]*pragma\W')
+ self.line_pattern = re.compile(r'([ \t]*line\W)|([ \t]*\d+)')
+ self.pragma_pattern = re.compile(r'[ \t]*pragma\W')
def build(self, **kwargs):
""" Builds the lexer from the specification. Must be
@@ -105,7 +105,7 @@ class CLexer(object):
'REGISTER', 'OFFSETOF',
'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT',
'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID',
- 'VOLATILE', 'WHILE', '__INT128',
+ 'VOLATILE', 'WHILE', '__INT128',
)
keywords_new = (
diff --git a/contrib/python/pycparser/pycparser/c_parser.py b/contrib/python/pycparser/pycparser/c_parser.py
index 640a759406..87d10e8486 100644
--- a/contrib/python/pycparser/pycparser/c_parser.py
+++ b/contrib/python/pycparser/pycparser/c_parser.py
@@ -3,7 +3,7 @@
#
# CParser class: Parser and AST builder for the C language
#
-# Eli Bendersky [https://eli.thegreenplace.net/]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
from .ply import yacc
@@ -14,7 +14,7 @@ from .plyparser import PLYParser, ParseError, parameterized, template
from .ast_transforms import fix_switch_cases, fix_atomic_specifiers
-@template
+@template
class CParser(PLYParser):
def __init__(
self,
@@ -41,11 +41,11 @@ class CParser(PLYParser):
When releasing with a stable lexer, set to True
to save the re-generation of the lexer table on
each run.
-
+
lexer:
Set this parameter to define the lexer to use if
you're not using the default CLexer.
-
+
lextab:
Points to the lex table that's used for optimized
mode. Only if you're modifying the lexer and want
@@ -90,12 +90,12 @@ class CParser(PLYParser):
'abstract_declarator',
'assignment_expression',
'declaration_list',
- 'declaration_specifiers_no_type',
+ 'declaration_specifiers_no_type',
'designation',
'expression',
'identifier_list',
'init_declarator_list',
- 'id_init_declarator_list',
+ 'id_init_declarator_list',
'initializer_list',
'parameter_type_list',
'block_item_list',
@@ -340,7 +340,7 @@ class CParser(PLYParser):
coord=typename[0].coord)
return decl
- def _add_declaration_specifier(self, declspec, newspec, kind, append=False):
+ def _add_declaration_specifier(self, declspec, newspec, kind, append=False):
""" Declaration specifiers are represented by a dictionary
with the entries:
* qual: a list of type qualifiers
@@ -351,18 +351,18 @@ class CParser(PLYParser):
This method is given a declaration specifier, and a
new specifier of a given kind.
- If `append` is True, the new specifier is added to the end of
- the specifiers list, otherwise it's added at the beginning.
+ If `append` is True, the new specifier is added to the end of
+ the specifiers list, otherwise it's added at the beginning.
Returns the declaration specifier, with the new
specifier incorporated.
"""
spec = declspec or dict(qual=[], storage=[], type=[], function=[], alignment=[])
-
- if append:
- spec[kind].append(newspec)
- else:
- spec[kind].insert(0, newspec)
-
+
+ if append:
+ spec[kind].append(newspec)
+ else:
+ spec[kind].insert(0, newspec)
+
return spec
def _build_declarations(self, spec, decls, typedef_namespace=False):
@@ -569,21 +569,21 @@ class CParser(PLYParser):
""" pp_directive : PPHASH
"""
self._parse_error('Directives not supported yet',
- self._token_coord(p, 1))
+ self._token_coord(p, 1))
def p_pppragma_directive(self, p):
""" pppragma_directive : PPPRAGMA
| PPPRAGMA PPPRAGMASTR
"""
if len(p) == 3:
- p[0] = c_ast.Pragma(p[2], self._token_coord(p, 2))
+ p[0] = c_ast.Pragma(p[2], self._token_coord(p, 2))
else:
- p[0] = c_ast.Pragma("", self._token_coord(p, 1))
+ p[0] = c_ast.Pragma("", self._token_coord(p, 1))
# In function definitions, the declarator can be followed by
# a declaration list, for old "K&R style" function definitios.
def p_function_definition_1(self, p):
- """ function_definition : id_declarator declaration_list_opt compound_statement
+ """ function_definition : id_declarator declaration_list_opt compound_statement
"""
# no declaration specifiers - 'int' becomes the default type
spec = dict(
@@ -591,7 +591,7 @@ class CParser(PLYParser):
alignment=[],
storage=[],
type=[c_ast.IdentifierType(['int'],
- coord=self._token_coord(p, 1))],
+ coord=self._token_coord(p, 1))],
function=[])
p[0] = self._build_function_definition(
@@ -601,7 +601,7 @@ class CParser(PLYParser):
body=p[3])
def p_function_definition_2(self, p):
- """ function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement
+ """ function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement
"""
spec = p[1]
@@ -634,53 +634,53 @@ class CParser(PLYParser):
# such as labeled_statements, selection_statements, and
# iteration_statements, causing a misleading structure in the AST. For
# example, consider the following C code.
- #
- # for (int i = 0; i < 3; i++)
- # #pragma omp critical
- # sum += 1;
- #
+ #
+ # for (int i = 0; i < 3; i++)
+ # #pragma omp critical
+ # sum += 1;
+ #
# This code will compile and execute "sum += 1;" as the body of the for
# loop. Previous implementations of PyCParser would render the AST for this
- # block of code as follows:
- #
- # For:
- # DeclList:
- # Decl: i, [], [], []
- # TypeDecl: i, []
- # IdentifierType: ['int']
- # Constant: int, 0
- # BinaryOp: <
- # ID: i
- # Constant: int, 3
- # UnaryOp: p++
- # ID: i
- # Pragma: omp critical
- # Assignment: +=
- # ID: sum
- # Constant: int, 1
- #
- # This AST misleadingly takes the Pragma as the body of the loop and the
- # assignment then becomes a sibling of the loop.
- #
- # To solve edge cases like these, the pragmacomp_or_statement rule groups
- # a pragma and its following statement (which would otherwise be orphaned)
- # using a compound block, effectively turning the above code into:
- #
- # for (int i = 0; i < 3; i++) {
- # #pragma omp critical
- # sum += 1;
- # }
- def p_pragmacomp_or_statement(self, p):
- """ pragmacomp_or_statement : pppragma_directive statement
- | statement
- """
- if isinstance(p[1], c_ast.Pragma) and len(p) == 3:
- p[0] = c_ast.Compound(
- block_items=[p[1], p[2]],
- coord=self._token_coord(p, 1))
- else:
- p[0] = p[1]
-
+ # block of code as follows:
+ #
+ # For:
+ # DeclList:
+ # Decl: i, [], [], []
+ # TypeDecl: i, []
+ # IdentifierType: ['int']
+ # Constant: int, 0
+ # BinaryOp: <
+ # ID: i
+ # Constant: int, 3
+ # UnaryOp: p++
+ # ID: i
+ # Pragma: omp critical
+ # Assignment: +=
+ # ID: sum
+ # Constant: int, 1
+ #
+ # This AST misleadingly takes the Pragma as the body of the loop and the
+ # assignment then becomes a sibling of the loop.
+ #
+ # To solve edge cases like these, the pragmacomp_or_statement rule groups
+ # a pragma and its following statement (which would otherwise be orphaned)
+ # using a compound block, effectively turning the above code into:
+ #
+ # for (int i = 0; i < 3; i++) {
+ # #pragma omp critical
+ # sum += 1;
+ # }
+ def p_pragmacomp_or_statement(self, p):
+ """ pragmacomp_or_statement : pppragma_directive statement
+ | statement
+ """
+ if isinstance(p[1], c_ast.Pragma) and len(p) == 3:
+ p[0] = c_ast.Compound(
+ block_items=[p[1], p[2]],
+ coord=self._token_coord(p, 1))
+ else:
+ p[0] = p[1]
+
# In C, declarations can come several in a line:
# int x, *px, romulo = 5;
#
@@ -692,7 +692,7 @@ class CParser(PLYParser):
#
def p_decl_body(self, p):
""" decl_body : declaration_specifiers init_declarator_list_opt
- | declaration_specifiers_no_type id_init_declarator_list_opt
+ | declaration_specifiers_no_type id_init_declarator_list_opt
"""
spec = p[1]
@@ -766,73 +766,73 @@ class CParser(PLYParser):
"""
p[0] = p[1] if len(p) == 2 else p[1] + p[2]
- # To know when declaration-specifiers end and declarators begin,
- # we require declaration-specifiers to have at least one
- # type-specifier, and disallow typedef-names after we've seen any
- # type-specifier. These are both required by the spec.
- #
- def p_declaration_specifiers_no_type_1(self, p):
- """ declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt
+ # To know when declaration-specifiers end and declarators begin,
+ # we require declaration-specifiers to have at least one
+ # type-specifier, and disallow typedef-names after we've seen any
+ # type-specifier. These are both required by the spec.
+ #
+ def p_declaration_specifiers_no_type_1(self, p):
+ """ declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'qual')
- def p_declaration_specifiers_no_type_2(self, p):
- """ declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt
- """
- p[0] = self._add_declaration_specifier(p[2], p[1], 'storage')
-
- def p_declaration_specifiers_no_type_3(self, p):
- """ declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt
- """
- p[0] = self._add_declaration_specifier(p[2], p[1], 'function')
-
+ def p_declaration_specifiers_no_type_2(self, p):
+ """ declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt
+ """
+ p[0] = self._add_declaration_specifier(p[2], p[1], 'storage')
+
+ def p_declaration_specifiers_no_type_3(self, p):
+ """ declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt
+ """
+ p[0] = self._add_declaration_specifier(p[2], p[1], 'function')
+
# Without this, `typedef _Atomic(T) U` will parse incorrectly because the
# _Atomic qualifier will match, instead of the specifier.
def p_declaration_specifiers_no_type_4(self, p):
""" declaration_specifiers_no_type : atomic_specifier declaration_specifiers_no_type_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'type')
-
+
def p_declaration_specifiers_no_type_5(self, p):
""" declaration_specifiers_no_type : alignment_specifier declaration_specifiers_no_type_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'alignment')
- def p_declaration_specifiers_1(self, p):
- """ declaration_specifiers : declaration_specifiers type_qualifier
- """
- p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
-
+ def p_declaration_specifiers_1(self, p):
+ """ declaration_specifiers : declaration_specifiers type_qualifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
+
def p_declaration_specifiers_2(self, p):
- """ declaration_specifiers : declaration_specifiers storage_class_specifier
+ """ declaration_specifiers : declaration_specifiers storage_class_specifier
"""
- p[0] = self._add_declaration_specifier(p[1], p[2], 'storage', append=True)
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'storage', append=True)
def p_declaration_specifiers_3(self, p):
- """ declaration_specifiers : declaration_specifiers function_specifier
+ """ declaration_specifiers : declaration_specifiers function_specifier
"""
- p[0] = self._add_declaration_specifier(p[1], p[2], 'function', append=True)
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'function', append=True)
def p_declaration_specifiers_4(self, p):
- """ declaration_specifiers : declaration_specifiers type_specifier_no_typeid
- """
- p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
-
- def p_declaration_specifiers_5(self, p):
- """ declaration_specifiers : type_specifier
- """
- p[0] = self._add_declaration_specifier(None, p[1], 'type')
-
- def p_declaration_specifiers_6(self, p):
- """ declaration_specifiers : declaration_specifiers_no_type type_specifier
- """
- p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
-
+ """ declaration_specifiers : declaration_specifiers type_specifier_no_typeid
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
+
+ def p_declaration_specifiers_5(self, p):
+ """ declaration_specifiers : type_specifier
+ """
+ p[0] = self._add_declaration_specifier(None, p[1], 'type')
+
+ def p_declaration_specifiers_6(self, p):
+ """ declaration_specifiers : declaration_specifiers_no_type type_specifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
+
def p_declaration_specifiers_7(self, p):
""" declaration_specifiers : declaration_specifiers alignment_specifier
"""
p[0] = self._add_declaration_specifier(p[1], p[2], 'alignment', append=True)
-
+
def p_storage_class_specifier(self, p):
""" storage_class_specifier : AUTO
| REGISTER
@@ -849,27 +849,27 @@ class CParser(PLYParser):
"""
p[0] = p[1]
- def p_type_specifier_no_typeid(self, p):
- """ type_specifier_no_typeid : VOID
- | _BOOL
- | CHAR
- | SHORT
- | INT
- | LONG
- | FLOAT
- | DOUBLE
- | _COMPLEX
- | SIGNED
- | UNSIGNED
- | __INT128
- """
- p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
-
- def p_type_specifier(self, p):
+ def p_type_specifier_no_typeid(self, p):
+ """ type_specifier_no_typeid : VOID
+ | _BOOL
+ | CHAR
+ | SHORT
+ | INT
+ | LONG
+ | FLOAT
+ | DOUBLE
+ | _COMPLEX
+ | SIGNED
+ | UNSIGNED
+ | __INT128
+ """
+ p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
+
+ def p_type_specifier(self, p):
""" type_specifier : typedef_name
| enum_specifier
| struct_or_union_specifier
- | type_specifier_no_typeid
+ | type_specifier_no_typeid
| atomic_specifier
"""
p[0] = p[1]
@@ -890,7 +890,7 @@ class CParser(PLYParser):
"""
p[0] = p[1]
- def p_init_declarator_list(self, p):
+ def p_init_declarator_list(self, p):
""" init_declarator_list : init_declarator
| init_declarator_list COMMA init_declarator
"""
@@ -905,40 +905,40 @@ class CParser(PLYParser):
"""
p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
- def p_id_init_declarator_list(self, p):
- """ id_init_declarator_list : id_init_declarator
- | id_init_declarator_list COMMA init_declarator
- """
- p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
-
- def p_id_init_declarator(self, p):
- """ id_init_declarator : id_declarator
- | id_declarator EQUALS initializer
- """
- p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
-
- # Require at least one type specifier in a specifier-qualifier-list
- #
+ def p_id_init_declarator_list(self, p):
+ """ id_init_declarator_list : id_init_declarator
+ | id_init_declarator_list COMMA init_declarator
+ """
+ p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
+
+ def p_id_init_declarator(self, p):
+ """ id_init_declarator : id_declarator
+ | id_declarator EQUALS initializer
+ """
+ p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
+
+ # Require at least one type specifier in a specifier-qualifier-list
+ #
def p_specifier_qualifier_list_1(self, p):
- """ specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid
+ """ specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid
"""
- p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
def p_specifier_qualifier_list_2(self, p):
- """ specifier_qualifier_list : specifier_qualifier_list type_qualifier
- """
- p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
-
- def p_specifier_qualifier_list_3(self, p):
- """ specifier_qualifier_list : type_specifier
- """
- p[0] = self._add_declaration_specifier(None, p[1], 'type')
-
- def p_specifier_qualifier_list_4(self, p):
- """ specifier_qualifier_list : type_qualifier_list type_specifier
- """
+ """ specifier_qualifier_list : specifier_qualifier_list type_qualifier
+ """
+ p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
+
+ def p_specifier_qualifier_list_3(self, p):
+ """ specifier_qualifier_list : type_specifier
+ """
+ p[0] = self._add_declaration_specifier(None, p[1], 'type')
+
+ def p_specifier_qualifier_list_4(self, p):
+ """ specifier_qualifier_list : type_qualifier_list type_specifier
+ """
p[0] = dict(qual=p[1], alignment=[], storage=[], type=[p[2]], function=[])
-
+
def p_specifier_qualifier_list_5(self, p):
""" specifier_qualifier_list : alignment_specifier
"""
@@ -957,48 +957,48 @@ class CParser(PLYParser):
| struct_or_union TYPEID
"""
klass = self._select_struct_union_class(p[1])
- # None means no list of members
+ # None means no list of members
p[0] = klass(
name=p[2],
decls=None,
- coord=self._token_coord(p, 2))
+ coord=self._token_coord(p, 2))
def p_struct_or_union_specifier_2(self, p):
""" struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close
- | struct_or_union brace_open brace_close
+ | struct_or_union brace_open brace_close
"""
klass = self._select_struct_union_class(p[1])
- if len(p) == 4:
- # Empty sequence means an empty list of members
- p[0] = klass(
- name=None,
- decls=[],
- coord=self._token_coord(p, 2))
- else:
- p[0] = klass(
- name=None,
- decls=p[3],
- coord=self._token_coord(p, 2))
-
-
+ if len(p) == 4:
+ # Empty sequence means an empty list of members
+ p[0] = klass(
+ name=None,
+ decls=[],
+ coord=self._token_coord(p, 2))
+ else:
+ p[0] = klass(
+ name=None,
+ decls=p[3],
+ coord=self._token_coord(p, 2))
+
+
def p_struct_or_union_specifier_3(self, p):
""" struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close
- | struct_or_union ID brace_open brace_close
+ | struct_or_union ID brace_open brace_close
| struct_or_union TYPEID brace_open struct_declaration_list brace_close
- | struct_or_union TYPEID brace_open brace_close
+ | struct_or_union TYPEID brace_open brace_close
"""
klass = self._select_struct_union_class(p[1])
- if len(p) == 5:
- # Empty sequence means an empty list of members
- p[0] = klass(
- name=p[2],
- decls=[],
- coord=self._token_coord(p, 2))
- else:
- p[0] = klass(
- name=p[2],
- decls=p[4],
- coord=self._token_coord(p, 2))
+ if len(p) == 5:
+ # Empty sequence means an empty list of members
+ p[0] = klass(
+ name=p[2],
+ decls=[],
+ coord=self._token_coord(p, 2))
+ else:
+ p[0] = klass(
+ name=p[2],
+ decls=p[4],
+ coord=self._token_coord(p, 2))
def p_struct_or_union(self, p):
""" struct_or_union : STRUCT
@@ -1056,14 +1056,14 @@ class CParser(PLYParser):
p[0] = decls
def p_struct_declaration_2(self, p):
- """ struct_declaration : SEMI
+ """ struct_declaration : SEMI
"""
- p[0] = None
+ p[0] = None
def p_struct_declaration_3(self, p):
- """ struct_declaration : pppragma_directive
+ """ struct_declaration : pppragma_directive
"""
- p[0] = [p[1]]
+ p[0] = [p[1]]
def p_struct_declarator_list(self, p):
""" struct_declarator_list : struct_declarator
@@ -1092,18 +1092,18 @@ class CParser(PLYParser):
""" enum_specifier : ENUM ID
| ENUM TYPEID
"""
- p[0] = c_ast.Enum(p[2], None, self._token_coord(p, 1))
+ p[0] = c_ast.Enum(p[2], None, self._token_coord(p, 1))
def p_enum_specifier_2(self, p):
""" enum_specifier : ENUM brace_open enumerator_list brace_close
"""
- p[0] = c_ast.Enum(None, p[3], self._token_coord(p, 1))
+ p[0] = c_ast.Enum(None, p[3], self._token_coord(p, 1))
def p_enum_specifier_3(self, p):
""" enum_specifier : ENUM ID brace_open enumerator_list brace_close
| ENUM TYPEID brace_open enumerator_list brace_close
"""
- p[0] = c_ast.Enum(p[2], p[4], self._token_coord(p, 1))
+ p[0] = c_ast.Enum(p[2], p[4], self._token_coord(p, 1))
def p_enumerator_list(self, p):
""" enumerator_list : enumerator
@@ -1131,53 +1131,53 @@ class CParser(PLYParser):
if len(p) == 2:
enumerator = c_ast.Enumerator(
p[1], None,
- self._token_coord(p, 1))
+ self._token_coord(p, 1))
else:
enumerator = c_ast.Enumerator(
p[1], p[3],
- self._token_coord(p, 1))
+ self._token_coord(p, 1))
self._add_identifier(enumerator.name, enumerator.coord)
p[0] = enumerator
- def p_declarator(self, p):
- """ declarator : id_declarator
- | typeid_declarator
+ def p_declarator(self, p):
+ """ declarator : id_declarator
+ | typeid_declarator
"""
p[0] = p[1]
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_xxx_declarator_1(self, p):
- """ xxx_declarator : direct_xxx_declarator
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_xxx_declarator_1(self, p):
+ """ xxx_declarator : direct_xxx_declarator
"""
- p[0] = p[1]
+ p[0] = p[1]
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_xxx_declarator_2(self, p):
- """ xxx_declarator : pointer direct_xxx_declarator
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_xxx_declarator_2(self, p):
+ """ xxx_declarator : pointer direct_xxx_declarator
"""
- p[0] = self._type_modify_decl(p[2], p[1])
+ p[0] = self._type_modify_decl(p[2], p[1])
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_direct_xxx_declarator_1(self, p):
- """ direct_xxx_declarator : yyy
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_1(self, p):
+ """ direct_xxx_declarator : yyy
"""
p[0] = c_ast.TypeDecl(
declname=p[1],
type=None,
quals=None,
align=None,
- coord=self._token_coord(p, 1))
+ coord=self._token_coord(p, 1))
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'))
- def p_direct_xxx_declarator_2(self, p):
- """ direct_xxx_declarator : LPAREN xxx_declarator RPAREN
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'))
+ def p_direct_xxx_declarator_2(self, p):
+ """ direct_xxx_declarator : LPAREN xxx_declarator RPAREN
"""
p[0] = p[2]
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_direct_xxx_declarator_3(self, p):
- """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_3(self, p):
+ """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
"""
quals = (p[3] if len(p) > 5 else []) or []
# Accept dimension qualifiers
@@ -1190,10 +1190,10 @@ class CParser(PLYParser):
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_direct_xxx_declarator_4(self, p):
- """ direct_xxx_declarator : direct_xxx_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET
- | direct_xxx_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_4(self, p):
+ """ direct_xxx_declarator : direct_xxx_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET
+ | direct_xxx_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET
"""
# Using slice notation for PLY objects doesn't work in Python 3 for the
# version of PLY embedded with pycparser; see PLY Google Code issue 30.
@@ -1212,22 +1212,22 @@ class CParser(PLYParser):
# Special for VLAs
#
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_direct_xxx_declarator_5(self, p):
- """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_5(self, p):
+ """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET
"""
arr = c_ast.ArrayDecl(
type=None,
- dim=c_ast.ID(p[4], self._token_coord(p, 4)),
+ dim=c_ast.ID(p[4], self._token_coord(p, 4)),
dim_quals=p[3] if p[3] is not None else [],
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
- @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
- def p_direct_xxx_declarator_6(self, p):
- """ direct_xxx_declarator : direct_xxx_declarator LPAREN parameter_type_list RPAREN
- | direct_xxx_declarator LPAREN identifier_list_opt RPAREN
+ @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
+ def p_direct_xxx_declarator_6(self, p):
+ """ direct_xxx_declarator : direct_xxx_declarator LPAREN parameter_type_list RPAREN
+ | direct_xxx_declarator LPAREN identifier_list_opt RPAREN
"""
func = c_ast.FuncDecl(
args=p[3],
@@ -1257,7 +1257,7 @@ class CParser(PLYParser):
""" pointer : TIMES type_qualifier_list_opt
| TIMES type_qualifier_list_opt pointer
"""
- coord = self._token_coord(p, 1)
+ coord = self._token_coord(p, 1)
# Pointer decls nest from inside out. This is important when different
# levels have different qualifiers. For example:
#
@@ -1265,7 +1265,7 @@ class CParser(PLYParser):
#
# Means "pointer to const pointer to char"
#
- # While:
+ # While:
#
# char ** const p;
#
@@ -1294,7 +1294,7 @@ class CParser(PLYParser):
| parameter_list COMMA ELLIPSIS
"""
if len(p) > 2:
- p[1].params.append(c_ast.EllipsisParam(self._token_coord(p, 3)))
+ p[1].params.append(c_ast.EllipsisParam(self._token_coord(p, 3)))
p[0] = p[1]
@@ -1308,24 +1308,24 @@ class CParser(PLYParser):
p[1].params.append(p[3])
p[0] = p[1]
- # From ISO/IEC 9899:TC2, 6.7.5.3.11:
- # "If, in a parameter declaration, an identifier can be treated either
- # as a typedef name or as a parameter name, it shall be taken as a
- # typedef name."
- #
- # Inside a parameter declaration, once we've reduced declaration specifiers,
- # if we shift in an LPAREN and see a TYPEID, it could be either an abstract
- # declarator or a declarator nested inside parens. This rule tells us to
- # always treat it as an abstract declarator. Therefore, we only accept
- # `id_declarator`s and `typeid_noparen_declarator`s.
+ # From ISO/IEC 9899:TC2, 6.7.5.3.11:
+ # "If, in a parameter declaration, an identifier can be treated either
+ # as a typedef name or as a parameter name, it shall be taken as a
+ # typedef name."
+ #
+ # Inside a parameter declaration, once we've reduced declaration specifiers,
+ # if we shift in an LPAREN and see a TYPEID, it could be either an abstract
+ # declarator or a declarator nested inside parens. This rule tells us to
+ # always treat it as an abstract declarator. Therefore, we only accept
+ # `id_declarator`s and `typeid_noparen_declarator`s.
def p_parameter_declaration_1(self, p):
- """ parameter_declaration : declaration_specifiers id_declarator
- | declaration_specifiers typeid_noparen_declarator
+ """ parameter_declaration : declaration_specifiers id_declarator
+ | declaration_specifiers typeid_noparen_declarator
"""
spec = p[1]
if not spec['type']:
spec['type'] = [c_ast.IdentifierType(['int'],
- coord=self._token_coord(p, 1))]
+ coord=self._token_coord(p, 1))]
p[0] = self._build_declarations(
spec=spec,
decls=[dict(decl=p[2])])[0]
@@ -1336,7 +1336,7 @@ class CParser(PLYParser):
spec = p[1]
if not spec['type']:
spec['type'] = [c_ast.IdentifierType(['int'],
- coord=self._token_coord(p, 1))]
+ coord=self._token_coord(p, 1))]
# Parameters can have the same names as typedefs. The trouble is that
# the parameter's name gets grouped into declaration_specifiers, making
@@ -1356,7 +1356,7 @@ class CParser(PLYParser):
quals=spec['qual'],
align=None,
type=p[2] or c_ast.TypeDecl(None, None, None, None),
- coord=self._token_coord(p, 2))
+ coord=self._token_coord(p, 2))
typename = spec['type']
decl = self._fix_decl_name_type(decl, typename)
@@ -1382,7 +1382,7 @@ class CParser(PLYParser):
| brace_open initializer_list COMMA brace_close
"""
if p[2] is None:
- p[0] = c_ast.InitList([], self._token_coord(p, 1))
+ p[0] = c_ast.InitList([], self._token_coord(p, 1))
else:
p[0] = p[2]
@@ -1426,7 +1426,7 @@ class CParser(PLYParser):
quals=p[1]['qual'][:],
align=None,
type=p[2] or c_ast.TypeDecl(None, None, None, None),
- coord=self._token_coord(p, 2))
+ coord=self._token_coord(p, 2))
p[0] = self._fix_decl_name_type(typename, p[1]['type'])
@@ -1476,14 +1476,14 @@ class CParser(PLYParser):
type=c_ast.TypeDecl(None, None, None, None),
dim=p[3] if len(p) > 4 else p[2],
dim_quals=quals,
- coord=self._token_coord(p, 1))
+ coord=self._token_coord(p, 1))
def p_direct_abstract_declarator_4(self, p):
""" direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET
"""
arr = c_ast.ArrayDecl(
type=None,
- dim=c_ast.ID(p[3], self._token_coord(p, 3)),
+ dim=c_ast.ID(p[3], self._token_coord(p, 3)),
dim_quals=[],
coord=p[1].coord)
@@ -1494,9 +1494,9 @@ class CParser(PLYParser):
"""
p[0] = c_ast.ArrayDecl(
type=c_ast.TypeDecl(None, None, None, None),
- dim=c_ast.ID(p[3], self._token_coord(p, 3)),
+ dim=c_ast.ID(p[3], self._token_coord(p, 3)),
dim_quals=[],
- coord=self._token_coord(p, 1))
+ coord=self._token_coord(p, 1))
def p_direct_abstract_declarator_6(self, p):
""" direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN
@@ -1514,7 +1514,7 @@ class CParser(PLYParser):
p[0] = c_ast.FuncDecl(
args=p[2],
type=c_ast.TypeDecl(None, None, None, None),
- coord=self._token_coord(p, 1))
+ coord=self._token_coord(p, 1))
# declaration is a list, statement isn't. To make it consistent, block_item
# will always be a list
@@ -1538,72 +1538,72 @@ class CParser(PLYParser):
""" compound_statement : brace_open block_item_list_opt brace_close """
p[0] = c_ast.Compound(
block_items=p[2],
- coord=self._token_coord(p, 1))
+ coord=self._token_coord(p, 1))
def p_labeled_statement_1(self, p):
- """ labeled_statement : ID COLON pragmacomp_or_statement """
- p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1))
+ """ labeled_statement : ID COLON pragmacomp_or_statement """
+ p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1))
def p_labeled_statement_2(self, p):
- """ labeled_statement : CASE constant_expression COLON pragmacomp_or_statement """
- p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1))
+ """ labeled_statement : CASE constant_expression COLON pragmacomp_or_statement """
+ p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1))
def p_labeled_statement_3(self, p):
- """ labeled_statement : DEFAULT COLON pragmacomp_or_statement """
- p[0] = c_ast.Default([p[3]], self._token_coord(p, 1))
+ """ labeled_statement : DEFAULT COLON pragmacomp_or_statement """
+ p[0] = c_ast.Default([p[3]], self._token_coord(p, 1))
def p_selection_statement_1(self, p):
- """ selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement """
- p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1))
+ """ selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement """
+ p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1))
def p_selection_statement_2(self, p):
- """ selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement """
- p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1))
+ """ selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement """
+ p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1))
def p_selection_statement_3(self, p):
- """ selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement """
+ """ selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement """
p[0] = fix_switch_cases(
- c_ast.Switch(p[3], p[5], self._token_coord(p, 1)))
+ c_ast.Switch(p[3], p[5], self._token_coord(p, 1)))
def p_iteration_statement_1(self, p):
- """ iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement """
- p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1))
+ """ iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement """
+ p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1))
def p_iteration_statement_2(self, p):
- """ iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI """
- p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1))
+ """ iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI """
+ p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1))
def p_iteration_statement_3(self, p):
- """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
- p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1))
+ """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
+ p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1))
def p_iteration_statement_4(self, p):
- """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
- p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)),
- p[4], p[6], p[8], self._token_coord(p, 1))
+ """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
+ p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)),
+ p[4], p[6], p[8], self._token_coord(p, 1))
def p_jump_statement_1(self, p):
""" jump_statement : GOTO ID SEMI """
- p[0] = c_ast.Goto(p[2], self._token_coord(p, 1))
+ p[0] = c_ast.Goto(p[2], self._token_coord(p, 1))
def p_jump_statement_2(self, p):
""" jump_statement : BREAK SEMI """
- p[0] = c_ast.Break(self._token_coord(p, 1))
+ p[0] = c_ast.Break(self._token_coord(p, 1))
def p_jump_statement_3(self, p):
""" jump_statement : CONTINUE SEMI """
- p[0] = c_ast.Continue(self._token_coord(p, 1))
+ p[0] = c_ast.Continue(self._token_coord(p, 1))
def p_jump_statement_4(self, p):
""" jump_statement : RETURN expression SEMI
| RETURN SEMI
"""
- p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._token_coord(p, 1))
+ p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._token_coord(p, 1))
def p_expression_statement(self, p):
""" expression_statement : expression_opt SEMI """
if p[1] is None:
- p[0] = c_ast.EmptyStatement(self._token_coord(p, 2))
+ p[0] = c_ast.EmptyStatement(self._token_coord(p, 2))
else:
p[0] = p[1]
@@ -1626,7 +1626,7 @@ class CParser(PLYParser):
def p_typedef_name(self, p):
""" typedef_name : TYPEID """
- p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
+ p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
def p_assignment_expression(self, p):
""" assignment_expression : conditional_expression
@@ -1702,7 +1702,7 @@ class CParser(PLYParser):
def p_cast_expression_2(self, p):
""" cast_expression : LPAREN type_name RPAREN cast_expression """
- p[0] = c_ast.Cast(p[2], p[4], self._token_coord(p, 1))
+ p[0] = c_ast.Cast(p[2], p[4], self._token_coord(p, 1))
def p_unary_expression_1(self, p):
""" unary_expression : postfix_expression """
@@ -1723,7 +1723,7 @@ class CParser(PLYParser):
p[0] = c_ast.UnaryOp(
p[1],
p[2] if len(p) == 3 else p[3],
- self._token_coord(p, 1))
+ self._token_coord(p, 1))
def p_unary_operator(self, p):
""" unary_operator : AND
@@ -1755,7 +1755,7 @@ class CParser(PLYParser):
| postfix_expression ARROW ID
| postfix_expression ARROW TYPEID
"""
- field = c_ast.ID(p[3], self._token_coord(p, 3))
+ field = c_ast.ID(p[3], self._token_coord(p, 3))
p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord)
def p_postfix_expression_5(self, p):
@@ -1791,7 +1791,7 @@ class CParser(PLYParser):
def p_primary_expression_5(self, p):
""" primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN
"""
- coord = self._token_coord(p, 1)
+ coord = self._token_coord(p, 1)
p[0] = c_ast.FuncCall(c_ast.ID(p[1], coord),
c_ast.ExprList([p[3], p[5]], coord),
coord)
@@ -1822,7 +1822,7 @@ class CParser(PLYParser):
def p_identifier(self, p):
""" identifier : ID """
- p[0] = c_ast.ID(p[1], self._token_coord(p, 1))
+ p[0] = c_ast.ID(p[1], self._token_coord(p, 1))
def p_constant_1(self, p):
""" constant : INT_CONST_DEC
@@ -1851,18 +1851,18 @@ class CParser(PLYParser):
""" constant : FLOAT_CONST
| HEX_FLOAT_CONST
"""
- if 'x' in p[1].lower():
- t = 'float'
- else:
- if p[1][-1] in ('f', 'F'):
- t = 'float'
- elif p[1][-1] in ('l', 'L'):
- t = 'long double'
- else:
- t = 'double'
-
+ if 'x' in p[1].lower():
+ t = 'float'
+ else:
+ if p[1][-1] in ('f', 'F'):
+ t = 'float'
+ elif p[1][-1] in ('l', 'L'):
+ t = 'long double'
+ else:
+ t = 'double'
+
p[0] = c_ast.Constant(
- t, p[1], self._token_coord(p, 1))
+ t, p[1], self._token_coord(p, 1))
def p_constant_3(self, p):
""" constant : CHAR_CONST
@@ -1872,7 +1872,7 @@ class CParser(PLYParser):
| U32CHAR_CONST
"""
p[0] = c_ast.Constant(
- 'char', p[1], self._token_coord(p, 1))
+ 'char', p[1], self._token_coord(p, 1))
# The "unified" string and wstring literal rules are for supporting
# concatenation of adjacent string literals.
@@ -1885,7 +1885,7 @@ class CParser(PLYParser):
"""
if len(p) == 2: # single literal
p[0] = c_ast.Constant(
- 'string', p[1], self._token_coord(p, 1))
+ 'string', p[1], self._token_coord(p, 1))
else:
p[1].value = p[1].value[:-1] + p[2][1:]
p[0] = p[1]
@@ -1902,7 +1902,7 @@ class CParser(PLYParser):
"""
if len(p) == 2: # single literal
p[0] = c_ast.Constant(
- 'string', p[1], self._token_coord(p, 1))
+ 'string', p[1], self._token_coord(p, 1))
else:
p[1].value = p[1].value.rstrip()[:-1] + p[2][2:]
p[0] = p[1]
diff --git a/contrib/python/pycparser/pycparser/lextab.py b/contrib/python/pycparser/pycparser/lextab.py
index 444b4656d5..18f819fb13 100644
--- a/contrib/python/pycparser/pycparser/lextab.py
+++ b/contrib/python/pycparser/pycparser/lextab.py
@@ -1,7 +1,7 @@
-# lextab.py. This file automatically created by PLY (version 3.10). Don't edit!
-_tabversion = '3.10'
+# lextab.py. This file automatically created by PLY (version 3.10). Don't edit!
+_tabversion = '3.10'
_lextokens = set(('INT_CONST_CHAR', 'VOID', 'LBRACKET', 'WCHAR_CONST', 'FLOAT_CONST', 'MINUS', 'RPAREN', 'STRUCT', 'LONG', 'PLUS', 'ELLIPSIS', 'U32STRING_LITERAL', 'GT', 'GOTO', 'ENUM', 'PERIOD', 'GE', 'INT_CONST_DEC', 'ARROW', '_STATIC_ASSERT', '__INT128', 'HEX_FLOAT_CONST', 'DOUBLE', 'MINUSEQUAL', 'INT_CONST_OCT', 'TIMESEQUAL', 'OR', 'SHORT', 'RETURN', 'RSHIFTEQUAL', '_ALIGNAS', 'RESTRICT', 'STATIC', 'SIZEOF', 'UNSIGNED', 'PLUSPLUS', 'COLON', 'WSTRING_LITERAL', 'DIVIDE', 'FOR', 'UNION', 'EQUALS', 'ELSE', 'ANDEQUAL', 'EQ', 'AND', 'TYPEID', 'LBRACE', 'PPHASH', 'INT', 'SIGNED', 'CONTINUE', 'NOT', 'OREQUAL', 'MOD', 'RSHIFT', 'DEFAULT', '_NORETURN', 'CHAR', 'WHILE', 'DIVEQUAL', '_ALIGNOF', 'EXTERN', 'LNOT', 'CASE', 'LAND', 'REGISTER', 'MODEQUAL', 'NE', 'SWITCH', 'INT_CONST_HEX', '_COMPLEX', 'PPPRAGMASTR', 'PLUSEQUAL', 'U32CHAR_CONST', 'CONDOP', 'U8STRING_LITERAL', 'BREAK', 'VOLATILE', 'PPPRAGMA', 'INLINE', 'INT_CONST_BIN', 'DO', 'U8CHAR_CONST', 'CONST', 'U16STRING_LITERAL', 'LOR', 'CHAR_CONST', 'LSHIFT', 'RBRACE', '_BOOL', 'LE', 'SEMI', '_THREAD_LOCAL', 'LT', 'COMMA', 'U16CHAR_CONST', 'OFFSETOF', '_ATOMIC', 'TYPEDEF', 'XOR', 'AUTO', 'TIMES', 'LPAREN', 'MINUSMINUS', 'ID', 'IF', 'STRING_LITERAL', 'FLOAT', 'XOREQUAL', 'LSHIFTEQUAL', 'RBRACKET'))
-_lexreflags = 64
+_lexreflags = 64
_lexliterals = ''
_lexstateinfo = {'ppline': 'exclusive', 'pppragma': 'exclusive', 'INITIAL': 'inclusive'}
_lexstatere = {'ppline': [('(?P<t_ppline_FILENAME>"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P<t_ppline_LINE_NUMBER>(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P<t_ppline_NEWLINE>\\n)|(?P<t_ppline_PPLINE>line)', [None, ('t_ppline_FILENAME', 'FILENAME'), None, None, ('t_ppline_LINE_NUMBER', 'LINE_NUMBER'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ppline_NEWLINE', 'NEWLINE'), ('t_ppline_PPLINE', 'PPLINE')])], 'pppragma': [('(?P<t_pppragma_NEWLINE>\\n)|(?P<t_pppragma_PPPRAGMA>pragma)|(?P<t_pppragma_STR>.+)', [None, ('t_pppragma_NEWLINE', 'NEWLINE'), ('t_pppragma_PPPRAGMA', 'PPPRAGMA'), ('t_pppragma_STR', 'STR')])], 'INITIAL': [('(?P<t_PPHASH>[ \\t]*\\#)|(?P<t_NEWLINE>\\n+)|(?P<t_LBRACE>\\{)|(?P<t_RBRACE>\\})|(?P<t_FLOAT_CONST>((((([0-9]*\\.[0-9]+)|([0-9]+\\.))([eE][-+]?[0-9]+)?)|([0-9]+([eE][-+]?[0-9]+)))[FfLl]?))|(?P<t_HEX_FLOAT_CONST>(0[xX]([0-9a-fA-F]+|((([0-9a-fA-F]+)?\\.[0-9a-fA-F]+)|([0-9a-fA-F]+\\.)))([pP][+-]?[0-9]+)[FfLl]?))|(?P<t_INT_CONST_HEX>0[xX][0-9a-fA-F]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P<t_INT_CONST_BIN>0[bB][01]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)', [None, ('t_PPHASH', 'PPHASH'), ('t_NEWLINE', 'NEWLINE'), ('t_LBRACE', 'LBRACE'), ('t_RBRACE', 'RBRACE'), ('t_FLOAT_CONST', 'FLOAT_CONST'), None, None, None, None, None, None, None, None, None, ('t_HEX_FLOAT_CONST', 'HEX_FLOAT_CONST'), None, None, None, None, None, None, None, ('t_INT_CONST_HEX', 'INT_CONST_HEX'), None, None, None, None, None, None, None, ('t_INT_CONST_BIN', 'INT_CONST_BIN')]), ('(?P<t_BAD_CONST_OCT>0[0-7]*[89])|(?P<t_INT_CONST_OCT>0[0-7]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P<t_INT_CONST_DEC>(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P<t_INT_CONST_CHAR>\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F])))){2,4}\')|(?P<t_CHAR_CONST>\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?P<t_WCHAR_CONST>L\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?P<t_U8CHAR_CONST>u8\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?P<t_U16CHAR_CONST>u\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?P<t_U32CHAR_CONST>U\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')', [None, ('t_BAD_CONST_OCT', 'BAD_CONST_OCT'), ('t_INT_CONST_OCT', 'INT_CONST_OCT'), None, None, None, None, None, None, None, ('t_INT_CONST_DEC', 'INT_CONST_DEC'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_INT_CONST_CHAR', 'INT_CONST_CHAR'), None, None, None, None, None, None, ('t_CHAR_CONST', 'CHAR_CONST'), None, None, None, None, None, None, ('t_WCHAR_CONST', 'WCHAR_CONST'), None, None, None, None, None, None, ('t_U8CHAR_CONST', 'U8CHAR_CONST'), None, None, None, None, None, None, ('t_U16CHAR_CONST', 'U16CHAR_CONST'), None, None, None, None, None, None, ('t_U32CHAR_CONST', 'U32CHAR_CONST')]), ('(?P<t_UNMATCHED_QUOTE>(\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))*\\n)|(\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))*$))|(?P<t_BAD_CHAR_CONST>(\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))[^\'\n]+\')|(\'\')|(\'([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-9])[^\'\\n]*\'))|(?P<t_WSTRING_LITERAL>L"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P<t_U8STRING_LITERAL>u8"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P<t_U16STRING_LITERAL>u"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P<t_U32STRING_LITERAL>U"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P<t_BAD_STRING_LITERAL>"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-9])([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P<t_ID>[a-zA-Z_$][0-9a-zA-Z_$]*)|(?P<t_STRING_LITERAL>"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P<t_ELLIPSIS>\\.\\.\\.)|(?P<t_PLUSPLUS>\\+\\+)|(?P<t_LOR>\\|\\|)|(?P<t_XOREQUAL>\\^=)|(?P<t_OREQUAL>\\|=)|(?P<t_LSHIFTEQUAL><<=)|(?P<t_RSHIFTEQUAL>>>=)|(?P<t_PLUSEQUAL>\\+=)|(?P<t_TIMESEQUAL>\\*=)', [None, ('t_UNMATCHED_QUOTE', 'UNMATCHED_QUOTE'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_BAD_CHAR_CONST', 'BAD_CHAR_CONST'), None, None, None, None, None, None, None, None, None, None, ('t_WSTRING_LITERAL', 'WSTRING_LITERAL'), None, None, ('t_U8STRING_LITERAL', 'U8STRING_LITERAL'), None, None, ('t_U16STRING_LITERAL', 'U16STRING_LITERAL'), None, None, ('t_U32STRING_LITERAL', 'U32STRING_LITERAL'), None, None, ('t_BAD_STRING_LITERAL', 'BAD_STRING_LITERAL'), None, None, None, None, None, ('t_ID', 'ID'), (None, 'STRING_LITERAL'), None, None, (None, 'ELLIPSIS'), (None, 'PLUSPLUS'), (None, 'LOR'), (None, 'XOREQUAL'), (None, 'OREQUAL'), (None, 'LSHIFTEQUAL'), (None, 'RSHIFTEQUAL'), (None, 'PLUSEQUAL'), (None, 'TIMESEQUAL')]), ('(?P<t_PLUS>\\+)|(?P<t_MODEQUAL>%=)|(?P<t_DIVEQUAL>/=)|(?P<t_RBRACKET>\\])|(?P<t_CONDOP>\\?)|(?P<t_XOR>\\^)|(?P<t_LSHIFT><<)|(?P<t_LE><=)|(?P<t_LPAREN>\\()|(?P<t_ARROW>->)|(?P<t_EQ>==)|(?P<t_NE>!=)|(?P<t_MINUSMINUS>--)|(?P<t_OR>\\|)|(?P<t_TIMES>\\*)|(?P<t_LBRACKET>\\[)|(?P<t_GE>>=)|(?P<t_RPAREN>\\))|(?P<t_LAND>&&)|(?P<t_RSHIFT>>>)|(?P<t_MINUSEQUAL>-=)|(?P<t_PERIOD>\\.)|(?P<t_ANDEQUAL>&=)|(?P<t_EQUALS>=)|(?P<t_LT><)|(?P<t_COMMA>,)|(?P<t_DIVIDE>/)|(?P<t_AND>&)|(?P<t_MOD>%)|(?P<t_SEMI>;)|(?P<t_MINUS>-)|(?P<t_GT>>)|(?P<t_COLON>:)|(?P<t_NOT>~)|(?P<t_LNOT>!)', [None, (None, 'PLUS'), (None, 'MODEQUAL'), (None, 'DIVEQUAL'), (None, 'RBRACKET'), (None, 'CONDOP'), (None, 'XOR'), (None, 'LSHIFT'), (None, 'LE'), (None, 'LPAREN'), (None, 'ARROW'), (None, 'EQ'), (None, 'NE'), (None, 'MINUSMINUS'), (None, 'OR'), (None, 'TIMES'), (None, 'LBRACKET'), (None, 'GE'), (None, 'RPAREN'), (None, 'LAND'), (None, 'RSHIFT'), (None, 'MINUSEQUAL'), (None, 'PERIOD'), (None, 'ANDEQUAL'), (None, 'EQUALS'), (None, 'LT'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'AND'), (None, 'MOD'), (None, 'SEMI'), (None, 'MINUS'), (None, 'GT'), (None, 'COLON'), (None, 'NOT'), (None, 'LNOT')])]}
diff --git a/contrib/python/pycparser/pycparser/ply/__init__.py b/contrib/python/pycparser/pycparser/ply/__init__.py
index 6e53cddcf6..ed040086b5 100644
--- a/contrib/python/pycparser/pycparser/ply/__init__.py
+++ b/contrib/python/pycparser/pycparser/ply/__init__.py
@@ -1,5 +1,5 @@
# PLY package
# Author: David Beazley (dave@dabeaz.com)
-__version__ = '3.9'
+__version__ = '3.9'
__all__ = ['lex','yacc']
diff --git a/contrib/python/pycparser/pycparser/ply/cpp.py b/contrib/python/pycparser/pycparser/ply/cpp.py
index 86273eac77..2f31763ef7 100644
--- a/contrib/python/pycparser/pycparser/ply/cpp.py
+++ b/contrib/python/pycparser/pycparser/ply/cpp.py
@@ -2,20 +2,20 @@
# cpp.py
#
# Author: David Beazley (http://www.dabeaz.com)
-# Copyright (C) 2017
+# Copyright (C) 2017
# All rights reserved
#
-# This module implements an ANSI-C style lexical preprocessor for PLY.
+# This module implements an ANSI-C style lexical preprocessor for PLY.
# -----------------------------------------------------------------------------
-import sys
-
-# Some Python 3 compatibility shims
-if sys.version_info.major < 3:
- STRING_TYPES = (str, unicode)
-else:
- STRING_TYPES = str
- xrange = range
-
+import sys
+
+# Some Python 3 compatibility shims
+if sys.version_info.major < 3:
+ STRING_TYPES = (str, unicode)
+else:
+ STRING_TYPES = str
+ xrange = range
+
# -----------------------------------------------------------------------------
# Default preprocessor lexer definitions. These tokens are enough to get
# a basic preprocessor working. Other modules may import these if they want
@@ -75,8 +75,8 @@ def t_CPP_COMMENT2(t):
r'(//.*?(\n|$))'
# replace with '/n'
t.type = 'CPP_WS'; t.value = '\n'
- return t
-
+ return t
+
def t_error(t):
t.type = t.value[0]
t.value = t.value[0]
@@ -90,8 +90,8 @@ import os.path
# -----------------------------------------------------------------------------
# trigraph()
-#
-# Given an input string, this function replaces all trigraph sequences.
+#
+# Given an input string, this function replaces all trigraph sequences.
# The following mapping is used:
#
# ??= #
@@ -261,7 +261,7 @@ class Preprocessor(object):
# ----------------------------------------------------------------------
# add_path()
#
- # Adds a search path to the preprocessor.
+ # Adds a search path to the preprocessor.
# ----------------------------------------------------------------------
def add_path(self,path):
@@ -305,7 +305,7 @@ class Preprocessor(object):
# ----------------------------------------------------------------------
# tokenstrip()
- #
+ #
# Remove leading/trailing whitespace tokens from a token list
# ----------------------------------------------------------------------
@@ -331,7 +331,7 @@ class Preprocessor(object):
# argument. Each argument is represented by a list of tokens.
#
# When collecting arguments, leading and trailing whitespace is removed
- # from each argument.
+ # from each argument.
#
# This function properly handles nested parenthesis and commas---these do not
# define new arguments.
@@ -343,7 +343,7 @@ class Preprocessor(object):
current_arg = []
nesting = 1
tokenlen = len(tokenlist)
-
+
# Search for the opening '('.
i = 0
while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
@@ -377,7 +377,7 @@ class Preprocessor(object):
else:
current_arg.append(t)
i += 1
-
+
# Missing end argument
self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
return 0, [],[]
@@ -389,9 +389,9 @@ class Preprocessor(object):
# This is used to speed up macro expansion later on---we'll know
# right away where to apply patches to the value to form the expansion
# ----------------------------------------------------------------------
-
+
def macro_prescan(self,macro):
- macro.patch = [] # Standard macro arguments
+ macro.patch = [] # Standard macro arguments
macro.str_patch = [] # String conversion expansion
macro.var_comma_patch = [] # Variadic macro comma patch
i = 0
@@ -438,7 +438,7 @@ class Preprocessor(object):
rep = [copy.copy(_x) for _x in macro.value]
# Make string expansion patches. These do not alter the length of the replacement sequence
-
+
str_expansion = {}
for argnum, i in macro.str_patch:
if argnum not in str_expansion:
@@ -456,7 +456,7 @@ class Preprocessor(object):
# Make all other patches. The order of these matters. It is assumed that the patch list
# has been sorted in reverse order of patch location since replacements will cause the
# size of the replacement sequence to expand from the patch point.
-
+
expanded = { }
for ptype, argnum, i in macro.patch:
# Concatenation. Argument is left unexpanded
@@ -493,7 +493,7 @@ class Preprocessor(object):
if t.value in self.macros and t.value not in expanded:
# Yes, we found a macro match
expanded[t.value] = True
-
+
m = self.macros[t.value]
if not m.arglist:
# A simple macro
@@ -525,7 +525,7 @@ class Preprocessor(object):
else:
args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
del args[len(m.arglist):]
-
+
# Get macro replacement text
rep = self.macro_expand_args(m,args)
rep = self.expand_macros(rep,expanded)
@@ -538,13 +538,13 @@ class Preprocessor(object):
elif t.value == '__LINE__':
t.type = self.t_INTEGER
t.value = self.t_INTEGER_TYPE(t.lineno)
-
+
i += 1
return tokens
- # ----------------------------------------------------------------------
+ # ----------------------------------------------------------------------
# evalexpr()
- #
+ #
# Evaluate an expression token sequence for the purposes of evaluating
# integral expressions.
# ----------------------------------------------------------------------
@@ -591,14 +591,14 @@ class Preprocessor(object):
tokens[i].value = str(tokens[i].value)
while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
tokens[i].value = tokens[i].value[:-1]
-
+
expr = "".join([str(x.value) for x in tokens])
expr = expr.replace("&&"," and ")
expr = expr.replace("||"," or ")
expr = expr.replace("!"," not ")
try:
result = eval(expr)
- except Exception:
+ except Exception:
self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
result = 0
return result
@@ -616,7 +616,7 @@ class Preprocessor(object):
if not source:
source = ""
-
+
self.define("__FILE__ \"%s\"" % source)
self.source = source
@@ -635,7 +635,7 @@ class Preprocessor(object):
for tok in x:
if tok.type in self.t_WS and '\n' in tok.value:
chunk.append(tok)
-
+
dirtokens = self.tokenstrip(x[i+1:])
if dirtokens:
name = dirtokens[0].value
@@ -643,7 +643,7 @@ class Preprocessor(object):
else:
name = ""
args = []
-
+
if name == 'define':
if enable:
for tok in self.expand_macros(chunk):
@@ -703,7 +703,7 @@ class Preprocessor(object):
iftrigger = True
else:
self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
-
+
elif name == 'else':
if ifstack:
if ifstack[-1][0]:
@@ -789,7 +789,7 @@ class Preprocessor(object):
# ----------------------------------------------------------------------
def define(self,tokens):
- if isinstance(tokens,STRING_TYPES):
+ if isinstance(tokens,STRING_TYPES):
tokens = self.tokenize(tokens)
linetok = tokens
@@ -873,7 +873,7 @@ class Preprocessor(object):
def parse(self,input,source=None,ignore={}):
self.ignore = ignore
self.parser = self.parsegen(input,source)
-
+
# ----------------------------------------------------------------------
# token()
#
diff --git a/contrib/python/pycparser/pycparser/ply/lex.py b/contrib/python/pycparser/pycparser/ply/lex.py
index 4bdd76ca06..2b3cadb875 100644
--- a/contrib/python/pycparser/pycparser/ply/lex.py
+++ b/contrib/python/pycparser/pycparser/ply/lex.py
@@ -1,7 +1,7 @@
# -----------------------------------------------------------------------------
# ply: lex.py
#
-# Copyright (C) 2001-2017
+# Copyright (C) 2001-2017
# David M. Beazley (Dabeaz LLC)
# All rights reserved.
#
@@ -31,8 +31,8 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# -----------------------------------------------------------------------------
-__version__ = '3.10'
-__tabversion__ = '3.10'
+__version__ = '3.10'
+__tabversion__ = '3.10'
import re
import sys
@@ -179,12 +179,12 @@ class Lexer:
with open(filename, 'w') as tf:
tf.write('# %s.py. This file automatically created by PLY (version %s). Don\'t edit!\n' % (basetabmodule, __version__))
tf.write('_tabversion = %s\n' % repr(__tabversion__))
- tf.write('_lextokens = set(%s)\n' % repr(tuple(self.lextokens)))
+ tf.write('_lextokens = set(%s)\n' % repr(tuple(self.lextokens)))
tf.write('_lexreflags = %s\n' % repr(self.lexreflags))
tf.write('_lexliterals = %s\n' % repr(self.lexliterals))
tf.write('_lexstateinfo = %s\n' % repr(self.lexstateinfo))
- # Rewrite the lexstatere table, replacing function objects with function names
+ # Rewrite the lexstatere table, replacing function objects with function names
tabre = {}
for statename, lre in self.lexstatere.items():
titem = []
@@ -230,7 +230,7 @@ class Lexer:
titem = []
txtitem = []
for pat, func_name in lre:
- titem.append((re.compile(pat, lextab._lexreflags), _names_to_funcs(func_name, fdict)))
+ titem.append((re.compile(pat, lextab._lexreflags), _names_to_funcs(func_name, fdict)))
self.lexstatere[statename] = titem
self.lexstateretext[statename] = txtitem
@@ -495,7 +495,7 @@ def _form_master_re(relist, reflags, ldict, toknames):
return []
regex = '|'.join(relist)
try:
- lexre = re.compile(regex, reflags)
+ lexre = re.compile(regex, reflags)
# Build the index to function map for the matching engine
lexindexfunc = [None] * (max(lexre.groupindex.values()) + 1)
@@ -536,7 +536,7 @@ def _statetoken(s, names):
for i, part in enumerate(parts[1:], 1):
if part not in names and part != 'ANY':
break
-
+
if i > 1:
states = tuple(parts[1:i])
else:
@@ -758,7 +758,7 @@ class LexerReflect(object):
continue
try:
- c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags)
+ c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags)
if c.match(''):
self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file, line, f.__name__)
self.error = True
@@ -782,7 +782,7 @@ class LexerReflect(object):
continue
try:
- c = re.compile('(?P<%s>%s)' % (name, r), self.reflags)
+ c = re.compile('(?P<%s>%s)' % (name, r), self.reflags)
if (c.match('')):
self.log.error("Regular expression for rule '%s' matches empty string", name)
self.error = True
@@ -830,10 +830,10 @@ class LexerReflect(object):
# -----------------------------------------------------------------------------
def validate_module(self, module):
- try:
- lines, linen = inspect.getsourcelines(module)
- except IOError:
- return
+ try:
+ lines, linen = inspect.getsourcelines(module)
+ except IOError:
+ return
fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(')
sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=')
@@ -861,7 +861,7 @@ class LexerReflect(object):
# Build all of the regular expression rules from definitions in the supplied module
# -----------------------------------------------------------------------------
def lex(module=None, object=None, debug=False, optimize=False, lextab='lextab',
- reflags=int(re.VERBOSE), nowarn=False, outputdir=None, debuglog=None, errorlog=None):
+ reflags=int(re.VERBOSE), nowarn=False, outputdir=None, debuglog=None, errorlog=None):
if lextab is None:
lextab = 'lextab'
diff --git a/contrib/python/pycparser/pycparser/ply/yacc.py b/contrib/python/pycparser/pycparser/ply/yacc.py
index 20b4f2863c..7ca728ce46 100644
--- a/contrib/python/pycparser/pycparser/ply/yacc.py
+++ b/contrib/python/pycparser/pycparser/ply/yacc.py
@@ -1,7 +1,7 @@
# -----------------------------------------------------------------------------
# ply: yacc.py
#
-# Copyright (C) 2001-2017
+# Copyright (C) 2001-2017
# David M. Beazley (Dabeaz LLC)
# All rights reserved.
#
@@ -67,8 +67,8 @@ import inspect
import base64
import warnings
-__version__ = '3.10'
-__tabversion__ = '3.10'
+__version__ = '3.10'
+__tabversion__ = '3.10'
#-----------------------------------------------------------------------------
# === User configurable parameters ===
@@ -309,7 +309,7 @@ class LRParser:
# certain kinds of advanced parsing situations where the lexer and parser interact with
# each other or change states (i.e., manipulation of scope, lexer states, etc.).
#
- # See: https://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions
+ # See: https://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions
def set_defaulted_states(self):
self.defaulted_states = {}
for state, actions in self.action.items():
@@ -497,8 +497,8 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
- self.state = state
- p.callable(pslice)
+ self.state = state
+ p.callable(pslice)
del statestack[-plen:]
#--! DEBUG
debug.info('Result : %s', format_result(pslice[0]))
@@ -508,16 +508,16 @@ class LRParser:
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- symstack.extend(targ[1:-1]) # Put the production slice back on the stack
- statestack.pop() # Pop back one state (before the reduce)
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
- sym.value = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
-
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -540,7 +540,7 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
- self.state = state
+ self.state = state
p.callable(pslice)
#--! DEBUG
debug.info('Result : %s', format_result(pslice[0]))
@@ -550,15 +550,15 @@ class LRParser:
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- statestack.pop() # Pop back one state (before the reduce)
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
- sym.value = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
-
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -597,7 +597,7 @@ class LRParser:
if self.errorfunc:
if errtoken and not hasattr(errtoken, 'lexer'):
errtoken.lexer = lexer
- self.state = state
+ self.state = state
tok = call_errorfunc(self.errorfunc, errtoken, self)
if self.errorok:
# User must have done some kind of panic
@@ -817,24 +817,24 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
- self.state = state
- p.callable(pslice)
+ self.state = state
+ p.callable(pslice)
del statestack[-plen:]
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- symstack.extend(targ[1:-1]) # Put the production slice back on the stack
- statestack.pop() # Pop back one state (before the reduce)
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
- sym.value = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
-
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -857,22 +857,22 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
- self.state = state
+ self.state = state
p.callable(pslice)
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- statestack.pop() # Pop back one state (before the reduce)
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
- sym.value = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
-
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -903,7 +903,7 @@ class LRParser:
if self.errorfunc:
if errtoken and not hasattr(errtoken, 'lexer'):
errtoken.lexer = lexer
- self.state = state
+ self.state = state
tok = call_errorfunc(self.errorfunc, errtoken, self)
if self.errorok:
# User must have done some kind of panic
@@ -1114,24 +1114,24 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
- self.state = state
- p.callable(pslice)
+ self.state = state
+ p.callable(pslice)
del statestack[-plen:]
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- symstack.extend(targ[1:-1]) # Put the production slice back on the stack
- statestack.pop() # Pop back one state (before the reduce)
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
- sym.value = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
-
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -1149,22 +1149,22 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
- self.state = state
+ self.state = state
p.callable(pslice)
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- statestack.pop() # Pop back one state (before the reduce)
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
- sym.value = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
-
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -1195,7 +1195,7 @@ class LRParser:
if self.errorfunc:
if errtoken and not hasattr(errtoken, 'lexer'):
errtoken.lexer = lexer
- self.state = state
+ self.state = state
tok = call_errorfunc(self.errorfunc, errtoken, self)
if self.errorok:
# User must have done some kind of panic
@@ -2000,7 +2000,7 @@ class LRTable(object):
import cPickle as pickle
except ImportError:
import pickle
-
+
if not os.path.exists(filename):
raise ImportError
@@ -2585,13 +2585,13 @@ class LRGeneratedTable(LRTable):
# Need to decide on shift or reduce here
# By default we favor shifting. Need to add
# some precedence rules here.
-
- # Shift precedence comes from the token
- sprec, slevel = Precedence.get(a, ('right', 0))
-
- # Reduce precedence comes from rule being reduced (p)
- rprec, rlevel = Productions[p.number].prec
-
+
+ # Shift precedence comes from the token
+ sprec, slevel = Precedence.get(a, ('right', 0))
+
+ # Reduce precedence comes from rule being reduced (p)
+ rprec, rlevel = Productions[p.number].prec
+
if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')):
# We really need to reduce here.
st_action[a] = -p.number
@@ -2649,13 +2649,13 @@ class LRGeneratedTable(LRTable):
# - if precedence of reduce rule is higher, we reduce.
# - if precedence of reduce is same and left assoc, we reduce.
# - otherwise we shift
-
- # Shift precedence comes from the token
- sprec, slevel = Precedence.get(a, ('right', 0))
-
- # Reduce precedence comes from the rule that could have been reduced
+
+ # Shift precedence comes from the token
+ sprec, slevel = Precedence.get(a, ('right', 0))
+
+ # Reduce precedence comes from the rule that could have been reduced
rprec, rlevel = Productions[st_actionp[a].number].prec
-
+
if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')):
# We decide to shift here... highest precedence to shift
Productions[st_actionp[a].number].reduced -= 1
@@ -2968,20 +2968,20 @@ class ParserReflect(object):
# Compute a signature over the grammar
def signature(self):
- parts = []
+ parts = []
try:
if self.start:
- parts.append(self.start)
+ parts.append(self.start)
if self.prec:
- parts.append(''.join([''.join(p) for p in self.prec]))
+ parts.append(''.join([''.join(p) for p in self.prec]))
if self.tokens:
- parts.append(' '.join(self.tokens))
+ parts.append(' '.join(self.tokens))
for f in self.pfuncs:
if f[3]:
- parts.append(f[3])
+ parts.append(f[3])
except (TypeError, ValueError):
pass
- return ''.join(parts)
+ return ''.join(parts)
# -----------------------------------------------------------------------------
# validate_modules()
@@ -2999,10 +2999,10 @@ class ParserReflect(object):
fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(')
for module in self.modules:
- try:
- lines, linen = inspect.getsourcelines(module)
- except IOError:
- continue
+ try:
+ lines, linen = inspect.getsourcelines(module)
+ except IOError:
+ continue
counthash = {}
for linen, line in enumerate(lines):
@@ -3130,7 +3130,7 @@ class ParserReflect(object):
if not name.startswith('p_') or name == 'p_error':
continue
if isinstance(item, (types.FunctionType, types.MethodType)):
- line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno)
+ line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno)
module = inspect.getmodule(item)
p_functions.append((line, module, name, item.__doc__))
diff --git a/contrib/python/pycparser/pycparser/plyparser.py b/contrib/python/pycparser/pycparser/plyparser.py
index b8f4c4395e..43189489e5 100644
--- a/contrib/python/pycparser/pycparser/plyparser.py
+++ b/contrib/python/pycparser/pycparser/plyparser.py
@@ -4,11 +4,11 @@
# PLYParser class and other utilities for simplifying programming
# parsers with PLY
#
-# Eli Bendersky [https://eli.thegreenplace.net/]
+# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#-----------------------------------------------------------------
-import warnings
+import warnings
class Coord(object):
""" Coordinates of a syntactic element. Consists of:
@@ -52,82 +52,82 @@ class PLYParser(object):
line=lineno,
column=column)
- def _token_coord(self, p, token_idx):
+ def _token_coord(self, p, token_idx):
""" Returns the coordinates for the YaccProduction object 'p' indexed
- with 'token_idx'. The coordinate includes the 'lineno' and
- 'column'. Both follow the lex semantic, starting from 1.
- """
- last_cr = p.lexer.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx))
- if last_cr < 0:
- last_cr = -1
- column = (p.lexpos(token_idx) - (last_cr))
- return self._coord(p.lineno(token_idx), column)
-
+ with 'token_idx'. The coordinate includes the 'lineno' and
+ 'column'. Both follow the lex semantic, starting from 1.
+ """
+ last_cr = p.lexer.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx))
+ if last_cr < 0:
+ last_cr = -1
+ column = (p.lexpos(token_idx) - (last_cr))
+ return self._coord(p.lineno(token_idx), column)
+
def _parse_error(self, msg, coord):
raise ParseError("%s: %s" % (coord, msg))
-
-
-def parameterized(*params):
- """ Decorator to create parameterized rules.
-
- Parameterized rule methods must be named starting with 'p_' and contain
- 'xxx', and their docstrings may contain 'xxx' and 'yyy'. These will be
- replaced by the given parameter tuples. For example, ``p_xxx_rule()`` with
- docstring 'xxx_rule : yyy' when decorated with
- ``@parameterized(('id', 'ID'))`` produces ``p_id_rule()`` with the docstring
- 'id_rule : ID'. Using multiple tuples produces multiple rules.
- """
- def decorate(rule_func):
- rule_func._params = params
- return rule_func
- return decorate
-
-
-def template(cls):
- """ Class decorator to generate rules from parameterized rule templates.
-
- See `parameterized` for more information on parameterized rules.
- """
- issued_nodoc_warning = False
- for attr_name in dir(cls):
- if attr_name.startswith('p_'):
- method = getattr(cls, attr_name)
- if hasattr(method, '_params'):
- # Remove the template method
- delattr(cls, attr_name)
- # Create parameterized rules from this method; only run this if
- # the method has a docstring. This is to address an issue when
- # pycparser's users are installed in -OO mode which strips
- # docstrings away.
- # See: https://github.com/eliben/pycparser/pull/198/ and
- # https://github.com/eliben/pycparser/issues/197
- # for discussion.
- if method.__doc__ is not None:
- _create_param_rules(cls, method)
- elif not issued_nodoc_warning:
- warnings.warn(
- 'parsing methods must have __doc__ for pycparser to work properly',
- RuntimeWarning,
- stacklevel=2)
- issued_nodoc_warning = True
- return cls
-
-
-def _create_param_rules(cls, func):
- """ Create ply.yacc rules based on a parameterized rule function
-
- Generates new methods (one per each pair of parameters) based on the
- template rule function `func`, and attaches them to `cls`. The rule
- function's parameters must be accessible via its `_params` attribute.
- """
- for xxx, yyy in func._params:
- # Use the template method's body for each new method
- def param_rule(self, p):
- func(self, p)
-
- # Substitute in the params for the grammar rule and function name
- param_rule.__doc__ = func.__doc__.replace('xxx', xxx).replace('yyy', yyy)
- param_rule.__name__ = func.__name__.replace('xxx', xxx)
-
- # Attach the new method to the class
- setattr(cls, param_rule.__name__, param_rule)
+
+
+def parameterized(*params):
+ """ Decorator to create parameterized rules.
+
+ Parameterized rule methods must be named starting with 'p_' and contain
+ 'xxx', and their docstrings may contain 'xxx' and 'yyy'. These will be
+ replaced by the given parameter tuples. For example, ``p_xxx_rule()`` with
+ docstring 'xxx_rule : yyy' when decorated with
+ ``@parameterized(('id', 'ID'))`` produces ``p_id_rule()`` with the docstring
+ 'id_rule : ID'. Using multiple tuples produces multiple rules.
+ """
+ def decorate(rule_func):
+ rule_func._params = params
+ return rule_func
+ return decorate
+
+
+def template(cls):
+ """ Class decorator to generate rules from parameterized rule templates.
+
+ See `parameterized` for more information on parameterized rules.
+ """
+ issued_nodoc_warning = False
+ for attr_name in dir(cls):
+ if attr_name.startswith('p_'):
+ method = getattr(cls, attr_name)
+ if hasattr(method, '_params'):
+ # Remove the template method
+ delattr(cls, attr_name)
+ # Create parameterized rules from this method; only run this if
+ # the method has a docstring. This is to address an issue when
+ # pycparser's users are installed in -OO mode which strips
+ # docstrings away.
+ # See: https://github.com/eliben/pycparser/pull/198/ and
+ # https://github.com/eliben/pycparser/issues/197
+ # for discussion.
+ if method.__doc__ is not None:
+ _create_param_rules(cls, method)
+ elif not issued_nodoc_warning:
+ warnings.warn(
+ 'parsing methods must have __doc__ for pycparser to work properly',
+ RuntimeWarning,
+ stacklevel=2)
+ issued_nodoc_warning = True
+ return cls
+
+
+def _create_param_rules(cls, func):
+ """ Create ply.yacc rules based on a parameterized rule function
+
+ Generates new methods (one per each pair of parameters) based on the
+ template rule function `func`, and attaches them to `cls`. The rule
+ function's parameters must be accessible via its `_params` attribute.
+ """
+ for xxx, yyy in func._params:
+ # Use the template method's body for each new method
+ def param_rule(self, p):
+ func(self, p)
+
+ # Substitute in the params for the grammar rule and function name
+ param_rule.__doc__ = func.__doc__.replace('xxx', xxx).replace('yyy', yyy)
+ param_rule.__name__ = func.__name__.replace('xxx', xxx)
+
+ # Attach the new method to the class
+ setattr(cls, param_rule.__name__, param_rule)
diff --git a/contrib/python/pycparser/pycparser/yacctab.py b/contrib/python/pycparser/pycparser/yacctab.py
index 0622c36602..c877170b69 100644
--- a/contrib/python/pycparser/pycparser/yacctab.py
+++ b/contrib/python/pycparser/pycparser/yacctab.py
@@ -1,7 +1,7 @@
# yacctab.py
# This file is automatically generated. Do not edit.
-_tabversion = '3.10'
+_tabversion = '3.10'
_lr_method = 'LALR'
@@ -26,63 +26,63 @@ for _k, _v in _lr_goto_items.items():
del _lr_goto_items
_lr_productions = [
("S' -> translation_unit_or_empty","S'",1,None,None,None),
- ('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',43),
- ('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',44),
- ('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',43),
- ('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',44),
- ('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',43),
- ('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',44),
- ('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',43),
- ('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',44),
- ('declaration_specifiers_no_type_opt -> empty','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',43),
- ('declaration_specifiers_no_type_opt -> declaration_specifiers_no_type','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',44),
- ('designation_opt -> empty','designation_opt',1,'p_designation_opt','plyparser.py',43),
- ('designation_opt -> designation','designation_opt',1,'p_designation_opt','plyparser.py',44),
- ('expression_opt -> empty','expression_opt',1,'p_expression_opt','plyparser.py',43),
- ('expression_opt -> expression','expression_opt',1,'p_expression_opt','plyparser.py',44),
- ('id_init_declarator_list_opt -> empty','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',43),
- ('id_init_declarator_list_opt -> id_init_declarator_list','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',44),
- ('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',43),
- ('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',44),
- ('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',43),
- ('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',44),
- ('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',43),
- ('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',44),
- ('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',43),
- ('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',44),
- ('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',43),
- ('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',44),
- ('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',43),
- ('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',44),
- ('direct_id_declarator -> ID','direct_id_declarator',1,'p_direct_id_declarator_1','plyparser.py',126),
- ('direct_id_declarator -> LPAREN id_declarator RPAREN','direct_id_declarator',3,'p_direct_id_declarator_2','plyparser.py',126),
- ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_3','plyparser.py',126),
- ('direct_id_declarator -> direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',126),
- ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',127),
- ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_5','plyparser.py',126),
- ('direct_id_declarator -> direct_id_declarator LPAREN parameter_type_list RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',126),
- ('direct_id_declarator -> direct_id_declarator LPAREN identifier_list_opt RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',127),
- ('direct_typeid_declarator -> TYPEID','direct_typeid_declarator',1,'p_direct_typeid_declarator_1','plyparser.py',126),
- ('direct_typeid_declarator -> LPAREN typeid_declarator RPAREN','direct_typeid_declarator',3,'p_direct_typeid_declarator_2','plyparser.py',126),
- ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_3','plyparser.py',126),
- ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',126),
- ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',127),
- ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_5','plyparser.py',126),
- ('direct_typeid_declarator -> direct_typeid_declarator LPAREN parameter_type_list RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',126),
- ('direct_typeid_declarator -> direct_typeid_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',127),
- ('direct_typeid_noparen_declarator -> TYPEID','direct_typeid_noparen_declarator',1,'p_direct_typeid_noparen_declarator_1','plyparser.py',126),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_3','plyparser.py',126),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',126),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',127),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_5','plyparser.py',126),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',126),
- ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',127),
- ('id_declarator -> direct_id_declarator','id_declarator',1,'p_id_declarator_1','plyparser.py',126),
- ('id_declarator -> pointer direct_id_declarator','id_declarator',2,'p_id_declarator_2','plyparser.py',126),
- ('typeid_declarator -> direct_typeid_declarator','typeid_declarator',1,'p_typeid_declarator_1','plyparser.py',126),
- ('typeid_declarator -> pointer direct_typeid_declarator','typeid_declarator',2,'p_typeid_declarator_2','plyparser.py',126),
- ('typeid_noparen_declarator -> direct_typeid_noparen_declarator','typeid_noparen_declarator',1,'p_typeid_noparen_declarator_1','plyparser.py',126),
- ('typeid_noparen_declarator -> pointer direct_typeid_noparen_declarator','typeid_noparen_declarator',2,'p_typeid_noparen_declarator_2','plyparser.py',126),
+ ('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',43),
+ ('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',44),
+ ('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',43),
+ ('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',44),
+ ('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',43),
+ ('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',44),
+ ('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',43),
+ ('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',44),
+ ('declaration_specifiers_no_type_opt -> empty','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',43),
+ ('declaration_specifiers_no_type_opt -> declaration_specifiers_no_type','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',44),
+ ('designation_opt -> empty','designation_opt',1,'p_designation_opt','plyparser.py',43),
+ ('designation_opt -> designation','designation_opt',1,'p_designation_opt','plyparser.py',44),
+ ('expression_opt -> empty','expression_opt',1,'p_expression_opt','plyparser.py',43),
+ ('expression_opt -> expression','expression_opt',1,'p_expression_opt','plyparser.py',44),
+ ('id_init_declarator_list_opt -> empty','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',43),
+ ('id_init_declarator_list_opt -> id_init_declarator_list','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',44),
+ ('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',43),
+ ('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',44),
+ ('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',43),
+ ('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',44),
+ ('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',43),
+ ('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',44),
+ ('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',43),
+ ('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',44),
+ ('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',43),
+ ('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',44),
+ ('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',43),
+ ('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',44),
+ ('direct_id_declarator -> ID','direct_id_declarator',1,'p_direct_id_declarator_1','plyparser.py',126),
+ ('direct_id_declarator -> LPAREN id_declarator RPAREN','direct_id_declarator',3,'p_direct_id_declarator_2','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_3','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',127),
+ ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_5','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LPAREN parameter_type_list RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',126),
+ ('direct_id_declarator -> direct_id_declarator LPAREN identifier_list_opt RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',127),
+ ('direct_typeid_declarator -> TYPEID','direct_typeid_declarator',1,'p_direct_typeid_declarator_1','plyparser.py',126),
+ ('direct_typeid_declarator -> LPAREN typeid_declarator RPAREN','direct_typeid_declarator',3,'p_direct_typeid_declarator_2','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_3','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',127),
+ ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_5','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LPAREN parameter_type_list RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',126),
+ ('direct_typeid_declarator -> direct_typeid_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',127),
+ ('direct_typeid_noparen_declarator -> TYPEID','direct_typeid_noparen_declarator',1,'p_direct_typeid_noparen_declarator_1','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_3','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',127),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_5','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',126),
+ ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',127),
+ ('id_declarator -> direct_id_declarator','id_declarator',1,'p_id_declarator_1','plyparser.py',126),
+ ('id_declarator -> pointer direct_id_declarator','id_declarator',2,'p_id_declarator_2','plyparser.py',126),
+ ('typeid_declarator -> direct_typeid_declarator','typeid_declarator',1,'p_typeid_declarator_1','plyparser.py',126),
+ ('typeid_declarator -> pointer direct_typeid_declarator','typeid_declarator',2,'p_typeid_declarator_2','plyparser.py',126),
+ ('typeid_noparen_declarator -> direct_typeid_noparen_declarator','typeid_noparen_declarator',1,'p_typeid_noparen_declarator_1','plyparser.py',126),
+ ('typeid_noparen_declarator -> pointer direct_typeid_noparen_declarator','typeid_noparen_declarator',2,'p_typeid_noparen_declarator_2','plyparser.py',126),
('translation_unit_or_empty -> translation_unit','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',509),
('translation_unit_or_empty -> empty','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',510),
('translation_unit -> external_declaration','translation_unit',1,'p_translation_unit_1','c_parser.py',518),
diff --git a/contrib/python/six/six.py b/contrib/python/six/six.py
index 4cba03c75f..bb1958ae94 100644
--- a/contrib/python/six/six.py
+++ b/contrib/python/six/six.py
@@ -18,8 +18,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
-"""Utilities for writing code that runs on Python 2 and 3"""
-
+"""Utilities for writing code that runs on Python 2 and 3"""
+
from __future__ import absolute_import
import functools
@@ -257,7 +257,7 @@ _moved_attributes = [
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
- MovedAttribute("getoutput", "commands", "subprocess"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
@@ -281,8 +281,8 @@ _moved_attributes = [
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
- MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
- MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
@@ -357,12 +357,12 @@ _urllib_parse_moved_attributes = [
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
- MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
+ MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
- MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
@@ -438,8 +438,8 @@ _urllib_request_moved_attributes = [
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
- MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
- MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
@@ -711,15 +711,15 @@ if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
- try:
- if value is None:
- value = tp()
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
- finally:
- value = None
- tb = None
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
else:
def exec_(_code_, _globs_=None, _locs_=None):
@@ -735,19 +735,19 @@ else:
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
- try:
- raise tp, value, tb
- finally:
- tb = None
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
""")
if sys.version_info[:2] > (3,):
exec_("""def raise_from(value, from_value):
- try:
- raise value from from_value
- finally:
- value = None
+ try:
+ raise value from from_value
+ finally:
+ value = None
""")
else:
def raise_from(value, from_value):
@@ -858,7 +858,7 @@ def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
- class metaclass(type):
+ class metaclass(type):
def __new__(cls, name, this_bases, d):
if sys.version_info[:2] >= (3, 7):
@@ -870,10 +870,10 @@ def with_metaclass(meta, *bases):
else:
resolved_bases = bases
return meta(name, resolved_bases, d)
-
- @classmethod
- def __prepare__(cls, name, this_bases):
- return meta.__prepare__(name, bases)
+
+ @classmethod
+ def __prepare__(cls, name, this_bases):
+ return meta.__prepare__(name, bases)
return type.__new__(metaclass, 'temporary_class', (), {})
@@ -889,72 +889,72 @@ def add_metaclass(metaclass):
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
- if hasattr(cls, '__qualname__'):
- orig_vars['__qualname__'] = cls.__qualname__
+ if hasattr(cls, '__qualname__'):
+ orig_vars['__qualname__'] = cls.__qualname__
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
-def ensure_binary(s, encoding='utf-8', errors='strict'):
- """Coerce **s** to six.binary_type.
-
- For Python 2:
- - `unicode` -> encoded to `str`
- - `str` -> `str`
-
- For Python 3:
- - `str` -> encoded to `bytes`
- - `bytes` -> `bytes`
- """
+def ensure_binary(s, encoding='utf-8', errors='strict'):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
if isinstance(s, binary_type):
return s
- if isinstance(s, text_type):
- return s.encode(encoding, errors)
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
raise TypeError("not expecting type '%s'" % type(s))
-
-
-def ensure_str(s, encoding='utf-8', errors='strict'):
- """Coerce *s* to `str`.
-
- For Python 2:
- - `unicode` -> encoded to `str`
- - `str` -> `str`
-
- For Python 3:
- - `str` -> `str`
- - `bytes` -> decoded to `str`
- """
+
+
+def ensure_str(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
# Optimization: Fast return for the common case.
if type(s) is str:
return s
- if PY2 and isinstance(s, text_type):
+ if PY2 and isinstance(s, text_type):
return s.encode(encoding, errors)
- elif PY3 and isinstance(s, binary_type):
+ elif PY3 and isinstance(s, binary_type):
return s.decode(encoding, errors)
elif not isinstance(s, (text_type, binary_type)):
raise TypeError("not expecting type '%s'" % type(s))
- return s
-
-
-def ensure_text(s, encoding='utf-8', errors='strict'):
- """Coerce *s* to six.text_type.
-
- For Python 2:
- - `unicode` -> `unicode`
- - `str` -> `unicode`
-
- For Python 3:
- - `str` -> `str`
- - `bytes` -> decoded to `str`
- """
- if isinstance(s, binary_type):
- return s.decode(encoding, errors)
- elif isinstance(s, text_type):
- return s
- else:
- raise TypeError("not expecting type '%s'" % type(s))
-
-
+ return s
+
+
+def ensure_text(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
def python_2_unicode_compatible(klass):
"""
A class decorator that defines __unicode__ and __str__ methods under Python 2.
diff --git a/contrib/python/six/ya.make b/contrib/python/six/ya.make
index e0c7849214..446e57edc3 100644
--- a/contrib/python/six/ya.make
+++ b/contrib/python/six/ya.make
@@ -5,7 +5,7 @@ PY23_LIBRARY()
LICENSE(MIT)
VERSION(1.16.0)
-
+
PY_SRCS(
TOP_LEVEL
six.py
diff --git a/contrib/python/traitlets/py2/traitlets/traitlets.py b/contrib/python/traitlets/py2/traitlets/traitlets.py
index c07daf7400..c60dabe0db 100644
--- a/contrib/python/traitlets/py2/traitlets/traitlets.py
+++ b/contrib/python/traitlets/py2/traitlets/traitlets.py
@@ -416,8 +416,8 @@ class TraitType(BaseDescriptor):
read_only = False
info_text = 'any value'
- def __init__(self, default_value=Undefined, allow_none=False, read_only=None, help=None,
- config=None, **kwargs):
+ def __init__(self, default_value=Undefined, allow_none=False, read_only=None, help=None,
+ config=None, **kwargs):
"""Declare a traitlet.
If *allow_none* is True, None is a valid value in addition to any
@@ -457,8 +457,8 @@ class TraitType(BaseDescriptor):
self.metadata = kwargs
else:
self.metadata = self.metadata.copy()
- if config is not None:
- self.metadata['config'] = config
+ if config is not None:
+ self.metadata['config'] = config
# We add help to the metadata during a deprecation period so that
# code that looks for the help string there can find it.
diff --git a/contrib/python/traitlets/py3/traitlets/traitlets.py b/contrib/python/traitlets/py3/traitlets/traitlets.py
index 6bdf7414d3..f85dce335f 100644
--- a/contrib/python/traitlets/py3/traitlets/traitlets.py
+++ b/contrib/python/traitlets/py3/traitlets/traitlets.py
@@ -432,8 +432,8 @@ class TraitType(BaseDescriptor):
info_text = 'any value'
default_value = Undefined
- def __init__(self, default_value=Undefined, allow_none=False, read_only=None, help=None,
- config=None, **kwargs):
+ def __init__(self, default_value=Undefined, allow_none=False, read_only=None, help=None,
+ config=None, **kwargs):
"""Declare a traitlet.
If *allow_none* is True, None is a valid value in addition to any
@@ -475,8 +475,8 @@ class TraitType(BaseDescriptor):
self.metadata = kwargs
else:
self.metadata = self.metadata.copy()
- if config is not None:
- self.metadata['config'] = config
+ if config is not None:
+ self.metadata['config'] = config
# We add help to the metadata during a deprecation period so that
# code that looks for the help string there can find it.
diff --git a/contrib/python/ya.make b/contrib/python/ya.make
index d01ced9f3a..cbfe01c481 100644
--- a/contrib/python/ya.make
+++ b/contrib/python/ya.make
@@ -220,7 +220,7 @@ RECURSE(
django-ajax-selects
django-alive
django-autoconfig
- django-appconf
+ django-appconf
django-bootstrap3
django-braces
django-bulk-update
@@ -264,7 +264,7 @@ RECURSE(
django-json-widget
django-markwhat
django-model-choices
- django-model-utils
+ django-model-utils
django-modeladmin-reorder
django-modeltranslation
django-moderation
@@ -352,15 +352,15 @@ RECURSE(
face
facebook-business
factory-boy
- Faker
+ Faker
fakeredis
falcon
falcon-cors
falcon-multipart
fallocate
fancycompleter
- fastapi
- fastapi-utils
+ fastapi
+ fastapi-utils
fastdtw
fasteners
fastjsonschema
@@ -437,9 +437,9 @@ RECURSE(
geobuf
geoindex
gevent
- gino
+ gino
gitdb2
- github3.py
+ github3.py
GitPython
glob2
glom
@@ -639,7 +639,7 @@ RECURSE(
mpegdash
mpi4py
mpmath
- msal
+ msal
msgpack
mujson
multidict
@@ -659,7 +659,7 @@ RECURSE(
nbconvert
nbformat
ncclient
- ndg-httpsclient
+ ndg-httpsclient
nest-asyncio
nested-diff
netaddr
@@ -992,7 +992,7 @@ RECURSE(
setproctitle
setuptools
sgmllib3k
- sh
+ sh
Shapely
shortuuid
simplediff
@@ -1032,7 +1032,7 @@ RECURSE(
sshpubkeys
sshtunnel
stack-data
- starlette
+ starlette
statsd
statsmodels
stevedore
@@ -1058,7 +1058,7 @@ RECURSE(
terminado
terminaltables
testpath
- text-unidecode
+ text-unidecode
textdata
texttable
textwrap3
@@ -1088,7 +1088,7 @@ RECURSE(
trollius
trollsift
Twiggy
- twiggy-goodies
+ twiggy-goodies
Twisted
txaio
txredisapi
@@ -1151,7 +1151,7 @@ RECURSE(
weighted-levenshtein
Werkzeug
wheel
- whitenoise
+ whitenoise
whodap
wmctrl
wrapt
diff --git a/library/python/ya.make b/library/python/ya.make
index 2e1eb6e0e1..99bd0a06d0 100644
--- a/library/python/ya.make
+++ b/library/python/ya.make
@@ -7,17 +7,17 @@ RECURSE(
archive/benchmark
archive/test
archive/test/data
- asgi_yauth
- async_clients
+ asgi_yauth
+ async_clients
auth_client_parser
awssdk-extensions
awssdk_async_extensions
base64
base64/test
bclclient
- blackbox
- blackbox/tests
- blackbox/tvm2
+ blackbox
+ blackbox/tests
+ blackbox/tvm2
bloom
boost_test
bstr
@@ -52,16 +52,16 @@ RECURSE(
cyson/ut
deploy_formatter
deprecated
- dir-sync
+ dir-sync
django
django/example
- django-idm-api
- django-multic
+ django-idm-api
+ django-multic
django-sform
django-sform/tests
- django_alive
+ django_alive
django_celery_monitoring
- django_russian
+ django_russian
django_template_common
django_tools_log_context
dssclient
@@ -86,15 +86,15 @@ RECURSE(
geolocation/ut
geohash
geohash/ut
- golovan_stats_aggregator
- granular_settings
- granular_settings/tests
+ golovan_stats_aggregator
+ granular_settings
+ granular_settings/tests
guid
guid/test
guid/at_fork_test
gunicorn
hnsw
- ids
+ ids
import_test
infected_masks
infected_masks/ut
@@ -116,7 +116,7 @@ RECURSE(
luigi/luigid_static
maths
messagebus
- metrics_framework
+ metrics_framework
mime_types
monitoring
monlib
@@ -139,16 +139,16 @@ RECURSE(
path/tests
protobuf
pymain
- pyscopg2
+ pyscopg2
pytest
pytest-mongodb
pytest/allure
pytest/empty
pytest/plugins
python-blackboxer
- python-django-tanker
- python-django-yauth/tests
- python-django-yauth
+ python-django-tanker
+ python-django-yauth/tests
+ python-django-yauth
reactor
redis_utils
reservoir_sampling
@@ -174,8 +174,8 @@ RECURSE(
spyt
ssh_client
ssh_sign
- startrek_python_client
- startrek_python_client/tests_int
+ startrek_python_client
+ startrek_python_client/tests_int
statface_client
step
strings
@@ -195,8 +195,8 @@ RECURSE(
thread/test
tskv
tvmauth
- tvm2
- tvm2/tests
+ tvm2
+ tvm2/tests
type_info
type_info/test
unique_id
@@ -206,12 +206,12 @@ RECURSE(
wiki
windows
windows/ut
- yandex_tracker_client
+ yandex_tracker_client
yenv
yt
yt/test
- ylock
- ylock/tests
+ ylock
+ ylock/tests
zipatch
)