aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/tools/cython/Cython/Build
diff options
context:
space:
mode:
authororivej <orivej@yandex-team.ru>2022-02-10 16:45:01 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:45:01 +0300
commit2d37894b1b037cf24231090eda8589bbb44fb6fc (patch)
treebe835aa92c6248212e705f25388ebafcf84bc7a1 /contrib/tools/cython/Cython/Build
parent718c552901d703c502ccbefdfc3c9028d608b947 (diff)
downloadydb-2d37894b1b037cf24231090eda8589bbb44fb6fc.tar.gz
Restoring authorship annotation for <orivej@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/tools/cython/Cython/Build')
-rw-r--r--contrib/tools/cython/Cython/Build/Cythonize.py94
-rw-r--r--contrib/tools/cython/Cython/Build/Dependencies.py638
-rw-r--r--contrib/tools/cython/Cython/Build/Distutils.py2
-rw-r--r--contrib/tools/cython/Cython/Build/Inline.py160
-rw-r--r--contrib/tools/cython/Cython/Build/IpythonMagic.py44
-rw-r--r--contrib/tools/cython/Cython/Build/Tests/TestCyCache.py212
-rw-r--r--contrib/tools/cython/Cython/Build/Tests/TestInline.py38
-rw-r--r--contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py64
-rw-r--r--contrib/tools/cython/Cython/Build/Tests/TestStripLiterals.py4
-rw-r--r--contrib/tools/cython/Cython/Build/__init__.py2
10 files changed, 629 insertions, 629 deletions
diff --git a/contrib/tools/cython/Cython/Build/Cythonize.py b/contrib/tools/cython/Cython/Build/Cythonize.py
index caa3cebc0e..c85b6eabab 100644
--- a/contrib/tools/cython/Cython/Build/Cythonize.py
+++ b/contrib/tools/cython/Cython/Build/Cythonize.py
@@ -21,27 +21,27 @@ except ImportError:
class _FakePool(object):
def map_async(self, func, args):
- try:
- from itertools import imap
- except ImportError:
- imap=map
+ try:
+ from itertools import imap
+ except ImportError:
+ imap=map
for _ in imap(func, args):
pass
- def close(self):
- pass
+ def close(self):
+ pass
+
+ def terminate(self):
+ pass
+
+ def join(self):
+ pass
- def terminate(self):
- pass
- def join(self):
- pass
-
-
def parse_directives(option, name, value, parser):
dest = option.dest
old_directives = dict(getattr(parser.values, dest,
- Options.get_directive_defaults()))
+ Options.get_directive_defaults()))
directives = Options.parse_directive_list(
value, relaxed_bool=True, current_settings=old_directives)
setattr(parser.values, dest, directives)
@@ -60,13 +60,13 @@ def parse_options(option, name, value, parser):
setattr(parser.values, dest, options)
-def parse_compile_time_env(option, name, value, parser):
- dest = option.dest
- old_env = dict(getattr(parser.values, dest, {}))
- new_env = Options.parse_compile_time_env(value, current_settings=old_env)
- setattr(parser.values, dest, new_env)
-
-
+def parse_compile_time_env(option, name, value, parser):
+ dest = option.dest
+ old_env = dict(getattr(parser.values, dest, {}))
+ new_env = Options.parse_compile_time_env(value, current_settings=old_env)
+ setattr(parser.values, dest, new_env)
+
+
def find_package_base(path):
base_dir, package_path = os.path.split(path)
while os.path.isfile(os.path.join(base_dir, '__init__.py')):
@@ -77,9 +77,9 @@ def find_package_base(path):
def cython_compile(path_pattern, options):
pool = None
- all_paths = map(os.path.abspath, extended_iglob(path_pattern))
+ all_paths = map(os.path.abspath, extended_iglob(path_pattern))
try:
- for path in all_paths:
+ for path in all_paths:
if options.build_inplace:
base_dir = path
while not os.path.isdir(base_dir) or is_package_dir(base_dir):
@@ -89,7 +89,7 @@ def cython_compile(path_pattern, options):
if os.path.isdir(path):
# recursively compiling a package
- paths = [os.path.join(path, '**', '*.{py,pyx}')]
+ paths = [os.path.join(path, '**', '*.{py,pyx}')]
else:
# assume it's a file(-like thing)
paths = [path]
@@ -100,7 +100,7 @@ def cython_compile(path_pattern, options):
exclude_failures=options.keep_going,
exclude=options.excludes,
compiler_directives=options.directives,
- compile_time_env=options.compile_time_env,
+ compile_time_env=options.compile_time_env,
force=options.force,
quiet=options.quiet,
depfile=options.depfile,
@@ -153,26 +153,26 @@ def parse_args(args):
from optparse import OptionParser
parser = OptionParser(usage='%prog [options] [sources and packages]+')
- parser.add_option('-X', '--directive', metavar='NAME=VALUE,...',
- dest='directives', default={}, type="str",
- action='callback', callback=parse_directives,
+ parser.add_option('-X', '--directive', metavar='NAME=VALUE,...',
+ dest='directives', default={}, type="str",
+ action='callback', callback=parse_directives,
help='set a compiler directive')
- parser.add_option('-E', '--compile-time-env', metavar='NAME=VALUE,...',
- dest='compile_time_env', default={}, type="str",
- action='callback', callback=parse_compile_time_env,
- help='set a compile time environment variable')
- parser.add_option('-s', '--option', metavar='NAME=VALUE',
- dest='options', default={}, type="str",
- action='callback', callback=parse_options,
+ parser.add_option('-E', '--compile-time-env', metavar='NAME=VALUE,...',
+ dest='compile_time_env', default={}, type="str",
+ action='callback', callback=parse_compile_time_env,
+ help='set a compile time environment variable')
+ parser.add_option('-s', '--option', metavar='NAME=VALUE',
+ dest='options', default={}, type="str",
+ action='callback', callback=parse_options,
help='set a cythonize option')
- parser.add_option('-2', dest='language_level', action='store_const', const=2, default=None,
- help='use Python 2 syntax mode by default')
- parser.add_option('-3', dest='language_level', action='store_const', const=3,
+ parser.add_option('-2', dest='language_level', action='store_const', const=2, default=None,
+ help='use Python 2 syntax mode by default')
+ parser.add_option('-3', dest='language_level', action='store_const', const=3,
+ help='use Python 3 syntax mode by default')
+ parser.add_option('--3str', dest='language_level', action='store_const', const='3str',
help='use Python 3 syntax mode by default')
- parser.add_option('--3str', dest='language_level', action='store_const', const='3str',
- help='use Python 3 syntax mode by default')
- parser.add_option('-a', '--annotate', dest='annotate', action='store_true',
- help='generate annotated HTML page for source files')
+ parser.add_option('-a', '--annotate', dest='annotate', action='store_true',
+ help='generate annotated HTML page for source files')
parser.add_option('-x', '--exclude', metavar='PATTERN', dest='excludes',
action='append', default=[],
@@ -204,9 +204,9 @@ def parse_args(args):
options.build = True
if multiprocessing is None:
options.parallel = 0
- if options.language_level:
- assert options.language_level in (2, 3, '3str')
- options.options['language_level'] = options.language_level
+ if options.language_level:
+ assert options.language_level in (2, 3, '3str')
+ options.options['language_level'] = options.language_level
return options, args
@@ -218,9 +218,9 @@ def main(args=None):
Options.error_on_unknown_names = False
Options.error_on_uninitialized = False
- if options.annotate:
- Options.annotate = True
-
+ if options.annotate:
+ Options.annotate = True
+
for path in paths:
cython_compile(path, options)
diff --git a/contrib/tools/cython/Cython/Build/Dependencies.py b/contrib/tools/cython/Cython/Build/Dependencies.py
index ac5a3a10c0..7eb55e2607 100644
--- a/contrib/tools/cython/Cython/Build/Dependencies.py
+++ b/contrib/tools/cython/Cython/Build/Dependencies.py
@@ -1,22 +1,22 @@
-from __future__ import absolute_import, print_function
+from __future__ import absolute_import, print_function
import cython
from .. import __version__
-import collections
-import contextlib
-import hashlib
+import collections
+import contextlib
+import hashlib
import os
import shutil
import subprocess
import re, sys, time
-import warnings
+import warnings
from glob import iglob
from io import open as io_open
from os.path import relpath as _relpath
from distutils.extension import Extension
from distutils.util import strtobool
-import zipfile
+import zipfile
try:
from collections.abc import Iterable
@@ -32,24 +32,24 @@ except ImportError:
gzip_ext = ''
try:
- import zlib
- zipfile_compression_mode = zipfile.ZIP_DEFLATED
-except ImportError:
- zipfile_compression_mode = zipfile.ZIP_STORED
-
-try:
+ import zlib
+ zipfile_compression_mode = zipfile.ZIP_DEFLATED
+except ImportError:
+ zipfile_compression_mode = zipfile.ZIP_STORED
+
+try:
import pythran
except:
- pythran = None
+ pythran = None
from .. import Utils
-from ..Utils import (cached_function, cached_method, path_exists,
- safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, replace_suffix)
+from ..Utils import (cached_function, cached_method, path_exists,
+ safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, replace_suffix)
from ..Compiler.Main import Context, CompilationOptions, default_options
join_path = cached_function(os.path.join)
-copy_once_if_newer = cached_function(copy_file_to_dir_if_newer)
-safe_makedirs_once = cached_function(safe_makedirs)
+copy_once_if_newer = cached_function(copy_file_to_dir_if_newer)
+safe_makedirs_once = cached_function(safe_makedirs)
if sys.version_info[0] < 3:
# stupid Py2 distutils enforces str type in list of sources
@@ -57,7 +57,7 @@ if sys.version_info[0] < 3:
if _fs_encoding is None:
_fs_encoding = sys.getdefaultencoding()
def encode_filename_in_py2(filename):
- if not isinstance(filename, bytes):
+ if not isinstance(filename, bytes):
return filename.encode(_fs_encoding)
return filename
else:
@@ -65,7 +65,7 @@ else:
return filename
basestring = str
-
+
def _make_relative(file_paths, base=None):
if not base:
base = os.getcwd()
@@ -76,14 +76,14 @@ def _make_relative(file_paths, base=None):
def extended_iglob(pattern):
- if '{' in pattern:
- m = re.match('(.*){([^}]+)}(.*)', pattern)
- if m:
- before, switch, after = m.groups()
- for case in switch.split(','):
- for path in extended_iglob(before + case + after):
- yield path
- return
+ if '{' in pattern:
+ m = re.match('(.*){([^}]+)}(.*)', pattern)
+ if m:
+ before, switch, after = m.groups()
+ for case in switch.split(','):
+ for path in extended_iglob(before + case + after):
+ yield path
+ return
if '**/' in pattern:
seen = set()
first, rest = pattern.split('**/', 1)
@@ -104,60 +104,60 @@ def extended_iglob(pattern):
for path in iglob(pattern):
yield path
-
-def nonempty(it, error_msg="expected non-empty iterator"):
- empty = True
- for value in it:
- empty = False
- yield value
- if empty:
- raise ValueError(error_msg)
-
-
+
+def nonempty(it, error_msg="expected non-empty iterator"):
+ empty = True
+ for value in it:
+ empty = False
+ yield value
+ if empty:
+ raise ValueError(error_msg)
+
+
@cached_function
def file_hash(filename):
- path = os.path.normpath(filename)
- prefix = ('%d:%s' % (len(path), path)).encode("UTF-8")
+ path = os.path.normpath(filename)
+ prefix = ('%d:%s' % (len(path), path)).encode("UTF-8")
m = hashlib.md5(prefix)
- with open(path, 'rb') as f:
+ with open(path, 'rb') as f:
data = f.read(65000)
while data:
m.update(data)
data = f.read(65000)
return m.hexdigest()
-
-def update_pythran_extension(ext):
- if pythran is None:
- raise RuntimeError("You first need to install Pythran to use the np_pythran directive.")
- try:
- pythran_ext = pythran.config.make_extension(python=True)
- except TypeError: # older pythran version only
- pythran_ext = pythran.config.make_extension()
-
- ext.include_dirs.extend(pythran_ext['include_dirs'])
- ext.extra_compile_args.extend(pythran_ext['extra_compile_args'])
- ext.extra_link_args.extend(pythran_ext['extra_link_args'])
- ext.define_macros.extend(pythran_ext['define_macros'])
- ext.undef_macros.extend(pythran_ext['undef_macros'])
- ext.library_dirs.extend(pythran_ext['library_dirs'])
- ext.libraries.extend(pythran_ext['libraries'])
- ext.language = 'c++'
-
- # These options are not compatible with the way normal Cython extensions work
- for bad_option in ["-fwhole-program", "-fvisibility=hidden"]:
- try:
- ext.extra_compile_args.remove(bad_option)
- except ValueError:
- pass
-
-
+
+def update_pythran_extension(ext):
+ if pythran is None:
+ raise RuntimeError("You first need to install Pythran to use the np_pythran directive.")
+ try:
+ pythran_ext = pythran.config.make_extension(python=True)
+ except TypeError: # older pythran version only
+ pythran_ext = pythran.config.make_extension()
+
+ ext.include_dirs.extend(pythran_ext['include_dirs'])
+ ext.extra_compile_args.extend(pythran_ext['extra_compile_args'])
+ ext.extra_link_args.extend(pythran_ext['extra_link_args'])
+ ext.define_macros.extend(pythran_ext['define_macros'])
+ ext.undef_macros.extend(pythran_ext['undef_macros'])
+ ext.library_dirs.extend(pythran_ext['library_dirs'])
+ ext.libraries.extend(pythran_ext['libraries'])
+ ext.language = 'c++'
+
+ # These options are not compatible with the way normal Cython extensions work
+ for bad_option in ["-fwhole-program", "-fvisibility=hidden"]:
+ try:
+ ext.extra_compile_args.remove(bad_option)
+ except ValueError:
+ pass
+
+
def parse_list(s):
"""
- >>> parse_list("")
- []
- >>> parse_list("a")
- ['a']
+ >>> parse_list("")
+ []
+ >>> parse_list("a")
+ ['a']
>>> parse_list("a b c")
['a', 'b', 'c']
>>> parse_list("[a, b, c]")
@@ -167,7 +167,7 @@ def parse_list(s):
>>> parse_list('[a, ",a", "a,", ",", ]')
['a', ',a', 'a,', ',']
"""
- if len(s) >= 2 and s[0] == '[' and s[-1] == ']':
+ if len(s) >= 2 and s[0] == '[' and s[-1] == ']':
s = s[1:-1]
delimiter = ','
else:
@@ -181,7 +181,7 @@ def parse_list(s):
return literal
return [unquote(item) for item in s.split(delimiter) if item.strip()]
-
+
transitive_str = object()
transitive_list = object()
bool_or = object()
@@ -204,8 +204,8 @@ distutils_settings = {
'np_pythran': bool_or
}
-
-@cython.locals(start=cython.Py_ssize_t, end=cython.Py_ssize_t)
+
+@cython.locals(start=cython.Py_ssize_t, end=cython.Py_ssize_t)
def line_iter(source):
if isinstance(source, basestring):
start = 0
@@ -220,30 +220,30 @@ def line_iter(source):
for line in source:
yield line
-
+
class DistutilsInfo(object):
def __init__(self, source=None, exn=None):
self.values = {}
if source is not None:
for line in line_iter(source):
- line = line.lstrip()
- if not line:
- continue
- if line[0] != '#':
+ line = line.lstrip()
+ if not line:
+ continue
+ if line[0] != '#':
break
- line = line[1:].lstrip()
+ line = line[1:].lstrip()
kind = next((k for k in ("distutils:","cython:") if line.startswith(k)), None)
- if kind is not None:
+ if kind is not None:
key, _, value = [s.strip() for s in line[len(kind):].partition('=')]
type = distutils_settings.get(key, None)
if line.startswith("cython:") and type is None: continue
if type in (list, transitive_list):
value = parse_list(value)
if key == 'define_macros':
- value = [tuple(macro.split('=', 1))
- if '=' in macro else (macro, None)
- for macro in value]
+ value = [tuple(macro.split('=', 1))
+ if '=' in macro else (macro, None)
+ for macro in value]
if type is bool_or:
value = strtobool(value)
self.values[key] = value
@@ -264,13 +264,13 @@ class DistutilsInfo(object):
self.values[key] = value
elif type is transitive_list:
if key in self.values:
- # Change a *copy* of the list (Trac #845)
- all = self.values[key][:]
+ # Change a *copy* of the list (Trac #845)
+ all = self.values[key][:]
for v in value:
if v not in all:
all.append(v)
- value = all
- self.values[key] = value
+ value = all
+ self.values[key] = value
elif type is bool_or:
self.values[key] = self.values.get(key, False) | value
return self
@@ -301,14 +301,14 @@ class DistutilsInfo(object):
for key, value in self.values.items():
type = distutils_settings[key]
if type in [list, transitive_list]:
- value = getattr(extension, key) + list(value)
- setattr(extension, key, value)
-
-
-@cython.locals(start=cython.Py_ssize_t, q=cython.Py_ssize_t,
- single_q=cython.Py_ssize_t, double_q=cython.Py_ssize_t,
- hash_mark=cython.Py_ssize_t, end=cython.Py_ssize_t,
- k=cython.Py_ssize_t, counter=cython.Py_ssize_t, quote_len=cython.Py_ssize_t)
+ value = getattr(extension, key) + list(value)
+ setattr(extension, key, value)
+
+
+@cython.locals(start=cython.Py_ssize_t, q=cython.Py_ssize_t,
+ single_q=cython.Py_ssize_t, double_q=cython.Py_ssize_t,
+ hash_mark=cython.Py_ssize_t, end=cython.Py_ssize_t,
+ k=cython.Py_ssize_t, counter=cython.Py_ssize_t, quote_len=cython.Py_ssize_t)
def strip_string_literals(code, prefix='__Pyx_L'):
"""
Normalizes every string literal to be of the form '__Pyx_Lxxx',
@@ -333,8 +333,8 @@ def strip_string_literals(code, prefix='__Pyx_L'):
if double_q < q:
double_q = code.find('"', q)
q = min(single_q, double_q)
- if q == -1:
- q = max(single_q, double_q)
+ if q == -1:
+ q = max(single_q, double_q)
# We're done.
if q == -1 and hash_mark == -1:
@@ -350,8 +350,8 @@ def strip_string_literals(code, prefix='__Pyx_L'):
if k % 2 == 0:
q += 1
continue
- if code[q] == quote_type and (
- quote_len == 1 or (code_len > q + 2 and quote_type == code[q+1] == code[q+2])):
+ if code[q] == quote_type and (
+ quote_len == 1 or (code_len > q + 2 and quote_type == code[q+1] == code[q+2])):
counter += 1
label = "%s%s_" % (prefix, counter)
literals[label] = code[start+quote_len:q]
@@ -396,23 +396,23 @@ def strip_string_literals(code, prefix='__Pyx_L'):
return "".join(new_code), literals
-# We need to allow spaces to allow for conditional compilation like
-# IF ...:
-# cimport ...
-dependency_regex = re.compile(r"(?:^\s*from +([0-9a-zA-Z_.]+) +cimport)|"
- r"(?:^\s*cimport +([0-9a-zA-Z_.]+(?: *, *[0-9a-zA-Z_.]+)*))|"
- r"(?:^\s*cdef +extern +from +['\"]([^'\"]+)['\"])|"
- r"(?:^\s*include +['\"]([^'\"]+)['\"])", re.M)
-dependency_after_from_regex = re.compile(
- r"(?:^\s+\(([0-9a-zA-Z_., ]*)\)[#\n])|"
- r"(?:^\s+([0-9a-zA-Z_., ]*)[#\n])",
- re.M)
+# We need to allow spaces to allow for conditional compilation like
+# IF ...:
+# cimport ...
+dependency_regex = re.compile(r"(?:^\s*from +([0-9a-zA-Z_.]+) +cimport)|"
+ r"(?:^\s*cimport +([0-9a-zA-Z_.]+(?: *, *[0-9a-zA-Z_.]+)*))|"
+ r"(?:^\s*cdef +extern +from +['\"]([^'\"]+)['\"])|"
+ r"(?:^\s*include +['\"]([^'\"]+)['\"])", re.M)
+dependency_after_from_regex = re.compile(
+ r"(?:^\s+\(([0-9a-zA-Z_., ]*)\)[#\n])|"
+ r"(?:^\s+([0-9a-zA-Z_., ]*)[#\n])",
+ re.M)
+
-
def normalize_existing(base_path, rel_paths):
return normalize_existing0(os.path.dirname(base_path), tuple(set(rel_paths)))
-
+
@cached_function
def normalize_existing0(base_dir, rel_paths):
"""
@@ -440,7 +440,7 @@ def normalize_existing0(base_dir, rel_paths):
normalized.append(rel)
return (normalized, needed_base)
-
+
def resolve_depends(depends, include_dirs):
include_dirs = tuple(include_dirs)
resolved = []
@@ -450,7 +450,7 @@ def resolve_depends(depends, include_dirs):
resolved.append(path)
return resolved
-
+
@cached_function
def resolve_depend(depend, include_dirs):
if depend[0] == '<' and depend[-1] == '>':
@@ -461,16 +461,16 @@ def resolve_depend(depend, include_dirs):
return os.path.normpath(path)
return None
-
+
@cached_function
def package(filename):
dir = os.path.dirname(os.path.abspath(str(filename)))
- if dir != filename and is_package_dir(dir):
+ if dir != filename and is_package_dir(dir):
return package(dir) + (os.path.basename(dir),)
else:
return ()
-
+
@cached_function
def fully_qualified_name(filename):
module = os.path.splitext(os.path.basename(filename))[0]
@@ -479,10 +479,10 @@ def fully_qualified_name(filename):
@cached_function
def parse_dependencies(source_filename):
- # Actual parsing is way too slow, so we use regular expressions.
+ # Actual parsing is way too slow, so we use regular expressions.
# The only catch is that we must strip comments and string
# literals ahead of time.
- with Utils.open_source_file(source_filename, error_handling='ignore') as fh:
+ with Utils.open_source_file(source_filename, error_handling='ignore') as fh:
source = fh.read()
distutils_info = DistutilsInfo(source)
source, literals = strip_string_literals(source)
@@ -492,19 +492,19 @@ def parse_dependencies(source_filename):
cimports = []
includes = []
externs = []
- for m in dependency_regex.finditer(source):
- cimport_from, cimport_list, extern, include = m.groups()
+ for m in dependency_regex.finditer(source):
+ cimport_from, cimport_list, extern, include = m.groups()
if cimport_from:
cimports.append(cimport_from)
- m_after_from = dependency_after_from_regex.search(source, pos=m.end())
- if m_after_from:
- multiline, one_line = m_after_from.groups()
- subimports = multiline or one_line
- cimports.extend("{0}.{1}".format(cimport_from, s.strip())
- for s in subimports.split(','))
-
- elif cimport_list:
- cimports.extend(x.strip() for x in cimport_list.split(","))
+ m_after_from = dependency_after_from_regex.search(source, pos=m.end())
+ if m_after_from:
+ multiline, one_line = m_after_from.groups()
+ subimports = multiline or one_line
+ cimports.extend("{0}.{1}".format(cimport_from, s.strip())
+ for s in subimports.split(','))
+
+ elif cimport_list:
+ cimports.extend(x.strip() for x in cimport_list.split(","))
elif extern:
externs.append(literals[extern])
else:
@@ -520,8 +520,8 @@ class DependencyTree(object):
self._transitive_cache = {}
def parse_dependencies(self, source_filename):
- if path_exists(source_filename):
- source_filename = os.path.normpath(source_filename)
+ if path_exists(source_filename):
+ source_filename = os.path.normpath(source_filename)
return parse_dependencies(source_filename)
@cached_method
@@ -599,8 +599,8 @@ class DependencyTree(object):
pxd_list = [filename[:-4] + '.pxd']
else:
pxd_list = []
- # Cimports generates all possible combinations package.module
- # when imported as from package cimport module.
+ # Cimports generates all possible combinations package.module
+ # when imported as from package cimport module.
for module in self.cimports(filename):
if module[:7] == 'cython.' or module == 'cython':
continue
@@ -629,32 +629,32 @@ class DependencyTree(object):
def newest_dependency(self, filename):
return max([self.extract_timestamp(f) for f in self.all_dependencies(filename)])
- def transitive_fingerprint(self, filename, module, compilation_options):
- r"""
- Return a fingerprint of a cython file that is about to be cythonized.
-
- Fingerprints are looked up in future compilations. If the fingerprint
- is found, the cythonization can be skipped. The fingerprint must
- incorporate everything that has an influence on the generated code.
- """
+ def transitive_fingerprint(self, filename, module, compilation_options):
+ r"""
+ Return a fingerprint of a cython file that is about to be cythonized.
+
+ Fingerprints are looked up in future compilations. If the fingerprint
+ is found, the cythonization can be skipped. The fingerprint must
+ incorporate everything that has an influence on the generated code.
+ """
try:
m = hashlib.md5(__version__.encode('UTF-8'))
m.update(file_hash(filename).encode('UTF-8'))
for x in sorted(self.all_dependencies(filename)):
if os.path.splitext(x)[1] not in ('.c', '.cpp', '.h'):
m.update(file_hash(x).encode('UTF-8'))
- # Include the module attributes that change the compilation result
- # in the fingerprint. We do not iterate over module.__dict__ and
- # include almost everything here as users might extend Extension
- # with arbitrary (random) attributes that would lead to cache
- # misses.
- m.update(str((
- module.language,
- getattr(module, 'py_limited_api', False),
- getattr(module, 'np_pythran', False)
- )).encode('UTF-8'))
-
- m.update(compilation_options.get_fingerprint().encode('UTF-8'))
+ # Include the module attributes that change the compilation result
+ # in the fingerprint. We do not iterate over module.__dict__ and
+ # include almost everything here as users might extend Extension
+ # with arbitrary (random) attributes that would lead to cache
+ # misses.
+ m.update(str((
+ module.language,
+ getattr(module, 'py_limited_api', False),
+ getattr(module, 'np_pythran', False)
+ )).encode('UTF-8'))
+
+ m.update(compilation_options.get_fingerprint().encode('UTF-8'))
return m.hexdigest()
except IOError:
return None
@@ -719,9 +719,9 @@ class DependencyTree(object):
finally:
del stack[node]
-
+
_dep_tree = None
-
+
def create_dependency_tree(ctx=None, quiet=False):
global _dep_tree
if _dep_tree is None:
@@ -746,15 +746,15 @@ def default_create_extension(template, kwds):
# This may be useful for advanced users?
-def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=False, language=None,
- exclude_failures=False):
- if language is not None:
- print('Warning: passing language={0!r} to cythonize() is deprecated. '
- 'Instead, put "# distutils: language={0}" in your .pyx or .pxd file(s)'.format(language))
- if exclude is None:
- exclude = []
- if patterns is None:
- return [], {}
+def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=False, language=None,
+ exclude_failures=False):
+ if language is not None:
+ print('Warning: passing language={0!r} to cythonize() is deprecated. '
+ 'Instead, put "# distutils: language={0}" in your .pyx or .pxd file(s)'.format(language))
+ if exclude is None:
+ exclude = []
+ if patterns is None:
+ return [], {}
elif isinstance(patterns, basestring) or not isinstance(patterns, Iterable):
patterns = [patterns]
explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)])
@@ -767,17 +767,17 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
to_exclude.update(map(os.path.abspath, extended_iglob(pattern)))
module_list = []
- module_metadata = {}
-
- # workaround for setuptools
- if 'setuptools' in sys.modules:
- Extension_distutils = sys.modules['setuptools.extension']._Extension
- Extension_setuptools = sys.modules['setuptools'].Extension
- else:
- # dummy class, in case we do not have setuptools
- Extension_distutils = Extension
- class Extension_setuptools(Extension): pass
-
+ module_metadata = {}
+
+ # workaround for setuptools
+ if 'setuptools' in sys.modules:
+ Extension_distutils = sys.modules['setuptools.extension']._Extension
+ Extension_setuptools = sys.modules['setuptools'].Extension
+ else:
+ # dummy class, in case we do not have setuptools
+ Extension_distutils = Extension
+ class Extension_setuptools(Extension): pass
+
# if no create_extension() function is defined, use a simple
# default function.
create_extension = ctx.options.create_extension or default_create_extension
@@ -788,11 +788,11 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
template = Extension(pattern, []) # Fake Extension without sources
name = '*'
base = None
- ext_language = language
- elif isinstance(pattern, (Extension_distutils, Extension_setuptools)):
- cython_sources = [s for s in pattern.sources
- if os.path.splitext(s)[1] in ('.py', '.pyx')]
- if cython_sources:
+ ext_language = language
+ elif isinstance(pattern, (Extension_distutils, Extension_setuptools)):
+ cython_sources = [s for s in pattern.sources
+ if os.path.splitext(s)[1] in ('.py', '.pyx')]
+ if cython_sources:
filepattern = cython_sources[0]
if len(cython_sources) > 1:
print("Warning: Multiple cython sources found for extension '%s': %s\n"
@@ -805,25 +805,25 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
template = pattern
name = template.name
base = DistutilsInfo(exn=template)
- ext_language = None # do not override whatever the Extension says
+ ext_language = None # do not override whatever the Extension says
else:
- msg = str("pattern is not of type str nor subclass of Extension (%s)"
- " but of type %s and class %s" % (repr(Extension),
- type(pattern),
- pattern.__class__))
- raise TypeError(msg)
+ msg = str("pattern is not of type str nor subclass of Extension (%s)"
+ " but of type %s and class %s" % (repr(Extension),
+ type(pattern),
+ pattern.__class__))
+ raise TypeError(msg)
- for file in nonempty(sorted(extended_iglob(filepattern)), "'%s' doesn't match any files" % filepattern):
+ for file in nonempty(sorted(extended_iglob(filepattern)), "'%s' doesn't match any files" % filepattern):
if os.path.abspath(file) in to_exclude:
continue
- module_name = deps.fully_qualified_name(file)
+ module_name = deps.fully_qualified_name(file)
if '*' in name:
if module_name in explicit_modules:
continue
elif name:
module_name = name
- Utils.raise_error_if_module_name_forbidden(module_name)
+ Utils.raise_error_if_module_name_forbidden(module_name)
if module_name not in seen:
try:
@@ -848,9 +848,9 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
sources.append(source)
kwds['sources'] = sources
- if ext_language and 'language' not in kwds:
- kwds['language'] = ext_language
-
+ if ext_language and 'language' not in kwds:
+ kwds['language'] = ext_language
+
np_pythran = kwds.pop('np_pythran', False)
# Create the new extension
@@ -864,7 +864,7 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
# generated C file but otherwise has no purpose)
module_metadata[module_name] = metadata
- if file not in m.sources:
+ if file not in m.sources:
# Old setuptools unconditionally replaces .pyx with .c/.cpp
target_file = os.path.splitext(file)[0] + ('.cpp' if m.language == 'c++' else '.c')
try:
@@ -872,93 +872,93 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=
except ValueError:
# never seen this in the wild, but probably better to warn about this unexpected case
print("Warning: Cython source file not found in sources list, adding %s" % file)
- m.sources.insert(0, file)
+ m.sources.insert(0, file)
seen.add(name)
- return module_list, module_metadata
+ return module_list, module_metadata
# This is the user-exposed entry point.
-def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, force=False, language=None,
+def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, force=False, language=None,
exclude_failures=False, **options):
"""
Compile a set of source modules into C/C++ files and return a list of distutils
Extension objects for them.
- :param module_list: As module list, pass either a glob pattern, a list of glob
- patterns or a list of Extension objects. The latter
- allows you to configure the extensions separately
- through the normal distutils options.
- You can also pass Extension objects that have
- glob patterns as their sources. Then, cythonize
- will resolve the pattern and create a
- copy of the Extension for every matching file.
-
- :param exclude: When passing glob patterns as ``module_list``, you can exclude certain
- module names explicitly by passing them into the ``exclude`` option.
-
- :param nthreads: The number of concurrent builds for parallel compilation
- (requires the ``multiprocessing`` module).
-
- :param aliases: If you want to use compiler directives like ``# distutils: ...`` but
- can only know at compile time (when running the ``setup.py``) which values
- to use, you can use aliases and pass a dictionary mapping those aliases
- to Python strings when calling :func:`cythonize`. As an example, say you
- want to use the compiler
- directive ``# distutils: include_dirs = ../static_libs/include/``
- but this path isn't always fixed and you want to find it when running
- the ``setup.py``. You can then do ``# distutils: include_dirs = MY_HEADERS``,
- find the value of ``MY_HEADERS`` in the ``setup.py``, put it in a python
- variable called ``foo`` as a string, and then call
- ``cythonize(..., aliases={'MY_HEADERS': foo})``.
+ :param module_list: As module list, pass either a glob pattern, a list of glob
+ patterns or a list of Extension objects. The latter
+ allows you to configure the extensions separately
+ through the normal distutils options.
+ You can also pass Extension objects that have
+ glob patterns as their sources. Then, cythonize
+ will resolve the pattern and create a
+ copy of the Extension for every matching file.
+
+ :param exclude: When passing glob patterns as ``module_list``, you can exclude certain
+ module names explicitly by passing them into the ``exclude`` option.
+
+ :param nthreads: The number of concurrent builds for parallel compilation
+ (requires the ``multiprocessing`` module).
+
+ :param aliases: If you want to use compiler directives like ``# distutils: ...`` but
+ can only know at compile time (when running the ``setup.py``) which values
+ to use, you can use aliases and pass a dictionary mapping those aliases
+ to Python strings when calling :func:`cythonize`. As an example, say you
+ want to use the compiler
+ directive ``# distutils: include_dirs = ../static_libs/include/``
+ but this path isn't always fixed and you want to find it when running
+ the ``setup.py``. You can then do ``# distutils: include_dirs = MY_HEADERS``,
+ find the value of ``MY_HEADERS`` in the ``setup.py``, put it in a python
+ variable called ``foo`` as a string, and then call
+ ``cythonize(..., aliases={'MY_HEADERS': foo})``.
:param quiet: If True, Cython won't print error, warning, or status messages during the
compilation.
- :param force: Forces the recompilation of the Cython modules, even if the timestamps
- don't indicate that a recompilation is necessary.
-
- :param language: To globally enable C++ mode, you can pass ``language='c++'``. Otherwise, this
- will be determined at a per-file level based on compiler directives. This
- affects only modules found based on file names. Extension instances passed
- into :func:`cythonize` will not be changed. It is recommended to rather
- use the compiler directive ``# distutils: language = c++`` than this option.
-
- :param exclude_failures: For a broad 'try to compile' mode that ignores compilation
- failures and simply excludes the failed extensions,
- pass ``exclude_failures=True``. Note that this only
- really makes sense for compiling ``.py`` files which can also
- be used without compilation.
-
- :param annotate: If ``True``, will produce a HTML file for each of the ``.pyx`` or ``.py``
- files compiled. The HTML file gives an indication
- of how much Python interaction there is in
- each of the source code lines, compared to plain C code.
- It also allows you to see the C/C++ code
- generated for each line of Cython code. This report is invaluable when
- optimizing a function for speed,
- and for determining when to :ref:`release the GIL <nogil>`:
- in general, a ``nogil`` block may contain only "white" code.
- See examples in :ref:`determining_where_to_add_types` or
- :ref:`primes`.
-
- :param compiler_directives: Allow to set compiler directives in the ``setup.py`` like this:
- ``compiler_directives={'embedsignature': True}``.
- See :ref:`compiler-directives`.
+ :param force: Forces the recompilation of the Cython modules, even if the timestamps
+ don't indicate that a recompilation is necessary.
+
+ :param language: To globally enable C++ mode, you can pass ``language='c++'``. Otherwise, this
+ will be determined at a per-file level based on compiler directives. This
+ affects only modules found based on file names. Extension instances passed
+ into :func:`cythonize` will not be changed. It is recommended to rather
+ use the compiler directive ``# distutils: language = c++`` than this option.
+
+ :param exclude_failures: For a broad 'try to compile' mode that ignores compilation
+ failures and simply excludes the failed extensions,
+ pass ``exclude_failures=True``. Note that this only
+ really makes sense for compiling ``.py`` files which can also
+ be used without compilation.
+
+ :param annotate: If ``True``, will produce a HTML file for each of the ``.pyx`` or ``.py``
+ files compiled. The HTML file gives an indication
+ of how much Python interaction there is in
+ each of the source code lines, compared to plain C code.
+ It also allows you to see the C/C++ code
+ generated for each line of Cython code. This report is invaluable when
+ optimizing a function for speed,
+ and for determining when to :ref:`release the GIL <nogil>`:
+ in general, a ``nogil`` block may contain only "white" code.
+ See examples in :ref:`determining_where_to_add_types` or
+ :ref:`primes`.
+
+ :param compiler_directives: Allow to set compiler directives in the ``setup.py`` like this:
+ ``compiler_directives={'embedsignature': True}``.
+ See :ref:`compiler-directives`.
:param depfile: produce depfiles for the sources if True.
"""
- if exclude is None:
- exclude = []
+ if exclude is None:
+ exclude = []
if 'include_path' not in options:
options['include_path'] = ['.']
if 'common_utility_include_dir' in options:
- safe_makedirs(options['common_utility_include_dir'])
+ safe_makedirs(options['common_utility_include_dir'])
depfile = options.pop('depfile', None)
- if pythran is None:
- pythran_options = None
- else:
+ if pythran is None:
+ pythran_options = None
+ else:
pythran_options = CompilationOptions(**options)
pythran_options.cplus = True
pythran_options.np_pythran = True
@@ -967,13 +967,13 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
cpp_options = CompilationOptions(**options); cpp_options.cplus = True
ctx = c_options.create_context()
options = c_options
- module_list, module_metadata = create_extension_list(
+ module_list, module_metadata = create_extension_list(
module_list,
exclude=exclude,
ctx=ctx,
quiet=quiet,
exclude_failures=exclude_failures,
- language=language,
+ language=language,
aliases=aliases)
deps = create_dependency_tree(ctx, quiet=quiet)
build_dir = getattr(options, 'build_dir', None)
@@ -1021,11 +1021,11 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
# setup for out of place build directory if enabled
if build_dir:
- if os.path.isabs(c_file):
- warnings.warn("build_dir has no effect for absolute source paths")
+ if os.path.isabs(c_file):
+ warnings.warn("build_dir has no effect for absolute source paths")
c_file = os.path.join(build_dir, c_file)
dir = os.path.dirname(c_file)
- safe_makedirs_once(dir)
+ safe_makedirs_once(dir)
# write out the depfile, if requested
if depfile:
@@ -1066,8 +1066,8 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
print("Compiling %s because it changed." % source)
else:
print("Compiling %s because it depends on %s." % (source, dep))
- if not force and options.cache:
- fingerprint = deps.transitive_fingerprint(source, m, options)
+ if not force and options.cache:
+ fingerprint = deps.transitive_fingerprint(source, m, options)
else:
fingerprint = None
to_compile.append((
@@ -1082,19 +1082,19 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
copy_to_build_dir(source)
m.sources = new_sources
- if options.cache:
+ if options.cache:
if not os.path.exists(options.cache):
os.makedirs(options.cache)
to_compile.sort()
- # Drop "priority" component of "to_compile" entries and add a
- # simple progress indicator.
- N = len(to_compile)
- progress_fmt = "[{0:%d}/{1}] " % len(str(N))
- for i in range(N):
- progress = progress_fmt.format(i+1, N)
- to_compile[i] = to_compile[i][1:] + (progress,)
-
- if N <= 1:
+ # Drop "priority" component of "to_compile" entries and add a
+ # simple progress indicator.
+ N = len(to_compile)
+ progress_fmt = "[{0:%d}/{1}] " % len(str(N))
+ for i in range(N):
+ progress = progress_fmt.format(i+1, N)
+ to_compile[i] = to_compile[i][1:] + (progress,)
+
+ if N <= 1:
nthreads = 0
if nthreads:
# Requires multiprocessing (or Python >= 2.6)
@@ -1124,11 +1124,11 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
pool.join()
if not nthreads:
for args in to_compile:
- cythonize_one(*args)
+ cythonize_one(*args)
if exclude_failures:
failed_modules = set()
- for c_file, modules in modules_by_cfile.items():
+ for c_file, modules in modules_by_cfile.items():
if not os.path.exists(c_file):
failed_modules.update(modules)
elif os.path.getsize(c_file) < 200:
@@ -1145,7 +1145,7 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
print("Failed compilations: %s" % ', '.join(sorted([
module.name for module in failed_modules])))
- if options.cache:
+ if options.cache:
cleanup_cache(options.cache, getattr(options, 'cache_size', 1024 * 1024 * 100))
# cythonize() is often followed by the (non-Python-buffered)
# compiler output, flush now to avoid interleaving output.
@@ -1185,10 +1185,10 @@ if os.environ.get('XML_RESULTS'):
output.close()
return with_record
else:
- def record_results(func):
- return func
+ def record_results(func):
+ return func
+
-
# TODO: Share context? Issue: pyx processing leaks into pxd module
@record_results
def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
@@ -1202,38 +1202,38 @@ def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
safe_makedirs(options.cache)
# Cython-generated c files are highly compressible.
# (E.g. a compression ratio of about 10 for Sage).
- fingerprint_file_base = join_path(
- options.cache, "%s-%s" % (os.path.basename(c_file), fingerprint))
- gz_fingerprint_file = fingerprint_file_base + gzip_ext
- zip_fingerprint_file = fingerprint_file_base + '.zip'
- if os.path.exists(gz_fingerprint_file) or os.path.exists(zip_fingerprint_file):
+ fingerprint_file_base = join_path(
+ options.cache, "%s-%s" % (os.path.basename(c_file), fingerprint))
+ gz_fingerprint_file = fingerprint_file_base + gzip_ext
+ zip_fingerprint_file = fingerprint_file_base + '.zip'
+ if os.path.exists(gz_fingerprint_file) or os.path.exists(zip_fingerprint_file):
if not quiet:
- print("%sFound compiled %s in cache" % (progress, pyx_file))
- if os.path.exists(gz_fingerprint_file):
- os.utime(gz_fingerprint_file, None)
- with contextlib.closing(gzip_open(gz_fingerprint_file, 'rb')) as g:
- with contextlib.closing(open(c_file, 'wb')) as f:
- shutil.copyfileobj(g, f)
- else:
- os.utime(zip_fingerprint_file, None)
- dirname = os.path.dirname(c_file)
- with contextlib.closing(zipfile.ZipFile(zip_fingerprint_file)) as z:
- for artifact in z.namelist():
- z.extract(artifact, os.path.join(dirname, artifact))
+ print("%sFound compiled %s in cache" % (progress, pyx_file))
+ if os.path.exists(gz_fingerprint_file):
+ os.utime(gz_fingerprint_file, None)
+ with contextlib.closing(gzip_open(gz_fingerprint_file, 'rb')) as g:
+ with contextlib.closing(open(c_file, 'wb')) as f:
+ shutil.copyfileobj(g, f)
+ else:
+ os.utime(zip_fingerprint_file, None)
+ dirname = os.path.dirname(c_file)
+ with contextlib.closing(zipfile.ZipFile(zip_fingerprint_file)) as z:
+ for artifact in z.namelist():
+ z.extract(artifact, os.path.join(dirname, artifact))
return
if not quiet:
- print("%sCythonizing %s" % (progress, pyx_file))
+ print("%sCythonizing %s" % (progress, pyx_file))
if options is None:
options = CompilationOptions(default_options)
options.output_file = c_file
- options.embedded_metadata = embedded_metadata
+ options.embedded_metadata = embedded_metadata
any_failures = 0
try:
result = compile_single(pyx_file, options, full_module_name=full_module_name)
if result.num_errors > 0:
any_failures = 1
- except (EnvironmentError, PyrexError) as e:
+ except (EnvironmentError, PyrexError) as e:
sys.stderr.write('%s\n' % e)
any_failures = 1
# XXX
@@ -1251,27 +1251,27 @@ def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
elif os.path.exists(c_file):
os.remove(c_file)
elif fingerprint:
- artifacts = list(filter(None, [
- getattr(result, attr, None)
- for attr in ('c_file', 'h_file', 'api_file', 'i_file')]))
- if len(artifacts) == 1:
- fingerprint_file = gz_fingerprint_file
- with contextlib.closing(open(c_file, 'rb')) as f:
- with contextlib.closing(gzip_open(fingerprint_file + '.tmp', 'wb')) as g:
- shutil.copyfileobj(f, g)
- else:
- fingerprint_file = zip_fingerprint_file
- with contextlib.closing(zipfile.ZipFile(
- fingerprint_file + '.tmp', 'w', zipfile_compression_mode)) as zip:
- for artifact in artifacts:
- zip.write(artifact, os.path.basename(artifact))
- os.rename(fingerprint_file + '.tmp', fingerprint_file)
+ artifacts = list(filter(None, [
+ getattr(result, attr, None)
+ for attr in ('c_file', 'h_file', 'api_file', 'i_file')]))
+ if len(artifacts) == 1:
+ fingerprint_file = gz_fingerprint_file
+ with contextlib.closing(open(c_file, 'rb')) as f:
+ with contextlib.closing(gzip_open(fingerprint_file + '.tmp', 'wb')) as g:
+ shutil.copyfileobj(f, g)
+ else:
+ fingerprint_file = zip_fingerprint_file
+ with contextlib.closing(zipfile.ZipFile(
+ fingerprint_file + '.tmp', 'w', zipfile_compression_mode)) as zip:
+ for artifact in artifacts:
+ zip.write(artifact, os.path.basename(artifact))
+ os.rename(fingerprint_file + '.tmp', fingerprint_file)
def cythonize_one_helper(m):
import traceback
try:
- return cythonize_one(*m)
+ return cythonize_one(*m)
except Exception:
traceback.print_exc()
raise
diff --git a/contrib/tools/cython/Cython/Build/Distutils.py b/contrib/tools/cython/Cython/Build/Distutils.py
index 4008d21f52..3efcc0d7b5 100644
--- a/contrib/tools/cython/Cython/Build/Distutils.py
+++ b/contrib/tools/cython/Cython/Build/Distutils.py
@@ -1 +1 @@
-from Cython.Distutils.build_ext import build_ext
+from Cython.Distutils.build_ext import build_ext
diff --git a/contrib/tools/cython/Cython/Build/Inline.py b/contrib/tools/cython/Cython/Build/Inline.py
index cd48e49a70..db6d2640a5 100644
--- a/contrib/tools/cython/Cython/Build/Inline.py
+++ b/contrib/tools/cython/Cython/Build/Inline.py
@@ -15,19 +15,19 @@ from ..Compiler.Main import Context, default_options
from ..Compiler.Visitor import CythonTransform, EnvTransform
from ..Compiler.ParseTreeTransforms import SkipDeclarations
from ..Compiler.TreeFragment import parse_from_strings
-from ..Compiler.StringEncoding import _unicode
+from ..Compiler.StringEncoding import _unicode
from .Dependencies import strip_string_literals, cythonize, cached_function
from ..Compiler import Pipeline
from ..Utils import get_cython_cache_dir
import cython as cython_module
-
+
IS_PY3 = sys.version_info >= (3,)
# A utility function to convert user-supplied ASCII strings to unicode.
if not IS_PY3:
def to_unicode(s):
- if isinstance(s, bytes):
+ if isinstance(s, bytes):
return s.decode('ascii')
else:
return s
@@ -59,7 +59,7 @@ class UnboundSymbols(EnvTransform, SkipDeclarations):
super(UnboundSymbols, self).__call__(node)
return self.unbound
-
+
@cached_function
def unbound_symbols(code, context=None):
code = to_unicode(code)
@@ -77,9 +77,9 @@ def unbound_symbols(code, context=None):
import builtins
except ImportError:
import __builtin__ as builtins
- return tuple(UnboundSymbols()(tree) - set(dir(builtins)))
+ return tuple(UnboundSymbols()(tree) - set(dir(builtins)))
+
-
def unsafe_type(arg, context=None):
py_type = type(arg)
if py_type is int:
@@ -87,10 +87,10 @@ def unsafe_type(arg, context=None):
else:
return safe_type(arg, context)
-
+
def safe_type(arg, context=None):
py_type = type(arg)
- if py_type in (list, tuple, dict, str):
+ if py_type in (list, tuple, dict, str):
return py_type.__name__
elif py_type is complex:
return 'double complex'
@@ -101,7 +101,7 @@ def safe_type(arg, context=None):
elif 'numpy' in sys.modules and isinstance(arg, sys.modules['numpy'].ndarray):
return 'numpy.ndarray[numpy.%s_t, ndim=%s]' % (arg.dtype.name, arg.ndim)
else:
- for base_type in py_type.__mro__:
+ for base_type in py_type.__mro__:
if base_type.__module__ in ('__builtin__', 'builtins'):
return 'object'
module = context.find_module(base_type.__module__, need_pxd=False)
@@ -111,7 +111,7 @@ def safe_type(arg, context=None):
return '%s.%s' % (base_type.__module__, base_type.__name__)
return 'object'
-
+
def _get_build_extension():
dist = Distribution()
# Ensure the build respects distutils configuration by parsing
@@ -122,66 +122,66 @@ def _get_build_extension():
build_extension.finalize_options()
return build_extension
-
+
@cached_function
def _create_context(cython_include_dirs):
return Context(list(cython_include_dirs), default_options)
-
-_cython_inline_cache = {}
-_cython_inline_default_context = _create_context(('.',))
-
-
-def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None):
- for symbol in unbound_symbols:
- if symbol not in kwds:
- if locals is None or globals is None:
- calling_frame = inspect.currentframe().f_back.f_back.f_back
- if locals is None:
- locals = calling_frame.f_locals
- if globals is None:
- globals = calling_frame.f_globals
- if symbol in locals:
- kwds[symbol] = locals[symbol]
- elif symbol in globals:
- kwds[symbol] = globals[symbol]
- else:
- print("Couldn't find %r" % symbol)
-
+
+_cython_inline_cache = {}
+_cython_inline_default_context = _create_context(('.',))
+
+
+def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None):
+ for symbol in unbound_symbols:
+ if symbol not in kwds:
+ if locals is None or globals is None:
+ calling_frame = inspect.currentframe().f_back.f_back.f_back
+ if locals is None:
+ locals = calling_frame.f_locals
+ if globals is None:
+ globals = calling_frame.f_globals
+ if symbol in locals:
+ kwds[symbol] = locals[symbol]
+ elif symbol in globals:
+ kwds[symbol] = globals[symbol]
+ else:
+ print("Couldn't find %r" % symbol)
+
def _inline_key(orig_code, arg_sigs, language_level):
key = orig_code, arg_sigs, sys.version_info, sys.executable, language_level, Cython.__version__
return hashlib.sha1(_unicode(key).encode('utf-8')).hexdigest()
-def cython_inline(code, get_type=unsafe_type,
- lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
- cython_include_dirs=None, cython_compiler_directives=None,
- force=False, quiet=False, locals=None, globals=None, language_level=None, **kwds):
-
+def cython_inline(code, get_type=unsafe_type,
+ lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
+ cython_include_dirs=None, cython_compiler_directives=None,
+ force=False, quiet=False, locals=None, globals=None, language_level=None, **kwds):
+
if get_type is None:
get_type = lambda x: 'object'
- ctx = _create_context(tuple(cython_include_dirs)) if cython_include_dirs else _cython_inline_default_context
-
+ ctx = _create_context(tuple(cython_include_dirs)) if cython_include_dirs else _cython_inline_default_context
+
cython_compiler_directives = dict(cython_compiler_directives) if cython_compiler_directives else {}
if language_level is None and 'language_level' not in cython_compiler_directives:
language_level = '3str'
if language_level is not None:
cython_compiler_directives['language_level'] = language_level
- # Fast path if this has been called in this session.
- _unbound_symbols = _cython_inline_cache.get(code)
- if _unbound_symbols is not None:
- _populate_unbound(kwds, _unbound_symbols, locals, globals)
- args = sorted(kwds.items())
- arg_sigs = tuple([(get_type(value, ctx), arg) for arg, value in args])
+ # Fast path if this has been called in this session.
+ _unbound_symbols = _cython_inline_cache.get(code)
+ if _unbound_symbols is not None:
+ _populate_unbound(kwds, _unbound_symbols, locals, globals)
+ args = sorted(kwds.items())
+ arg_sigs = tuple([(get_type(value, ctx), arg) for arg, value in args])
key_hash = _inline_key(code, arg_sigs, language_level)
invoke = _cython_inline_cache.get((code, arg_sigs, key_hash))
- if invoke is not None:
- arg_list = [arg[1] for arg in args]
- return invoke(*arg_list)
-
- orig_code = code
+ if invoke is not None:
+ arg_list = [arg[1] for arg in args]
+ return invoke(*arg_list)
+
+ orig_code = code
code = to_unicode(code)
code, literals = strip_string_literals(code)
code = strip_common_indent(code)
@@ -190,19 +190,19 @@ def cython_inline(code, get_type=unsafe_type,
if globals is None:
globals = inspect.currentframe().f_back.f_back.f_globals
try:
- _cython_inline_cache[orig_code] = _unbound_symbols = unbound_symbols(code)
- _populate_unbound(kwds, _unbound_symbols, locals, globals)
+ _cython_inline_cache[orig_code] = _unbound_symbols = unbound_symbols(code)
+ _populate_unbound(kwds, _unbound_symbols, locals, globals)
except AssertionError:
if not quiet:
# Parsing from strings not fully supported (e.g. cimports).
print("Could not parse code as a string (to extract unbound symbols).")
-
+
cimports = []
- for name, arg in list(kwds.items()):
+ for name, arg in list(kwds.items()):
if arg is cython_module:
cimports.append('\ncimport cython as %s' % name)
del kwds[name]
- arg_names = sorted(kwds)
+ arg_names = sorted(kwds)
arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
key_hash = _inline_key(orig_code, arg_sigs, language_level)
module_name = "_cython_inline_" + key_hash
@@ -261,11 +261,11 @@ def __invoke(%(params)s):
extra_compile_args = cflags)
if build_extension is None:
build_extension = _get_build_extension()
- build_extension.extensions = cythonize(
- [extension],
- include_path=cython_include_dirs or ['.'],
- compiler_directives=cython_compiler_directives,
- quiet=quiet)
+ build_extension.extensions = cythonize(
+ [extension],
+ include_path=cython_include_dirs or ['.'],
+ compiler_directives=cython_compiler_directives,
+ quiet=quiet)
build_extension.build_temp = os.path.dirname(pyx_file)
build_extension.build_lib = lib_dir
build_extension.run()
@@ -281,29 +281,29 @@ def __invoke(%(params)s):
# overridden with actual value upon the first cython_inline invocation
cython_inline.so_ext = None
-_find_non_space = re.compile('[^ ]').search
-
-
+_find_non_space = re.compile('[^ ]').search
+
+
def strip_common_indent(code):
min_indent = None
- lines = code.splitlines()
+ lines = code.splitlines()
for line in lines:
- match = _find_non_space(line)
+ match = _find_non_space(line)
if not match:
- continue # blank
+ continue # blank
indent = match.start()
if line[indent] == '#':
- continue # comment
- if min_indent is None or min_indent > indent:
+ continue # comment
+ if min_indent is None or min_indent > indent:
min_indent = indent
for ix, line in enumerate(lines):
- match = _find_non_space(line)
- if not match or not line or line[indent:indent+1] == '#':
+ match = _find_non_space(line)
+ if not match or not line or line[indent:indent+1] == '#':
continue
- lines[ix] = line[min_indent:]
+ lines[ix] = line[min_indent:]
return '\n'.join(lines)
-
+
module_statement = re.compile(r'^((cdef +(extern|class))|cimport|(from .+ cimport)|(from .+ import +[*]))')
def extract_func_code(code):
module = []
@@ -331,7 +331,7 @@ except ImportError:
all[varargs] = arg_values[len(args):]
for name, value in zip(args, arg_values):
all[name] = value
- for name, value in list(kwd_values.items()):
+ for name, value in list(kwd_values.items()):
if name in args:
if name in all:
raise TypeError("Duplicate argument %s" % name)
@@ -339,7 +339,7 @@ except ImportError:
if kwds is not None:
all[kwds] = kwd_values
elif kwd_values:
- raise TypeError("Unexpected keyword arguments: %s" % list(kwd_values))
+ raise TypeError("Unexpected keyword arguments: %s" % list(kwd_values))
if defaults is None:
defaults = ()
first_default = len(args) - len(defaults)
@@ -351,7 +351,7 @@ except ImportError:
raise TypeError("Missing argument: %s" % name)
return all
-
+
def get_body(source):
ix = source.index(':')
if source[:5] == 'lambda':
@@ -359,7 +359,7 @@ def get_body(source):
else:
return source[ix+1:]
-
+
# Lots to be done here... It would be especially cool if compiled functions
# could invoke each other quickly.
class RuntimeCompiledFunction(object):
@@ -370,7 +370,7 @@ class RuntimeCompiledFunction(object):
def __call__(self, *args, **kwds):
all = getcallargs(self._f, *args, **kwds)
- if IS_PY3:
- return cython_inline(self._body, locals=self._f.__globals__, globals=self._f.__globals__, **all)
- else:
- return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all)
+ if IS_PY3:
+ return cython_inline(self._body, locals=self._f.__globals__, globals=self._f.__globals__, **all)
+ else:
+ return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all)
diff --git a/contrib/tools/cython/Cython/Build/IpythonMagic.py b/contrib/tools/cython/Cython/Build/IpythonMagic.py
index 80bba08bfc..7abb97ec70 100644
--- a/contrib/tools/cython/Cython/Build/IpythonMagic.py
+++ b/contrib/tools/cython/Cython/Build/IpythonMagic.py
@@ -14,7 +14,7 @@ Magic command interface for interactive work with Cython
Usage
=====
-To enable the magics below, execute ``%load_ext cython``.
+To enable the magics below, execute ``%load_ext cython``.
``%%cython``
@@ -41,7 +41,7 @@ Parts of this code were taken from Cython.inline.
#
# Distributed under the terms of the Modified BSD License.
#
-# The full license is in the file ipython-COPYING.rst, distributed with this software.
+# The full license is in the file ipython-COPYING.rst, distributed with this software.
#-----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
@@ -75,11 +75,11 @@ from distutils.command.build_ext import build_ext
from IPython.core import display
from IPython.core import magic_arguments
from IPython.core.magic import Magics, magics_class, cell_magic
-try:
- from IPython.paths import get_ipython_cache_dir
-except ImportError:
- # older IPython version
- from IPython.utils.path import get_ipython_cache_dir
+try:
+ from IPython.paths import get_ipython_cache_dir
+except ImportError:
+ # older IPython version
+ from IPython.utils.path import get_ipython_cache_dir
from IPython.utils.text import dedent
from ..Shadow import __version__ as cython_version
@@ -175,15 +175,15 @@ class CythonMagics(Magics):
f.write(cell)
if 'pyximport' not in sys.modules or not self._pyximport_installed:
import pyximport
- pyximport.install()
+ pyximport.install()
self._pyximport_installed = True
if module_name in self._reloads:
module = self._reloads[module_name]
- # Note: reloading extension modules is not actually supported
- # (requires PEP-489 reinitialisation support).
- # Don't know why this should ever have worked as it reads here.
- # All we really need to do is to update the globals below.
- #reload(module)
+ # Note: reloading extension modules is not actually supported
+ # (requires PEP-489 reinitialisation support).
+ # Don't know why this should ever have worked as it reads here.
+ # All we really need to do is to update the globals below.
+ #reload(module)
else:
__import__(module_name)
module = sys.modules[module_name]
@@ -200,14 +200,14 @@ class CythonMagics(Magics):
help="Output a C++ rather than C file."
)
@magic_arguments.argument(
- '-3', dest='language_level', action='store_const', const=3, default=None,
- help="Select Python 3 syntax."
- )
- @magic_arguments.argument(
- '-2', dest='language_level', action='store_const', const=2, default=None,
- help="Select Python 2 syntax."
- )
- @magic_arguments.argument(
+ '-3', dest='language_level', action='store_const', const=3, default=None,
+ help="Select Python 3 syntax."
+ )
+ @magic_arguments.argument(
+ '-2', dest='language_level', action='store_const', const=2, default=None,
+ help="Select Python 2 syntax."
+ )
+ @magic_arguments.argument(
'-f', '--force', action='store_true', default=False,
help="Force the compilation of a new module, even if the source has been "
"previously compiled."
@@ -233,7 +233,7 @@ class CythonMagics(Magics):
)
@magic_arguments.argument(
'-L', dest='library_dirs', metavar='dir', action='append', default=[],
- help="Add a path to the list of library directories (can be specified "
+ help="Add a path to the list of library directories (can be specified "
"multiple times)."
)
@magic_arguments.argument(
diff --git a/contrib/tools/cython/Cython/Build/Tests/TestCyCache.py b/contrib/tools/cython/Cython/Build/Tests/TestCyCache.py
index a0ed3f389a..a3224b4175 100644
--- a/contrib/tools/cython/Cython/Build/Tests/TestCyCache.py
+++ b/contrib/tools/cython/Cython/Build/Tests/TestCyCache.py
@@ -1,106 +1,106 @@
-import difflib
-import glob
-import gzip
-import os
-import tempfile
-
-import Cython.Build.Dependencies
-import Cython.Utils
-from Cython.TestUtils import CythonTest
-
-
-class TestCyCache(CythonTest):
-
- def setUp(self):
- CythonTest.setUp(self)
- self.temp_dir = tempfile.mkdtemp(
- prefix='cycache-test',
- dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None)
- self.src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
- self.cache_dir = tempfile.mkdtemp(prefix='cache', dir=self.temp_dir)
-
- def cache_files(self, file_glob):
- return glob.glob(os.path.join(self.cache_dir, file_glob))
-
- def fresh_cythonize(self, *args, **kwargs):
- Cython.Utils.clear_function_caches()
- Cython.Build.Dependencies._dep_tree = None # discard method caches
- Cython.Build.Dependencies.cythonize(*args, **kwargs)
-
- def test_cycache_switch(self):
- content1 = 'value = 1\n'
- content2 = 'value = 2\n'
- a_pyx = os.path.join(self.src_dir, 'a.pyx')
- a_c = a_pyx[:-4] + '.c'
-
- open(a_pyx, 'w').write(content1)
- self.fresh_cythonize(a_pyx, cache=self.cache_dir)
- self.fresh_cythonize(a_pyx, cache=self.cache_dir)
- self.assertEqual(1, len(self.cache_files('a.c*')))
- a_contents1 = open(a_c).read()
- os.unlink(a_c)
-
- open(a_pyx, 'w').write(content2)
- self.fresh_cythonize(a_pyx, cache=self.cache_dir)
- a_contents2 = open(a_c).read()
- os.unlink(a_c)
-
- self.assertNotEqual(a_contents1, a_contents2, 'C file not changed!')
- self.assertEqual(2, len(self.cache_files('a.c*')))
-
- open(a_pyx, 'w').write(content1)
- self.fresh_cythonize(a_pyx, cache=self.cache_dir)
- self.assertEqual(2, len(self.cache_files('a.c*')))
- a_contents = open(a_c).read()
- self.assertEqual(
- a_contents, a_contents1,
- msg='\n'.join(list(difflib.unified_diff(
- a_contents.split('\n'), a_contents1.split('\n')))[:10]))
-
- def test_cycache_uses_cache(self):
- a_pyx = os.path.join(self.src_dir, 'a.pyx')
- a_c = a_pyx[:-4] + '.c'
- open(a_pyx, 'w').write('pass')
- self.fresh_cythonize(a_pyx, cache=self.cache_dir)
- a_cache = os.path.join(self.cache_dir, os.listdir(self.cache_dir)[0])
- gzip.GzipFile(a_cache, 'wb').write('fake stuff'.encode('ascii'))
- os.unlink(a_c)
- self.fresh_cythonize(a_pyx, cache=self.cache_dir)
- a_contents = open(a_c).read()
- self.assertEqual(a_contents, 'fake stuff',
- 'Unexpected contents: %s...' % a_contents[:100])
-
- def test_multi_file_output(self):
- a_pyx = os.path.join(self.src_dir, 'a.pyx')
- a_c = a_pyx[:-4] + '.c'
- a_h = a_pyx[:-4] + '.h'
- a_api_h = a_pyx[:-4] + '_api.h'
- open(a_pyx, 'w').write('cdef public api int foo(int x): return x\n')
- self.fresh_cythonize(a_pyx, cache=self.cache_dir)
- expected = [a_c, a_h, a_api_h]
- for output in expected:
- self.assertTrue(os.path.exists(output), output)
- os.unlink(output)
- self.fresh_cythonize(a_pyx, cache=self.cache_dir)
- for output in expected:
- self.assertTrue(os.path.exists(output), output)
-
- def test_options_invalidation(self):
- hash_pyx = os.path.join(self.src_dir, 'options.pyx')
- hash_c = hash_pyx[:-len('.pyx')] + '.c'
-
- open(hash_pyx, 'w').write('pass')
- self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False)
- self.assertEqual(1, len(self.cache_files('options.c*')))
-
- os.unlink(hash_c)
- self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=True)
- self.assertEqual(2, len(self.cache_files('options.c*')))
-
- os.unlink(hash_c)
- self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=False)
- self.assertEqual(2, len(self.cache_files('options.c*')))
-
- os.unlink(hash_c)
- self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=True)
- self.assertEqual(2, len(self.cache_files('options.c*')))
+import difflib
+import glob
+import gzip
+import os
+import tempfile
+
+import Cython.Build.Dependencies
+import Cython.Utils
+from Cython.TestUtils import CythonTest
+
+
+class TestCyCache(CythonTest):
+
+ def setUp(self):
+ CythonTest.setUp(self)
+ self.temp_dir = tempfile.mkdtemp(
+ prefix='cycache-test',
+ dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None)
+ self.src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
+ self.cache_dir = tempfile.mkdtemp(prefix='cache', dir=self.temp_dir)
+
+ def cache_files(self, file_glob):
+ return glob.glob(os.path.join(self.cache_dir, file_glob))
+
+ def fresh_cythonize(self, *args, **kwargs):
+ Cython.Utils.clear_function_caches()
+ Cython.Build.Dependencies._dep_tree = None # discard method caches
+ Cython.Build.Dependencies.cythonize(*args, **kwargs)
+
+ def test_cycache_switch(self):
+ content1 = 'value = 1\n'
+ content2 = 'value = 2\n'
+ a_pyx = os.path.join(self.src_dir, 'a.pyx')
+ a_c = a_pyx[:-4] + '.c'
+
+ open(a_pyx, 'w').write(content1)
+ self.fresh_cythonize(a_pyx, cache=self.cache_dir)
+ self.fresh_cythonize(a_pyx, cache=self.cache_dir)
+ self.assertEqual(1, len(self.cache_files('a.c*')))
+ a_contents1 = open(a_c).read()
+ os.unlink(a_c)
+
+ open(a_pyx, 'w').write(content2)
+ self.fresh_cythonize(a_pyx, cache=self.cache_dir)
+ a_contents2 = open(a_c).read()
+ os.unlink(a_c)
+
+ self.assertNotEqual(a_contents1, a_contents2, 'C file not changed!')
+ self.assertEqual(2, len(self.cache_files('a.c*')))
+
+ open(a_pyx, 'w').write(content1)
+ self.fresh_cythonize(a_pyx, cache=self.cache_dir)
+ self.assertEqual(2, len(self.cache_files('a.c*')))
+ a_contents = open(a_c).read()
+ self.assertEqual(
+ a_contents, a_contents1,
+ msg='\n'.join(list(difflib.unified_diff(
+ a_contents.split('\n'), a_contents1.split('\n')))[:10]))
+
+ def test_cycache_uses_cache(self):
+ a_pyx = os.path.join(self.src_dir, 'a.pyx')
+ a_c = a_pyx[:-4] + '.c'
+ open(a_pyx, 'w').write('pass')
+ self.fresh_cythonize(a_pyx, cache=self.cache_dir)
+ a_cache = os.path.join(self.cache_dir, os.listdir(self.cache_dir)[0])
+ gzip.GzipFile(a_cache, 'wb').write('fake stuff'.encode('ascii'))
+ os.unlink(a_c)
+ self.fresh_cythonize(a_pyx, cache=self.cache_dir)
+ a_contents = open(a_c).read()
+ self.assertEqual(a_contents, 'fake stuff',
+ 'Unexpected contents: %s...' % a_contents[:100])
+
+ def test_multi_file_output(self):
+ a_pyx = os.path.join(self.src_dir, 'a.pyx')
+ a_c = a_pyx[:-4] + '.c'
+ a_h = a_pyx[:-4] + '.h'
+ a_api_h = a_pyx[:-4] + '_api.h'
+ open(a_pyx, 'w').write('cdef public api int foo(int x): return x\n')
+ self.fresh_cythonize(a_pyx, cache=self.cache_dir)
+ expected = [a_c, a_h, a_api_h]
+ for output in expected:
+ self.assertTrue(os.path.exists(output), output)
+ os.unlink(output)
+ self.fresh_cythonize(a_pyx, cache=self.cache_dir)
+ for output in expected:
+ self.assertTrue(os.path.exists(output), output)
+
+ def test_options_invalidation(self):
+ hash_pyx = os.path.join(self.src_dir, 'options.pyx')
+ hash_c = hash_pyx[:-len('.pyx')] + '.c'
+
+ open(hash_pyx, 'w').write('pass')
+ self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False)
+ self.assertEqual(1, len(self.cache_files('options.c*')))
+
+ os.unlink(hash_c)
+ self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=True)
+ self.assertEqual(2, len(self.cache_files('options.c*')))
+
+ os.unlink(hash_c)
+ self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=False)
+ self.assertEqual(2, len(self.cache_files('options.c*')))
+
+ os.unlink(hash_c)
+ self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=True)
+ self.assertEqual(2, len(self.cache_files('options.c*')))
diff --git a/contrib/tools/cython/Cython/Build/Tests/TestInline.py b/contrib/tools/cython/Cython/Build/Tests/TestInline.py
index 30fab094b5..d209488083 100644
--- a/contrib/tools/cython/Cython/Build/Tests/TestInline.py
+++ b/contrib/tools/cython/Cython/Build/Tests/TestInline.py
@@ -17,8 +17,8 @@ class TestInline(CythonTest):
def setUp(self):
CythonTest.setUp(self)
self.test_kwds = dict(test_kwds)
- if os.path.isdir('TEST_TMP'):
- lib_dir = os.path.join('TEST_TMP','inline')
+ if os.path.isdir('TEST_TMP'):
+ lib_dir = os.path.join('TEST_TMP','inline')
else:
lib_dir = tempfile.mkdtemp(prefix='cython_inline_')
self.test_kwds['lib_dir'] = lib_dir
@@ -45,35 +45,35 @@ class TestInline(CythonTest):
a = 1
cdef double b = 2
cdef c = []
- """, **self.test_kwds), dict(a=1, b=2.0, c=[]))
+ """, **self.test_kwds), dict(a=1, b=2.0, c=[]))
def test_def_node(self):
- foo = inline("def foo(x): return x * x", **self.test_kwds)['foo']
+ foo = inline("def foo(x): return x * x", **self.test_kwds)['foo']
self.assertEqual(foo(7), 49)
- def test_class_ref(self):
- class Type(object):
- pass
- tp = inline("Type")['Type']
- self.assertEqual(tp, Type)
-
+ def test_class_ref(self):
+ class Type(object):
+ pass
+ tp = inline("Type")['Type']
+ self.assertEqual(tp, Type)
+
def test_pure(self):
import cython as cy
b = inline("""
b = cy.declare(float, a)
c = cy.declare(cy.pointer(cy.float), &b)
return b
- """, a=3, **self.test_kwds)
+ """, a=3, **self.test_kwds)
self.assertEqual(type(b), float)
- def test_compiler_directives(self):
- self.assertEqual(
- inline('return sum(x)',
- x=[1, 2, 3],
- cython_compiler_directives={'boundscheck': False}),
- 6
- )
-
+ def test_compiler_directives(self):
+ self.assertEqual(
+ inline('return sum(x)',
+ x=[1, 2, 3],
+ cython_compiler_directives={'boundscheck': False}),
+ 6
+ )
+
def test_lang_version(self):
# GH-3419. Caching for inline code didn't always respect compiler directives.
inline_divcode = "def f(int a, int b): return a/b"
diff --git a/contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py b/contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py
index 9d2e1531a5..24213091b2 100644
--- a/contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py
+++ b/contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py
@@ -29,24 +29,24 @@ except ImportError:
pass
code = u"""\
-def f(x):
+def f(x):
return 2*x
"""
cython3_code = u"""\
-def f(int x):
- return 2 / x
+def f(int x):
+ return 2 / x
-def call(x):
- return f(*(x,))
+def call(x):
+ return f(*(x,))
"""
-
+
pgo_cython3_code = cython3_code + u"""\
def main():
for _ in range(100): call(5)
main()
"""
-
+
if sys.platform == 'win32':
# not using IPython's decorators here because they depend on "nose"
@@ -114,34 +114,34 @@ class TestIPythonMagic(CythonTest):
ip.ex('import mymodule; g = mymodule.f(10)')
self.assertEqual(ip.user_ns['g'], 20.0)
- def test_cython_language_level(self):
- # The Cython cell defines the functions f() and call().
+ def test_cython_language_level(self):
+ # The Cython cell defines the functions f() and call().
ip = self._ip
- ip.run_cell_magic('cython', '', cython3_code)
- ip.ex('g = f(10); h = call(10)')
- if sys.version_info[0] < 3:
- self.assertEqual(ip.user_ns['g'], 2 // 10)
- self.assertEqual(ip.user_ns['h'], 2 // 10)
- else:
- self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
- self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
-
- def test_cython3(self):
- # The Cython cell defines the functions f() and call().
+ ip.run_cell_magic('cython', '', cython3_code)
+ ip.ex('g = f(10); h = call(10)')
+ if sys.version_info[0] < 3:
+ self.assertEqual(ip.user_ns['g'], 2 // 10)
+ self.assertEqual(ip.user_ns['h'], 2 // 10)
+ else:
+ self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
+ self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
+
+ def test_cython3(self):
+ # The Cython cell defines the functions f() and call().
ip = self._ip
- ip.run_cell_magic('cython', '-3', cython3_code)
- ip.ex('g = f(10); h = call(10)')
- self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
- self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
-
- def test_cython2(self):
- # The Cython cell defines the functions f() and call().
+ ip.run_cell_magic('cython', '-3', cython3_code)
+ ip.ex('g = f(10); h = call(10)')
+ self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
+ self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
+
+ def test_cython2(self):
+ # The Cython cell defines the functions f() and call().
ip = self._ip
- ip.run_cell_magic('cython', '-2', cython3_code)
- ip.ex('g = f(10); h = call(10)')
- self.assertEqual(ip.user_ns['g'], 2 // 10)
- self.assertEqual(ip.user_ns['h'], 2 // 10)
-
+ ip.run_cell_magic('cython', '-2', cython3_code)
+ ip.ex('g = f(10); h = call(10)')
+ self.assertEqual(ip.user_ns['g'], 2 // 10)
+ self.assertEqual(ip.user_ns['h'], 2 // 10)
+
@skip_win32('Skip on Windows')
def test_cython3_pgo(self):
# The Cython cell defines the functions f() and call().
diff --git a/contrib/tools/cython/Cython/Build/Tests/TestStripLiterals.py b/contrib/tools/cython/Cython/Build/Tests/TestStripLiterals.py
index 494a4e03b1..a7572a5083 100644
--- a/contrib/tools/cython/Cython/Build/Tests/TestStripLiterals.py
+++ b/contrib/tools/cython/Cython/Build/Tests/TestStripLiterals.py
@@ -6,10 +6,10 @@ class TestStripLiterals(CythonTest):
def t(self, before, expected):
actual, literals = strip_string_literals(before, prefix="_L")
- self.assertEqual(expected, actual)
+ self.assertEqual(expected, actual)
for key, value in literals.items():
actual = actual.replace(key, value)
- self.assertEqual(before, actual)
+ self.assertEqual(before, actual)
def test_empty(self):
self.t("", "")
diff --git a/contrib/tools/cython/Cython/Build/__init__.py b/contrib/tools/cython/Cython/Build/__init__.py
index 38bc609706..d6f3986597 100644
--- a/contrib/tools/cython/Cython/Build/__init__.py
+++ b/contrib/tools/cython/Cython/Build/__init__.py
@@ -1,2 +1,2 @@
from .Dependencies import cythonize
-from .Distutils import build_ext
+from .Distutils import build_ext