diff options
author | shadchin <shadchin@yandex-team.ru> | 2022-02-10 16:44:39 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:44:39 +0300 |
commit | e9656aae26e0358d5378e5b63dcac5c8dbe0e4d0 (patch) | |
tree | 64175d5cadab313b3e7039ebaa06c5bc3295e274 /contrib/tools/cython/Cython | |
parent | 2598ef1d0aee359b4b6d5fdd1758916d5907d04f (diff) | |
download | ydb-e9656aae26e0358d5378e5b63dcac5c8dbe0e4d0.tar.gz |
Restoring authorship annotation for <shadchin@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/tools/cython/Cython')
82 files changed, 2095 insertions, 2095 deletions
diff --git a/contrib/tools/cython/Cython/Build/Cythonize.py b/contrib/tools/cython/Cython/Build/Cythonize.py index 13a6637f41..c85b6eabab 100644 --- a/contrib/tools/cython/Cython/Build/Cythonize.py +++ b/contrib/tools/cython/Cython/Build/Cythonize.py @@ -103,7 +103,7 @@ def cython_compile(path_pattern, options): compile_time_env=options.compile_time_env, force=options.force, quiet=options.quiet, - depfile=options.depfile, + depfile=options.depfile, **options.options) if ext_modules and options.build: @@ -195,7 +195,7 @@ def parse_args(args): help='increase Python compatibility by ignoring some compile time errors') parser.add_option('-k', '--keep-going', dest='keep_going', action='store_true', help='compile as much as possible, ignore compilation failures') - parser.add_option('-M', '--depfile', action='store_true', help='produce depfiles for the sources') + parser.add_option('-M', '--depfile', action='store_true', help='produce depfiles for the sources') options, args = parser.parse_args(args) if not args: diff --git a/contrib/tools/cython/Cython/Build/Dependencies.py b/contrib/tools/cython/Cython/Build/Dependencies.py index 0799ea0351..7eb55e2607 100644 --- a/contrib/tools/cython/Cython/Build/Dependencies.py +++ b/contrib/tools/cython/Cython/Build/Dependencies.py @@ -19,11 +19,11 @@ from distutils.util import strtobool import zipfile try: - from collections.abc import Iterable -except ImportError: - from collections import Iterable - -try: + from collections.abc import Iterable +except ImportError: + from collections import Iterable + +try: import gzip gzip_open = gzip.open gzip_ext = '.gz' @@ -322,8 +322,8 @@ def strip_string_literals(code, prefix='__Pyx_L'): in_quote = False hash_mark = single_q = double_q = -1 code_len = len(code) - quote_type = None - quote_len = -1 + quote_type = None + quote_len = -1 while True: if hash_mark < q: @@ -755,7 +755,7 @@ def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet= exclude = [] if patterns is None: return [], {} - elif isinstance(patterns, basestring) or not isinstance(patterns, Iterable): + elif isinstance(patterns, basestring) or not isinstance(patterns, Iterable): patterns = [patterns] explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)]) seen = set() @@ -911,8 +911,8 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, variable called ``foo`` as a string, and then call ``cythonize(..., aliases={'MY_HEADERS': foo})``. - :param quiet: If True, Cython won't print error, warning, or status messages during the - compilation. + :param quiet: If True, Cython won't print error, warning, or status messages during the + compilation. :param force: Forces the recompilation of the Cython modules, even if the timestamps don't indicate that a recompilation is necessary. @@ -944,8 +944,8 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, :param compiler_directives: Allow to set compiler directives in the ``setup.py`` like this: ``compiler_directives={'embedsignature': True}``. See :ref:`compiler-directives`. - - :param depfile: produce depfiles for the sources if True. + + :param depfile: produce depfiles for the sources if True. """ if exclude is None: exclude = [] @@ -954,8 +954,8 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, if 'common_utility_include_dir' in options: safe_makedirs(options['common_utility_include_dir']) - depfile = options.pop('depfile', None) - + depfile = options.pop('depfile', None) + if pythran is None: pythran_options = None else: @@ -1027,26 +1027,26 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, dir = os.path.dirname(c_file) safe_makedirs_once(dir) - # write out the depfile, if requested - if depfile: - dependencies = deps.all_dependencies(source) - src_base_dir, _ = os.path.split(source) - if not src_base_dir.endswith(os.sep): - src_base_dir += os.sep - # paths below the base_dir are relative, otherwise absolute - paths = [] - for fname in dependencies: - if (fname.startswith(src_base_dir) or - fname.startswith('.' + os.path.sep)): - paths.append(os.path.relpath(fname, src_base_dir)) - else: - paths.append(os.path.abspath(fname)) - - depline = os.path.split(c_file)[1] + ": \\\n " - depline += " \\\n ".join(paths) + "\n" - with open(c_file+'.dep', 'w') as outfile: - outfile.write(depline) - + # write out the depfile, if requested + if depfile: + dependencies = deps.all_dependencies(source) + src_base_dir, _ = os.path.split(source) + if not src_base_dir.endswith(os.sep): + src_base_dir += os.sep + # paths below the base_dir are relative, otherwise absolute + paths = [] + for fname in dependencies: + if (fname.startswith(src_base_dir) or + fname.startswith('.' + os.path.sep)): + paths.append(os.path.relpath(fname, src_base_dir)) + else: + paths.append(os.path.abspath(fname)) + + depline = os.path.split(c_file)[1] + ": \\\n " + depline += " \\\n ".join(paths) + "\n" + with open(c_file+'.dep', 'w') as outfile: + outfile.write(depline) + if os.path.exists(c_file): c_timestamp = os.path.getmtime(c_file) else: diff --git a/contrib/tools/cython/Cython/Build/Inline.py b/contrib/tools/cython/Cython/Build/Inline.py index 80a88c67a7..db6d2640a5 100644 --- a/contrib/tools/cython/Cython/Build/Inline.py +++ b/contrib/tools/cython/Cython/Build/Inline.py @@ -1,31 +1,31 @@ from __future__ import absolute_import -import hashlib -import inspect -import os -import re -import sys +import hashlib +import inspect +import os +import re +import sys from distutils.core import Distribution, Extension from distutils.command.build_ext import build_ext import Cython -from ..Compiler.Main import Context, default_options +from ..Compiler.Main import Context, default_options -from ..Compiler.Visitor import CythonTransform, EnvTransform -from ..Compiler.ParseTreeTransforms import SkipDeclarations +from ..Compiler.Visitor import CythonTransform, EnvTransform +from ..Compiler.ParseTreeTransforms import SkipDeclarations from ..Compiler.TreeFragment import parse_from_strings from ..Compiler.StringEncoding import _unicode from .Dependencies import strip_string_literals, cythonize, cached_function -from ..Compiler import Pipeline +from ..Compiler import Pipeline from ..Utils import get_cython_cache_dir import cython as cython_module -IS_PY3 = sys.version_info >= (3,) - +IS_PY3 = sys.version_info >= (3,) + # A utility function to convert user-supplied ASCII strings to unicode. -if not IS_PY3: +if not IS_PY3: def to_unicode(s): if isinstance(s, bytes): return s.decode('ascii') @@ -34,18 +34,18 @@ if not IS_PY3: else: to_unicode = lambda x: x -if sys.version_info < (3, 5): - import imp - def load_dynamic(name, module_path): - return imp.load_dynamic(name, module_path) -else: - import importlib.util as _importlib_util - def load_dynamic(name, module_path): - spec = _importlib_util.spec_from_file_location(name, module_path) - module = _importlib_util.module_from_spec(spec) - # sys.modules[name] = module - spec.loader.exec_module(module) - return module +if sys.version_info < (3, 5): + import imp + def load_dynamic(name, module_path): + return imp.load_dynamic(name, module_path) +else: + import importlib.util as _importlib_util + def load_dynamic(name, module_path): + spec = _importlib_util.spec_from_file_location(name, module_path) + module = _importlib_util.module_from_spec(spec) + # sys.modules[name] = module + spec.loader.exec_module(module) + return module class UnboundSymbols(EnvTransform, SkipDeclarations): def __init__(self): @@ -131,7 +131,7 @@ def _create_context(cython_include_dirs): _cython_inline_cache = {} _cython_inline_default_context = _create_context(('.',)) - + def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None): for symbol in unbound_symbols: if symbol not in kwds: @@ -148,12 +148,12 @@ def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None): else: print("Couldn't find %r" % symbol) - -def _inline_key(orig_code, arg_sigs, language_level): - key = orig_code, arg_sigs, sys.version_info, sys.executable, language_level, Cython.__version__ - return hashlib.sha1(_unicode(key).encode('utf-8')).hexdigest() - - + +def _inline_key(orig_code, arg_sigs, language_level): + key = orig_code, arg_sigs, sys.version_info, sys.executable, language_level, Cython.__version__ + return hashlib.sha1(_unicode(key).encode('utf-8')).hexdigest() + + def cython_inline(code, get_type=unsafe_type, lib_dir=os.path.join(get_cython_cache_dir(), 'inline'), cython_include_dirs=None, cython_compiler_directives=None, @@ -163,20 +163,20 @@ def cython_inline(code, get_type=unsafe_type, get_type = lambda x: 'object' ctx = _create_context(tuple(cython_include_dirs)) if cython_include_dirs else _cython_inline_default_context - cython_compiler_directives = dict(cython_compiler_directives) if cython_compiler_directives else {} - if language_level is None and 'language_level' not in cython_compiler_directives: - language_level = '3str' - if language_level is not None: - cython_compiler_directives['language_level'] = language_level - + cython_compiler_directives = dict(cython_compiler_directives) if cython_compiler_directives else {} + if language_level is None and 'language_level' not in cython_compiler_directives: + language_level = '3str' + if language_level is not None: + cython_compiler_directives['language_level'] = language_level + # Fast path if this has been called in this session. _unbound_symbols = _cython_inline_cache.get(code) if _unbound_symbols is not None: _populate_unbound(kwds, _unbound_symbols, locals, globals) args = sorted(kwds.items()) arg_sigs = tuple([(get_type(value, ctx), arg) for arg, value in args]) - key_hash = _inline_key(code, arg_sigs, language_level) - invoke = _cython_inline_cache.get((code, arg_sigs, key_hash)) + key_hash = _inline_key(code, arg_sigs, language_level) + invoke = _cython_inline_cache.get((code, arg_sigs, key_hash)) if invoke is not None: arg_list = [arg[1] for arg in args] return invoke(*arg_list) @@ -204,8 +204,8 @@ def cython_inline(code, get_type=unsafe_type, del kwds[name] arg_names = sorted(kwds) arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names]) - key_hash = _inline_key(orig_code, arg_sigs, language_level) - module_name = "_cython_inline_" + key_hash + key_hash = _inline_key(orig_code, arg_sigs, language_level) + module_name = "_cython_inline_" + key_hash if module_name in sys.modules: module = sys.modules[module_name] @@ -270,13 +270,13 @@ def __invoke(%(params)s): build_extension.build_lib = lib_dir build_extension.run() - module = load_dynamic(module_name, module_path) + module = load_dynamic(module_name, module_path) - _cython_inline_cache[orig_code, arg_sigs, key_hash] = module.__invoke + _cython_inline_cache[orig_code, arg_sigs, key_hash] = module.__invoke arg_list = [kwds[arg] for arg in arg_names] return module.__invoke(*arg_list) - + # Cached suffix used by cython_inline above. None should get # overridden with actual value upon the first cython_inline invocation cython_inline.so_ext = None diff --git a/contrib/tools/cython/Cython/Build/IpythonMagic.py b/contrib/tools/cython/Cython/Build/IpythonMagic.py index a96889dc8a..7abb97ec70 100644 --- a/contrib/tools/cython/Cython/Build/IpythonMagic.py +++ b/contrib/tools/cython/Cython/Build/IpythonMagic.py @@ -56,8 +56,8 @@ import copy import distutils.log import textwrap -IO_ENCODING = sys.getfilesystemencoding() -IS_PY2 = sys.version_info[0] < 3 +IO_ENCODING = sys.getfilesystemencoding() +IS_PY2 = sys.version_info[0] < 3 try: reload @@ -102,14 +102,14 @@ PGO_CONFIG = { PGO_CONFIG['mingw32'] = PGO_CONFIG['gcc'] -if IS_PY2: - def encode_fs(name): - return name if isinstance(name, bytes) else name.encode(IO_ENCODING) -else: - def encode_fs(name): - return name - - +if IS_PY2: + def encode_fs(name): + return name if isinstance(name, bytes) else name.encode(IO_ENCODING) +else: + def encode_fs(name): + return name + + @magics_class class CythonMagics(Magics): @@ -315,7 +315,7 @@ class CythonMagics(Magics): key += (time.time(),) if args.name: - module_name = str(args.name) # no-op in Py3 + module_name = str(args.name) # no-op in Py3 else: module_name = "_cython_magic_" + hashlib.md5(str(key).encode('utf-8')).hexdigest() html_file = os.path.join(lib_dir, module_name + '.html') @@ -331,9 +331,9 @@ class CythonMagics(Magics): extension = None if need_cythonize: extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet) - if extensions is None: - # Compilation failed and printed error message - return None + if extensions is None: + # Compilation failed and printed error message + return None assert len(extensions) == 1 extension = extensions[0] self._code_cache[key] = module_name @@ -341,12 +341,12 @@ class CythonMagics(Magics): if args.pgo: self._profile_pgo_wrapper(extension, lib_dir) - try: - self._build_extension(extension, lib_dir, pgo_step_name='use' if args.pgo else None, - quiet=args.quiet) - except distutils.errors.CompileError: - # Build failed and printed error message - return None + try: + self._build_extension(extension, lib_dir, pgo_step_name='use' if args.pgo else None, + quiet=args.quiet) + except distutils.errors.CompileError: + # Build failed and printed error message + return None module = imp.load_dynamic(module_name, module_path) self._import_all(module) @@ -415,7 +415,7 @@ class CythonMagics(Magics): def _cythonize(self, module_name, code, lib_dir, args, quiet=True): pyx_file = os.path.join(lib_dir, module_name + '.pyx') - pyx_file = encode_fs(pyx_file) + pyx_file = encode_fs(pyx_file) c_include_dirs = args.include c_src_files = list(map(str, args.src)) @@ -535,10 +535,10 @@ class CythonMagics(Magics): build_extension = _build_ext(dist) build_extension.finalize_options() if temp_dir: - temp_dir = encode_fs(temp_dir) + temp_dir = encode_fs(temp_dir) build_extension.build_temp = temp_dir if lib_dir: - lib_dir = encode_fs(lib_dir) + lib_dir = encode_fs(lib_dir) build_extension.build_lib = lib_dir if extension is not None: build_extension.extensions = [extension] diff --git a/contrib/tools/cython/Cython/Build/Tests/TestInline.py b/contrib/tools/cython/Cython/Build/Tests/TestInline.py index 9676e202ae..d209488083 100644 --- a/contrib/tools/cython/Cython/Build/Tests/TestInline.py +++ b/contrib/tools/cython/Cython/Build/Tests/TestInline.py @@ -24,10 +24,10 @@ class TestInline(CythonTest): self.test_kwds['lib_dir'] = lib_dir def test_simple(self): - self.assertEqual(inline("return 1+2", **self.test_kwds), 3) + self.assertEqual(inline("return 1+2", **self.test_kwds), 3) def test_types(self): - self.assertEqual(inline(""" + self.assertEqual(inline(""" cimport cython return cython.typeof(a), cython.typeof(b) """, a=1.0, b=[], **self.test_kwds), ('double', 'list object')) @@ -35,13 +35,13 @@ class TestInline(CythonTest): def test_locals(self): a = 1 b = 2 - self.assertEqual(inline("return a+b", **self.test_kwds), 3) + self.assertEqual(inline("return a+b", **self.test_kwds), 3) def test_globals(self): - self.assertEqual(inline("return global_value + 1", **self.test_kwds), global_value + 1) + self.assertEqual(inline("return global_value + 1", **self.test_kwds), global_value + 1) def test_no_return(self): - self.assertEqual(inline(""" + self.assertEqual(inline(""" a = 1 cdef double b = 2 cdef c = [] @@ -49,7 +49,7 @@ class TestInline(CythonTest): def test_def_node(self): foo = inline("def foo(x): return x * x", **self.test_kwds)['foo'] - self.assertEqual(foo(7), 49) + self.assertEqual(foo(7), 49) def test_class_ref(self): class Type(object): @@ -64,7 +64,7 @@ class TestInline(CythonTest): c = cy.declare(cy.pointer(cy.float), &b) return b """, a=3, **self.test_kwds) - self.assertEqual(type(b), float) + self.assertEqual(type(b), float) def test_compiler_directives(self): self.assertEqual( @@ -74,23 +74,23 @@ class TestInline(CythonTest): 6 ) - def test_lang_version(self): - # GH-3419. Caching for inline code didn't always respect compiler directives. - inline_divcode = "def f(int a, int b): return a/b" - self.assertEqual( - inline(inline_divcode, language_level=2)['f'](5,2), - 2 - ) - self.assertEqual( - inline(inline_divcode, language_level=3)['f'](5,2), - 2.5 - ) - + def test_lang_version(self): + # GH-3419. Caching for inline code didn't always respect compiler directives. + inline_divcode = "def f(int a, int b): return a/b" + self.assertEqual( + inline(inline_divcode, language_level=2)['f'](5,2), + 2 + ) + self.assertEqual( + inline(inline_divcode, language_level=3)['f'](5,2), + 2.5 + ) + if has_numpy: def test_numpy(self): import numpy a = numpy.ndarray((10, 20)) a[0,0] = 10 - self.assertEqual(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]') - self.assertEqual(inline("return a[0,0]", a=a, **self.test_kwds), 10.0) + self.assertEqual(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]') + self.assertEqual(inline("return a[0,0]", a=a, **self.test_kwds), 10.0) diff --git a/contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py b/contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py index d27c5717e6..24213091b2 100644 --- a/contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py +++ b/contrib/tools/cython/Cython/Build/Tests/TestIpythonMagic.py @@ -28,24 +28,24 @@ try: except ImportError: pass -code = u"""\ +code = u"""\ def f(x): return 2*x -""" +""" -cython3_code = u"""\ +cython3_code = u"""\ def f(int x): return 2 / x def call(x): return f(*(x,)) -""" +""" -pgo_cython3_code = cython3_code + u"""\ +pgo_cython3_code = cython3_code + u"""\ def main(): for _ in range(100): call(5) main() -""" +""" if sys.platform == 'win32': @@ -154,10 +154,10 @@ class TestIPythonMagic(CythonTest): @skip_win32('Skip on Windows') def test_extlibs(self): ip = self._ip - code = u""" + code = u""" from libc.math cimport sin x = sin(0.0) - """ + """ ip.user_ns['x'] = 1 ip.run_cell_magic('cython', '-l m', code) self.assertEqual(ip.user_ns['x'], 0) @@ -195,11 +195,11 @@ x = sin(0.0) ip.run_cell_magic('cython', '--verbose', code) ip.ex('g = f(10)') self.assertEqual(ip.user_ns['g'], 20.0) - self.assertEqual([verbose_log.INFO, verbose_log.DEBUG, verbose_log.INFO], + self.assertEqual([verbose_log.INFO, verbose_log.DEBUG, verbose_log.INFO], verbose_log.thresholds) with mock_distutils() as normal_log: ip.run_cell_magic('cython', '', code) ip.ex('g = f(10)') self.assertEqual(ip.user_ns['g'], 20.0) - self.assertEqual([normal_log.INFO], normal_log.thresholds) + self.assertEqual([normal_log.INFO], normal_log.thresholds) diff --git a/contrib/tools/cython/Cython/Compiler/Buffer.py b/contrib/tools/cython/Cython/Compiler/Buffer.py index 9603235611..c62a24f568 100644 --- a/contrib/tools/cython/Cython/Compiler/Buffer.py +++ b/contrib/tools/cython/Cython/Compiler/Buffer.py @@ -668,11 +668,11 @@ def get_type_information_cname(code, dtype, maxdepth=None): if dtype.is_simple_buffer_dtype(): structinfo_name = "NULL" elif dtype.is_struct: - struct_scope = dtype.scope - if dtype.is_const: - struct_scope = struct_scope.const_base_type_scope - # Must pre-call all used types in order not to recurse during utility code writing. - fields = struct_scope.var_entries + struct_scope = dtype.scope + if dtype.is_const: + struct_scope = struct_scope.const_base_type_scope + # Must pre-call all used types in order not to recurse during utility code writing. + fields = struct_scope.var_entries assert len(fields) > 0 types = [get_type_information_cname(code, f.type, maxdepth - 1) for f in fields] diff --git a/contrib/tools/cython/Cython/Compiler/Builtin.py b/contrib/tools/cython/Cython/Compiler/Builtin.py index 3dc6698721..5fa717507d 100644 --- a/contrib/tools/cython/Cython/Compiler/Builtin.py +++ b/contrib/tools/cython/Cython/Compiler/Builtin.py @@ -203,7 +203,7 @@ builtin_function_table = [ #('raw_input', "", "", ""), #('reduce', "", "", ""), BuiltinFunction('reload', "O", "O", "PyImport_ReloadModule"), - BuiltinFunction('repr', "O", "O", "PyObject_Repr"), # , builtin_return_type='str'), # add in Cython 3.1 + BuiltinFunction('repr', "O", "O", "PyObject_Repr"), # , builtin_return_type='str'), # add in Cython 3.1 #('round', "", "", ""), BuiltinFunction('setattr', "OOO", "r", "PyObject_SetAttr"), #('sum', "", "", ""), diff --git a/contrib/tools/cython/Cython/Compiler/Code.pxd b/contrib/tools/cython/Cython/Compiler/Code.pxd index c07cc415e1..acad0c1cf4 100644 --- a/contrib/tools/cython/Cython/Compiler/Code.pxd +++ b/contrib/tools/cython/Cython/Compiler/Code.pxd @@ -48,7 +48,7 @@ cdef class FunctionState: cdef public list temps_allocated cdef public dict temps_free cdef public dict temps_used_type - cdef public set zombie_temps + cdef public set zombie_temps cdef public size_t temp_counter cdef public list collect_temps_stack diff --git a/contrib/tools/cython/Cython/Compiler/Code.py b/contrib/tools/cython/Cython/Compiler/Code.py index 3120deb795..f43c4b2b8e 100644 --- a/contrib/tools/cython/Cython/Compiler/Code.py +++ b/contrib/tools/cython/Cython/Compiler/Code.py @@ -280,7 +280,7 @@ class UtilityCodeBase(object): _, ext = os.path.splitext(path) if ext in ('.pyx', '.py', '.pxd', '.pxi'): comment = '#' - strip_comments = partial(re.compile(r'^\s*#(?!\s*cython\s*:).*').sub, '') + strip_comments = partial(re.compile(r'^\s*#(?!\s*cython\s*:).*').sub, '') rstrip = StringEncoding._unicode.rstrip else: comment = '/' @@ -501,11 +501,11 @@ class UtilityCode(UtilityCodeBase): def specialize(self, pyrex_type=None, **data): # Dicts aren't hashable... - name = self.name + name = self.name if pyrex_type is not None: data['type'] = pyrex_type.empty_declaration_code() data['type_name'] = pyrex_type.specialization_name() - name = "%s[%s]" % (name, data['type_name']) + name = "%s[%s]" % (name, data['type_name']) key = tuple(sorted(data.items())) try: return self._cache[key] @@ -521,9 +521,9 @@ class UtilityCode(UtilityCodeBase): self.none_or_sub(self.init, data), self.none_or_sub(self.cleanup, data), requires, - self.proto_block, - name, - ) + self.proto_block, + name, + ) self.specialize_list.append(s) return s @@ -547,7 +547,7 @@ class UtilityCode(UtilityCodeBase): impl = re.sub(r'PY(IDENT|UNICODE)\("([^"]+)"\)', externalise, impl) assert 'PYIDENT(' not in impl and 'PYUNICODE(' not in impl - return True, impl + return True, impl def inject_unbound_methods(self, impl, output): """Replace 'UNBOUND_METHOD(type, "name")' by a constant Python identifier cname. @@ -570,7 +570,7 @@ class UtilityCode(UtilityCodeBase): r'\)', externalise, impl) assert 'CALL_UNBOUND_METHOD(' not in impl - return True, impl + return True, impl def wrap_c_strings(self, impl): """Replace CSTRING('''xyz''') by a C compatible string @@ -722,10 +722,10 @@ class FunctionState(object): self.can_trace = False self.gil_owned = True - self.temps_allocated = [] # of (name, type, manage_ref, static) - self.temps_free = {} # (type, manage_ref) -> list of free vars with same type/managed status - self.temps_used_type = {} # name -> (type, manage_ref) - self.zombie_temps = set() # temps that must not be reused after release + self.temps_allocated = [] # of (name, type, manage_ref, static) + self.temps_free = {} # (type, manage_ref) -> list of free vars with same type/managed status + self.temps_used_type = {} # name -> (type, manage_ref) + self.zombie_temps = set() # temps that must not be reused after release self.temp_counter = 0 self.closure_temps = None @@ -740,20 +740,20 @@ class FunctionState(object): self.should_declare_error_indicator = False self.uses_error_indicator = False - # safety checks - - def validate_exit(self): - # validate that all allocated temps have been freed - if self.temps_allocated: - leftovers = self.temps_in_use() - if leftovers: - msg = "TEMPGUARD: Temps left over at end of '%s': %s" % (self.scope.name, ', '.join([ - '%s [%s]' % (name, ctype) - for name, ctype, is_pytemp in sorted(leftovers)]), - ) - #print(msg) - raise RuntimeError(msg) - + # safety checks + + def validate_exit(self): + # validate that all allocated temps have been freed + if self.temps_allocated: + leftovers = self.temps_in_use() + if leftovers: + msg = "TEMPGUARD: Temps left over at end of '%s': %s" % (self.scope.name, ', '.join([ + '%s [%s]' % (name, ctype) + for name, ctype, is_pytemp in sorted(leftovers)]), + ) + #print(msg) + raise RuntimeError(msg) + # labels def new_label(self, name=None): @@ -823,7 +823,7 @@ class FunctionState(object): # temp handling - def allocate_temp(self, type, manage_ref, static=False, reusable=True): + def allocate_temp(self, type, manage_ref, static=False, reusable=True): """ Allocates a temporary (which may create a new one or get a previously allocated and released one of the same type). Type is simply registered @@ -842,24 +842,24 @@ class FunctionState(object): This is only used when allocating backing store for a module-level C array literals. - if reusable=False, the temp will not be reused after release. - + if reusable=False, the temp will not be reused after release. + A C string referring to the variable is returned. """ if type.is_const and not type.is_reference: type = type.const_base_type elif type.is_reference and not type.is_fake_reference: type = type.ref_base_type - elif type.is_cfunction: - from . import PyrexTypes - type = PyrexTypes.c_ptr_type(type) # A function itself isn't an l-value + elif type.is_cfunction: + from . import PyrexTypes + type = PyrexTypes.c_ptr_type(type) # A function itself isn't an l-value if not type.is_pyobject and not type.is_memoryviewslice: # Make manage_ref canonical, so that manage_ref will always mean # a decref is needed. manage_ref = False freelist = self.temps_free.get((type, manage_ref)) - if reusable and freelist is not None and freelist[0]: + if reusable and freelist is not None and freelist[0]: result = freelist[0].pop() freelist[1].remove(result) else: @@ -868,11 +868,11 @@ class FunctionState(object): result = "%s%d" % (Naming.codewriter_temp_prefix, self.temp_counter) if result not in self.names_taken: break self.temps_allocated.append((result, type, manage_ref, static)) - if not reusable: - self.zombie_temps.add(result) + if not reusable: + self.zombie_temps.add(result) self.temps_used_type[result] = (type, manage_ref) if DebugFlags.debug_temp_code_comments: - self.owner.putln("/* %s allocated (%s)%s */" % (result, type, "" if reusable else " - zombie")) + self.owner.putln("/* %s allocated (%s)%s */" % (result, type, "" if reusable else " - zombie")) if self.collect_temps_stack: self.collect_temps_stack[-1].add((result, type)) @@ -891,12 +891,12 @@ class FunctionState(object): self.temps_free[(type, manage_ref)] = freelist if name in freelist[1]: raise RuntimeError("Temp %s freed twice!" % name) - if name not in self.zombie_temps: - freelist[0].append(name) + if name not in self.zombie_temps: + freelist[0].append(name) freelist[1].add(name) if DebugFlags.debug_temp_code_comments: - self.owner.putln("/* %s released %s*/" % ( - name, " - zombie" if name in self.zombie_temps else "")) + self.owner.putln("/* %s released %s*/" % ( + name, " - zombie" if name in self.zombie_temps else "")) def temps_in_use(self): """Return a list of (cname,type,manage_ref) tuples of temp names and their type @@ -2366,18 +2366,18 @@ class CCodeWriter(object): self.funcstate.should_declare_error_indicator = True if used: self.funcstate.uses_error_indicator = True - return "__PYX_MARK_ERR_POS(%s, %s)" % ( + return "__PYX_MARK_ERR_POS(%s, %s)" % ( self.lookup_filename(pos[0]), - pos[1]) + pos[1]) - def error_goto(self, pos, used=True): + def error_goto(self, pos, used=True): lbl = self.funcstate.error_label self.funcstate.use_label(lbl) if pos is None: return 'goto %s;' % lbl - self.funcstate.should_declare_error_indicator = True - if used: - self.funcstate.uses_error_indicator = True + self.funcstate.should_declare_error_indicator = True + if used: + self.funcstate.uses_error_indicator = True return "__PYX_ERR(%s, %s, %s)" % ( self.lookup_filename(pos[0]), pos[1], diff --git a/contrib/tools/cython/Cython/Compiler/ExprNodes.py b/contrib/tools/cython/Cython/Compiler/ExprNodes.py index 94f30d7e7a..4a402f8126 100644 --- a/contrib/tools/cython/Cython/Compiler/ExprNodes.py +++ b/contrib/tools/cython/Cython/Compiler/ExprNodes.py @@ -1012,11 +1012,11 @@ class ExprNode(Node): return self elif type.is_pyobject or type.is_int or type.is_ptr or type.is_float: return CoerceToBooleanNode(self, env) - elif type.is_cpp_class and type.scope and type.scope.lookup("operator bool"): + elif type.is_cpp_class and type.scope and type.scope.lookup("operator bool"): return SimpleCallNode( self.pos, function=AttributeNode( - self.pos, obj=self, attribute=StringEncoding.EncodedString('operator bool')), + self.pos, obj=self, attribute=StringEncoding.EncodedString('operator bool')), args=[]).analyse_types(env) elif type.is_ctuple: bool_value = len(type.components) == 0 @@ -1623,23 +1623,23 @@ class UnicodeNode(ConstNode): def generate_evaluation_code(self, code): if self.type.is_pyobject: - # FIXME: this should go away entirely! - # Since string_contains_lone_surrogates() returns False for surrogate pairs in Py2/UCS2, - # Py2 can generate different code from Py3 here. Let's hope we get away with claiming that - # the processing of surrogate pairs in code was always ambiguous and lead to different results - # on P16/32bit Unicode platforms. - if StringEncoding.string_contains_lone_surrogates(self.value): - # lone (unpaired) surrogates are not really portable and cannot be + # FIXME: this should go away entirely! + # Since string_contains_lone_surrogates() returns False for surrogate pairs in Py2/UCS2, + # Py2 can generate different code from Py3 here. Let's hope we get away with claiming that + # the processing of surrogate pairs in code was always ambiguous and lead to different results + # on P16/32bit Unicode platforms. + if StringEncoding.string_contains_lone_surrogates(self.value): + # lone (unpaired) surrogates are not really portable and cannot be # decoded by the UTF-8 codec in Py3.3 self.result_code = code.get_py_const(py_object_type, 'ustring') - data_cname = code.get_string_const( - StringEncoding.BytesLiteral(self.value.encode('unicode_escape'))) + data_cname = code.get_string_const( + StringEncoding.BytesLiteral(self.value.encode('unicode_escape'))) const_code = code.get_cached_constants_writer(self.result_code) if const_code is None: return # already initialised const_code.mark_pos(self.pos) const_code.putln( - "%s = PyUnicode_DecodeUnicodeEscape(%s, sizeof(%s) - 1, NULL); %s" % ( + "%s = PyUnicode_DecodeUnicodeEscape(%s, sizeof(%s) - 1, NULL); %s" % ( self.result_code, data_cname, data_cname, @@ -3228,7 +3228,7 @@ class FormattedValueNode(ExprNode): # {}-delimited portions of an f-string # # value ExprNode The expression itself - # conversion_char str or None Type conversion (!s, !r, !a, or none, or 'd' for integer conversion) + # conversion_char str or None Type conversion (!s, !r, !a, or none, or 'd' for integer conversion) # format_spec JoinedStrNode or None Format string passed to __format__ # c_format_spec str or None If not None, formatting can be done at the C level @@ -3242,7 +3242,7 @@ class FormattedValueNode(ExprNode): 's': 'PyObject_Unicode', 'r': 'PyObject_Repr', 'a': 'PyObject_ASCII', # NOTE: mapped to PyObject_Repr() in Py2 - 'd': '__Pyx_PyNumber_IntOrLong', # NOTE: internal mapping for '%d' formatting + 'd': '__Pyx_PyNumber_IntOrLong', # NOTE: internal mapping for '%d' formatting }.get def may_be_none(self): @@ -4207,9 +4207,9 @@ class BufferIndexNode(_IndexingBaseNode): # Whether we're assigning to a buffer (in that case it needs to be writable) writable_needed = False - # Any indexing temp variables that we need to clean up. - index_temps = () - + # Any indexing temp variables that we need to clean up. + index_temps = () + def analyse_target_types(self, env): self.analyse_types(env, getting=False) @@ -4294,7 +4294,7 @@ class BufferIndexNode(_IndexingBaseNode): warning(self.pos, "Use boundscheck(False) for faster access", level=1) # Assign indices to temps of at least (s)size_t to allow further index calculations. - self.index_temps = index_temps = [self.get_index_in_temp(code,ivar) for ivar in self.indices] + self.index_temps = index_temps = [self.get_index_in_temp(code,ivar) for ivar in self.indices] # Generate buffer access code using these temps from . import Buffer @@ -4340,7 +4340,7 @@ class BufferIndexNode(_IndexingBaseNode): pythran_indexing_code(self.indices), op, rhs.pythran_result())) - code.funcstate.release_temp(obj) + code.funcstate.release_temp(obj) return # Used from generate_assignment_code and InPlaceAssignmentNode @@ -4381,13 +4381,13 @@ class BufferIndexNode(_IndexingBaseNode): code.putln("%s = (PyObject *) *%s;" % (self.result(), self.buffer_ptr_code)) code.putln("__Pyx_INCREF((PyObject*)%s);" % self.result()) - def free_subexpr_temps(self, code): - for temp in self.index_temps: - code.funcstate.release_temp(temp) - self.index_temps = () - super(BufferIndexNode, self).free_subexpr_temps(code) + def free_subexpr_temps(self, code): + for temp in self.index_temps: + code.funcstate.release_temp(temp) + self.index_temps = () + super(BufferIndexNode, self).free_subexpr_temps(code) + - class MemoryViewIndexNode(BufferIndexNode): is_memview_index = True @@ -4662,7 +4662,7 @@ class MemoryCopyNode(ExprNode): self.dst.generate_evaluation_code(code) self._generate_assignment_code(rhs, code) self.dst.generate_disposal_code(code) - self.dst.free_temps(code) + self.dst.free_temps(code) rhs.generate_disposal_code(code) rhs.free_temps(code) @@ -5479,7 +5479,7 @@ class CallNode(ExprNode): func_type = self.function_type() if func_type.is_pyobject: self.gil_error() - elif not func_type.is_error and not getattr(func_type, 'nogil', False): + elif not func_type.is_error and not getattr(func_type, 'nogil', False): self.gil_error() gil_message = "Calling gil-requiring function" @@ -5566,7 +5566,7 @@ class SimpleCallNode(CallNode): env.add_include_file(pythran_get_func_include_file(function)) return NumPyMethodCallNode.from_node( self, - function_cname=pythran_functor(function), + function_cname=pythran_functor(function), arg_tuple=self.arg_tuple, type=PythranExpr(pythran_func_type(function, self.arg_tuple.args)), ) @@ -5847,17 +5847,17 @@ class SimpleCallNode(CallNode): if function.is_name or function.is_attribute: code.globalstate.use_entry_utility_code(function.entry) - abs_function_cnames = ('abs', 'labs', '__Pyx_abs_longlong') - is_signed_int = self.type.is_int and self.type.signed - if self.overflowcheck and is_signed_int and function.result() in abs_function_cnames: - code.globalstate.use_utility_code(UtilityCode.load_cached("Common", "Overflow.c")) - code.putln('if (unlikely(%s == __PYX_MIN(%s))) {\ - PyErr_SetString(PyExc_OverflowError,\ - "Trying to take the absolute value of the most negative integer is not defined."); %s; }' % ( - self.args[0].result(), - self.args[0].type.empty_declaration_code(), - code.error_goto(self.pos))) - + abs_function_cnames = ('abs', 'labs', '__Pyx_abs_longlong') + is_signed_int = self.type.is_int and self.type.signed + if self.overflowcheck and is_signed_int and function.result() in abs_function_cnames: + code.globalstate.use_utility_code(UtilityCode.load_cached("Common", "Overflow.c")) + code.putln('if (unlikely(%s == __PYX_MIN(%s))) {\ + PyErr_SetString(PyExc_OverflowError,\ + "Trying to take the absolute value of the most negative integer is not defined."); %s; }' % ( + self.args[0].result(), + self.args[0].type.empty_declaration_code(), + code.error_goto(self.pos))) + if not function.type.is_pyobject or len(self.arg_tuple.args) > 1 or ( self.arg_tuple.args and self.arg_tuple.is_literal): super(SimpleCallNode, self).generate_evaluation_code(code) @@ -5960,7 +5960,7 @@ class SimpleCallNode(CallNode): self.result() if self.type.is_pyobject else None, func_type.exception_value, self.nogil) else: - if exc_checks: + if exc_checks: goto_error = code.error_goto_if(" && ".join(exc_checks), self.pos) else: goto_error = "" @@ -5971,13 +5971,13 @@ class SimpleCallNode(CallNode): code.funcstate.release_temp(self.opt_arg_struct) -class NumPyMethodCallNode(ExprNode): +class NumPyMethodCallNode(ExprNode): # Pythran call to a NumPy function or method. # - # function_cname string the function/method to call - # arg_tuple TupleNode the arguments as an args tuple + # function_cname string the function/method to call + # arg_tuple TupleNode the arguments as an args tuple - subexprs = ['arg_tuple'] + subexprs = ['arg_tuple'] is_temp = True may_return_none = True @@ -5995,7 +5995,7 @@ class NumPyMethodCallNode(ExprNode): code.putln("new (&%s) decltype(%s){%s{}(%s)};" % ( self.result(), self.result(), - self.function_cname, + self.function_cname, ", ".join(a.pythran_result() for a in args))) @@ -6049,7 +6049,7 @@ class PyMethodCallNode(SimpleCallNode): # not an attribute itself, but might have been assigned from one (e.g. bound method) for assignment in self.function.cf_state: value = assignment.rhs - if value and value.is_attribute and value.obj.type and value.obj.type.is_pyobject: + if value and value.is_attribute and value.obj.type and value.obj.type.is_pyobject: if attribute_is_likely_method(value): likely_method = 'likely' break @@ -6669,7 +6669,7 @@ class MergedDictNode(ExprNode): return dict_type def analyse_types(self, env): - self.keyword_args = [ + self.keyword_args = [ arg.analyse_types(env).coerce_to_pyobject(env).as_none_safe_node( # FIXME: CPython's error message starts with the runtime function name 'argument after ** must be a mapping, not NoneType') @@ -6842,11 +6842,11 @@ class AttributeNode(ExprNode): # FIXME: this is way too redundant with analyse_types() node = self.analyse_as_cimported_attribute_node(env, target=False) if node is not None: - if node.entry.type and node.entry.type.is_cfunction: - # special-case - function converted to pointer - return PyrexTypes.CPtrType(node.entry.type) - else: - return node.entry.type + if node.entry.type and node.entry.type.is_cfunction: + # special-case - function converted to pointer + return PyrexTypes.CPtrType(node.entry.type) + else: + return node.entry.type node = self.analyse_as_type_attribute(env) if node is not None: return node.entry.type @@ -7279,8 +7279,8 @@ class AttributeNode(ExprNode): self.member.upper(), self.obj.result_as(self.obj.type), rhs.result_as(self.ctype()))) - rhs.generate_disposal_code(code) - rhs.free_temps(code) + rhs.generate_disposal_code(code) + rhs.free_temps(code) else: select_code = self.result() if self.type.is_pyobject and self.use_managed_ref: @@ -8131,16 +8131,16 @@ class ListNode(SequenceNode): return t def allocate_temp_result(self, code): - if self.type.is_array: - if self.in_module_scope: - self.temp_code = code.funcstate.allocate_temp( - self.type, manage_ref=False, static=True, reusable=False) - else: - # To be valid C++, we must allocate the memory on the stack - # manually and be sure not to reuse it for something else. - # Yes, this means that we leak a temp array variable. - self.temp_code = code.funcstate.allocate_temp( - self.type, manage_ref=False, reusable=False) + if self.type.is_array: + if self.in_module_scope: + self.temp_code = code.funcstate.allocate_temp( + self.type, manage_ref=False, static=True, reusable=False) + else: + # To be valid C++, we must allocate the memory on the stack + # manually and be sure not to reuse it for something else. + # Yes, this means that we leak a temp array variable. + self.temp_code = code.funcstate.allocate_temp( + self.type, manage_ref=False, reusable=False) else: SequenceNode.allocate_temp_result(self, code) @@ -8955,11 +8955,11 @@ class ClassNode(ExprNode, ModuleNameMixin): # a name, tuple of bases and class dictionary. # # name EncodedString Name of the class - # class_def_node PyClassDefNode PyClassDefNode defining this class + # class_def_node PyClassDefNode PyClassDefNode defining this class # doc ExprNode or None Doc string # module_name EncodedString Name of defining module - subexprs = ['doc'] + subexprs = ['doc'] type = py_object_type is_temp = True @@ -8980,13 +8980,13 @@ class ClassNode(ExprNode, ModuleNameMixin): gil_message = "Constructing Python class" def generate_result_code(self, code): - class_def_node = self.class_def_node + class_def_node = self.class_def_node cname = code.intern_identifier(self.name) if self.doc: code.put_error_if_neg(self.pos, 'PyDict_SetItem(%s, %s, %s)' % ( - class_def_node.dict.py_result(), + class_def_node.dict.py_result(), code.intern_identifier( StringEncoding.EncodedString("__doc__")), self.doc.py_result())) @@ -8995,8 +8995,8 @@ class ClassNode(ExprNode, ModuleNameMixin): code.putln( '%s = __Pyx_CreateClass(%s, %s, %s, %s, %s); %s' % ( self.result(), - class_def_node.bases.py_result(), - class_def_node.dict.py_result(), + class_def_node.bases.py_result(), + class_def_node.dict.py_result(), cname, qualname, py_mod_name, @@ -9011,7 +9011,7 @@ class Py3ClassNode(ExprNode): # # name EncodedString Name of the class # module_name EncodedString Name of defining module - # class_def_node PyClassDefNode PyClassDefNode defining this class + # class_def_node PyClassDefNode PyClassDefNode defining this class # calculate_metaclass bool should call CalculateMetaclass() # allow_py2_metaclass bool should look for Py2 metaclass @@ -9034,10 +9034,10 @@ class Py3ClassNode(ExprNode): def generate_result_code(self, code): code.globalstate.use_utility_code(UtilityCode.load_cached("Py3ClassCreate", "ObjectHandling.c")) cname = code.intern_identifier(self.name) - class_def_node = self.class_def_node - mkw = class_def_node.mkw.py_result() if class_def_node.mkw else 'NULL' - if class_def_node.metaclass: - metaclass = class_def_node.metaclass.py_result() + class_def_node = self.class_def_node + mkw = class_def_node.mkw.py_result() if class_def_node.mkw else 'NULL' + if class_def_node.metaclass: + metaclass = class_def_node.metaclass.py_result() else: metaclass = "((PyObject*)&__Pyx_DefaultClassType)" code.putln( @@ -9045,8 +9045,8 @@ class Py3ClassNode(ExprNode): self.result(), metaclass, cname, - class_def_node.bases.py_result(), - class_def_node.dict.py_result(), + class_def_node.bases.py_result(), + class_def_node.dict.py_result(), mkw, self.calculate_metaclass, self.allow_py2_metaclass, @@ -9057,7 +9057,7 @@ class Py3ClassNode(ExprNode): class PyClassMetaclassNode(ExprNode): # Helper class holds Python3 metaclass object # - # class_def_node PyClassDefNode PyClassDefNode defining this class + # class_def_node PyClassDefNode PyClassDefNode defining this class subexprs = [] @@ -9070,38 +9070,38 @@ class PyClassMetaclassNode(ExprNode): return True def generate_result_code(self, code): - bases = self.class_def_node.bases - mkw = self.class_def_node.mkw - if mkw: + bases = self.class_def_node.bases + mkw = self.class_def_node.mkw + if mkw: code.globalstate.use_utility_code( UtilityCode.load_cached("Py3MetaclassGet", "ObjectHandling.c")) call = "__Pyx_Py3MetaclassGet(%s, %s)" % ( - bases.result(), - mkw.result()) + bases.result(), + mkw.result()) else: code.globalstate.use_utility_code( UtilityCode.load_cached("CalculateMetaclass", "ObjectHandling.c")) call = "__Pyx_CalculateMetaclass(NULL, %s)" % ( - bases.result()) + bases.result()) code.putln( "%s = %s; %s" % ( self.result(), call, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) - + class PyClassNamespaceNode(ExprNode, ModuleNameMixin): # Helper class holds Python3 namespace object # # All this are not owned by this node - # class_def_node PyClassDefNode PyClassDefNode defining this class + # class_def_node PyClassDefNode PyClassDefNode defining this class # doc ExprNode or None Doc string (owned) subexprs = ['doc'] def analyse_types(self, env): if self.doc: - self.doc = self.doc.analyse_types(env).coerce_to_pyobject(env) + self.doc = self.doc.analyse_types(env).coerce_to_pyobject(env) self.type = py_object_type self.is_temp = 1 return self @@ -9113,16 +9113,16 @@ class PyClassNamespaceNode(ExprNode, ModuleNameMixin): cname = code.intern_identifier(self.name) py_mod_name = self.get_py_mod_name(code) qualname = self.get_py_qualified_name(code) - class_def_node = self.class_def_node - null = "(PyObject *) NULL" - doc_code = self.doc.result() if self.doc else null - mkw = class_def_node.mkw.py_result() if class_def_node.mkw else null - metaclass = class_def_node.metaclass.py_result() if class_def_node.metaclass else null + class_def_node = self.class_def_node + null = "(PyObject *) NULL" + doc_code = self.doc.result() if self.doc else null + mkw = class_def_node.mkw.py_result() if class_def_node.mkw else null + metaclass = class_def_node.metaclass.py_result() if class_def_node.metaclass else null code.putln( "%s = __Pyx_Py3MetaclassPrepare(%s, %s, %s, %s, %s, %s, %s); %s" % ( self.result(), metaclass, - class_def_node.bases.result(), + class_def_node.bases.result(), cname, qualname, mkw, @@ -9142,20 +9142,20 @@ class ClassCellInjectorNode(ExprNode): def analyse_expressions(self, env): return self - def generate_result_code(self, code): - assert self.is_active - code.putln( - '%s = PyList_New(0); %s' % ( - self.result(), - code.error_goto_if_null(self.result(), self.pos))) - code.put_gotref(self.result()) + def generate_result_code(self, code): + assert self.is_active + code.putln( + '%s = PyList_New(0); %s' % ( + self.result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.result()) def generate_injection_code(self, code, classobj_cname): - assert self.is_active - code.globalstate.use_utility_code( - UtilityCode.load_cached("CyFunctionClassCell", "CythonFunction.c")) - code.put_error_if_neg(self.pos, '__Pyx_CyFunction_InitClassCell(%s, %s)' % ( - self.result(), classobj_cname)) + assert self.is_active + code.globalstate.use_utility_code( + UtilityCode.load_cached("CyFunctionClassCell", "CythonFunction.c")) + code.put_error_if_neg(self.pos, '__Pyx_CyFunction_InitClassCell(%s, %s)' % ( + self.result(), classobj_cname)) class ClassCellNode(ExprNode): @@ -9404,11 +9404,11 @@ class PyCFunctionNode(ExprNode, ModuleNameMixin): if self.specialized_cpdefs or self.is_specialization: code.globalstate.use_utility_code( UtilityCode.load_cached("FusedFunction", "CythonFunction.c")) - constructor = "__pyx_FusedFunction_New" + constructor = "__pyx_FusedFunction_New" else: code.globalstate.use_utility_code( UtilityCode.load_cached("CythonFunction", "CythonFunction.c")) - constructor = "__Pyx_CyFunction_New" + constructor = "__Pyx_CyFunction_New" if self.code_object: code_object_result = self.code_object.py_result() @@ -10707,20 +10707,20 @@ class CythonArrayNode(ExprNode): code.putln(code.error_goto(self.operand.pos)) code.putln("}") - code.putln("%s = __pyx_format_from_typeinfo(&%s); %s" % ( - format_temp, - type_info, - code.error_goto_if_null(format_temp, self.pos), - )) - code.put_gotref(format_temp) - + code.putln("%s = __pyx_format_from_typeinfo(&%s); %s" % ( + format_temp, + type_info, + code.error_goto_if_null(format_temp, self.pos), + )) + code.put_gotref(format_temp) + buildvalue_fmt = " __PYX_BUILD_PY_SSIZE_T " * len(shapes) - code.putln('%s = Py_BuildValue((char*) "(" %s ")", %s); %s' % ( - shapes_temp, - buildvalue_fmt, - ", ".join(shapes), - code.error_goto_if_null(shapes_temp, self.pos), - )) + code.putln('%s = Py_BuildValue((char*) "(" %s ")", %s); %s' % ( + shapes_temp, + buildvalue_fmt, + ", ".join(shapes), + code.error_goto_if_null(shapes_temp, self.pos), + )) code.put_gotref(shapes_temp) tup = (self.result(), shapes_temp, itemsize, format_temp, @@ -10875,10 +10875,10 @@ class TypeidNode(ExprNode): typeinfo_entry = typeinfo_module.lookup('type_info') return PyrexTypes.CFakeReferenceType(PyrexTypes.c_const_type(typeinfo_entry.type)) - cpp_message = 'typeid operator' - + cpp_message = 'typeid operator' + def analyse_types(self, env): - self.cpp_check(env) + self.cpp_check(env) type_info = self.get_type_info_type(env) if not type_info: self.error("The 'libcpp.typeinfo' module must be cimported to use the typeid() operator") @@ -11415,24 +11415,24 @@ class AddNode(NumBinopNode): self, type1, type2) def py_operation_function(self, code): - type1, type2 = self.operand1.type, self.operand2.type - - if type1 is unicode_type or type2 is unicode_type: - if type1 in (unicode_type, str_type) and type2 in (unicode_type, str_type): - is_unicode_concat = True - elif isinstance(self.operand1, FormattedValueNode) or isinstance(self.operand2, FormattedValueNode): - # Assume that even if we don't know the second type, it's going to be a string. - is_unicode_concat = True + type1, type2 = self.operand1.type, self.operand2.type + + if type1 is unicode_type or type2 is unicode_type: + if type1 in (unicode_type, str_type) and type2 in (unicode_type, str_type): + is_unicode_concat = True + elif isinstance(self.operand1, FormattedValueNode) or isinstance(self.operand2, FormattedValueNode): + # Assume that even if we don't know the second type, it's going to be a string. + is_unicode_concat = True else: - # Operation depends on the second type. - is_unicode_concat = False - - if is_unicode_concat: - if self.operand1.may_be_none() or self.operand2.may_be_none(): - return '__Pyx_PyUnicode_ConcatSafe' - else: - return '__Pyx_PyUnicode_Concat' - + # Operation depends on the second type. + is_unicode_concat = False + + if is_unicode_concat: + if self.operand1.may_be_none() or self.operand2.may_be_none(): + return '__Pyx_PyUnicode_ConcatSafe' + else: + return '__Pyx_PyUnicode_Concat' + return super(AddNode, self).py_operation_function(code) @@ -12515,8 +12515,8 @@ class CmpNode(object): result_code if self.type.is_pyobject else None, self.exception_value, self.in_nogil_context) - else: - code.putln(statement) + else: + code.putln(statement) def c_operator(self, op): if op == 'is': @@ -12976,7 +12976,7 @@ class CoerceToMemViewSliceNode(CoercionNode): self.is_temp = 1 self.use_managed_ref = True self.arg = arg - self.type.create_from_py_utility_code(env) + self.type.create_from_py_utility_code(env) def generate_result_code(self, code): code.putln(self.type.from_py_call_code( @@ -13079,19 +13079,19 @@ class PyTypeTestNode(CoercionNode): def generate_post_assignment_code(self, code): self.arg.generate_post_assignment_code(code) - def allocate_temp_result(self, code): - pass - - def release_temp_result(self, code): - pass - + def allocate_temp_result(self, code): + pass + + def release_temp_result(self, code): + pass + def free_temps(self, code): self.arg.free_temps(code) - def free_subexpr_temps(self, code): - self.arg.free_subexpr_temps(code) + def free_subexpr_temps(self, code): + self.arg.free_subexpr_temps(code) + - class NoneCheckNode(CoercionNode): # This node is used to check that a Python object is not None and # raises an appropriate exception (as specified by the creating diff --git a/contrib/tools/cython/Cython/Compiler/FlowControl.py b/contrib/tools/cython/Cython/Compiler/FlowControl.py index 2f5002d14d..df04471f90 100644 --- a/contrib/tools/cython/Cython/Compiler/FlowControl.py +++ b/contrib/tools/cython/Cython/Compiler/FlowControl.py @@ -884,12 +884,12 @@ class ControlFlowAnalysis(CythonTransform): self.mark_position(node) return node - def visit_SizeofVarNode(self, node): - return node - - def visit_TypeidNode(self, node): - return node - + def visit_SizeofVarNode(self, node): + return node + + def visit_TypeidNode(self, node): + return node + def visit_IfStatNode(self, node): next_block = self.flow.newblock() parent = self.flow.block @@ -1232,18 +1232,18 @@ class ControlFlowAnalysis(CythonTransform): self.mark_position(node) self.visitchildren(node) - outer_exception_handlers = iter(self.flow.exceptions[::-1]) - for handler in outer_exception_handlers: - if handler.finally_enter: - self.flow.block.add_child(handler.finally_enter) - if handler.finally_exit: - # 'return' goes to function exit, or to the next outer 'finally' clause - exit_point = self.flow.exit_point - for next_handler in outer_exception_handlers: - if next_handler.finally_enter: - exit_point = next_handler.finally_enter - break - handler.finally_exit.add_child(exit_point) + outer_exception_handlers = iter(self.flow.exceptions[::-1]) + for handler in outer_exception_handlers: + if handler.finally_enter: + self.flow.block.add_child(handler.finally_enter) + if handler.finally_exit: + # 'return' goes to function exit, or to the next outer 'finally' clause + exit_point = self.flow.exit_point + for next_handler in outer_exception_handlers: + if next_handler.finally_enter: + exit_point = next_handler.finally_enter + break + handler.finally_exit.add_child(exit_point) break else: if self.flow.block: diff --git a/contrib/tools/cython/Cython/Compiler/FusedNode.py b/contrib/tools/cython/Cython/Compiler/FusedNode.py index f31b74e7a6..26d6ffd3d6 100644 --- a/contrib/tools/cython/Cython/Compiler/FusedNode.py +++ b/contrib/tools/cython/Cython/Compiler/FusedNode.py @@ -507,22 +507,22 @@ class FusedCFuncDefNode(StatListNode): ndarray = __Pyx_ImportNumPyArrayTypeIfAvailable() """) - seen_typedefs = set() + seen_typedefs = set() seen_int_dtypes = set() for buffer_type in all_buffer_types: dtype = buffer_type.dtype - dtype_name = self._dtype_name(dtype) + dtype_name = self._dtype_name(dtype) if dtype.is_typedef: - if dtype_name not in seen_typedefs: - seen_typedefs.add(dtype_name) - decl_code.putln( - 'ctypedef %s %s "%s"' % (dtype.resolve(), dtype_name, - dtype.empty_declaration_code())) + if dtype_name not in seen_typedefs: + seen_typedefs.add(dtype_name) + decl_code.putln( + 'ctypedef %s %s "%s"' % (dtype.resolve(), dtype_name, + dtype.empty_declaration_code())) if buffer_type.dtype.is_int: if str(dtype) not in seen_int_dtypes: seen_int_dtypes.add(str(dtype)) - pyx_code.context.update(dtype_name=dtype_name, + pyx_code.context.update(dtype_name=dtype_name, dtype_type=self._dtype_type(dtype)) pyx_code.local_variable_declarations.put_chunk( u""" @@ -878,23 +878,23 @@ class FusedCFuncDefNode(StatListNode): (self.resulting_fused_function.result(), self.__signatures__.result())) code.put_giveref(self.__signatures__.result()) - self.__signatures__.generate_post_assignment_code(code) - self.__signatures__.free_temps(code) + self.__signatures__.generate_post_assignment_code(code) + self.__signatures__.free_temps(code) self.fused_func_assignment.generate_execution_code(code) # Dispose of results self.resulting_fused_function.generate_disposal_code(code) - self.resulting_fused_function.free_temps(code) + self.resulting_fused_function.free_temps(code) self.defaults_tuple.generate_disposal_code(code) - self.defaults_tuple.free_temps(code) + self.defaults_tuple.free_temps(code) self.code_object.generate_disposal_code(code) - self.code_object.free_temps(code) + self.code_object.free_temps(code) for default in self.defaults: if default is not None: default.generate_disposal_code(code) - default.free_temps(code) + default.free_temps(code) def annotate(self, code): for stat in self.stats: diff --git a/contrib/tools/cython/Cython/Compiler/MemoryView.py b/contrib/tools/cython/Cython/Compiler/MemoryView.py index fc46861dc1..0406d6c716 100644 --- a/contrib/tools/cython/Cython/Compiler/MemoryView.py +++ b/contrib/tools/cython/Cython/Compiler/MemoryView.py @@ -307,7 +307,7 @@ class MemoryViewSliceBufferEntry(Buffer.BufferEntry): util_name = "SimpleSlice" else: util_name = "ToughSlice" - d['error_goto'] = code.error_goto(index.pos) + d['error_goto'] = code.error_goto(index.pos) new_ndim += 1 else: @@ -325,10 +325,10 @@ class MemoryViewSliceBufferEntry(Buffer.BufferEntry): d = dict( locals(), wraparound=int(directives['wraparound']), - boundscheck=int(directives['boundscheck']), + boundscheck=int(directives['boundscheck']), ) - if d['boundscheck']: - d['error_goto'] = code.error_goto(index.pos) + if d['boundscheck']: + d['error_goto'] = code.error_goto(index.pos) util_name = "SliceIndex" _, impl = TempitaUtilityCode.load_as_string(util_name, "MemoryView_C.c", context=d) diff --git a/contrib/tools/cython/Cython/Compiler/ModuleNode.py b/contrib/tools/cython/Cython/Compiler/ModuleNode.py index b30be60dfe..cd7166408e 100644 --- a/contrib/tools/cython/Cython/Compiler/ModuleNode.py +++ b/contrib/tools/cython/Cython/Compiler/ModuleNode.py @@ -176,7 +176,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): h_guard = Naming.h_guard_prefix + self.api_name(env) h_code.put_h_guard(h_guard) h_code.putln("") - h_code.putln('#include "Python.h"') + h_code.putln('#include "Python.h"') self.generate_type_header_code(h_types, h_code) if options.capi_reexport_cincludes: self.generate_includes(env, [], h_code) @@ -430,11 +430,11 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): except ImportError: import xml.etree.ElementTree as ET coverage_xml = ET.parse(coverage_xml_filename).getroot() - if hasattr(coverage_xml, 'iter'): - iterator = coverage_xml.iter() # Python 2.7 & 3.2+ - else: - iterator = coverage_xml.getiterator() - for el in iterator: + if hasattr(coverage_xml, 'iter'): + iterator = coverage_xml.iter() # Python 2.7 & 3.2+ + else: + iterator = coverage_xml.getiterator() + for el in iterator: el.tail = None # save some memory else: coverage_xml = None @@ -580,17 +580,17 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): for entry in vtabslot_list: self.generate_objstruct_predeclaration(entry.type, code) vtabslot_entries = set(vtabslot_list) - ctuple_names = set() + ctuple_names = set() for module in modules: definition = module is env - type_entries = [] - for entry in module.type_entries: - if entry.type.is_ctuple and entry.used: - if entry.name not in ctuple_names: - ctuple_names.add(entry.name) + type_entries = [] + for entry in module.type_entries: + if entry.type.is_ctuple and entry.used: + if entry.name not in ctuple_names: + ctuple_names.add(entry.name) type_entries.append(entry) - elif definition or entry.defined_in_pxd: - type_entries.append(entry) + elif definition or entry.defined_in_pxd: + type_entries.append(entry) type_entries = [t for t in type_entries if t not in vtabslot_entries] self.generate_type_header_code(type_entries, code) for entry in vtabslot_list: @@ -635,10 +635,10 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.putln(json.dumps(metadata, indent=4, sort_keys=True)) code.putln("END: Cython Metadata */") code.putln("") - - code.putln("#ifndef PY_SSIZE_T_CLEAN") + + code.putln("#ifndef PY_SSIZE_T_CLEAN") code.putln("#define PY_SSIZE_T_CLEAN") - code.putln("#endif /* PY_SSIZE_T_CLEAN */") + code.putln("#endif /* PY_SSIZE_T_CLEAN */") for inc in sorted(env.c_includes.values(), key=IncludeCode.sortkey): if inc.location == inc.INITIAL: @@ -666,19 +666,19 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): self._put_setup_code(code, "PythonCompatibility") self._put_setup_code(code, "MathInitCode") - # Using "(void)cname" to prevent "unused" warnings. + # Using "(void)cname" to prevent "unused" warnings. if options.c_line_in_traceback: - cinfo = "%s = %s; (void)%s; " % (Naming.clineno_cname, Naming.line_c_macro, Naming.clineno_cname) + cinfo = "%s = %s; (void)%s; " % (Naming.clineno_cname, Naming.line_c_macro, Naming.clineno_cname) else: cinfo = "" - code.putln("#define __PYX_MARK_ERR_POS(f_index, lineno) \\") - code.putln(" { %s = %s[f_index]; (void)%s; %s = lineno; (void)%s; %s}" % ( - Naming.filename_cname, Naming.filetable_cname, Naming.filename_cname, - Naming.lineno_cname, Naming.lineno_cname, - cinfo - )) - code.putln("#define __PYX_ERR(f_index, lineno, Ln_error) \\") - code.putln(" { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }") + code.putln("#define __PYX_MARK_ERR_POS(f_index, lineno) \\") + code.putln(" { %s = %s[f_index]; (void)%s; %s = lineno; (void)%s; %s}" % ( + Naming.filename_cname, Naming.filetable_cname, Naming.filename_cname, + Naming.lineno_cname, Naming.lineno_cname, + cinfo + )) + code.putln("#define __PYX_ERR(f_index, lineno, Ln_error) \\") + code.putln(" { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }") code.putln("") self.generate_extern_c_macro_definition(code) @@ -1236,10 +1236,10 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): type = entry.type scope = type.scope if scope: # could be None if there was an error - if not scope.directives['c_api_binop_methods']: - error(self.pos, - "The 'c_api_binop_methods' directive is only supported for forward compatibility" - " and must be True.") + if not scope.directives['c_api_binop_methods']: + error(self.pos, + "The 'c_api_binop_methods' directive is only supported for forward compatibility" + " and must be True.") self.generate_exttype_vtable(scope, code) self.generate_new_function(scope, code, entry) self.generate_dealloc_function(scope, code) @@ -1571,11 +1571,11 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.putln("{") code.putln("PyObject *etype, *eval, *etb;") code.putln("PyErr_Fetch(&etype, &eval, &etb);") - # increase the refcount while we are calling into user code - # to prevent recursive deallocation - code.putln("__Pyx_SET_REFCNT(o, Py_REFCNT(o) + 1);") + # increase the refcount while we are calling into user code + # to prevent recursive deallocation + code.putln("__Pyx_SET_REFCNT(o, Py_REFCNT(o) + 1);") code.putln("%s(o);" % entry.func_cname) - code.putln("__Pyx_SET_REFCNT(o, Py_REFCNT(o) - 1);") + code.putln("__Pyx_SET_REFCNT(o, Py_REFCNT(o) - 1);") code.putln("PyErr_Restore(etype, eval, etb);") code.putln("}") @@ -2315,7 +2315,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.exit_cfunc_scope() # done with labels def generate_module_init_func(self, imported_modules, env, options, code): - subfunction = self.mod_init_subfunction(self.pos, self.scope, code) + subfunction = self.mod_init_subfunction(self.pos, self.scope, code) code.enter_cfunc_scope(self.scope) code.putln("") @@ -2413,8 +2413,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.put_error_if_neg(self.pos, "_import_array()") code.putln("/*--- Threads initialization code ---*/") - code.putln("#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 " - "&& defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS") + code.putln("#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 " + "&& defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS") code.putln("PyEval_InitThreads();") code.putln("#endif") @@ -2441,10 +2441,10 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): if Options.cache_builtins: code.putln("/*--- Builtin init code ---*/") - code.put_error_if_neg(self.pos, "__Pyx_InitCachedBuiltins()") + code.put_error_if_neg(self.pos, "__Pyx_InitCachedBuiltins()") code.putln("/*--- Constants init code ---*/") - code.put_error_if_neg(self.pos, "__Pyx_InitCachedConstants()") + code.put_error_if_neg(self.pos, "__Pyx_InitCachedConstants()") code.putln("/*--- Global type/function init code ---*/") @@ -2535,7 +2535,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.exit_cfunc_scope() - def mod_init_subfunction(self, pos, scope, orig_code): + def mod_init_subfunction(self, pos, scope, orig_code): """ Return a context manager that allows deviating the module init code generation into a separate function and instead inserts a call to it. @@ -2591,8 +2591,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.putln("") if needs_error_handling: - self.call_code.putln( - self.call_code.error_goto_if_neg("%s()" % self.cfunc_name, pos)) + self.call_code.putln( + self.call_code.error_goto_if_neg("%s()" % self.cfunc_name, pos)) else: self.call_code.putln("(void)%s();" % self.cfunc_name) self.call_code = None @@ -2671,8 +2671,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.putln('static void %s(CYTHON_UNUSED PyObject *self) {' % Naming.cleanup_cname) - code.enter_cfunc_scope(env) - + code.enter_cfunc_scope(env) + if Options.generate_cleanup_code >= 2: code.putln("/*--- Global cleanup code ---*/") rev_entries = list(env.var_entries) @@ -2962,7 +2962,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): module.qualified_name, temp, code.error_goto(self.pos))) - code.put_gotref(temp) + code.put_gotref(temp) for entry in entries: if env is module: cname = entry.cname @@ -2973,8 +2973,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): 'if (__Pyx_ImportVoidPtr(%s, "%s", (void **)&%s, "%s") < 0) %s' % ( temp, entry.name, cname, signature, code.error_goto(self.pos))) - code.put_decref_clear(temp, py_object_type) - code.funcstate.release_temp(temp) + code.put_decref_clear(temp, py_object_type) + code.funcstate.release_temp(temp) def generate_c_function_import_code_for_module(self, module, env, code): # Generate import code for all exported C functions in a cimported module. @@ -2992,7 +2992,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): module.qualified_name, temp, code.error_goto(self.pos))) - code.put_gotref(temp) + code.put_gotref(temp) for entry in entries: code.putln( 'if (__Pyx_ImportFunction(%s, "%s", (void (**)(void))&%s, "%s") < 0) %s' % ( @@ -3001,8 +3001,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): entry.cname, entry.type.signature_string(), code.error_goto(self.pos))) - code.put_decref_clear(temp, py_object_type) - code.funcstate.release_temp(temp) + code.put_decref_clear(temp, py_object_type) + code.funcstate.release_temp(temp) def generate_type_init_code(self, env, code): # Generate type import code for extern extension types diff --git a/contrib/tools/cython/Cython/Compiler/Nodes.py b/contrib/tools/cython/Cython/Compiler/Nodes.py index 8c7952e804..6436c5002d 100644 --- a/contrib/tools/cython/Cython/Compiler/Nodes.py +++ b/contrib/tools/cython/Cython/Compiler/Nodes.py @@ -1048,8 +1048,8 @@ class CSimpleBaseTypeNode(CBaseTypeNode): type = PyrexTypes.TemplatePlaceholderType(self.name) else: error(self.pos, "'%s' is not a type identifier" % self.name) - if type and type.is_fused and env.fused_to_specific: - type = type.specialize(env.fused_to_specific) + if type and type.is_fused and env.fused_to_specific: + type = type.specialize(env.fused_to_specific) if self.complex: if not type.is_numeric or type.is_complex: error(self.pos, "can only complexify c numeric types") @@ -1382,9 +1382,9 @@ class CVarDefNode(StatNode): self.entry.type.create_to_py_utility_code(env) self.entry.create_wrapper = True else: - if self.overridable: - warning(self.pos, "cpdef variables will not be supported in Cython 3; " - "currently they are no different from cdef variables", 2) + if self.overridable: + warning(self.pos, "cpdef variables will not be supported in Cython 3; " + "currently they are no different from cdef variables", 2) if self.directive_locals: error(self.pos, "Decorators can only be followed by functions") self.entry = dest_scope.declare_var( @@ -3235,14 +3235,14 @@ class DefNode(FuncDefNode): def put_into_closure(entry): if entry.in_closure: code.putln('%s = %s;' % (entry.cname, entry.original_cname)) - if entry.xdecref_cleanup: - # mostly applies to the starstar arg - this can sometimes be NULL - # so must be xincrefed instead - code.put_var_xincref(entry) - code.put_var_xgiveref(entry) - else: - code.put_var_incref(entry) - code.put_var_giveref(entry) + if entry.xdecref_cleanup: + # mostly applies to the starstar arg - this can sometimes be NULL + # so must be xincrefed instead + code.put_var_xincref(entry) + code.put_var_xgiveref(entry) + else: + code.put_var_incref(entry) + code.put_var_giveref(entry) for arg in self.args: put_into_closure(arg.entry) for arg in self.star_arg, self.starstar_arg: @@ -4156,10 +4156,10 @@ class GeneratorBodyDefNode(DefNode): cname=cname, visibility='private') entry.func_cname = cname entry.qualified_name = EncodedString(self.name) - # Work-around for https://github.com/cython/cython/issues/1699 - # We don't currently determine whether the generator entry is used or not, - # so mark it as used to avoid false warnings. - entry.used = True + # Work-around for https://github.com/cython/cython/issues/1699 + # We don't currently determine whether the generator entry is used or not, + # so mark it as used to avoid false warnings. + entry.used = True self.entry = entry def analyse_declarations(self, env): @@ -4511,22 +4511,22 @@ class PyClassDefNode(ClassDefNode): pass # no base classes => no inherited metaclass else: self.metaclass = ExprNodes.PyClassMetaclassNode( - pos, class_def_node=self) + pos, class_def_node=self) needs_metaclass_calculation = False else: needs_metaclass_calculation = True self.dict = ExprNodes.PyClassNamespaceNode( - pos, name=name, doc=doc_node, class_def_node=self) + pos, name=name, doc=doc_node, class_def_node=self) self.classobj = ExprNodes.Py3ClassNode( - pos, name=name, class_def_node=self, doc=doc_node, + pos, name=name, class_def_node=self, doc=doc_node, calculate_metaclass=needs_metaclass_calculation, allow_py2_metaclass=allow_py2_metaclass) else: # no bases, no metaclass => old style class creation self.dict = ExprNodes.DictNode(pos, key_value_pairs=[]) self.classobj = ExprNodes.ClassNode( - pos, name=name, class_def_node=self, doc=doc_node) + pos, name=name, class_def_node=self, doc=doc_node) self.target = ExprNodes.NameNode(pos, name=name) self.class_cell = ExprNodes.ClassCellInjectorNode(self.pos) @@ -4544,7 +4544,7 @@ class PyClassDefNode(ClassDefNode): visibility='private', module_name=None, class_name=self.name, - bases=self.bases or ExprNodes.TupleNode(self.pos, args=[]), + bases=self.bases or ExprNodes.TupleNode(self.pos, args=[]), decorators=self.decorators, body=self.body, in_pxd=False, @@ -4568,10 +4568,10 @@ class PyClassDefNode(ClassDefNode): args=[class_result]) self.decorators = None self.class_result = class_result - if self.bases: - self.bases.analyse_declarations(env) - if self.mkw: - self.mkw.analyse_declarations(env) + if self.bases: + self.bases.analyse_declarations(env) + if self.mkw: + self.mkw.analyse_declarations(env) self.class_result.analyse_declarations(env) self.target.analyse_target_declaration(env) cenv = self.create_scope(env) @@ -4582,8 +4582,8 @@ class PyClassDefNode(ClassDefNode): def analyse_expressions(self, env): if self.bases: self.bases = self.bases.analyse_expressions(env) - if self.mkw: - self.mkw = self.mkw.analyse_expressions(env) + if self.mkw: + self.mkw = self.mkw.analyse_expressions(env) if self.metaclass: self.metaclass = self.metaclass.analyse_expressions(env) self.dict = self.dict.analyse_expressions(env) @@ -4610,22 +4610,22 @@ class PyClassDefNode(ClassDefNode): self.metaclass.generate_evaluation_code(code) self.dict.generate_evaluation_code(code) cenv.namespace_cname = cenv.class_obj_cname = self.dict.result() - - class_cell = self.class_cell - if class_cell is not None and not class_cell.is_active: - class_cell = None - - if class_cell is not None: - class_cell.generate_evaluation_code(code) + + class_cell = self.class_cell + if class_cell is not None and not class_cell.is_active: + class_cell = None + + if class_cell is not None: + class_cell.generate_evaluation_code(code) self.body.generate_execution_code(code) self.class_result.generate_evaluation_code(code) - if class_cell is not None: - class_cell.generate_injection_code( - code, self.class_result.result()) - if class_cell is not None: - class_cell.generate_disposal_code(code) - class_cell.free_temps(code) - + if class_cell is not None: + class_cell.generate_injection_code( + code, self.class_result.result()) + if class_cell is not None: + class_cell.generate_disposal_code(code) + class_cell.free_temps(code) + cenv.namespace_cname = cenv.class_obj_cname = self.classobj.result() self.target.generate_assignment_code(self.class_result, code) self.dict.generate_disposal_code(code) @@ -5876,7 +5876,7 @@ class DelStatNode(StatNode): arg.generate_evaluation_code(code) code.putln("delete %s;" % arg.result()) arg.generate_disposal_code(code) - arg.free_temps(code) + arg.free_temps(code) # else error reported earlier def annotate(self, code): @@ -6005,7 +6005,7 @@ class ReturnStatNode(StatNode): rhs=value, code=code, have_gil=self.in_nogil_context) - value.generate_post_assignment_code(code) + value.generate_post_assignment_code(code) elif self.in_generator: # return value == raise StopIteration(value), but uncatchable code.globalstate.use_utility_code( @@ -6019,7 +6019,7 @@ class ReturnStatNode(StatNode): code.putln("%s = %s;" % ( Naming.retval_cname, value.result_as(self.return_type))) - value.generate_post_assignment_code(code) + value.generate_post_assignment_code(code) value.free_temps(code) else: if self.return_type.is_pyobject: @@ -6421,8 +6421,8 @@ class SwitchStatNode(StatNode): # generate the switch statement, so shouldn't be bothered). code.putln("default: break;") code.putln("}") - self.test.generate_disposal_code(code) - self.test.free_temps(code) + self.test.generate_disposal_code(code) + self.test.free_temps(code) def generate_function_definitions(self, env, code): self.test.generate_function_definitions(env, code) @@ -7688,8 +7688,8 @@ class TryFinallyStatNode(StatNode): code.funcstate.release_temp(ret_temp) if self.in_generator: self.put_error_uncatcher(code, exc_vars) - for cname in exc_vars: - code.funcstate.release_temp(cname) + for cname in exc_vars: + code.funcstate.release_temp(cname) if not self.finally_clause.is_terminator: code.put_goto(old_label) @@ -8786,11 +8786,11 @@ class ParallelStatNode(StatNode, ParallelNode): self.begin_of_parallel_control_block_point = None self.begin_of_parallel_control_block_point_after_decls = None - if self.num_threads is not None: - # FIXME: is it the right place? should not normally produce code. - self.num_threads.generate_disposal_code(code) - self.num_threads.free_temps(code) - + if self.num_threads is not None: + # FIXME: is it the right place? should not normally produce code. + self.num_threads.generate_disposal_code(code) + self.num_threads.free_temps(code) + # Firstly, always prefer errors over returning, continue or break if self.error_label_used: c.putln("const char *%s = NULL; int %s = 0, %s = 0;" % self.parallel_pos_info) @@ -9132,7 +9132,7 @@ class ParallelRangeNode(ParallelStatNode): # TODO: check if the step is 0 and if so, raise an exception in a # 'with gil' block. For now, just abort - code.putln("if ((%(step)s == 0)) abort();" % fmt_dict) + code.putln("if ((%(step)s == 0)) abort();" % fmt_dict) self.setup_parallel_control_flow_block(code) # parallel control flow block @@ -9166,7 +9166,7 @@ class ParallelRangeNode(ParallelStatNode): # And finally, release our privates and write back any closure # variables - for temp in start_stop_step + (self.chunksize,): + for temp in start_stop_step + (self.chunksize,): if temp is not None: temp.generate_disposal_code(code) temp.free_temps(code) @@ -9253,15 +9253,15 @@ class ParallelRangeNode(ParallelStatNode): code.putln("%(target)s = (%(target_type)s)(%(start)s + %(step)s * %(i)s);" % fmt_dict) self.initialize_privates_to_nan(code, exclude=self.target.entry) - if self.is_parallel and not self.is_nested_prange: - # nested pranges are not omp'ified, temps go to outer loops + if self.is_parallel and not self.is_nested_prange: + # nested pranges are not omp'ified, temps go to outer loops code.funcstate.start_collecting_temps() self.body.generate_execution_code(code) self.trap_parallel_exit(code, should_flush=True) - if self.is_parallel and not self.is_nested_prange: - # nested pranges are not omp'ified, temps go to outer loops - self.privatize_temps(code) + if self.is_parallel and not self.is_nested_prange: + # nested pranges are not omp'ified, temps go to outer loops + self.privatize_temps(code) if self.breaking_label_used: # Put a guard around the loop body in case return, break or diff --git a/contrib/tools/cython/Cython/Compiler/Optimize.py b/contrib/tools/cython/Cython/Compiler/Optimize.py index 65924b4a51..3cb77efe2c 100644 --- a/contrib/tools/cython/Cython/Compiler/Optimize.py +++ b/contrib/tools/cython/Cython/Compiler/Optimize.py @@ -285,7 +285,7 @@ class IterationTransform(Visitor.EnvTransform): return self._transform_reversed_iteration(node, iterable) # range() iteration? - if Options.convert_range and 1 <= arg_count <= 3 and ( + if Options.convert_range and 1 <= arg_count <= 3 and ( iterable.self is None and function.is_name and function.name in ('range', 'xrange') and function.entry and function.entry.is_builtin): @@ -1347,10 +1347,10 @@ class FlattenInListTransform(Visitor.VisitorTransform, SkipDeclarations): # note: lhs may have side effects return node - if any([arg.is_starred for arg in args]): - # Starred arguments do not directly translate to comparisons or "in" tests. - return node - + if any([arg.is_starred for arg in args]): + # Starred arguments do not directly translate to comparisons or "in" tests. + return node + lhs = UtilNodes.ResultRefNode(node.operand1) conds = [] @@ -4255,7 +4255,7 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations): string_node.unicode_value = encoded_string( string_node.unicode_value * multiplier, string_node.unicode_value.encoding) - build_string = encoded_string if string_node.value.is_unicode else bytes_literal + build_string = encoded_string if string_node.value.is_unicode else bytes_literal elif isinstance(string_node, ExprNodes.UnicodeNode): if string_node.bytes_value is not None: string_node.bytes_value = bytes_literal( @@ -4263,14 +4263,14 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations): string_node.bytes_value.encoding) else: assert False, "unknown string node type: %s" % type(string_node) - string_node.value = build_string( + string_node.value = build_string( string_node.value * multiplier, string_node.value.encoding) - # follow constant-folding and use unicode_value in preference - if isinstance(string_node, ExprNodes.StringNode) and string_node.unicode_value is not None: - string_node.constant_result = string_node.unicode_value - else: - string_node.constant_result = string_node.value + # follow constant-folding and use unicode_value in preference + if isinstance(string_node, ExprNodes.StringNode) and string_node.unicode_value is not None: + string_node.constant_result = string_node.unicode_value + else: + string_node.constant_result = string_node.value return string_node def _calculate_constant_seq(self, node, sequence_node, factor): @@ -4302,10 +4302,10 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations): return self.visit_BinopNode(node) _parse_string_format_regex = ( - u'(%(?:' # %... - u'(?:[-0-9]+|[ ])?' # width (optional) or space prefix fill character (optional) - u'(?:[.][0-9]+)?' # precision (optional) - u')?.)' # format type (or something different for unsupported formats) + u'(%(?:' # %... + u'(?:[-0-9]+|[ ])?' # width (optional) or space prefix fill character (optional) + u'(?:[.][0-9]+)?' # precision (optional) + u')?.)' # format type (or something different for unsupported formats) ) def _build_fstring(self, pos, ustring, format_args): @@ -4337,25 +4337,25 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations): break if format_type in u'asrfdoxX': format_spec = s[1:] - conversion_char = None + conversion_char = None if format_type in u'doxX' and u'.' in format_spec: # Precision is not allowed for integers in format(), but ok in %-formatting. can_be_optimised = False elif format_type in u'ars': format_spec = format_spec[:-1] - conversion_char = format_type - if format_spec.startswith('0'): - format_spec = '>' + format_spec[1:] # right-alignment '%05s' spells '{:>5}' - elif format_type == u'd': - # '%d' formatting supports float, but '{obj:d}' does not => convert to int first. - conversion_char = 'd' - - if format_spec.startswith('-'): - format_spec = '<' + format_spec[1:] # left-alignment '%-5s' spells '{:<5}' - + conversion_char = format_type + if format_spec.startswith('0'): + format_spec = '>' + format_spec[1:] # right-alignment '%05s' spells '{:>5}' + elif format_type == u'd': + # '%d' formatting supports float, but '{obj:d}' does not => convert to int first. + conversion_char = 'd' + + if format_spec.startswith('-'): + format_spec = '<' + format_spec[1:] # left-alignment '%-5s' spells '{:<5}' + substrings.append(ExprNodes.FormattedValueNode( arg.pos, value=arg, - conversion_char=conversion_char, + conversion_char=conversion_char, format_spec=ExprNodes.UnicodeNode( pos, value=EncodedString(format_spec), constant_result=format_spec) if format_spec else None, diff --git a/contrib/tools/cython/Cython/Compiler/Options.py b/contrib/tools/cython/Cython/Compiler/Options.py index 4bd586ee41..b3ffbcd927 100644 --- a/contrib/tools/cython/Cython/Compiler/Options.py +++ b/contrib/tools/cython/Cython/Compiler/Options.py @@ -181,7 +181,7 @@ _directive_defaults = { 'auto_pickle': None, 'cdivision': False, # was True before 0.12 'cdivision_warnings': False, - 'c_api_binop_methods': True, + 'c_api_binop_methods': True, 'overflowcheck': False, 'overflowcheck.fold': True, 'always_allow_keywords': False, diff --git a/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py b/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py index 18dfb49aa1..0da3670cae 100644 --- a/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py +++ b/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py @@ -1161,7 +1161,7 @@ class ParallelRangeTransform(CythonTransform, SkipDeclarations): def visit_CallNode(self, node): self.visit(node.function) if not self.parallel_directive: - self.visitchildren(node, exclude=('function',)) + self.visitchildren(node, exclude=('function',)) return node # We are a parallel directive, replace this node with the @@ -1764,9 +1764,9 @@ if VALUE is not None: }, level='c_class', pipeline=[NormalizeTree(None)]).substitute({}) pickle_func.analyse_declarations(node.scope) - self.enter_scope(node, node.scope) # functions should be visited in the class scope + self.enter_scope(node, node.scope) # functions should be visited in the class scope self.visit(pickle_func) - self.exit_scope() + self.exit_scope() node.body.stats.append(pickle_func) def _handle_fused_def_decorators(self, old_decorators, env, node): @@ -2877,7 +2877,7 @@ class GilCheck(VisitorTransform): self.visitchildren(node, outer_attrs) self.nogil = gil_state - self.visitchildren(node, attrs=None, exclude=outer_attrs) + self.visitchildren(node, attrs=None, exclude=outer_attrs) self.nogil = was_nogil def visit_FuncDefNode(self, node): diff --git a/contrib/tools/cython/Cython/Compiler/Parsing.pxd b/contrib/tools/cython/Cython/Compiler/Parsing.pxd index ca9a3e85f4..25453b39ab 100644 --- a/contrib/tools/cython/Cython/Compiler/Parsing.pxd +++ b/contrib/tools/cython/Cython/Compiler/Parsing.pxd @@ -69,8 +69,8 @@ cdef bint check_for_non_ascii_characters(unicode string) @cython.locals(systr=unicode, is_python3_source=bint, is_raw=bint) cdef p_string_literal(PyrexScanner s, kind_override=*) cdef _append_escape_sequence(kind, builder, unicode escape_sequence, PyrexScanner s) -cdef tuple _f_string_error_pos(pos, string, Py_ssize_t i) -@cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, next_start=Py_ssize_t) +cdef tuple _f_string_error_pos(pos, string, Py_ssize_t i) +@cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, next_start=Py_ssize_t) cdef list p_f_string(PyrexScanner s, unicode_value, pos, bint is_raw) @cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, quote_char=Py_UCS4, NO_CHAR=Py_UCS4) cdef tuple p_f_string_expr(PyrexScanner s, unicode_value, pos, Py_ssize_t starting_index, bint is_raw) diff --git a/contrib/tools/cython/Cython/Compiler/Parsing.py b/contrib/tools/cython/Cython/Compiler/Parsing.py index 40862bcee6..4d2f12a24a 100644 --- a/contrib/tools/cython/Cython/Compiler/Parsing.py +++ b/contrib/tools/cython/Cython/Compiler/Parsing.py @@ -882,7 +882,7 @@ def p_string_literal(s, kind_override=None): pos = s.position() is_python3_source = s.context.language_level >= 3 has_non_ascii_literal_characters = False - string_start_pos = (pos[0], pos[1], pos[2] + len(s.systring)) + string_start_pos = (pos[0], pos[1], pos[2] + len(s.systring)) kind_string = s.systring.rstrip('"\'').lower() if len(kind_string) > 1: if len(set(kind_string)) != len(kind_string): @@ -966,7 +966,7 @@ def p_string_literal(s, kind_override=None): s.error("bytes can only contain ASCII literal characters.", pos=pos) bytes_value = None if kind == 'f': - unicode_value = p_f_string(s, unicode_value, string_start_pos, is_raw='r' in kind_string) + unicode_value = p_f_string(s, unicode_value, string_start_pos, is_raw='r' in kind_string) s.next() return (kind, bytes_value, unicode_value) @@ -1038,10 +1038,10 @@ _parse_escape_sequences_raw, _parse_escape_sequences = [re.compile(( for is_raw in (True, False)] -def _f_string_error_pos(pos, string, i): - return (pos[0], pos[1], pos[2] + i + 1) # FIXME: handle newlines in string - - +def _f_string_error_pos(pos, string, i): + return (pos[0], pos[1], pos[2] + i + 1) # FIXME: handle newlines in string + + def p_f_string(s, unicode_value, pos, is_raw): # Parses a PEP 498 f-string literal into a list of nodes. Nodes are either UnicodeNodes # or FormattedValueNodes. @@ -1055,7 +1055,7 @@ def p_f_string(s, unicode_value, pos, is_raw): end = next_start match = _parse_seq(unicode_value, next_start) if match is None: - error(_f_string_error_pos(pos, unicode_value, next_start), "Invalid escape sequence") + error(_f_string_error_pos(pos, unicode_value, next_start), "Invalid escape sequence") next_start = match.end() part = match.group() @@ -1079,8 +1079,8 @@ def p_f_string(s, unicode_value, pos, is_raw): if part == '}}': builder.append('}') else: - error(_f_string_error_pos(pos, unicode_value, end), - "f-string: single '}' is not allowed") + error(_f_string_error_pos(pos, unicode_value, end), + "f-string: single '}' is not allowed") else: builder.append(part) @@ -1101,20 +1101,20 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): nested_depth = 0 quote_char = NO_CHAR in_triple_quotes = False - backslash_reported = False + backslash_reported = False while True: if i >= size: - break # error will be reported below + break # error will be reported below c = unicode_value[i] if quote_char != NO_CHAR: if c == '\\': - # avoid redundant error reports along '\' sequences - if not backslash_reported: - error(_f_string_error_pos(pos, unicode_value, i), - "backslashes not allowed in f-strings") - backslash_reported = True + # avoid redundant error reports along '\' sequences + if not backslash_reported: + error(_f_string_error_pos(pos, unicode_value, i), + "backslashes not allowed in f-strings") + backslash_reported = True elif c == quote_char: if in_triple_quotes: if i + 2 < size and unicode_value[i + 1] == c and unicode_value[i + 2] == c: @@ -1133,8 +1133,8 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): elif nested_depth != 0 and c in '}])': nested_depth -= 1 elif c == '#': - error(_f_string_error_pos(pos, unicode_value, i), - "format string cannot include #") + error(_f_string_error_pos(pos, unicode_value, i), + "format string cannot include #") elif nested_depth == 0 and c in '!:}': # allow != as a special case if c == '!' and i + 1 < size and unicode_value[i + 1] == '=': @@ -1150,13 +1150,13 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): expr_pos = (pos[0], pos[1], pos[2] + starting_index + 2) # TODO: find exact code position (concat, multi-line, ...) if not expr_str.strip(): - error(_f_string_error_pos(pos, unicode_value, starting_index), - "empty expression not allowed in f-string") + error(_f_string_error_pos(pos, unicode_value, starting_index), + "empty expression not allowed in f-string") if terminal_char == '!': i += 1 if i + 2 > size: - pass # error will be reported below + pass # error will be reported below else: conversion_char = unicode_value[i] i += 1 @@ -1169,7 +1169,7 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): start_format_spec = i + 1 while True: if i >= size: - break # error will be reported below + break # error will be reported below c = unicode_value[i] if not in_triple_quotes and not in_string: if c == '{': @@ -1191,9 +1191,9 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): format_spec_str = unicode_value[start_format_spec:i] if terminal_char != '}': - error(_f_string_error_pos(pos, unicode_value, i), - "missing '}' in format string expression" + ( - ", found '%s'" % terminal_char if terminal_char else "")) + error(_f_string_error_pos(pos, unicode_value, i), + "missing '}' in format string expression" + ( + ", found '%s'" % terminal_char if terminal_char else "")) # parse the expression as if it was surrounded by parentheses buf = StringIO('(%s)' % expr_str) @@ -1202,7 +1202,7 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): # validate the conversion char if conversion_char is not None and not ExprNodes.FormattedValueNode.find_conversion_func(conversion_char): - error(expr_pos, "invalid conversion character '%s'" % conversion_char) + error(expr_pos, "invalid conversion character '%s'" % conversion_char) # the format spec is itself treated like an f-string if format_spec_str: @@ -2254,7 +2254,7 @@ def p_statement(s, ctx, first_statement = 0): s.error('decorator not allowed here') s.level = ctx.level decorators = p_decorators(s) - if not ctx.allow_struct_enum_decorator and s.sy not in ('def', 'cdef', 'cpdef', 'class', 'async'): + if not ctx.allow_struct_enum_decorator and s.sy not in ('def', 'cdef', 'cpdef', 'class', 'async'): if s.sy == 'IDENT' and s.systring == 'async': pass # handled below else: @@ -2683,7 +2683,7 @@ def looking_at_expr(s): s.put_back(*saved) elif s.sy == '[': s.next() - is_type = s.sy == ']' or not looking_at_expr(s) # could be a nested template type + is_type = s.sy == ']' or not looking_at_expr(s) # could be a nested template type s.put_back(*saved) dotted_path.reverse() diff --git a/contrib/tools/cython/Cython/Compiler/PyrexTypes.py b/contrib/tools/cython/Cython/Compiler/PyrexTypes.py index eebd0306a6..3d4931cea6 100644 --- a/contrib/tools/cython/Cython/Compiler/PyrexTypes.py +++ b/contrib/tools/cython/Cython/Compiler/PyrexTypes.py @@ -5,7 +5,7 @@ from __future__ import absolute_import import copy -import hashlib +import hashlib import re try: @@ -4043,10 +4043,10 @@ class CTupleType(CType): env.use_utility_code(self._convert_from_py_code) return True - def cast_code(self, expr_code): - return expr_code + def cast_code(self, expr_code): + return expr_code + - def c_tuple_type(components): components = tuple(components) cname = Naming.ctuple_type_prefix + type_list_identifier(components) @@ -4740,5 +4740,5 @@ def type_identifier(type): def cap_length(s, max_prefix=63, max_len=1024): if len(s) <= max_prefix: return s - hash_prefix = hashlib.sha256(s.encode('ascii')).hexdigest()[:6] - return '%s__%s__etc' % (hash_prefix, s[:max_len-17]) + hash_prefix = hashlib.sha256(s.encode('ascii')).hexdigest()[:6] + return '%s__%s__etc' % (hash_prefix, s[:max_len-17]) diff --git a/contrib/tools/cython/Cython/Compiler/Pythran.py b/contrib/tools/cython/Cython/Compiler/Pythran.py index 7fa3e0cbab..c02704a918 100644 --- a/contrib/tools/cython/Cython/Compiler/Pythran.py +++ b/contrib/tools/cython/Cython/Compiler/Pythran.py @@ -9,18 +9,18 @@ import cython try: import pythran pythran_is_pre_0_9 = tuple(map(int, pythran.__version__.split('.')[0:2])) < (0, 9) - pythran_is_pre_0_9_6 = tuple(map(int, pythran.__version__.split('.')[0:3])) < (0, 9, 6) + pythran_is_pre_0_9_6 = tuple(map(int, pythran.__version__.split('.')[0:3])) < (0, 9, 6) except ImportError: pythran = None pythran_is_pre_0_9 = True - pythran_is_pre_0_9_6 = True + pythran_is_pre_0_9_6 = True + +if pythran_is_pre_0_9_6: + pythran_builtins = '__builtin__' +else: + pythran_builtins = 'builtins' -if pythran_is_pre_0_9_6: - pythran_builtins = '__builtin__' -else: - pythran_builtins = 'builtins' - # Pythran/Numpy specific operations def has_np_pythran(env): @@ -54,7 +54,7 @@ def pythran_type(Ty, ptype="ndarray"): if Ty.is_pythran_expr: return Ty.pythran_type #if Ty.is_none: - # return "decltype(pythonic::builtins::None)" + # return "decltype(pythonic::builtins::None)" if Ty.is_numeric: return Ty.sign_and_name() raise ValueError("unsupported pythran type %s (%s)" % (Ty, type(Ty))) @@ -89,9 +89,9 @@ def _index_type_code(index_with_type): idx, index_type = index_with_type if idx.is_slice: n = 2 + int(not idx.step.is_none) - return "pythonic::%s::functor::slice{}(%s)" % ( - pythran_builtins, - ",".join(["0"]*n)) + return "pythonic::%s::functor::slice{}(%s)" % ( + pythran_builtins, + ",".join(["0"]*n)) elif index_type.is_int: return "std::declval<%s>()" % index_type.sign_and_name() elif index_type.is_pythran_expr: @@ -163,7 +163,7 @@ def to_pythran(op, ptype=None): if is_type(op_type, ["is_pythran_expr", "is_numeric", "is_float", "is_complex"]): return op.result() if op.is_none: - return "pythonic::%s::None" % pythran_builtins + return "pythonic::%s::None" % pythran_builtins if ptype is None: ptype = pythran_type(op_type) @@ -216,7 +216,7 @@ def include_pythran_generic(env): env.add_include_file("pythonic/types/bool.hpp") env.add_include_file("pythonic/types/ndarray.hpp") env.add_include_file("pythonic/numpy/power.hpp") - env.add_include_file("pythonic/%s/slice.hpp" % pythran_builtins) + env.add_include_file("pythonic/%s/slice.hpp" % pythran_builtins) env.add_include_file("<new>") # for placement new for i in (8, 16, 32, 64): diff --git a/contrib/tools/cython/Cython/Compiler/Scanning.pxd b/contrib/tools/cython/Cython/Compiler/Scanning.pxd index 20cd54b52a..59593f88a2 100644 --- a/contrib/tools/cython/Cython/Compiler/Scanning.pxd +++ b/contrib/tools/cython/Cython/Compiler/Scanning.pxd @@ -38,7 +38,7 @@ cdef class PyrexScanner(Scanner): cdef public list indentation_stack cdef public indentation_char cdef public int bracket_nesting_level - cdef readonly bint async_enabled + cdef readonly bint async_enabled cdef public sy cdef public systring diff --git a/contrib/tools/cython/Cython/Compiler/Scanning.py b/contrib/tools/cython/Cython/Compiler/Scanning.py index ea33eee7a2..c721bba69b 100644 --- a/contrib/tools/cython/Cython/Compiler/Scanning.py +++ b/contrib/tools/cython/Cython/Compiler/Scanning.py @@ -41,8 +41,8 @@ py_reserved_words = [ "global", "nonlocal", "def", "class", "print", "del", "pass", "break", "continue", "return", "raise", "import", "exec", "try", "except", "finally", "while", "if", "elif", "else", "for", - "in", "assert", "and", "or", "not", "is", "lambda", - "from", "yield", "with", + "in", "assert", "and", "or", "not", "is", "lambda", + "from", "yield", "with", ] pyx_reserved_words = py_reserved_words + [ @@ -324,25 +324,25 @@ class PyrexScanner(Scanner): def __init__(self, file, filename, parent_scanner=None, scope=None, context=None, source_encoding=None, parse_comments=True, initial_pos=None): Scanner.__init__(self, get_lexicon(), file, filename, initial_pos) - - if filename.is_python_file(): - self.in_python_file = True - self.keywords = set(py_reserved_words) - else: - self.in_python_file = False - self.keywords = set(pyx_reserved_words) - - self.async_enabled = 0 - + + if filename.is_python_file(): + self.in_python_file = True + self.keywords = set(py_reserved_words) + else: + self.in_python_file = False + self.keywords = set(pyx_reserved_words) + + self.async_enabled = 0 + if parent_scanner: self.context = parent_scanner.context self.included_files = parent_scanner.included_files self.compile_time_env = parent_scanner.compile_time_env self.compile_time_eval = parent_scanner.compile_time_eval self.compile_time_expr = parent_scanner.compile_time_expr - - if parent_scanner.async_enabled: - self.enter_async() + + if parent_scanner.async_enabled: + self.enter_async() else: self.context = context self.included_files = scope.included_files @@ -357,7 +357,7 @@ class PyrexScanner(Scanner): self.indentation_stack = [0] self.indentation_char = None self.bracket_nesting_level = 0 - + self.begin('INDENT') self.sy = '' self.next() diff --git a/contrib/tools/cython/Cython/Compiler/StringEncoding.py b/contrib/tools/cython/Cython/Compiler/StringEncoding.py index 4bbcd8a3d6..c37e8aab79 100644 --- a/contrib/tools/cython/Cython/Compiler/StringEncoding.py +++ b/contrib/tools/cython/Cython/Compiler/StringEncoding.py @@ -154,34 +154,34 @@ def string_contains_surrogates(ustring): return False -def string_contains_lone_surrogates(ustring): - """ - Check if the unicode string contains lone surrogate code points - on a CPython platform with wide (UCS-4) or narrow (UTF-16) - Unicode, i.e. characters that would be spelled as two - separate code units on a narrow platform, but that do not form a pair. - """ - last_was_start = False - unicode_uses_surrogate_encoding = sys.maxunicode == 65535 - for c in map(ord, ustring): - # surrogates tend to be rare - if c < 0xD800 or c > 0xDFFF: - if last_was_start: - return True - elif not unicode_uses_surrogate_encoding: - # on 32bit Unicode platforms, there is never a pair - return True - elif c <= 0xDBFF: - if last_was_start: - return True # lone start - last_was_start = True - else: - if not last_was_start: - return True # lone end - last_was_start = False - return last_was_start - - +def string_contains_lone_surrogates(ustring): + """ + Check if the unicode string contains lone surrogate code points + on a CPython platform with wide (UCS-4) or narrow (UTF-16) + Unicode, i.e. characters that would be spelled as two + separate code units on a narrow platform, but that do not form a pair. + """ + last_was_start = False + unicode_uses_surrogate_encoding = sys.maxunicode == 65535 + for c in map(ord, ustring): + # surrogates tend to be rare + if c < 0xD800 or c > 0xDFFF: + if last_was_start: + return True + elif not unicode_uses_surrogate_encoding: + # on 32bit Unicode platforms, there is never a pair + return True + elif c <= 0xDBFF: + if last_was_start: + return True # lone start + last_was_start = True + else: + if not last_was_start: + return True # lone end + last_was_start = False + return last_was_start + + class BytesLiteral(_bytes): # bytes subclass that is compatible with EncodedString encoding = None diff --git a/contrib/tools/cython/Cython/Compiler/Symtab.py b/contrib/tools/cython/Cython/Compiler/Symtab.py index bbedbd8c41..7361a55aea 100644 --- a/contrib/tools/cython/Cython/Compiler/Symtab.py +++ b/contrib/tools/cython/Cython/Compiler/Symtab.py @@ -822,7 +822,7 @@ class Scope(object): if overridable: # names of cpdef functions can be used as variables and can be assigned to var_entry = Entry(name, cname, py_object_type) # FIXME: cname? - var_entry.qualified_name = self.qualify_name(name) + var_entry.qualified_name = self.qualify_name(name) var_entry.is_variable = 1 var_entry.is_pyglobal = 1 var_entry.scope = entry.scope @@ -1035,7 +1035,7 @@ class BuiltinScope(Scope): else: python_equiv = EncodedString(python_equiv) var_entry = Entry(python_equiv, python_equiv, py_object_type) - var_entry.qualified_name = self.qualify_name(name) + var_entry.qualified_name = self.qualify_name(name) var_entry.is_variable = 1 var_entry.is_builtin = 1 var_entry.utility_code = utility_code @@ -1059,7 +1059,7 @@ class BuiltinScope(Scope): type = self.lookup('type').type, # make sure "type" is the first type declared... pos = entry.pos, cname = entry.type.typeptr_cname) - var_entry.qualified_name = self.qualify_name(name) + var_entry.qualified_name = self.qualify_name(name) var_entry.is_variable = 1 var_entry.is_cglobal = 1 var_entry.is_readonly = 1 @@ -1247,7 +1247,7 @@ class ModuleScope(Scope): else: entry.is_builtin = 1 entry.name = name - entry.qualified_name = self.builtin_scope().qualify_name(name) + entry.qualified_name = self.builtin_scope().qualify_name(name) return entry def find_module(self, module_name, pos, relative_level=-1): @@ -1711,7 +1711,7 @@ class ModuleScope(Scope): type = Builtin.type_type, pos = entry.pos, cname = entry.type.typeptr_cname) - var_entry.qualified_name = entry.qualified_name + var_entry.qualified_name = entry.qualified_name var_entry.is_variable = 1 var_entry.is_cglobal = 1 var_entry.is_readonly = 1 @@ -2295,7 +2295,7 @@ class CClassScope(ClassScope): entry = self.declare_cfunction(name, type, None, cname, visibility='extern', utility_code=utility_code) var_entry = Entry(name, name, py_object_type) - var_entry.qualified_name = name + var_entry.qualified_name = name var_entry.is_variable = 1 var_entry.is_builtin = 1 var_entry.utility_code = utility_code diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestBuffer.py b/contrib/tools/cython/Cython/Compiler/Tests/TestBuffer.py index 45f8c6b74f..1f69d96524 100644 --- a/contrib/tools/cython/Cython/Compiler/Tests/TestBuffer.py +++ b/contrib/tools/cython/Cython/Compiler/Tests/TestBuffer.py @@ -21,7 +21,7 @@ class TestBufferParsing(CythonTest): def test_basic(self): t = self.parse(u"cdef object[float, 4, ndim=2, foo=foo] x") bufnode = t.stats[0].base_type - self.assertTrue(isinstance(bufnode, TemplatedTypeNode)) + self.assertTrue(isinstance(bufnode, TemplatedTypeNode)) self.assertEqual(2, len(bufnode.positional_args)) # print bufnode.dump() # should put more here... @@ -46,7 +46,7 @@ class TestBufferOptions(CythonTest): def nonfatal_error(self, error): # We're passing self as context to transform to trap this self.error = error - self.assertTrue(self.expect_error) + self.assertTrue(self.expect_error) def parse_opts(self, opts, expect_error=False): assert opts != "" @@ -57,12 +57,12 @@ class TestBufferOptions(CythonTest): vardef = root.stats[0].body.stats[0] assert isinstance(vardef, CVarDefNode) # use normal assert as this is to validate the test code buftype = vardef.base_type - self.assertTrue(isinstance(buftype, TemplatedTypeNode)) - self.assertTrue(isinstance(buftype.base_type_node, CSimpleBaseTypeNode)) + self.assertTrue(isinstance(buftype, TemplatedTypeNode)) + self.assertTrue(isinstance(buftype.base_type_node, CSimpleBaseTypeNode)) self.assertEqual(u"object", buftype.base_type_node.name) return buftype else: - self.assertTrue(len(root.stats[0].body.stats) == 0) + self.assertTrue(len(root.stats[0].body.stats) == 0) def non_parse(self, expected_err, opts): self.parse_opts(opts, expect_error=True) @@ -71,14 +71,14 @@ class TestBufferOptions(CythonTest): def __test_basic(self): buf = self.parse_opts(u"unsigned short int, 3") - self.assertTrue(isinstance(buf.dtype_node, CSimpleBaseTypeNode)) - self.assertTrue(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1) + self.assertTrue(isinstance(buf.dtype_node, CSimpleBaseTypeNode)) + self.assertTrue(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1) self.assertEqual(3, buf.ndim) def __test_dict(self): buf = self.parse_opts(u"ndim=3, dtype=unsigned short int") - self.assertTrue(isinstance(buf.dtype_node, CSimpleBaseTypeNode)) - self.assertTrue(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1) + self.assertTrue(isinstance(buf.dtype_node, CSimpleBaseTypeNode)) + self.assertTrue(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1) self.assertEqual(3, buf.ndim) def __test_ndim(self): @@ -94,8 +94,8 @@ class TestBufferOptions(CythonTest): cdef object[ndim=ndim, dtype=int] y """, pipeline=[NormalizeTree(self), PostParse(self)]).root stats = t.stats[0].body.stats - self.assertTrue(stats[0].base_type.ndim == 3) - self.assertTrue(stats[1].base_type.ndim == 3) + self.assertTrue(stats[0].base_type.ndim == 3) + self.assertTrue(stats[1].base_type.ndim == 3) # add exotic and impossible combinations as they come along... diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py b/contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py index 237943d217..3792f26e99 100644 --- a/contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py +++ b/contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py @@ -48,7 +48,7 @@ class TestMemviewParsing(CythonTest): def test_basic(self): t = self.parse(u"cdef int[:] x") memv_node = t.stats[0].base_type - self.assertTrue(isinstance(memv_node, MemoryViewSliceTypeNode)) + self.assertTrue(isinstance(memv_node, MemoryViewSliceTypeNode)) # we also test other similar declarations (buffers, anonymous C arrays) # since the parsing has to distinguish between them. diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestParseTreeTransforms.py b/contrib/tools/cython/Cython/Compiler/Tests/TestParseTreeTransforms.py index 5917605ae1..234b45db5b 100644 --- a/contrib/tools/cython/Cython/Compiler/Tests/TestParseTreeTransforms.py +++ b/contrib/tools/cython/Cython/Compiler/Tests/TestParseTreeTransforms.py @@ -87,7 +87,7 @@ class TestNormalizeTree(TransformTest): def test_pass_eliminated(self): t = self.run_pipeline([NormalizeTree(None)], u"pass") - self.assertTrue(len(t.stats) == 0) + self.assertTrue(len(t.stats) == 0) class TestWithTransform(object): # (TransformTest): # Disabled! diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestStringEncoding.py b/contrib/tools/cython/Cython/Compiler/Tests/TestStringEncoding.py index de79469cfb..91d099333a 100644 --- a/contrib/tools/cython/Cython/Compiler/Tests/TestStringEncoding.py +++ b/contrib/tools/cython/Cython/Compiler/Tests/TestStringEncoding.py @@ -1,44 +1,44 @@ -# -*- coding: utf-8 -*- - -import sys -import unittest - -import Cython.Compiler.StringEncoding as StringEncoding - - -class StringEncodingTest(unittest.TestCase): - """ - Test the StringEncoding module. - """ - def test_string_contains_lone_surrogates(self): - self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"abc")) - self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"\uABCD")) - self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"\N{SNOWMAN}")) - - # This behaves differently in Py2 when freshly parsed and read from a .pyc file, - # but it seems to be a marshalling bug in Py2, which doesn't hurt us in Cython. - if sys.version_info[0] != 2: - self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800\uDFFF")) - - # In Py2 with 16bit Unicode, the following is indistinguishable from the 32bit character. - obfuscated_surrogate_pair = (u"\uDFFF" + "\uD800")[::-1] - if sys.version_info[0] == 2 and sys.maxunicode == 65565: - self.assertFalse(StringEncoding.string_contains_lone_surrogates(obfuscated_surrogate_pair)) - else: - self.assertTrue(StringEncoding.string_contains_lone_surrogates(obfuscated_surrogate_pair)) - - self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800")) - self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uDFFF")) - self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uDFFF\uD800")) - self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800x\uDFFF")) - - def test_string_contains_surrogates(self): - self.assertFalse(StringEncoding.string_contains_surrogates(u"abc")) - self.assertFalse(StringEncoding.string_contains_surrogates(u"\uABCD")) - self.assertFalse(StringEncoding.string_contains_surrogates(u"\N{SNOWMAN}")) - - self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800")) - self.assertTrue(StringEncoding.string_contains_surrogates(u"\uDFFF")) - self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800\uDFFF")) - self.assertTrue(StringEncoding.string_contains_surrogates(u"\uDFFF\uD800")) - self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800x\uDFFF")) +# -*- coding: utf-8 -*- + +import sys +import unittest + +import Cython.Compiler.StringEncoding as StringEncoding + + +class StringEncodingTest(unittest.TestCase): + """ + Test the StringEncoding module. + """ + def test_string_contains_lone_surrogates(self): + self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"abc")) + self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"\uABCD")) + self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"\N{SNOWMAN}")) + + # This behaves differently in Py2 when freshly parsed and read from a .pyc file, + # but it seems to be a marshalling bug in Py2, which doesn't hurt us in Cython. + if sys.version_info[0] != 2: + self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800\uDFFF")) + + # In Py2 with 16bit Unicode, the following is indistinguishable from the 32bit character. + obfuscated_surrogate_pair = (u"\uDFFF" + "\uD800")[::-1] + if sys.version_info[0] == 2 and sys.maxunicode == 65565: + self.assertFalse(StringEncoding.string_contains_lone_surrogates(obfuscated_surrogate_pair)) + else: + self.assertTrue(StringEncoding.string_contains_lone_surrogates(obfuscated_surrogate_pair)) + + self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800")) + self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uDFFF")) + self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uDFFF\uD800")) + self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800x\uDFFF")) + + def test_string_contains_surrogates(self): + self.assertFalse(StringEncoding.string_contains_surrogates(u"abc")) + self.assertFalse(StringEncoding.string_contains_surrogates(u"\uABCD")) + self.assertFalse(StringEncoding.string_contains_surrogates(u"\N{SNOWMAN}")) + + self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800")) + self.assertTrue(StringEncoding.string_contains_surrogates(u"\uDFFF")) + self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800\uDFFF")) + self.assertTrue(StringEncoding.string_contains_surrogates(u"\uDFFF\uD800")) + self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800x\uDFFF")) diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py b/contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py index 7f5a91bccf..9ee8da5478 100644 --- a/contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py +++ b/contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py @@ -23,7 +23,7 @@ class TestTreeFragments(CythonTest): T = self.fragment(u"y + y").substitute({"y": NameNode(pos=None, name="x")}) self.assertEqual("x", T.stats[0].expr.operand1.name) self.assertEqual("x", T.stats[0].expr.operand2.name) - self.assertTrue(T.stats[0].expr.operand1 is not T.stats[0].expr.operand2) + self.assertTrue(T.stats[0].expr.operand1 is not T.stats[0].expr.operand2) def test_substitution(self): F = self.fragment(u"x = 4") @@ -35,7 +35,7 @@ class TestTreeFragments(CythonTest): F = self.fragment(u"PASS") pass_stat = PassStatNode(pos=None) T = F.substitute({"PASS" : pass_stat}) - self.assertTrue(isinstance(T.stats[0], PassStatNode), T) + self.assertTrue(isinstance(T.stats[0], PassStatNode), T) def test_pos_is_transferred(self): F = self.fragment(u""" @@ -55,9 +55,9 @@ class TestTreeFragments(CythonTest): """) T = F.substitute(temps=[u"TMP"]) s = T.body.stats - self.assertTrue(isinstance(s[0].expr, TempRefNode)) - self.assertTrue(isinstance(s[1].rhs, TempRefNode)) - self.assertTrue(s[0].expr.handle is s[1].rhs.handle) + self.assertTrue(isinstance(s[0].expr, TempRefNode)) + self.assertTrue(isinstance(s[1].rhs, TempRefNode)) + self.assertTrue(s[0].expr.handle is s[1].rhs.handle) if __name__ == "__main__": import unittest diff --git a/contrib/tools/cython/Cython/Compiler/TreePath.py b/contrib/tools/cython/Cython/Compiler/TreePath.py index 978f2f6c5c..8585905557 100644 --- a/contrib/tools/cython/Cython/Compiler/TreePath.py +++ b/contrib/tools/cython/Cython/Compiler/TreePath.py @@ -10,13 +10,13 @@ from __future__ import absolute_import import re import operator -import sys +import sys + +if sys.version_info[0] >= 3: + _unicode = str +else: + _unicode = unicode -if sys.version_info[0] >= 3: - _unicode = str -else: - _unicode = unicode - path_tokenizer = re.compile( r"(" r"'[^']*'|\"[^\"]*\"|" @@ -173,11 +173,11 @@ def handle_attribute(next, token): continue if attr_value == value: yield attr_value - elif (isinstance(attr_value, bytes) and isinstance(value, _unicode) and - attr_value == value.encode()): - # allow a bytes-to-string comparison too - yield attr_value - + elif (isinstance(attr_value, bytes) and isinstance(value, _unicode) and + attr_value == value.encode()): + # allow a bytes-to-string comparison too + yield attr_value + return select diff --git a/contrib/tools/cython/Cython/Compiler/TypeSlots.py b/contrib/tools/cython/Cython/Compiler/TypeSlots.py index 2a0bccbe33..0b4ff67042 100644 --- a/contrib/tools/cython/Cython/Compiler/TypeSlots.py +++ b/contrib/tools/cython/Cython/Compiler/TypeSlots.py @@ -813,7 +813,7 @@ PyAsyncMethods = ( MethodSlot(unaryfunc, "am_await", "__await__"), MethodSlot(unaryfunc, "am_aiter", "__aiter__"), MethodSlot(unaryfunc, "am_anext", "__anext__"), - EmptySlot("am_send", ifdef="PY_VERSION_HEX >= 0x030A00A3"), + EmptySlot("am_send", ifdef="PY_VERSION_HEX >= 0x030A00A3"), ) #------------------------------------------------------------------------------------------ @@ -889,10 +889,10 @@ slot_table = ( EmptySlot("tp_del"), EmptySlot("tp_version_tag"), EmptySlot("tp_finalize", ifdef="PY_VERSION_HEX >= 0x030400a1"), - EmptySlot("tp_vectorcall", ifdef="PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)"), - EmptySlot("tp_print", ifdef="PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000"), - # PyPy specific extension - only here to avoid C compiler warnings. - EmptySlot("tp_pypy_flags", ifdef="CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000"), + EmptySlot("tp_vectorcall", ifdef="PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)"), + EmptySlot("tp_print", ifdef="PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000"), + # PyPy specific extension - only here to avoid C compiler warnings. + EmptySlot("tp_pypy_flags", ifdef="CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000"), ) #------------------------------------------------------------------------------------------ diff --git a/contrib/tools/cython/Cython/Compiler/Visitor.py b/contrib/tools/cython/Cython/Compiler/Visitor.py index 4cfa368b02..a35d13e1d0 100644 --- a/contrib/tools/cython/Cython/Compiler/Visitor.py +++ b/contrib/tools/cython/Cython/Compiler/Visitor.py @@ -198,7 +198,7 @@ class TreeVisitor(object): return self._visitchildren(parent, attrs) @cython.final - @cython.locals(idx=cython.Py_ssize_t) + @cython.locals(idx=cython.Py_ssize_t) def _visitchildren(self, parent, attrs): """ Visits the children of the given parent. If parent is None, returns diff --git a/contrib/tools/cython/Cython/Coverage.py b/contrib/tools/cython/Cython/Coverage.py index e699cfe4f4..5aa9df2ce0 100644 --- a/contrib/tools/cython/Cython/Coverage.py +++ b/contrib/tools/cython/Cython/Coverage.py @@ -33,14 +33,14 @@ def _find_c_source(base_path): return None -def _find_dep_file_path(main_file, file_path, relative_path_search=False): +def _find_dep_file_path(main_file, file_path, relative_path_search=False): abs_path = os.path.abspath(file_path) - if not os.path.exists(abs_path) and (file_path.endswith('.pxi') or - relative_path_search): - # files are looked up relative to the main source file - rel_file_path = os.path.join(os.path.dirname(main_file), file_path) - if os.path.exists(rel_file_path): - abs_path = os.path.abspath(rel_file_path) + if not os.path.exists(abs_path) and (file_path.endswith('.pxi') or + relative_path_search): + # files are looked up relative to the main source file + rel_file_path = os.path.join(os.path.dirname(main_file), file_path) + if os.path.exists(rel_file_path): + abs_path = os.path.abspath(rel_file_path) # search sys.path for external locations if a valid file hasn't been found if not os.path.exists(abs_path): for sys_path in sys.path: @@ -221,8 +221,8 @@ class Plugin(CoveragePlugin): self._c_files_map = {} for filename, code in code_lines.items(): - abs_path = _find_dep_file_path(c_file, filename, - relative_path_search=True) + abs_path = _find_dep_file_path(c_file, filename, + relative_path_search=True) self._c_files_map[abs_path] = (c_file, filename, code) if sourcefile not in self._c_files_map: diff --git a/contrib/tools/cython/Cython/Distutils/old_build_ext.py b/contrib/tools/cython/Cython/Distutils/old_build_ext.py index 2560ef2449..aa2a1cf229 100644 --- a/contrib/tools/cython/Cython/Distutils/old_build_ext.py +++ b/contrib/tools/cython/Cython/Distutils/old_build_ext.py @@ -84,9 +84,9 @@ class old_build_ext(_build_ext.build_ext): description = "build C/C++ and Cython extensions (compile/link to build directory)" sep_by = _build_ext.build_ext.sep_by - user_options = _build_ext.build_ext.user_options[:] - boolean_options = _build_ext.build_ext.boolean_options[:] - help_options = _build_ext.build_ext.help_options[:] + user_options = _build_ext.build_ext.user_options[:] + boolean_options = _build_ext.build_ext.boolean_options[:] + help_options = _build_ext.build_ext.help_options[:] # Add the pyrex specific data. user_options.extend([ @@ -191,8 +191,8 @@ class old_build_ext(_build_ext.build_ext): for ext in self.extensions: ext.sources = self.cython_sources(ext.sources, ext) - # Call original build_extensions - _build_ext.build_ext.build_extensions(self) + # Call original build_extensions + _build_ext.build_ext.build_extensions(self) def cython_sources(self, sources, extension): """ diff --git a/contrib/tools/cython/Cython/Includes/cpython/array.pxd b/contrib/tools/cython/Cython/Includes/cpython/array.pxd index f19264624a..19230a0a82 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/array.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/array.pxd @@ -131,14 +131,14 @@ cdef inline array clone(array template, Py_ssize_t length, bint zero): """ fast creation of a new array, given a template array. type will be same as template. if zero is true, new array will be initialized with zeroes.""" - cdef array op = newarrayobject(Py_TYPE(template), length, template.ob_descr) + cdef array op = newarrayobject(Py_TYPE(template), length, template.ob_descr) if zero and op is not None: memset(op.data.as_chars, 0, length * op.ob_descr.itemsize) return op cdef inline array copy(array self): """ make a copy of an array. """ - cdef array op = newarrayobject(Py_TYPE(self), Py_SIZE(self), self.ob_descr) + cdef array op = newarrayobject(Py_TYPE(self), Py_SIZE(self), self.ob_descr) memcpy(op.data.as_chars, self.data.as_chars, Py_SIZE(op) * op.ob_descr.itemsize) return op diff --git a/contrib/tools/cython/Cython/Includes/cpython/buffer.pxd b/contrib/tools/cython/Cython/Includes/cpython/buffer.pxd index e9b4cefc45..3f1ada774a 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/buffer.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/buffer.pxd @@ -3,9 +3,9 @@ cdef extern from "Python.h": cdef enum: - PyBUF_MAX_NDIM - - cdef enum: + PyBUF_MAX_NDIM + + cdef enum: PyBUF_SIMPLE, PyBUF_WRITABLE, PyBUF_WRITEABLE, # backwards compatibility diff --git a/contrib/tools/cython/Cython/Includes/cpython/cellobject.pxd b/contrib/tools/cython/Cython/Includes/cpython/cellobject.pxd index fc3605ca4d..5e3dd3d63c 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/cellobject.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/cellobject.pxd @@ -1,35 +1,35 @@ -from .object cimport PyObject - -cdef extern from "Python.h": - - ############################################################################ - # Cell Objects - ############################################################################ - - bint PyCell_Check(object ob) - # Return true if ob is a cell object; ob must not be NULL. - - object PyCell_New(PyObject* ob) - # Return value: New reference. - # Create and return a new cell object containing the value ob. The - # parameter may be NULL. - - object PyCell_Get(object cell) - # Return value: New reference. - # Return the contents of the cell object cell. - - object PyCell_GET(object cell) - # Return value: Borrowed reference. - # Return the contents of the cell object cell, but without checking that - # cell is non-NULL and is a cell object. - - int PyCell_Set(object cell, PyObject* value) except? -1 - # Set the contents of the cell object cell to value. This releases the - # reference to any current content of the cell. value may be NULL. cell - # must be non-NULL; if it is not a cell object, -1 will be returned. On - # success, 0 will be returned. - - void PyCell_SET(object cell, PyObject* value) - # Sets the value of the cell object cell to value. No reference counts are - # adjusted, and no checks are made for safety; cell must be non-NULL and - # must be a cell object. +from .object cimport PyObject + +cdef extern from "Python.h": + + ############################################################################ + # Cell Objects + ############################################################################ + + bint PyCell_Check(object ob) + # Return true if ob is a cell object; ob must not be NULL. + + object PyCell_New(PyObject* ob) + # Return value: New reference. + # Create and return a new cell object containing the value ob. The + # parameter may be NULL. + + object PyCell_Get(object cell) + # Return value: New reference. + # Return the contents of the cell object cell. + + object PyCell_GET(object cell) + # Return value: Borrowed reference. + # Return the contents of the cell object cell, but without checking that + # cell is non-NULL and is a cell object. + + int PyCell_Set(object cell, PyObject* value) except? -1 + # Set the contents of the cell object cell to value. This releases the + # reference to any current content of the cell. value may be NULL. cell + # must be non-NULL; if it is not a cell object, -1 will be returned. On + # success, 0 will be returned. + + void PyCell_SET(object cell, PyObject* value) + # Sets the value of the cell object cell to value. No reference counts are + # adjusted, and no checks are made for safety; cell must be non-NULL and + # must be a cell object. diff --git a/contrib/tools/cython/Cython/Includes/cpython/codecs.pxd b/contrib/tools/cython/Cython/Includes/cpython/codecs.pxd index dd57020bc1..f2ca7d2444 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/codecs.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/codecs.pxd @@ -1,121 +1,121 @@ -cdef extern from "Python.h": - - ########################################################################### - # Codec registry and support functions - ########################################################################### - - int PyCodec_Register(object search_function) - # Register a new codec search function. - - # As side effect, this tries to load the encodings package, if not yet - # done, to make sure that it is always first in the list of search - # functions. - - int PyCodec_KnownEncoding(const char *encoding) - # Return 1 or 0 depending on whether there is a registered codec for the - # given encoding. This function always succeeds. - - object PyCodec_Encode(object o, const char *encoding, const char *errors) - # Return value: New reference. - # Generic codec based encoding API. - - # o is passed through the encoder function found for the given encoding - # using the error handling method defined by errors. errors may be NULL - # to use the default method defined for the codec. Raises a LookupError - # if no encoder can be found. - - object PyCodec_Decode(object o, const char *encoding, const char *errors) - # Return value: New reference. - # Generic codec based decoding API. - - # o is passed through the decoder function found for the given encoding - # using the error handling method defined by errors. errors may be NULL - # to use the default method defined for the codec. Raises a LookupError - # if no encoder can be found. - - - # Codec lookup API - - # In the following functions, the encoding string is looked up converted - # to all lower-case characters, which makes encodings looked up through - # this mechanism effectively case-insensitive. If no codec is found, a - # KeyError is set and NULL returned. - - object PyCodec_Encoder(const char *encoding) - # Return value: New reference. - # Get an encoder function for the given encoding. - - object PyCodec_Decoder(const char *encoding) - # Return value: New reference. - # Get a decoder function for the given encoding. - - object PyCodec_IncrementalEncoder(const char *encoding, const char *errors) - # Return value: New reference. - # Get an IncrementalEncoder object for the given encoding. - - object PyCodec_IncrementalDecoder(const char *encoding, const char *errors) - # Return value: New reference. - # Get an IncrementalDecoder object for the given encoding. - - object PyCodec_StreamReader(const char *encoding, object stream, const char *errors) - # Return value: New reference. - # Get a StreamReader factory function for the given encoding. - - object PyCodec_StreamWriter(const char *encoding, object stream, const char *errors) - # Return value: New reference. - # Get a StreamWriter factory function for the given encoding. - - - # Registry API for Unicode encoding error handlers - - int PyCodec_RegisterError(const char *name, object error) except? -1 - # Register the error handling callback function error under the given - # name. This callback function will be called by a codec when it - # encounters unencodable characters/undecodable bytes and name is - # specified as the error parameter in the call to the encode/decode - # function. - - # The callback gets a single argument, an instance of - # UnicodeEncodeError, UnicodeDecodeError or UnicodeTranslateError that - # holds information about the problematic sequence of characters or bytes - # and their offset in the original string (see Unicode Exception Objects - # for functions to extract this information). The callback must either - # raise the given exception, or return a two-item tuple containing the - # replacement for the problematic sequence, and an integer giving the - # offset in the original string at which encoding/decoding should be - # resumed. - - # Return 0 on success, -1 on error. - - object PyCodec_LookupError(const char *name) - # Return value: New reference. - # Lookup the error handling callback function registered under name. As a - # special case NULL can be passed, in which case the error handling - # callback for "strict" will be returned. - - object PyCodec_StrictErrors(object exc) - # Return value: Always NULL. - # Raise exc as an exception. - - object PyCodec_IgnoreErrors(object exc) - # Return value: New reference. - # Ignore the unicode error, skipping the faulty input. - - object PyCodec_ReplaceErrors(object exc) - # Return value: New reference. - # Replace the unicode encode error with "?" or "U+FFFD". - - object PyCodec_XMLCharRefReplaceErrors(object exc) - # Return value: New reference. - # Replace the unicode encode error with XML character references. - - object PyCodec_BackslashReplaceErrors(object exc) - # Return value: New reference. - # Replace the unicode encode error with backslash escapes ("\x", "\u" - # and "\U"). - - object PyCodec_NameReplaceErrors(object exc) - # Return value: New reference. - # Replace the unicode encode error with "\N{...}" escapes. - - # New in version 3.5. +cdef extern from "Python.h": + + ########################################################################### + # Codec registry and support functions + ########################################################################### + + int PyCodec_Register(object search_function) + # Register a new codec search function. + + # As side effect, this tries to load the encodings package, if not yet + # done, to make sure that it is always first in the list of search + # functions. + + int PyCodec_KnownEncoding(const char *encoding) + # Return 1 or 0 depending on whether there is a registered codec for the + # given encoding. This function always succeeds. + + object PyCodec_Encode(object o, const char *encoding, const char *errors) + # Return value: New reference. + # Generic codec based encoding API. + + # o is passed through the encoder function found for the given encoding + # using the error handling method defined by errors. errors may be NULL + # to use the default method defined for the codec. Raises a LookupError + # if no encoder can be found. + + object PyCodec_Decode(object o, const char *encoding, const char *errors) + # Return value: New reference. + # Generic codec based decoding API. + + # o is passed through the decoder function found for the given encoding + # using the error handling method defined by errors. errors may be NULL + # to use the default method defined for the codec. Raises a LookupError + # if no encoder can be found. + + + # Codec lookup API + + # In the following functions, the encoding string is looked up converted + # to all lower-case characters, which makes encodings looked up through + # this mechanism effectively case-insensitive. If no codec is found, a + # KeyError is set and NULL returned. + + object PyCodec_Encoder(const char *encoding) + # Return value: New reference. + # Get an encoder function for the given encoding. + + object PyCodec_Decoder(const char *encoding) + # Return value: New reference. + # Get a decoder function for the given encoding. + + object PyCodec_IncrementalEncoder(const char *encoding, const char *errors) + # Return value: New reference. + # Get an IncrementalEncoder object for the given encoding. + + object PyCodec_IncrementalDecoder(const char *encoding, const char *errors) + # Return value: New reference. + # Get an IncrementalDecoder object for the given encoding. + + object PyCodec_StreamReader(const char *encoding, object stream, const char *errors) + # Return value: New reference. + # Get a StreamReader factory function for the given encoding. + + object PyCodec_StreamWriter(const char *encoding, object stream, const char *errors) + # Return value: New reference. + # Get a StreamWriter factory function for the given encoding. + + + # Registry API for Unicode encoding error handlers + + int PyCodec_RegisterError(const char *name, object error) except? -1 + # Register the error handling callback function error under the given + # name. This callback function will be called by a codec when it + # encounters unencodable characters/undecodable bytes and name is + # specified as the error parameter in the call to the encode/decode + # function. + + # The callback gets a single argument, an instance of + # UnicodeEncodeError, UnicodeDecodeError or UnicodeTranslateError that + # holds information about the problematic sequence of characters or bytes + # and their offset in the original string (see Unicode Exception Objects + # for functions to extract this information). The callback must either + # raise the given exception, or return a two-item tuple containing the + # replacement for the problematic sequence, and an integer giving the + # offset in the original string at which encoding/decoding should be + # resumed. + + # Return 0 on success, -1 on error. + + object PyCodec_LookupError(const char *name) + # Return value: New reference. + # Lookup the error handling callback function registered under name. As a + # special case NULL can be passed, in which case the error handling + # callback for "strict" will be returned. + + object PyCodec_StrictErrors(object exc) + # Return value: Always NULL. + # Raise exc as an exception. + + object PyCodec_IgnoreErrors(object exc) + # Return value: New reference. + # Ignore the unicode error, skipping the faulty input. + + object PyCodec_ReplaceErrors(object exc) + # Return value: New reference. + # Replace the unicode encode error with "?" or "U+FFFD". + + object PyCodec_XMLCharRefReplaceErrors(object exc) + # Return value: New reference. + # Replace the unicode encode error with XML character references. + + object PyCodec_BackslashReplaceErrors(object exc) + # Return value: New reference. + # Replace the unicode encode error with backslash escapes ("\x", "\u" + # and "\U"). + + object PyCodec_NameReplaceErrors(object exc) + # Return value: New reference. + # Replace the unicode encode error with "\N{...}" escapes. + + # New in version 3.5. diff --git a/contrib/tools/cython/Cython/Includes/cpython/conversion.pxd b/contrib/tools/cython/Cython/Includes/cpython/conversion.pxd index f779f52673..18e2c3d1a6 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/conversion.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/conversion.pxd @@ -1,36 +1,36 @@ -# From https://docs.python.org/3/c-api/conversion.html - -from .object cimport PyObject - -cdef extern from "Python.h": - ctypedef struct va_list - - int PyOS_snprintf(char *str, size_t size, const char *format, ...) - # Output not more than size bytes to str according to the format - # string format and the extra arguments. See the Unix man page snprintf(2). - - int PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va) - # Output not more than size bytes to str according to the format - # string format and the variable argument list va. Unix man page vsnprintf(2). - - double PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception) except? -1.0 - # Convert a string s to a double, raising a Python exception on failure. The set of - # accepted strings corresponds to the set of strings accepted by Python’s float() - # constructor, except that s must not have leading or trailing whitespace. - # The conversion is independent of the current locale. - - enum: - Py_DTSF_SIGN - Py_DTSF_ADD_DOT_0 - Py_DTSF_ALT - - char* PyOS_double_to_string(double val, char format_code, int precision, int flags, int *ptype) except NULL - # Convert a double val to a string using supplied format_code, precision, and flags. - - int PyOS_stricmp(const char *s1, const char *s2) - # Case insensitive comparison of strings. The function works almost identically - # to strcmp() except that it ignores the case. - - int PyOS_strnicmp(const char *s1, const char *s2, Py_ssize_t size) - # Case insensitive comparison of strings. The function works almost identically - # to strncmp() except that it ignores the case. +# From https://docs.python.org/3/c-api/conversion.html + +from .object cimport PyObject + +cdef extern from "Python.h": + ctypedef struct va_list + + int PyOS_snprintf(char *str, size_t size, const char *format, ...) + # Output not more than size bytes to str according to the format + # string format and the extra arguments. See the Unix man page snprintf(2). + + int PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va) + # Output not more than size bytes to str according to the format + # string format and the variable argument list va. Unix man page vsnprintf(2). + + double PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception) except? -1.0 + # Convert a string s to a double, raising a Python exception on failure. The set of + # accepted strings corresponds to the set of strings accepted by Python’s float() + # constructor, except that s must not have leading or trailing whitespace. + # The conversion is independent of the current locale. + + enum: + Py_DTSF_SIGN + Py_DTSF_ADD_DOT_0 + Py_DTSF_ALT + + char* PyOS_double_to_string(double val, char format_code, int precision, int flags, int *ptype) except NULL + # Convert a double val to a string using supplied format_code, precision, and flags. + + int PyOS_stricmp(const char *s1, const char *s2) + # Case insensitive comparison of strings. The function works almost identically + # to strcmp() except that it ignores the case. + + int PyOS_strnicmp(const char *s1, const char *s2, Py_ssize_t size) + # Case insensitive comparison of strings. The function works almost identically + # to strncmp() except that it ignores the case. diff --git a/contrib/tools/cython/Cython/Includes/cpython/datetime.pxd b/contrib/tools/cython/Cython/Includes/cpython/datetime.pxd index d3c80b07df..cd0f90719b 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/datetime.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/datetime.pxd @@ -90,9 +90,9 @@ cdef extern from "datetime.h": int PyDateTime_TIME_GET_MICROSECOND(object o) # Getters for timedelta (C macros). - int PyDateTime_DELTA_GET_DAYS(object o) - int PyDateTime_DELTA_GET_SECONDS(object o) - int PyDateTime_DELTA_GET_MICROSECONDS(object o) + int PyDateTime_DELTA_GET_DAYS(object o) + int PyDateTime_DELTA_GET_SECONDS(object o) + int PyDateTime_DELTA_GET_MICROSECONDS(object o) # PyDateTime CAPI object. PyDateTime_CAPI *PyDateTimeAPI diff --git a/contrib/tools/cython/Cython/Includes/cpython/genobject.pxd b/contrib/tools/cython/Cython/Includes/cpython/genobject.pxd index 0dd63fbbf0..337b3cc0ad 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/genobject.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/genobject.pxd @@ -1,25 +1,25 @@ -from .pystate cimport PyFrameObject - -cdef extern from "Python.h": - - ########################################################################### - # Generator Objects - ########################################################################### - - bint PyGen_Check(object ob) - # Return true if ob is a generator object; ob must not be NULL. - - bint PyGen_CheckExact(object ob) - # Return true if ob's type is PyGen_Type; ob must not be NULL. - - object PyGen_New(PyFrameObject *frame) - # Return value: New reference. - # Create and return a new generator object based on the frame object. A - # reference to frame is stolen by this function. The argument must not be - # NULL. - - object PyGen_NewWithQualName(PyFrameObject *frame, object name, object qualname) - # Return value: New reference. - # Create and return a new generator object based on the frame object, with - # __name__ and __qualname__ set to name and qualname. A reference to frame - # is stolen by this function. The frame argument must not be NULL. +from .pystate cimport PyFrameObject + +cdef extern from "Python.h": + + ########################################################################### + # Generator Objects + ########################################################################### + + bint PyGen_Check(object ob) + # Return true if ob is a generator object; ob must not be NULL. + + bint PyGen_CheckExact(object ob) + # Return true if ob's type is PyGen_Type; ob must not be NULL. + + object PyGen_New(PyFrameObject *frame) + # Return value: New reference. + # Create and return a new generator object based on the frame object. A + # reference to frame is stolen by this function. The argument must not be + # NULL. + + object PyGen_NewWithQualName(PyFrameObject *frame, object name, object qualname) + # Return value: New reference. + # Create and return a new generator object based on the frame object, with + # __name__ and __qualname__ set to name and qualname. A reference to frame + # is stolen by this function. The frame argument must not be NULL. diff --git a/contrib/tools/cython/Cython/Includes/cpython/iterobject.pxd b/contrib/tools/cython/Cython/Includes/cpython/iterobject.pxd index 8aa0387ca9..a70aeccb09 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/iterobject.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/iterobject.pxd @@ -1,24 +1,24 @@ -cdef extern from "Python.h": - - ########################################################################### - # Iterator Objects - ########################################################################### - - bint PySeqIter_Check(object op) - # Return true if the type of op is PySeqIter_Type. - - object PySeqIter_New(object seq) - # Return value: New reference. - # Return an iterator that works with a general sequence object, seq. The - # iteration ends when the sequence raises IndexError for the subscripting - # operation. - - bint PyCallIter_Check(object op) - # Return true if the type of op is PyCallIter_Type. - - object PyCallIter_New(object callable, object sentinel) - # Return value: New reference. - # Return a new iterator. The first parameter, callable, can be any Python - # callable object that can be called with no parameters; each call to it - # should return the next item in the iteration. When callable returns a - # value equal to sentinel, the iteration will be terminated. +cdef extern from "Python.h": + + ########################################################################### + # Iterator Objects + ########################################################################### + + bint PySeqIter_Check(object op) + # Return true if the type of op is PySeqIter_Type. + + object PySeqIter_New(object seq) + # Return value: New reference. + # Return an iterator that works with a general sequence object, seq. The + # iteration ends when the sequence raises IndexError for the subscripting + # operation. + + bint PyCallIter_Check(object op) + # Return true if the type of op is PyCallIter_Type. + + object PyCallIter_New(object callable, object sentinel) + # Return value: New reference. + # Return a new iterator. The first parameter, callable, can be any Python + # callable object that can be called with no parameters; each call to it + # should return the next item in the iteration. When callable returns a + # value equal to sentinel, the iteration will be terminated. diff --git a/contrib/tools/cython/Cython/Includes/cpython/longintrepr.pxd b/contrib/tools/cython/Cython/Includes/cpython/longintrepr.pxd index efae540d05..c38c1bff88 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/longintrepr.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/longintrepr.pxd @@ -1,11 +1,11 @@ # Internals of the "long" type (Python 2) or "int" type (Python 3). -cdef extern from "Python.h": - """ - #if PY_MAJOR_VERSION < 3 - #include "longintrepr.h" - #endif - """ +cdef extern from "Python.h": + """ + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif + """ ctypedef unsigned int digit ctypedef int sdigit # Python >= 2.7 only diff --git a/contrib/tools/cython/Cython/Includes/cpython/memoryview.pxd b/contrib/tools/cython/Cython/Includes/cpython/memoryview.pxd index 8274f42af9..83a84e6f91 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/memoryview.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/memoryview.pxd @@ -1,50 +1,50 @@ -cdef extern from "Python.h": - - ########################################################################### - # MemoryView Objects - ########################################################################### - # A memoryview object exposes the C level buffer interface as a Python - # object which can then be passed around like any other object - - object PyMemoryView_FromObject(object obj) - # Return value: New reference. - # Create a memoryview object from an object that provides the buffer - # interface. If obj supports writable buffer exports, the memoryview object - # will be read/write, otherwise it may be either read-only or read/write at - # the discretion of the exporter. - - object PyMemoryView_FromMemory(char *mem, Py_ssize_t size, int flags) - # Return value: New reference. - # Create a memoryview object using mem as the underlying buffer. flags can - # be one of PyBUF_READ or PyBUF_WRITE. - # New in version 3.3. - - object PyMemoryView_FromBuffer(Py_buffer *view) - # Return value: New reference. - # Create a memoryview object wrapping the given buffer structure view. For - # simple byte buffers, PyMemoryView_FromMemory() is the preferred function. - - object PyMemoryView_GetContiguous(object obj, - int buffertype, - char order) - # Return value: New reference. - # Create a memoryview object to a contiguous chunk of memory (in either ‘C’ - # or ‘F’ortran order) from an object that defines the buffer interface. If - # memory is contiguous, the memoryview object points to the original - # memory. Otherwise, a copy is made and the memoryview points to a new - # bytes object. - - bint PyMemoryView_Check(object obj) - # Return true if the object obj is a memoryview object. It is not currently - # allowed to create subclasses of memoryview. - - Py_buffer *PyMemoryView_GET_BUFFER(object mview) - # Return a pointer to the memoryview’s private copy of the exporter’s - # buffer. mview must be a memoryview instance; this macro doesn’t check its - # type, you must do it yourself or you will risk crashes. - - Py_buffer *PyMemoryView_GET_BASE(object mview) - # Return either a pointer to the exporting object that the memoryview is - # based on or NULL if the memoryview has been created by one of the - # functions PyMemoryView_FromMemory() or PyMemoryView_FromBuffer(). mview - # must be a memoryview instance. +cdef extern from "Python.h": + + ########################################################################### + # MemoryView Objects + ########################################################################### + # A memoryview object exposes the C level buffer interface as a Python + # object which can then be passed around like any other object + + object PyMemoryView_FromObject(object obj) + # Return value: New reference. + # Create a memoryview object from an object that provides the buffer + # interface. If obj supports writable buffer exports, the memoryview object + # will be read/write, otherwise it may be either read-only or read/write at + # the discretion of the exporter. + + object PyMemoryView_FromMemory(char *mem, Py_ssize_t size, int flags) + # Return value: New reference. + # Create a memoryview object using mem as the underlying buffer. flags can + # be one of PyBUF_READ or PyBUF_WRITE. + # New in version 3.3. + + object PyMemoryView_FromBuffer(Py_buffer *view) + # Return value: New reference. + # Create a memoryview object wrapping the given buffer structure view. For + # simple byte buffers, PyMemoryView_FromMemory() is the preferred function. + + object PyMemoryView_GetContiguous(object obj, + int buffertype, + char order) + # Return value: New reference. + # Create a memoryview object to a contiguous chunk of memory (in either ‘C’ + # or ‘F’ortran order) from an object that defines the buffer interface. If + # memory is contiguous, the memoryview object points to the original + # memory. Otherwise, a copy is made and the memoryview points to a new + # bytes object. + + bint PyMemoryView_Check(object obj) + # Return true if the object obj is a memoryview object. It is not currently + # allowed to create subclasses of memoryview. + + Py_buffer *PyMemoryView_GET_BUFFER(object mview) + # Return a pointer to the memoryview’s private copy of the exporter’s + # buffer. mview must be a memoryview instance; this macro doesn’t check its + # type, you must do it yourself or you will risk crashes. + + Py_buffer *PyMemoryView_GET_BASE(object mview) + # Return either a pointer to the exporting object that the memoryview is + # based on or NULL if the memoryview has been created by one of the + # functions PyMemoryView_FromMemory() or PyMemoryView_FromBuffer(). mview + # must be a memoryview instance. diff --git a/contrib/tools/cython/Cython/Includes/cpython/number.pxd b/contrib/tools/cython/Cython/Includes/cpython/number.pxd index df6c637c33..ded35c292a 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/number.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/number.pxd @@ -27,13 +27,13 @@ cdef extern from "Python.h": # failure. This is the equivalent of the Python expression "o1 * # o2". - object PyNumber_MatrixMultiply(object o1, object o2) - # Return value: New reference. - # Returns the result of matrix multiplication on o1 and o2, or - # NULL on failure. This is the equivalent of the Python - # expression "o1 @ o2". - # New in version 3.5. - + object PyNumber_MatrixMultiply(object o1, object o2) + # Return value: New reference. + # Returns the result of matrix multiplication on o1 and o2, or + # NULL on failure. This is the equivalent of the Python + # expression "o1 @ o2". + # New in version 3.5. + object PyNumber_Divide(object o1, object o2) # Return value: New reference. # Returns the result of dividing o1 by o2, or NULL on @@ -140,13 +140,13 @@ cdef extern from "Python.h": # failure. The operation is done in-place when o1 supports # it. This is the equivalent of the Python statement "o1 *= o2". - object PyNumber_InPlaceMatrixMultiply(object o1, object o2) - # Return value: New reference. - # Returns the result of matrix multiplication on o1 and o2, or - # NULL on failure. The operation is done in-place when o1 supports - # it. This is the equivalent of the Python statement "o1 @= o2". - # New in version 3.5. - + object PyNumber_InPlaceMatrixMultiply(object o1, object o2) + # Return value: New reference. + # Returns the result of matrix multiplication on o1 and o2, or + # NULL on failure. The operation is done in-place when o1 supports + # it. This is the equivalent of the Python statement "o1 @= o2". + # New in version 3.5. + object PyNumber_InPlaceDivide(object o1, object o2) # Return value: New reference. # Returns the result of dividing o1 by o2, or NULL on failure. The diff --git a/contrib/tools/cython/Cython/Includes/cpython/pycapsule.pxd b/contrib/tools/cython/Cython/Includes/cpython/pycapsule.pxd index 31aa5f5b7f..c3d12c7490 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/pycapsule.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/pycapsule.pxd @@ -24,7 +24,7 @@ cdef extern from "Python.h": # Return true if its argument is a PyCapsule. - object PyCapsule_New(void *pointer, const char *name, + object PyCapsule_New(void *pointer, const char *name, PyCapsule_Destructor destructor) # Return value: New reference. # @@ -47,7 +47,7 @@ cdef extern from "Python.h": # PyCapsule_Import(). - void* PyCapsule_GetPointer(object capsule, const char *name) except? NULL + void* PyCapsule_GetPointer(object capsule, const char *name) except? NULL # Retrieve the pointer stored in the capsule. On failure, set an # exception and return NULL. # @@ -66,7 +66,7 @@ cdef extern from "Python.h": # or PyErr_Occurred() to disambiguate. - const char* PyCapsule_GetName(object capsule) except? NULL + const char* PyCapsule_GetName(object capsule) except? NULL # Return the current name stored in the capsule. On failure, set # an exception and return NULL. # @@ -84,7 +84,7 @@ cdef extern from "Python.h": # PyErr_Occurred() to disambiguate. - bint PyCapsule_IsValid(object capsule, const char *name) + bint PyCapsule_IsValid(object capsule, const char *name) # Determines whether or not capsule is a valid capsule. A valid # capsule is non-NULL, passes PyCapsule_CheckExact(), has a # non-NULL pointer stored in it, and its internal name matches the @@ -114,7 +114,7 @@ cdef extern from "Python.h": # failure. - int PyCapsule_SetName(object capsule, const char *name) except -1 + int PyCapsule_SetName(object capsule, const char *name) except -1 # Set the name inside capsule to name. If non-NULL, the name must # outlive the capsule. If the previous name stored in the capsule # was not NULL, no attempt is made to free it. @@ -128,7 +128,7 @@ cdef extern from "Python.h": # success. Return nonzero and set an exception on failure. - void* PyCapsule_Import(const char *name, int no_block) except? NULL + void* PyCapsule_Import(const char *name, int no_block) except? NULL # Import a pointer to a C object from a capsule attribute in a # module. The name parameter should specify the full name to the # attribute, as in module.attribute. The name stored in the diff --git a/contrib/tools/cython/Cython/Includes/cpython/pylifecycle.pxd b/contrib/tools/cython/Cython/Includes/cpython/pylifecycle.pxd index 919c18c487..2c71e37163 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/pylifecycle.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/pylifecycle.pxd @@ -27,9 +27,9 @@ cdef extern from "Python.h": void Py_EndInterpreter(PyThreadState *) - # _Py_PyAtExit is for the atexit module, Py_AtExit is for low-level + # _Py_PyAtExit is for the atexit module, Py_AtExit is for low-level # exit functions. - void _Py_PyAtExit(void (*func)(object), object) + void _Py_PyAtExit(void (*func)(object), object) int Py_AtExit(void (*func)()) void Py_Exit(int) diff --git a/contrib/tools/cython/Cython/Includes/cpython/slice.pxd b/contrib/tools/cython/Cython/Includes/cpython/slice.pxd index ea81cbc3fe..202dea716c 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/slice.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/slice.pxd @@ -45,26 +45,26 @@ cdef extern from "Python.h": # # Changed in version 3.2: The parameter type for the slice parameter was # PySliceObject* before. - - int PySlice_Unpack(object slice, Py_ssize_t *start, Py_ssize_t *stop, - Py_ssize_t *step) except -1 - # Extract the start, stop and step data members from a slice object as C - # integers. Silently reduce values larger than PY_SSIZE_T_MAX to - # PY_SSIZE_T_MAX, silently boost the start and stop values less than - # PY_SSIZE_T_MIN to PY_SSIZE_T_MIN, and silently boost the step values - # less than -PY_SSIZE_T_MAX to -PY_SSIZE_T_MAX. - - # Return -1 on error, 0 on success. - - # New in version 3.6.1. - - Py_ssize_t PySlice_AdjustIndices(Py_ssize_t length, Py_ssize_t *start, - Py_ssize_t *stop, Py_ssize_t step) - # Adjust start/end slice indices assuming a sequence of the specified - # length. Out of bounds indices are clipped in a manner consistent with - # the handling of normal slices. - - # Return the length of the slice. Always successful. Doesn’t call Python - # code. - - # New in version 3.6.1. + + int PySlice_Unpack(object slice, Py_ssize_t *start, Py_ssize_t *stop, + Py_ssize_t *step) except -1 + # Extract the start, stop and step data members from a slice object as C + # integers. Silently reduce values larger than PY_SSIZE_T_MAX to + # PY_SSIZE_T_MAX, silently boost the start and stop values less than + # PY_SSIZE_T_MIN to PY_SSIZE_T_MIN, and silently boost the step values + # less than -PY_SSIZE_T_MAX to -PY_SSIZE_T_MAX. + + # Return -1 on error, 0 on success. + + # New in version 3.6.1. + + Py_ssize_t PySlice_AdjustIndices(Py_ssize_t length, Py_ssize_t *start, + Py_ssize_t *stop, Py_ssize_t step) + # Adjust start/end slice indices assuming a sequence of the specified + # length. Out of bounds indices are clipped in a manner consistent with + # the handling of normal slices. + + # Return the length of the slice. Always successful. Doesn’t call Python + # code. + + # New in version 3.6.1. diff --git a/contrib/tools/cython/Cython/Includes/cpython/unicode.pxd b/contrib/tools/cython/Cython/Includes/cpython/unicode.pxd index 6e412e3324..ad01ed64df 100644 --- a/contrib/tools/cython/Cython/Includes/cpython/unicode.pxd +++ b/contrib/tools/cython/Cython/Includes/cpython/unicode.pxd @@ -10,19 +10,19 @@ cdef extern from *: # Return the size of the object. o has to be a PyUnicodeObject # (not checked). - # - # Deprecated since version 3.3, will be removed in version 3.10: - # Part of the old-style Unicode API, please migrate to using - # PyUnicode_GET_LENGTH(). + # + # Deprecated since version 3.3, will be removed in version 3.10: + # Part of the old-style Unicode API, please migrate to using + # PyUnicode_GET_LENGTH(). Py_ssize_t PyUnicode_GET_SIZE(object o) - # Return the length of the Unicode string, in code points. o has - # to be a Unicode object in the “canonical” representation (not - # checked). - # - # New in version 3.3. - Py_ssize_t PyUnicode_GET_LENGTH(object o) - + # Return the length of the Unicode string, in code points. o has + # to be a Unicode object in the “canonical” representation (not + # checked). + # + # New in version 3.3. + Py_ssize_t PyUnicode_GET_LENGTH(object o) + # Return the size of the object's internal buffer in bytes. o has # to be a PyUnicodeObject (not checked). Py_ssize_t PyUnicode_GET_DATA_SIZE(object o) @@ -226,7 +226,7 @@ cdef extern from *: # equal, and greater than, respectively. It is best to pass only ASCII-encoded # strings, but the function interprets the input string as ISO-8859-1 if it # contains non-ASCII characters. - int PyUnicode_CompareWithASCIIString(object uni, const char *string) + int PyUnicode_CompareWithASCIIString(object uni, const char *string) # Rich compare two unicode strings and return one of the following: # diff --git a/contrib/tools/cython/Cython/Includes/libc/math.pxd b/contrib/tools/cython/Cython/Includes/libc/math.pxd index eef987c73e..b002670b22 100644 --- a/contrib/tools/cython/Cython/Includes/libc/math.pxd +++ b/contrib/tools/cython/Cython/Includes/libc/math.pxd @@ -104,9 +104,9 @@ cdef extern from "<math.h>" nogil: bint isnan(long double) bint isnormal(long double) bint signbit(long double) - int fpclassify(long double) - const int FP_NAN - const int FP_INFINITE - const int FP_ZERO - const int FP_SUBNORMAL - const int FP_NORMAL + int fpclassify(long double) + const int FP_NAN + const int FP_INFINITE + const int FP_ZERO + const int FP_SUBNORMAL + const int FP_NORMAL diff --git a/contrib/tools/cython/Cython/Includes/libcpp/string.pxd b/contrib/tools/cython/Cython/Includes/libcpp/string.pxd index 14fe5ede4b..a894144f1f 100644 --- a/contrib/tools/cython/Cython/Includes/libcpp/string.pxd +++ b/contrib/tools/cython/Cython/Includes/libcpp/string.pxd @@ -2,8 +2,8 @@ # deprecated cimport for backwards compatibility: from libc.string cimport const_char -cdef extern from "<string>" namespace "std::string" nogil: - const size_t npos +cdef extern from "<string>" namespace "std::string" nogil: + const size_t npos cdef extern from "<string>" namespace "std" nogil: cdef cppclass string: @@ -11,12 +11,12 @@ cdef extern from "<string>" namespace "std" nogil: cppclass iterator: iterator() char& operator*() - iterator(iterator&) + iterator(iterator&) iterator operator++() iterator operator--() bint operator==(iterator) bint operator!=(iterator) - + cppclass reverse_iterator: char& operator*() iterator operator++() @@ -29,22 +29,22 @@ cdef extern from "<string>" namespace "std" nogil: bint operator>(reverse_iterator) bint operator<=(reverse_iterator) bint operator>=(reverse_iterator) - + cppclass const_iterator(iterator): pass - + cppclass const_reverse_iterator(reverse_iterator): pass - string() except + - string(const string& s) except + - string(const string& s, size_t pos) except + - string(const string& s, size_t pos, size_t len) except + - string(const char* s) except + - string(const char* s, size_t n) except + - string(size_t n, char c) except + - string(iterator first, iterator last) except + - + string() except + + string(const string& s) except + + string(const string& s, size_t pos) except + + string(const string& s, size_t pos, size_t len) except + + string(const char* s) except + + string(const char* s, size_t n) except + + string(size_t n, char c) except + + string(iterator first, iterator last) except + + iterator begin() const_iterator const_begin "begin"() iterator end() @@ -59,123 +59,123 @@ cdef extern from "<string>" namespace "std" nogil: size_t size() size_t max_size() size_t length() - void resize(size_t) except + - void resize(size_t, char) except + - void shrink_to_fit() except + + void resize(size_t) except + + void resize(size_t, char) except + + void shrink_to_fit() except + size_t capacity() - void reserve(size_t) except + + void reserve(size_t) except + void clear() bint empty() - iterator erase(iterator first, iterator last) - iterator erase(iterator p) - iterator erase(const_iterator first, const_iterator last) - iterator erase(const_iterator p) - string& erase(size_t pos, size_t len) except + - string& erase(size_t pos) except + - string& erase() except + - - char& at(size_t pos) except + - char& operator[](size_t pos) - char& front() - char& back() - int compare(const string& s) - int compare(size_t pos, size_t len, const string& s) except + - int compare(size_t pos, size_t len, const string& s, size_t subpos, size_t sublen) except + - int compare(const char* s) except + - int compare(size_t pos, size_t len, const char* s) except + - int compare(size_t pos, size_t len, const char* s , size_t n) except + - - string& append(const string& s) except + - string& append(const string& s, size_t subpos, size_t sublen) except + - string& append(const char* s) except + - string& append(const char* s, size_t n) except + - string& append(size_t n, char c) except + - - void push_back(char c) except + - void pop_back() - - string& assign(const string& s) except + - string& assign(const string& s, size_t subpos, size_t sublen) except + - string& assign(const char* s, size_t n) except + - string& assign(const char* s) except + - string& assign(size_t n, char c) except + - - string& insert(size_t pos, const string& s, size_t subpos, size_t sublen) except + - string& insert(size_t pos, const string& s) except + - string& insert(size_t pos, const char* s, size_t n) except + - string& insert(size_t pos, const char* s) except + - string& insert(size_t pos, size_t n, char c) except + - void insert(iterator p, size_t n, char c) except + - iterator insert(iterator p, char c) except + - - size_t copy(char* s, size_t len, size_t pos) except + - size_t copy(char* s, size_t len) except + - - size_t find(const string& s, size_t pos) - size_t find(const string& s) - size_t find(const char* s, size_t pos, size_t n) - size_t find(const char* s, size_t pos) - size_t find(const char* s) - size_t find(char c, size_t pos) - size_t find(char c) - - size_t rfind(const string&, size_t pos) - size_t rfind(const string&) - size_t rfind(const char* s, size_t pos, size_t n) - size_t rfind(const char* s, size_t pos) - size_t rfind(const char* s) - size_t rfind(char c, size_t pos) + iterator erase(iterator first, iterator last) + iterator erase(iterator p) + iterator erase(const_iterator first, const_iterator last) + iterator erase(const_iterator p) + string& erase(size_t pos, size_t len) except + + string& erase(size_t pos) except + + string& erase() except + + + char& at(size_t pos) except + + char& operator[](size_t pos) + char& front() + char& back() + int compare(const string& s) + int compare(size_t pos, size_t len, const string& s) except + + int compare(size_t pos, size_t len, const string& s, size_t subpos, size_t sublen) except + + int compare(const char* s) except + + int compare(size_t pos, size_t len, const char* s) except + + int compare(size_t pos, size_t len, const char* s , size_t n) except + + + string& append(const string& s) except + + string& append(const string& s, size_t subpos, size_t sublen) except + + string& append(const char* s) except + + string& append(const char* s, size_t n) except + + string& append(size_t n, char c) except + + + void push_back(char c) except + + void pop_back() + + string& assign(const string& s) except + + string& assign(const string& s, size_t subpos, size_t sublen) except + + string& assign(const char* s, size_t n) except + + string& assign(const char* s) except + + string& assign(size_t n, char c) except + + + string& insert(size_t pos, const string& s, size_t subpos, size_t sublen) except + + string& insert(size_t pos, const string& s) except + + string& insert(size_t pos, const char* s, size_t n) except + + string& insert(size_t pos, const char* s) except + + string& insert(size_t pos, size_t n, char c) except + + void insert(iterator p, size_t n, char c) except + + iterator insert(iterator p, char c) except + + + size_t copy(char* s, size_t len, size_t pos) except + + size_t copy(char* s, size_t len) except + + + size_t find(const string& s, size_t pos) + size_t find(const string& s) + size_t find(const char* s, size_t pos, size_t n) + size_t find(const char* s, size_t pos) + size_t find(const char* s) + size_t find(char c, size_t pos) + size_t find(char c) + + size_t rfind(const string&, size_t pos) + size_t rfind(const string&) + size_t rfind(const char* s, size_t pos, size_t n) + size_t rfind(const char* s, size_t pos) + size_t rfind(const char* s) + size_t rfind(char c, size_t pos) size_t rfind(char c) - size_t find_first_of(const string&, size_t pos) - size_t find_first_of(const string&) - size_t find_first_of(const char* s, size_t pos, size_t n) - size_t find_first_of(const char* s, size_t pos) - size_t find_first_of(const char* s) - size_t find_first_of(char c, size_t pos) + size_t find_first_of(const string&, size_t pos) + size_t find_first_of(const string&) + size_t find_first_of(const char* s, size_t pos, size_t n) + size_t find_first_of(const char* s, size_t pos) + size_t find_first_of(const char* s) + size_t find_first_of(char c, size_t pos) size_t find_first_of(char c) - size_t find_first_not_of(const string& s, size_t pos) - size_t find_first_not_of(const string& s) - size_t find_first_not_of(const char* s, size_t pos, size_t n) - size_t find_first_not_of(const char* s, size_t pos) - size_t find_first_not_of(const char*) - size_t find_first_not_of(char c, size_t pos) + size_t find_first_not_of(const string& s, size_t pos) + size_t find_first_not_of(const string& s) + size_t find_first_not_of(const char* s, size_t pos, size_t n) + size_t find_first_not_of(const char* s, size_t pos) + size_t find_first_not_of(const char*) + size_t find_first_not_of(char c, size_t pos) size_t find_first_not_of(char c) - size_t find_last_of(const string& s, size_t pos) - size_t find_last_of(const string& s) - size_t find_last_of(const char* s, size_t pos, size_t n) - size_t find_last_of(const char* s, size_t pos) - size_t find_last_of(const char* s) - size_t find_last_of(char c, size_t pos) + size_t find_last_of(const string& s, size_t pos) + size_t find_last_of(const string& s) + size_t find_last_of(const char* s, size_t pos, size_t n) + size_t find_last_of(const char* s, size_t pos) + size_t find_last_of(const char* s) + size_t find_last_of(char c, size_t pos) size_t find_last_of(char c) - size_t find_last_not_of(const string& s, size_t pos) - size_t find_last_not_of(const string& s) - size_t find_last_not_of(const char* s, size_t pos, size_t n) - size_t find_last_not_of(const char* s, size_t pos) - size_t find_last_not_of(const char* s) - size_t find_last_not_of(char c, size_t pos) - size_t find_last_not_of(char c) + size_t find_last_not_of(const string& s, size_t pos) + size_t find_last_not_of(const string& s) + size_t find_last_not_of(const char* s, size_t pos, size_t n) + size_t find_last_not_of(const char* s, size_t pos) + size_t find_last_not_of(const char* s) + size_t find_last_not_of(char c, size_t pos) + size_t find_last_not_of(char c) - string substr(size_t pos, size_t len) except + - string substr(size_t pos) except + + string substr(size_t pos, size_t len) except + + string substr(size_t pos) except + string substr() #string& operator= (const string&) #string& operator= (const char*) #string& operator= (char) - string operator+ (const string&) except + - string operator+ (const char*) except + + string operator+ (const string&) except + + string operator+ (const char*) except + bint operator==(const string&) bint operator==(const char*) - bint operator!= (const string&) - bint operator!= (const char*) + bint operator!= (const string&) + bint operator!= (const char*) bint operator< (const string&) bint operator< (const char*) @@ -188,40 +188,40 @@ cdef extern from "<string>" namespace "std" nogil: bint operator>= (const string&) bint operator>= (const char*) - - - string to_string(int val) except + - string to_string(long val) except + - string to_string(long long val) except + - string to_string(unsigned val) except + - string to_string(size_t val) except + - string to_string(ssize_t val) except + - string to_string(unsigned long val) except + - string to_string(unsigned long long val) except + - string to_string(float val) except + - string to_string(double val) except + - string to_string(long double val) except + - - int stoi(const string& s, size_t* idx, int base) except + - int stoi(const string& s, size_t* idx) except + - int stoi(const string& s) except + - long stol(const string& s, size_t* idx, int base) except + - long stol(const string& s, size_t* idx) except + - long stol(const string& s) except + - long long stoll(const string& s, size_t* idx, int base) except + - long long stoll(const string& s, size_t* idx) except + - long long stoll(const string& s) except + - - unsigned long stoul(const string& s, size_t* idx, int base) except + - unsigned long stoul(const string& s, size_t* idx) except + - unsigned long stoul(const string& s) except + - unsigned long long stoull(const string& s, size_t* idx, int base) except + - unsigned long long stoull(const string& s, size_t* idx) except + - unsigned long long stoull(const string& s) except + - - float stof(const string& s, size_t* idx) except + - float stof(const string& s) except + - double stod(const string& s, size_t* idx) except + - double stod(const string& s) except + - long double stold(const string& s, size_t* idx) except + - long double stold(const string& s) except + + + + string to_string(int val) except + + string to_string(long val) except + + string to_string(long long val) except + + string to_string(unsigned val) except + + string to_string(size_t val) except + + string to_string(ssize_t val) except + + string to_string(unsigned long val) except + + string to_string(unsigned long long val) except + + string to_string(float val) except + + string to_string(double val) except + + string to_string(long double val) except + + + int stoi(const string& s, size_t* idx, int base) except + + int stoi(const string& s, size_t* idx) except + + int stoi(const string& s) except + + long stol(const string& s, size_t* idx, int base) except + + long stol(const string& s, size_t* idx) except + + long stol(const string& s) except + + long long stoll(const string& s, size_t* idx, int base) except + + long long stoll(const string& s, size_t* idx) except + + long long stoll(const string& s) except + + + unsigned long stoul(const string& s, size_t* idx, int base) except + + unsigned long stoul(const string& s, size_t* idx) except + + unsigned long stoul(const string& s) except + + unsigned long long stoull(const string& s, size_t* idx, int base) except + + unsigned long long stoull(const string& s, size_t* idx) except + + unsigned long long stoull(const string& s) except + + + float stof(const string& s, size_t* idx) except + + float stof(const string& s) except + + double stod(const string& s, size_t* idx) except + + double stod(const string& s) except + + long double stold(const string& s, size_t* idx) except + + long double stold(const string& s) except + diff --git a/contrib/tools/cython/Cython/Includes/libcpp/unordered_map.pxd b/contrib/tools/cython/Cython/Includes/libcpp/unordered_map.pxd index 6b1e3793e7..a00fbbed28 100644 --- a/contrib/tools/cython/Cython/Includes/libcpp/unordered_map.pxd +++ b/contrib/tools/cython/Cython/Includes/libcpp/unordered_map.pxd @@ -1,7 +1,7 @@ from .utility cimport pair cdef extern from "<unordered_map>" namespace "std" nogil: - cdef cppclass unordered_map[T, U, HASH=*, PRED=*, ALLOCATOR=*]: + cdef cppclass unordered_map[T, U, HASH=*, PRED=*, ALLOCATOR=*]: ctypedef T key_type ctypedef U mapped_type ctypedef pair[const T, U] value_type diff --git a/contrib/tools/cython/Cython/Includes/libcpp/utility.pxd b/contrib/tools/cython/Cython/Includes/libcpp/utility.pxd index 3dc02e9380..e0df69b166 100644 --- a/contrib/tools/cython/Cython/Includes/libcpp/utility.pxd +++ b/contrib/tools/cython/Cython/Includes/libcpp/utility.pxd @@ -13,18 +13,18 @@ cdef extern from "<utility>" namespace "std" nogil: bint operator>(pair&, pair&) bint operator<=(pair&, pair&) bint operator>=(pair&, pair&) - -cdef extern from * namespace "cython_std" nogil: - """ - #if __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600) - // move should be defined for these versions of MSVC, but __cplusplus isn't set usefully - #include <type_traits> - - namespace cython_std { - template <typename T> typename std::remove_reference<T>::type&& move(T& t) noexcept { return std::move(t); } - template <typename T> typename std::remove_reference<T>::type&& move(T&& t) noexcept { return std::move(t); } - } - - #endif - """ - cdef T move[T](T) + +cdef extern from * namespace "cython_std" nogil: + """ + #if __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600) + // move should be defined for these versions of MSVC, but __cplusplus isn't set usefully + #include <type_traits> + + namespace cython_std { + template <typename T> typename std::remove_reference<T>::type&& move(T& t) noexcept { return std::move(t); } + template <typename T> typename std::remove_reference<T>::type&& move(T&& t) noexcept { return std::move(t); } + } + + #endif + """ + cdef T move[T](T) diff --git a/contrib/tools/cython/Cython/Includes/numpy/__init__.pxd b/contrib/tools/cython/Cython/Includes/numpy/__init__.pxd index 4231e6cbd4..15700c05ef 100644 --- a/contrib/tools/cython/Cython/Includes/numpy/__init__.pxd +++ b/contrib/tools/cython/Cython/Includes/numpy/__init__.pxd @@ -226,11 +226,11 @@ cdef extern from "numpy/arrayobject.h": # this field via the inline helper method PyDataType_SHAPE. cdef PyArray_ArrayDescr* subarray - ctypedef class numpy.flatiter [object PyArrayIterObject, check_size ignore]: + ctypedef class numpy.flatiter [object PyArrayIterObject, check_size ignore]: # Use through macros pass - ctypedef class numpy.broadcast [object PyArrayMultiIterObject, check_size ignore]: + ctypedef class numpy.broadcast [object PyArrayMultiIterObject, check_size ignore]: # Use through macros pass @@ -685,7 +685,7 @@ cdef extern from "numpy/arrayobject.h": object PyArray_Choose (ndarray, object, ndarray, NPY_CLIPMODE) int PyArray_Sort (ndarray, int, NPY_SORTKIND) object PyArray_ArgSort (ndarray, int, NPY_SORTKIND) - object PyArray_SearchSorted (ndarray, object, NPY_SEARCHSIDE, PyObject*) + object PyArray_SearchSorted (ndarray, object, NPY_SEARCHSIDE, PyObject*) object PyArray_ArgMax (ndarray, int, ndarray) object PyArray_ArgMin (ndarray, int, ndarray) object PyArray_Reshape (ndarray, object) @@ -914,7 +914,7 @@ cdef extern from "numpy/ufuncobject.h": ctypedef void (*PyUFuncGenericFunction) (char **, npy_intp *, npy_intp *, void *) - ctypedef class numpy.ufunc [object PyUFuncObject, check_size ignore]: + ctypedef class numpy.ufunc [object PyUFuncObject, check_size ignore]: cdef: int nin, nout, nargs int identity diff --git a/contrib/tools/cython/Cython/Includes/posix/fcntl.pxd b/contrib/tools/cython/Cython/Includes/posix/fcntl.pxd index fe9fc5daf4..9afc33a368 100644 --- a/contrib/tools/cython/Cython/Includes/posix/fcntl.pxd +++ b/contrib/tools/cython/Cython/Includes/posix/fcntl.pxd @@ -24,7 +24,7 @@ cdef extern from "<fcntl.h>" nogil: enum: SEEK_END enum: O_CREAT - enum: O_DIRECT + enum: O_DIRECT enum: O_EXCL enum: O_NOCTTY enum: O_TRUNC diff --git a/contrib/tools/cython/Cython/Shadow.py b/contrib/tools/cython/Cython/Shadow.py index c76909db28..e7b9e4f612 100644 --- a/contrib/tools/cython/Cython/Shadow.py +++ b/contrib/tools/cython/Cython/Shadow.py @@ -1,7 +1,7 @@ # cython.* namespace for pure mode. from __future__ import absolute_import -__version__ = "0.29.27" +__version__ = "0.29.27" try: from __builtin__ import basestring @@ -123,9 +123,9 @@ overflowcheck.fold = optimization.use_switch = \ final = internal = type_version_tag = no_gc_clear = no_gc = _empty_decorator -binding = lambda _: _empty_decorator +binding = lambda _: _empty_decorator + - _cython_inline = None def inline(f, *args, **kwds): if isinstance(f, basestring): diff --git a/contrib/tools/cython/Cython/Tests/xmlrunner.py b/contrib/tools/cython/Cython/Tests/xmlrunner.py index ee4dddb373..d6838aa22e 100644 --- a/contrib/tools/cython/Cython/Tests/xmlrunner.py +++ b/contrib/tools/cython/Cython/Tests/xmlrunner.py @@ -27,12 +27,12 @@ class TestSequenceFunctions(unittest.TestCase): def test_choice(self): element = random.choice(self.seq) - self.assertTrue(element in self.seq) + self.assertTrue(element in self.seq) def test_sample(self): self.assertRaises(ValueError, random.sample, self.seq, 20) for element in random.sample(self.seq, 5): - self.assertTrue(element in self.seq) + self.assertTrue(element in self.seq) if __name__ == '__main__': unittest.main(testRunner=xmlrunner.XMLTestRunner(output='test-reports')) @@ -43,7 +43,7 @@ from __future__ import absolute_import import os import sys import time -from unittest import TestResult, TextTestResult, TextTestRunner +from unittest import TestResult, TextTestResult, TextTestRunner import xml.dom.minidom try: from StringIO import StringIO @@ -95,7 +95,7 @@ class _TestInfo(object): self.err, self.test_method) -class _XMLTestResult(TextTestResult): +class _XMLTestResult(TextTestResult): """A test result class that can express test results in a XML report. Used by XMLTestRunner. @@ -103,7 +103,7 @@ class _XMLTestResult(TextTestResult): def __init__(self, stream=sys.stderr, descriptions=1, verbosity=1, elapsed_times=True): "Create a new instance of _XMLTestResult." - TextTestResult.__init__(self, stream, descriptions, verbosity) + TextTestResult.__init__(self, stream, descriptions, verbosity) self.successes = [] self.callback = None self.elapsed_times = elapsed_times @@ -159,7 +159,7 @@ class _XMLTestResult(TextTestResult): def stopTest(self, test): "Called after execute each test method." self._restore_standard_output() - TextTestResult.stopTest(self, test) + TextTestResult.stopTest(self, test) self.stop_time = time.time() if self.callback and callable(self.callback): diff --git a/contrib/tools/cython/Cython/Utility/AsyncGen.c b/contrib/tools/cython/Cython/Utility/AsyncGen.c index 80017a8d77..9a11d6a129 100644 --- a/contrib/tools/cython/Cython/Utility/AsyncGen.c +++ b/contrib/tools/cython/Cython/Utility/AsyncGen.c @@ -350,10 +350,10 @@ static PyMethodDef __Pyx_async_gen_methods[] = { static __Pyx_PyAsyncMethodsStruct __Pyx_async_gen_as_async = { 0, /* am_await */ PyObject_SelfIter, /* am_aiter */ - (unaryfunc)__Pyx_async_gen_anext, /* am_anext */ -#if PY_VERSION_HEX >= 0x030A00A3 - 0, /*am_send*/ -#endif + (unaryfunc)__Pyx_async_gen_anext, /* am_anext */ +#if PY_VERSION_HEX >= 0x030A00A3 + 0, /*am_send*/ +#endif }; #endif @@ -424,15 +424,15 @@ static PyTypeObject __pyx_AsyncGenType_type = { #elif PY_VERSION_HEX >= 0x030400a1 0, /* tp_finalize */ #endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) 0, /*tp_vectorcall*/ #endif -#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ #endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 - 0, /*tp_pypy_flags*/ -#endif }; @@ -596,10 +596,10 @@ static PyMethodDef __Pyx_async_gen_asend_methods[] = { static __Pyx_PyAsyncMethodsStruct __Pyx_async_gen_asend_as_async = { PyObject_SelfIter, /* am_await */ 0, /* am_aiter */ - 0, /* am_anext */ -#if PY_VERSION_HEX >= 0x030A00A3 - 0, /*am_send*/ -#endif + 0, /* am_anext */ +#if PY_VERSION_HEX >= 0x030A00A3 + 0, /*am_send*/ +#endif }; #endif @@ -665,15 +665,15 @@ static PyTypeObject __pyx__PyAsyncGenASendType_type = { #if PY_VERSION_HEX >= 0x030400a1 0, /* tp_finalize */ #endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) 0, /*tp_vectorcall*/ #endif -#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ #endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 - 0, /*tp_pypy_flags*/ -#endif }; @@ -783,15 +783,15 @@ static PyTypeObject __pyx__PyAsyncGenWrappedValueType_type = { #if PY_VERSION_HEX >= 0x030400a1 0, /* tp_finalize */ #endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) 0, /*tp_vectorcall*/ #endif -#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ #endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 - 0, /*tp_pypy_flags*/ -#endif }; @@ -1006,10 +1006,10 @@ static PyMethodDef __Pyx_async_gen_athrow_methods[] = { static __Pyx_PyAsyncMethodsStruct __Pyx_async_gen_athrow_as_async = { PyObject_SelfIter, /* am_await */ 0, /* am_aiter */ - 0, /* am_anext */ -#if PY_VERSION_HEX >= 0x030A00A3 - 0, /*am_send*/ -#endif + 0, /* am_anext */ +#if PY_VERSION_HEX >= 0x030A00A3 + 0, /*am_send*/ +#endif }; #endif @@ -1074,15 +1074,15 @@ static PyTypeObject __pyx__PyAsyncGenAThrowType_type = { #if PY_VERSION_HEX >= 0x030400a1 0, /* tp_finalize */ #endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) 0, /*tp_vectorcall*/ #endif -#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ #endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 - 0, /*tp_pypy_flags*/ -#endif }; diff --git a/contrib/tools/cython/Cython/Utility/Buffer.c b/contrib/tools/cython/Cython/Utility/Buffer.c index 18398e5233..3c7105fa35 100644 --- a/contrib/tools/cython/Cython/Utility/Buffer.c +++ b/contrib/tools/cython/Cython/Utility/Buffer.c @@ -298,7 +298,7 @@ static void __Pyx_BufFmt_RaiseUnexpectedChar(char ch) { static const char* __Pyx_BufFmt_DescribeTypeChar(char ch, int is_complex) { switch (ch) { - case '?': return "'bool'"; + case '?': return "'bool'"; case 'c': return "'char'"; case 'b': return "'signed char'"; case 'B': return "'unsigned char'"; @@ -343,7 +343,7 @@ static size_t __Pyx_BufFmt_TypeCharToStandardSize(char ch, int is_complex) { static size_t __Pyx_BufFmt_TypeCharToNativeSize(char ch, int is_complex) { switch (ch) { - case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; case 'h': case 'H': return sizeof(short); case 'i': case 'I': return sizeof(int); case 'l': case 'L': return sizeof(long); @@ -432,7 +432,7 @@ static char __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) { case 'b': case 'h': case 'i': case 'l': case 'q': case 's': case 'p': return 'I'; - case '?': case 'B': case 'H': case 'I': case 'L': case 'Q': + case '?': case 'B': case 'H': case 'I': case 'L': case 'Q': return 'U'; case 'f': case 'd': case 'g': return (is_complex ? 'C' : 'R'); @@ -602,8 +602,8 @@ static PyObject * __pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp) { const char *ts = *tsp; - int i = 0, number, ndim; - + int i = 0, number, ndim; + ++ts; if (ctx->new_count != 1) { PyErr_SetString(PyExc_ValueError, @@ -614,9 +614,9 @@ __pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp) /* Process the previous element */ if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; - // store ndim now, as field advanced by __Pyx_BufFmt_ProcessTypeChunk call - ndim = ctx->head->field->type->ndim; - + // store ndim now, as field advanced by __Pyx_BufFmt_ProcessTypeChunk call + ndim = ctx->head->field->type->ndim; + /* Parse all numbers in the format string */ while (*ts && *ts != ')') { // ignore space characters (not using isspace() due to C/C++ problem on MacOS-X) @@ -755,12 +755,12 @@ static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const cha return NULL; } CYTHON_FALLTHROUGH; - case '?': case 'c': case 'b': case 'B': case 'h': case 'H': case 'i': case 'I': + case '?': case 'c': case 'b': case 'B': case 'h': case 'H': case 'i': case 'I': case 'l': case 'L': case 'q': case 'Q': case 'f': case 'd': case 'g': case 'O': case 'p': - if ((ctx->enc_type == *ts) && (got_Z == ctx->is_complex) && - (ctx->enc_packmode == ctx->new_packmode) && (!ctx->is_valid_array)) { + if ((ctx->enc_type == *ts) && (got_Z == ctx->is_complex) && + (ctx->enc_packmode == ctx->new_packmode) && (!ctx->is_valid_array)) { /* Continue pooling same type */ ctx->enc_count += ctx->new_count; ctx->new_count = 1; diff --git a/contrib/tools/cython/Cython/Utility/Builtins.c b/contrib/tools/cython/Cython/Utility/Builtins.c index d47ad1d083..1ffb3bcebd 100644 --- a/contrib/tools/cython/Cython/Utility/Builtins.c +++ b/contrib/tools/cython/Cython/Utility/Builtins.c @@ -128,9 +128,9 @@ static PyObject* __Pyx_PyExec3(PyObject* o, PyObject* globals, PyObject* locals) } else { PyCompilerFlags cf; cf.cf_flags = 0; -#if PY_VERSION_HEX >= 0x030800A3 - cf.cf_feature_version = PY_MINOR_VERSION; -#endif +#if PY_VERSION_HEX >= 0x030800A3 + cf.cf_feature_version = PY_MINOR_VERSION; +#endif if (PyUnicode_Check(o)) { cf.cf_flags = PyCF_SOURCE_IS_UTF8; s = PyUnicode_AsUTF8String(o); @@ -282,8 +282,8 @@ static PyObject *__Pyx_PyLong_AbsNeg(PyObject *n) { { PyObject *copy = _PyLong_Copy((PyLongObject*)n); if (likely(copy)) { - // negate the size to swap the sign - __Pyx_SET_SIZE(copy, -Py_SIZE(copy)); + // negate the size to swap the sign + __Pyx_SET_SIZE(copy, -Py_SIZE(copy)); } return copy; } @@ -333,7 +333,7 @@ static long __Pyx__PyObject_Ord(PyObject* c) { } else { // FIXME: support character buffers - but CPython doesn't support them either PyErr_Format(PyExc_TypeError, - "ord() expected string of length 1, but %.200s found", Py_TYPE(c)->tp_name); + "ord() expected string of length 1, but %.200s found", Py_TYPE(c)->tp_name); return (long)(Py_UCS4)-1; } PyErr_Format(PyExc_TypeError, @@ -496,9 +496,9 @@ static CYTHON_INLINE PyObject* __Pyx_PyFrozenSet_New(PyObject* it) { result = PyFrozenSet_New(it); if (unlikely(!result)) return NULL; - if ((PY_VERSION_HEX >= 0x031000A1) || likely(PySet_GET_SIZE(result))) + if ((PY_VERSION_HEX >= 0x031000A1) || likely(PySet_GET_SIZE(result))) return result; - // empty frozenset is a singleton (on Python <3.10) + // empty frozenset is a singleton (on Python <3.10) // seems wasteful, but CPython does the same Py_DECREF(result); #endif diff --git a/contrib/tools/cython/Cython/Utility/Coroutine.c b/contrib/tools/cython/Cython/Utility/Coroutine.c index 82c00716d2..d26314083b 100644 --- a/contrib/tools/cython/Cython/Utility/Coroutine.c +++ b/contrib/tools/cython/Cython/Utility/Coroutine.c @@ -388,7 +388,7 @@ typedef struct { PyObject *gi_qualname; PyObject *gi_modulename; PyObject *gi_code; - PyObject *gi_frame; + PyObject *gi_frame; int resume_label; // using T_BOOL for property below requires char value char is_running; @@ -714,15 +714,15 @@ PyObject *__Pyx_Coroutine_SendEx(__pyx_CoroutineObject *self, PyObject *value, i PyTracebackObject *tb = (PyTracebackObject *) exc_state->exc_traceback; PyFrameObject *f = tb->tb_frame; - assert(f->f_back == NULL); - #if PY_VERSION_HEX >= 0x030B00A1 - // PyThreadState_GetFrame returns NULL if there isn't a current frame - // which is a valid state so no need to check - f->f_back = PyThreadState_GetFrame(tstate); - #else + assert(f->f_back == NULL); + #if PY_VERSION_HEX >= 0x030B00A1 + // PyThreadState_GetFrame returns NULL if there isn't a current frame + // which is a valid state so no need to check + f->f_back = PyThreadState_GetFrame(tstate); + #else Py_XINCREF(tstate->frame); f->f_back = tstate->frame; - #endif + #endif } #endif } @@ -796,33 +796,33 @@ PyObject *__Pyx_Coroutine_MethodReturn(CYTHON_UNUSED PyObject* gen, PyObject *re return retval; } -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) +static CYTHON_INLINE +PyObject *__Pyx_PyGen_Send(PyGenObject *gen, PyObject *arg) { +#if PY_VERSION_HEX <= 0x030A00A1 + return _PyGen_Send(gen, arg); +#else + PyObject *result; + // PyIter_Send() asserts non-NULL arg + if (PyIter_Send((PyObject*)gen, arg ? arg : Py_None, &result) == PYGEN_RETURN) { + if (PyAsyncGen_CheckExact(gen)) { + assert(result == Py_None); + PyErr_SetNone(PyExc_StopAsyncIteration); + } + else if (result == Py_None) { + PyErr_SetNone(PyExc_StopIteration); + } + else { + _PyGen_SetStopIterationValue(result); + } + Py_CLEAR(result); + } + return result; +#endif +} +#endif + static CYTHON_INLINE -PyObject *__Pyx_PyGen_Send(PyGenObject *gen, PyObject *arg) { -#if PY_VERSION_HEX <= 0x030A00A1 - return _PyGen_Send(gen, arg); -#else - PyObject *result; - // PyIter_Send() asserts non-NULL arg - if (PyIter_Send((PyObject*)gen, arg ? arg : Py_None, &result) == PYGEN_RETURN) { - if (PyAsyncGen_CheckExact(gen)) { - assert(result == Py_None); - PyErr_SetNone(PyExc_StopAsyncIteration); - } - else if (result == Py_None) { - PyErr_SetNone(PyExc_StopIteration); - } - else { - _PyGen_SetStopIterationValue(result); - } - Py_CLEAR(result); - } - return result; -#endif -} -#endif - -static CYTHON_INLINE PyObject *__Pyx_Coroutine_FinishDelegation(__pyx_CoroutineObject *gen) { PyObject *ret; PyObject *val = NULL; @@ -863,13 +863,13 @@ static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value) { #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) // _PyGen_Send() is not exported before Py3.6 if (PyGen_CheckExact(yf)) { - ret = __Pyx_PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value); + ret = __Pyx_PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value); } else #endif #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03050000 && defined(PyCoro_CheckExact) && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) // _PyGen_Send() is not exported before Py3.6 if (PyCoro_CheckExact(yf)) { - ret = __Pyx_PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value); + ret = __Pyx_PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value); } else #endif { @@ -965,7 +965,7 @@ static PyObject *__Pyx_Generator_Next(PyObject *self) { #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) // _PyGen_Send() is not exported before Py3.6 if (PyGen_CheckExact(yf)) { - ret = __Pyx_PyGen_Send((PyGenObject*)yf, NULL); + ret = __Pyx_PyGen_Send((PyGenObject*)yf, NULL); } else #endif #ifdef __Pyx_Coroutine_USED @@ -1145,7 +1145,7 @@ static int __Pyx_Coroutine_clear(PyObject *self) { } #endif Py_CLEAR(gen->gi_code); - Py_CLEAR(gen->gi_frame); + Py_CLEAR(gen->gi_frame); Py_CLEAR(gen->gi_name); Py_CLEAR(gen->gi_qualname); Py_CLEAR(gen->gi_modulename); @@ -1166,7 +1166,7 @@ static void __Pyx_Coroutine_dealloc(PyObject *self) { if (PyObject_CallFinalizerFromDealloc(self)) #else Py_TYPE(gen)->tp_del(self); - if (Py_REFCNT(self) > 0) + if (Py_REFCNT(self) > 0) #endif { // resurrected. :( @@ -1200,7 +1200,7 @@ static void __Pyx_Coroutine_del(PyObject *self) { #if !CYTHON_USE_TP_FINALIZE // Temporarily resurrect the object. assert(self->ob_refcnt == 0); - __Pyx_SET_REFCNT(self, 1); + __Pyx_SET_REFCNT(self, 1); #endif __Pyx_PyThreadState_assign @@ -1281,7 +1281,7 @@ static void __Pyx_Coroutine_del(PyObject *self) { #if !CYTHON_USE_TP_FINALIZE // Undo the temporary resurrection; can't use DECREF here, it would // cause a recursive call. - assert(Py_REFCNT(self) > 0); + assert(Py_REFCNT(self) > 0); if (--self->ob_refcnt == 0) { // this is the normal path out return; @@ -1290,12 +1290,12 @@ static void __Pyx_Coroutine_del(PyObject *self) { // close() resurrected it! Make it look like the original Py_DECREF // never happened. { - Py_ssize_t refcnt = Py_REFCNT(self); + Py_ssize_t refcnt = Py_REFCNT(self); _Py_NewReference(self); - __Pyx_SET_REFCNT(self, refcnt); + __Pyx_SET_REFCNT(self, refcnt); } #if CYTHON_COMPILING_IN_CPYTHON - assert(PyType_IS_GC(Py_TYPE(self)) && + assert(PyType_IS_GC(Py_TYPE(self)) && _Py_AS_GC(self)->gc.gc_refs != _PyGC_REFS_UNTRACKED); // If Py_REF_DEBUG, _Py_NewReference bumped _Py_RefTotal, so @@ -1378,31 +1378,31 @@ __Pyx_Coroutine_set_qualname(__pyx_CoroutineObject *self, PyObject *value, CYTHO return 0; } - -static PyObject * -__Pyx_Coroutine_get_frame(__pyx_CoroutineObject *self, CYTHON_UNUSED void *context) -{ - PyObject *frame = self->gi_frame; - if (!frame) { - if (unlikely(!self->gi_code)) { - // Avoid doing something stupid, e.g. during garbage collection. - Py_RETURN_NONE; - } - frame = (PyObject *) PyFrame_New( - PyThreadState_Get(), /*PyThreadState *tstate,*/ - (PyCodeObject*) self->gi_code, /*PyCodeObject *code,*/ - $moddict_cname, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (unlikely(!frame)) - return NULL; - // keep the frame cached once it's created - self->gi_frame = frame; - } - Py_INCREF(frame); - return frame; -} - + +static PyObject * +__Pyx_Coroutine_get_frame(__pyx_CoroutineObject *self, CYTHON_UNUSED void *context) +{ + PyObject *frame = self->gi_frame; + if (!frame) { + if (unlikely(!self->gi_code)) { + // Avoid doing something stupid, e.g. during garbage collection. + Py_RETURN_NONE; + } + frame = (PyObject *) PyFrame_New( + PyThreadState_Get(), /*PyThreadState *tstate,*/ + (PyCodeObject*) self->gi_code, /*PyCodeObject *code,*/ + $moddict_cname, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (unlikely(!frame)) + return NULL; + // keep the frame cached once it's created + self->gi_frame = frame; + } + Py_INCREF(frame); + return frame; +} + static __pyx_CoroutineObject *__Pyx__Coroutine_New( PyTypeObject* type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, PyObject *name, PyObject *qualname, PyObject *module_name) { @@ -1437,7 +1437,7 @@ static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit( gen->gi_modulename = module_name; Py_XINCREF(code); gen->gi_code = code; - gen->gi_frame = NULL; + gen->gi_frame = NULL; PyObject_GC_Track(gen); return gen; @@ -1558,15 +1558,15 @@ static PyTypeObject __pyx_CoroutineAwaitType_type = { #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) 0, /*tp_vectorcall*/ #endif -#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ #endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 - 0, /*tp_pypy_flags*/ -#endif }; #if PY_VERSION_HEX < 0x030500B1 || defined(__Pyx_IterableCoroutine_USED) || CYTHON_USE_ASYNC_SLOTS @@ -1648,9 +1648,9 @@ static __Pyx_PyAsyncMethodsStruct __pyx_Coroutine_as_async = { __Pyx_Coroutine_await, /*am_await*/ 0, /*am_aiter*/ 0, /*am_anext*/ -#if PY_VERSION_HEX >= 0x030A00A3 - 0, /*am_send*/ -#endif +#if PY_VERSION_HEX >= 0x030A00A3 + 0, /*am_send*/ +#endif }; #endif @@ -1721,15 +1721,15 @@ static PyTypeObject __pyx_CoroutineType_type = { #elif PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) 0, /*tp_vectorcall*/ #endif -#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ #endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 - 0, /*tp_pypy_flags*/ -#endif }; static int __pyx_Coroutine_init(void) { @@ -1835,15 +1835,15 @@ static PyTypeObject __pyx_IterableCoroutineType_type = { #if PY_VERSION_HEX >= 0x030400a1 __Pyx_Coroutine_del, /*tp_finalize*/ #endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) 0, /*tp_vectorcall*/ #endif -#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ #endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 - 0, /*tp_pypy_flags*/ -#endif }; @@ -1884,8 +1884,8 @@ static PyGetSetDef __pyx_Generator_getsets[] = { (char*) PyDoc_STR("name of the generator"), 0}, {(char *) "__qualname__", (getter)__Pyx_Coroutine_get_qualname, (setter)__Pyx_Coroutine_set_qualname, (char*) PyDoc_STR("qualified name of the generator"), 0}, - {(char *) "gi_frame", (getter)__Pyx_Coroutine_get_frame, NULL, - (char*) PyDoc_STR("Frame of the generator"), 0}, + {(char *) "gi_frame", (getter)__Pyx_Coroutine_get_frame, NULL, + (char*) PyDoc_STR("Frame of the generator"), 0}, {0, 0, 0, 0, 0} }; @@ -1946,15 +1946,15 @@ static PyTypeObject __pyx_GeneratorType_type = { #elif PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) 0, /*tp_vectorcall*/ #endif -#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ #endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 - 0, /*tp_pypy_flags*/ -#endif }; static int __pyx_Generator_init(void) { @@ -2351,9 +2351,9 @@ static PyTypeObject __Pyx__PyExc_StopAsyncIteration_type = { #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif -#if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 - 0, /*tp_pypy_flags*/ -#endif +#if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 + 0, /*tp_pypy_flags*/ +#endif }; #endif diff --git a/contrib/tools/cython/Cython/Utility/CppConvert.pyx b/contrib/tools/cython/Cython/Utility/CppConvert.pyx index a5c6edd483..5f7859dd0e 100644 --- a/contrib/tools/cython/Cython/Utility/CppConvert.pyx +++ b/contrib/tools/cython/Cython/Utility/CppConvert.pyx @@ -11,7 +11,7 @@ cdef extern from *: @cname("{{cname}}") cdef string {{cname}}(object o) except *: - cdef Py_ssize_t length = 0 + cdef Py_ssize_t length = 0 cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length) return string(data, length) diff --git a/contrib/tools/cython/Cython/Utility/CythonFunction.c b/contrib/tools/cython/Cython/Utility/CythonFunction.c index f2bd802e27..d51b308a8d 100644 --- a/contrib/tools/cython/Cython/Utility/CythonFunction.c +++ b/contrib/tools/cython/Cython/Utility/CythonFunction.c @@ -1,5 +1,5 @@ -//////////////////// CythonFunctionShared.proto //////////////////// +//////////////////// CythonFunctionShared.proto //////////////////// #define __Pyx_CyFunction_USED 1 @@ -36,7 +36,7 @@ typedef struct { // Dynamic default args and annotations void *defaults; int defaults_pyobjects; - size_t defaults_size; // used by FusedFunction for copying defaults + size_t defaults_size; // used by FusedFunction for copying defaults int flags; // Defaults info @@ -50,7 +50,7 @@ static PyTypeObject *__pyx_CyFunctionType = 0; #define __Pyx_CyFunction_Check(obj) (__Pyx_TypeCheck(obj, __pyx_CyFunctionType)) -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject* op, PyMethodDef *ml, int flags, PyObject* qualname, PyObject *self, PyObject *module, PyObject *globals, @@ -69,8 +69,8 @@ static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m, static int __pyx_CyFunction_init(void); - -//////////////////// CythonFunctionShared //////////////////// + +//////////////////// CythonFunctionShared //////////////////// //@substitute: naming //@requires: CommonStructures.c::FetchCommonType ////@requires: ObjectHandling.c::PyObjectGetAttrStr @@ -426,8 +426,8 @@ static PyObject * __Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, CYTHON_UNUSED PyObject *args) { #if PY_MAJOR_VERSION >= 3 - Py_INCREF(m->func_qualname); - return m->func_qualname; + Py_INCREF(m->func_qualname); + return m->func_qualname; #else return PyString_FromString(m->func.m_ml->ml_name); #endif @@ -445,9 +445,9 @@ static PyMethodDef __pyx_CyFunction_methods[] = { #define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func.m_weakreflist) #endif -static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { - if (unlikely(op == NULL)) +static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { + if (unlikely(op == NULL)) return NULL; op->flags = flags; __Pyx_CyFunction_weakreflist(op) = NULL; @@ -469,7 +469,7 @@ static PyObject *__Pyx_CyFunction_Init(__pyx_CyFunctionObject *op, PyMethodDef * op->func_code = code; // Dynamic Default args op->defaults_pyobjects = 0; - op->defaults_size = 0; + op->defaults_size = 0; op->defaults = NULL; op->defaults_tuple = NULL; op->defaults_kwdict = NULL; @@ -550,7 +550,7 @@ static int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, static PyObject *__Pyx_CyFunction_descr_get(PyObject *func, PyObject *obj, PyObject *type) { -#if PY_MAJOR_VERSION < 3 +#if PY_MAJOR_VERSION < 3 __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func; if (m->flags & __Pyx_CYFUNCTION_STATICMETHOD) { @@ -566,7 +566,7 @@ static PyObject *__Pyx_CyFunction_descr_get(PyObject *func, PyObject *obj, PyObj if (obj == Py_None) obj = NULL; -#endif +#endif return __Pyx_PyMethod_New(func, obj, type); } @@ -731,15 +731,15 @@ static PyTypeObject __pyx_CyFunctionType_type = { #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) 0, /*tp_vectorcall*/ #endif -#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 0, /*tp_print*/ #endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 - 0, /*tp_pypy_flags*/ -#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ +#endif }; @@ -759,7 +759,7 @@ static CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t return PyErr_NoMemory(); memset(m->defaults, 0, size); m->defaults_pyobjects = pyobjects; - m->defaults_size = size; + m->defaults_size = size; return m->defaults; } @@ -781,36 +781,36 @@ static CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, Py Py_INCREF(dict); } - -//////////////////// CythonFunction.proto //////////////////// - -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, - int flags, PyObject* qualname, - PyObject *closure, - PyObject *module, PyObject *globals, - PyObject* code); - -//////////////////// CythonFunction //////////////////// -//@requires: CythonFunctionShared - -static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, - PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { - PyObject *op = __Pyx_CyFunction_Init( - PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), - ml, flags, qualname, closure, module, globals, code - ); - if (likely(op)) { - PyObject_GC_Track(op); - } - return op; -} - - + +//////////////////// CythonFunction.proto //////////////////// + +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, + int flags, PyObject* qualname, + PyObject *closure, + PyObject *module, PyObject *globals, + PyObject* code); + +//////////////////// CythonFunction //////////////////// +//@requires: CythonFunctionShared + +static PyObject *__Pyx_CyFunction_New(PyMethodDef *ml, int flags, PyObject* qualname, + PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) { + PyObject *op = __Pyx_CyFunction_Init( + PyObject_GC_New(__pyx_CyFunctionObject, __pyx_CyFunctionType), + ml, flags, qualname, closure, module, globals, code + ); + if (likely(op)) { + PyObject_GC_Track(op); + } + return op; +} + + //////////////////// CyFunctionClassCell.proto //////////////////// static int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *classobj);/*proto*/ //////////////////// CyFunctionClassCell //////////////////// -//@requires: CythonFunctionShared +//@requires: CythonFunctionShared static int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *classobj) { Py_ssize_t i, count = PyList_GET_SIZE(cyfunctions); @@ -833,9 +833,9 @@ static int __Pyx_CyFunction_InitClassCell(PyObject *cyfunctions, PyObject *class return 0; } - + //////////////////// FusedFunction.proto //////////////////// - + typedef struct { __pyx_CyFunctionObject func; PyObject *__signatures__; @@ -843,8 +843,8 @@ typedef struct { PyObject *self; } __pyx_FusedFunctionObject; -static PyObject *__pyx_FusedFunction_New(PyMethodDef *ml, int flags, - PyObject *qualname, PyObject *closure, +static PyObject *__pyx_FusedFunction_New(PyMethodDef *ml, int flags, + PyObject *qualname, PyObject *closure, PyObject *module, PyObject *globals, PyObject *code); @@ -855,27 +855,27 @@ static int __pyx_FusedFunction_init(void); #define __Pyx_FusedFunction_USED //////////////////// FusedFunction //////////////////// -//@requires: CythonFunctionShared +//@requires: CythonFunctionShared static PyObject * -__pyx_FusedFunction_New(PyMethodDef *ml, int flags, - PyObject *qualname, PyObject *closure, +__pyx_FusedFunction_New(PyMethodDef *ml, int flags, + PyObject *qualname, PyObject *closure, PyObject *module, PyObject *globals, PyObject *code) { - PyObject *op = __Pyx_CyFunction_Init( - // __pyx_CyFunctionObject is correct below since that's the cast that we want. - PyObject_GC_New(__pyx_CyFunctionObject, __pyx_FusedFunctionType), - ml, flags, qualname, closure, module, globals, code - ); - if (likely(op)) { - __pyx_FusedFunctionObject *fusedfunc = (__pyx_FusedFunctionObject *) op; - fusedfunc->__signatures__ = NULL; - fusedfunc->type = NULL; - fusedfunc->self = NULL; - PyObject_GC_Track(op); - } - return op; + PyObject *op = __Pyx_CyFunction_Init( + // __pyx_CyFunctionObject is correct below since that's the cast that we want. + PyObject_GC_New(__pyx_CyFunctionObject, __pyx_FusedFunctionType), + ml, flags, qualname, closure, module, globals, code + ); + if (likely(op)) { + __pyx_FusedFunctionObject *fusedfunc = (__pyx_FusedFunctionObject *) op; + fusedfunc->__signatures__ = NULL; + fusedfunc->type = NULL; + fusedfunc->self = NULL; + PyObject_GC_Track(op); + } + return op; } static void @@ -925,7 +925,7 @@ __pyx_FusedFunction_descr_get(PyObject *self, PyObject *obj, PyObject *type) if (obj == Py_None) obj = NULL; - meth = (__pyx_FusedFunctionObject *) __pyx_FusedFunction_New( + meth = (__pyx_FusedFunctionObject *) __pyx_FusedFunction_New( ((PyCFunctionObject *) func)->m_ml, ((__pyx_CyFunctionObject *) func)->flags, ((__pyx_CyFunctionObject *) func)->func_qualname, @@ -936,26 +936,26 @@ __pyx_FusedFunction_descr_get(PyObject *self, PyObject *obj, PyObject *type) if (!meth) return NULL; - // defaults needs copying fully rather than just copying the pointer - // since otherwise it will be freed on destruction of meth despite - // belonging to func rather than meth - if (func->func.defaults) { - PyObject **pydefaults; - int i; - - if (!__Pyx_CyFunction_InitDefaults((PyObject*)meth, - func->func.defaults_size, - func->func.defaults_pyobjects)) { - Py_XDECREF((PyObject*)meth); - return NULL; - } - memcpy(meth->func.defaults, func->func.defaults, func->func.defaults_size); - - pydefaults = __Pyx_CyFunction_Defaults(PyObject *, meth); - for (i = 0; i < meth->func.defaults_pyobjects; i++) - Py_XINCREF(pydefaults[i]); - } - + // defaults needs copying fully rather than just copying the pointer + // since otherwise it will be freed on destruction of meth despite + // belonging to func rather than meth + if (func->func.defaults) { + PyObject **pydefaults; + int i; + + if (!__Pyx_CyFunction_InitDefaults((PyObject*)meth, + func->func.defaults_size, + func->func.defaults_pyobjects)) { + Py_XDECREF((PyObject*)meth); + return NULL; + } + memcpy(meth->func.defaults, func->func.defaults, func->func.defaults_size); + + pydefaults = __Pyx_CyFunction_Defaults(PyObject *, meth); + for (i = 0; i < meth->func.defaults_pyobjects; i++) + Py_XINCREF(pydefaults[i]); + } + Py_XINCREF(func->func.func_classobj); meth->func.func_classobj = func->func.func_classobj; @@ -1004,29 +1004,29 @@ __pyx_FusedFunction_getitem(__pyx_FusedFunctionObject *self, PyObject *idx) PyObject *sep = NULL; int i; - if (unlikely(!list)) + if (unlikely(!list)) return NULL; for (i = 0; i < n; i++) { - int ret; - PyObject *string; + int ret; + PyObject *string; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS PyObject *item = PyTuple_GET_ITEM(idx, i); #else - PyObject *item = PySequence_ITEM(idx, i); if (unlikely(!item)) goto __pyx_err; + PyObject *item = PySequence_ITEM(idx, i); if (unlikely(!item)) goto __pyx_err; #endif string = _obj_to_str(item); #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS) Py_DECREF(item); #endif - if (unlikely(!string)) goto __pyx_err; - ret = PyList_Append(list, string); + if (unlikely(!string)) goto __pyx_err; + ret = PyList_Append(list, string); Py_DECREF(string); - if (unlikely(ret < 0)) goto __pyx_err; + if (unlikely(ret < 0)) goto __pyx_err; } sep = PyUnicode_FromString("|"); - if (likely(sep)) + if (likely(sep)) signature = PyUnicode_Join(sep, list); __pyx_err: ; @@ -1140,7 +1140,7 @@ __pyx_FusedFunction_call(PyObject *func, PyObject *args, PyObject *kw) PyErr_Format(PyExc_TypeError, "First argument should be of type %.200s, got %.200s.", ((PyTypeObject *) binding_func->type)->tp_name, - Py_TYPE(self)->tp_name); + Py_TYPE(self)->tp_name); goto bad; } else if (unlikely(is_instance == -1)) { goto bad; @@ -1242,7 +1242,7 @@ static PyTypeObject __pyx_FusedFunctionType_type = { // __doc__ is None for the fused function type, but we need it to be // a descriptor for the instance's __doc__, so rebuild descriptors in our subclass __pyx_CyFunction_getsets, /*tp_getset*/ - // NOTE: tp_base may be changed later during module initialisation when importing CyFunction across modules. + // NOTE: tp_base may be changed later during module initialisation when importing CyFunction across modules. &__pyx_CyFunctionType_type, /*tp_base*/ 0, /*tp_dict*/ __pyx_FusedFunction_descr_get, /*tp_descr_get*/ @@ -1263,20 +1263,20 @@ static PyTypeObject __pyx_FusedFunctionType_type = { #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif -#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) +#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) 0, /*tp_vectorcall*/ #endif -#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ #endif -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 - 0, /*tp_pypy_flags*/ -#endif }; static int __pyx_FusedFunction_init(void) { - // Set base from __Pyx_FetchCommonTypeFromSpec, in case it's different from the local static value. - __pyx_FusedFunctionType_type.tp_base = __pyx_CyFunctionType; + // Set base from __Pyx_FetchCommonTypeFromSpec, in case it's different from the local static value. + __pyx_FusedFunctionType_type.tp_base = __pyx_CyFunctionType; __pyx_FusedFunctionType = __Pyx_FetchCommonType(&__pyx_FusedFunctionType_type); if (__pyx_FusedFunctionType == NULL) { return -1; @@ -1302,8 +1302,8 @@ static PyObject* __Pyx_Method_ClassMethod(PyObject *method) { // special C-API function only in Pyston and PyPy >= 5.9 if (PyMethodDescr_Check(method)) #else - #if PY_MAJOR_VERSION == 2 - // PyMethodDescr_Type is not exposed in the CPython C-API in Py2. + #if PY_MAJOR_VERSION == 2 + // PyMethodDescr_Type is not exposed in the CPython C-API in Py2. static PyTypeObject *methoddescr_type = NULL; if (methoddescr_type == NULL) { PyObject *meth = PyObject_GetAttrString((PyObject*)&PyList_Type, "append"); @@ -1311,9 +1311,9 @@ static PyObject* __Pyx_Method_ClassMethod(PyObject *method) { methoddescr_type = Py_TYPE(meth); Py_DECREF(meth); } - #else - PyTypeObject *methoddescr_type = &PyMethodDescr_Type; - #endif + #else + PyTypeObject *methoddescr_type = &PyMethodDescr_Type; + #endif if (__Pyx_TypeCheck(method, methoddescr_type)) #endif { @@ -1331,7 +1331,7 @@ static PyObject* __Pyx_Method_ClassMethod(PyObject *method) { // python classes return PyClassMethod_New(PyMethod_GET_FUNCTION(method)); } - else { + else { return PyClassMethod_New(method); } } diff --git a/contrib/tools/cython/Cython/Utility/Embed.c b/contrib/tools/cython/Cython/Utility/Embed.c index 73ae274eaf..60da8f2330 100644 --- a/contrib/tools/cython/Cython/Utility/Embed.c +++ b/contrib/tools/cython/Cython/Utility/Embed.c @@ -66,12 +66,12 @@ static int __Pyx_main(int argc, wchar_t **argv) { } Py_XDECREF(m); } -#if PY_VERSION_HEX < 0x03060000 +#if PY_VERSION_HEX < 0x03060000 Py_Finalize(); -#else - if (Py_FinalizeEx() < 0) - return 2; -#endif +#else + if (Py_FinalizeEx() < 0) + return 2; +#endif return 0; } @@ -213,11 +213,11 @@ int if (res == 0) res = __Pyx_main(argc, argv_copy); for (i = 0; i < argc; i++) { -#if PY_VERSION_HEX < 0x03050000 +#if PY_VERSION_HEX < 0x03050000 free(argv_copy2[i]); -#else - PyMem_RawFree(argv_copy2[i]); -#endif +#else + PyMem_RawFree(argv_copy2[i]); +#endif } free(argv_copy); free(argv_copy2); diff --git a/contrib/tools/cython/Cython/Utility/Exceptions.c b/contrib/tools/cython/Cython/Utility/Exceptions.c index c550e38332..b0411f6956 100644 --- a/contrib/tools/cython/Cython/Utility/Exceptions.c +++ b/contrib/tools/cython/Cython/Utility/Exceptions.c @@ -647,7 +647,7 @@ static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);/*proto*/ //@substitute: naming #ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { +static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { PyObject *use_cline; PyObject *ptype, *pvalue, *ptraceback; #if CYTHON_COMPILING_IN_CPYTHON @@ -681,8 +681,8 @@ static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int } if (!use_cline) { c_line = 0; - // No need to handle errors here when we reset the exception state just afterwards. - (void) PyObject_SetAttr(${cython_runtime_cname}, PYIDENT("cline_in_traceback"), Py_False); + // No need to handle errors here when we reset the exception state just afterwards. + (void) PyObject_SetAttr(${cython_runtime_cname}, PYIDENT("cline_in_traceback"), Py_False); } else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { c_line = 0; @@ -709,33 +709,33 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line, static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( const char *funcname, int c_line, int py_line, const char *filename) { - PyCodeObject *py_code = NULL; - PyObject *py_funcname = NULL; - #if PY_MAJOR_VERSION < 3 - PyObject *py_srcfile = NULL; + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; + #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; py_srcfile = PyString_FromString(filename); - if (!py_srcfile) goto bad; + if (!py_srcfile) goto bad; #endif - + if (c_line) { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, $cfilenm_cname, c_line); - if (!py_funcname) goto bad; + if (!py_funcname) goto bad; #else py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, $cfilenm_cname, c_line); - if (!py_funcname) goto bad; - funcname = PyUnicode_AsUTF8(py_funcname); - if (!funcname) goto bad; + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; #endif } else { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromString(funcname); - if (!py_funcname) goto bad; + if (!py_funcname) goto bad; #endif } - #if PY_MAJOR_VERSION < 3 + #if PY_MAJOR_VERSION < 3 py_code = __Pyx_PyCode_New( 0, /*int argcount,*/ 0, /*int kwonlyargcount,*/ @@ -754,16 +754,16 @@ static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( $empty_bytes /*PyObject *lnotab*/ ); Py_DECREF(py_srcfile); - #else - py_code = PyCode_NewEmpty(filename, funcname, py_line); - #endif - Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline return py_code; bad: - Py_XDECREF(py_funcname); - #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 Py_XDECREF(py_srcfile); - #endif + #endif return NULL; } diff --git a/contrib/tools/cython/Cython/Utility/ExtensionTypes.c b/contrib/tools/cython/Cython/Utility/ExtensionTypes.c index e5f16bcb33..0d8c41dee1 100644 --- a/contrib/tools/cython/Cython/Utility/ExtensionTypes.c +++ b/contrib/tools/cython/Cython/Utility/ExtensionTypes.c @@ -54,66 +54,66 @@ static int __Pyx_PyType_Ready(PyTypeObject *t) { } } -#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) - { - // Make sure GC does not pick up our non-heap type as heap type with this hack! - // For details, see https://github.com/cython/cython/issues/3603 - PyObject *ret, *py_status; - int gc_was_enabled; - PyObject *gc = PyImport_Import(PYUNICODE("gc")); - if (unlikely(!gc)) return -1; - py_status = PyObject_CallMethodObjArgs(gc, PYUNICODE("isenabled"), NULL); - if (unlikely(!py_status)) { - Py_DECREF(gc); - return -1; - } - gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); - Py_DECREF(py_status); - if (gc_was_enabled > 0) { - ret = PyObject_CallMethodObjArgs(gc, PYUNICODE("disable"), NULL); - if (unlikely(!ret)) { - Py_DECREF(gc); - return -1; - } - Py_DECREF(ret); - } else if (unlikely(gc_was_enabled == -1)) { - Py_DECREF(gc); - return -1; - } - - // As of https://bugs.python.org/issue22079 - // PyType_Ready enforces that all bases of a non-heap type are - // non-heap. We know that this is the case for the solid base but - // other bases are heap allocated and are kept alive through the - // tp_bases reference. - // Other than this check, the Py_TPFLAGS_HEAPTYPE flag is unused - // in PyType_Ready(). - t->tp_flags |= Py_TPFLAGS_HEAPTYPE; +#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) + { + // Make sure GC does not pick up our non-heap type as heap type with this hack! + // For details, see https://github.com/cython/cython/issues/3603 + PyObject *ret, *py_status; + int gc_was_enabled; + PyObject *gc = PyImport_Import(PYUNICODE("gc")); + if (unlikely(!gc)) return -1; + py_status = PyObject_CallMethodObjArgs(gc, PYUNICODE("isenabled"), NULL); + if (unlikely(!py_status)) { + Py_DECREF(gc); + return -1; + } + gc_was_enabled = __Pyx_PyObject_IsTrue(py_status); + Py_DECREF(py_status); + if (gc_was_enabled > 0) { + ret = PyObject_CallMethodObjArgs(gc, PYUNICODE("disable"), NULL); + if (unlikely(!ret)) { + Py_DECREF(gc); + return -1; + } + Py_DECREF(ret); + } else if (unlikely(gc_was_enabled == -1)) { + Py_DECREF(gc); + return -1; + } + + // As of https://bugs.python.org/issue22079 + // PyType_Ready enforces that all bases of a non-heap type are + // non-heap. We know that this is the case for the solid base but + // other bases are heap allocated and are kept alive through the + // tp_bases reference. + // Other than this check, the Py_TPFLAGS_HEAPTYPE flag is unused + // in PyType_Ready(). + t->tp_flags |= Py_TPFLAGS_HEAPTYPE; #endif r = PyType_Ready(t); -#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) - t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; - - if (gc_was_enabled) { - PyObject *t, *v, *tb; - PyErr_Fetch(&t, &v, &tb); - ret = PyObject_CallMethodObjArgs(gc, PYUNICODE("enable"), NULL); - if (likely(ret || r == -1)) { - Py_XDECREF(ret); - // do not overwrite exceptions raised by PyType_Ready() above - PyErr_Restore(t, v, tb); - } else { - // PyType_Ready() succeeded, but gc.enable() failed. - Py_XDECREF(t); - Py_XDECREF(v); - Py_XDECREF(tb); - r = -1; - } - } - Py_DECREF(gc); - } +#if PY_VERSION_HEX >= 0x03050000 && !defined(PYSTON_MAJOR_VERSION) + t->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; + + if (gc_was_enabled) { + PyObject *t, *v, *tb; + PyErr_Fetch(&t, &v, &tb); + ret = PyObject_CallMethodObjArgs(gc, PYUNICODE("enable"), NULL); + if (likely(ret || r == -1)) { + Py_XDECREF(ret); + // do not overwrite exceptions raised by PyType_Ready() above + PyErr_Restore(t, v, tb); + } else { + // PyType_Ready() succeeded, but gc.enable() failed. + Py_XDECREF(t); + Py_XDECREF(v); + Py_XDECREF(tb); + r = -1; + } + } + Py_DECREF(gc); + } #endif return r; @@ -177,7 +177,7 @@ static void __Pyx_call_next_tp_clear(PyObject* obj, inquiry current_tp_clear) { static int __Pyx_setup_reduce(PyObject* type_obj); /////////////// SetupReduce /////////////// -//@requires: ObjectHandling.c::PyObjectGetAttrStrNoError +//@requires: ObjectHandling.c::PyObjectGetAttrStrNoError //@requires: ObjectHandling.c::PyObjectGetAttrStr //@substitute: naming @@ -212,61 +212,61 @@ static int __Pyx_setup_reduce(PyObject* type_obj) { PyObject *setstate_cython = NULL; #if CYTHON_USE_PYTYPE_LOOKUP - if (_PyType_Lookup((PyTypeObject*)type_obj, PYIDENT("__getstate__"))) goto __PYX_GOOD; + if (_PyType_Lookup((PyTypeObject*)type_obj, PYIDENT("__getstate__"))) goto __PYX_GOOD; #else - if (PyObject_HasAttr(type_obj, PYIDENT("__getstate__"))) goto __PYX_GOOD; + if (PyObject_HasAttr(type_obj, PYIDENT("__getstate__"))) goto __PYX_GOOD; #endif #if CYTHON_USE_PYTYPE_LOOKUP - object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, PYIDENT("__reduce_ex__")); if (!object_reduce_ex) goto __PYX_BAD; + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, PYIDENT("__reduce_ex__")); if (!object_reduce_ex) goto __PYX_BAD; #else - object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, PYIDENT("__reduce_ex__")); if (!object_reduce_ex) goto __PYX_BAD; + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, PYIDENT("__reduce_ex__")); if (!object_reduce_ex) goto __PYX_BAD; #endif - reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__reduce_ex__")); if (unlikely(!reduce_ex)) goto __PYX_BAD; + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__reduce_ex__")); if (unlikely(!reduce_ex)) goto __PYX_BAD; if (reduce_ex == object_reduce_ex) { #if CYTHON_USE_PYTYPE_LOOKUP - object_reduce = _PyType_Lookup(&PyBaseObject_Type, PYIDENT("__reduce__")); if (!object_reduce) goto __PYX_BAD; + object_reduce = _PyType_Lookup(&PyBaseObject_Type, PYIDENT("__reduce__")); if (!object_reduce) goto __PYX_BAD; #else - object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, PYIDENT("__reduce__")); if (!object_reduce) goto __PYX_BAD; + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, PYIDENT("__reduce__")); if (!object_reduce) goto __PYX_BAD; #endif - reduce = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__reduce__")); if (unlikely(!reduce)) goto __PYX_BAD; + reduce = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__reduce__")); if (unlikely(!reduce)) goto __PYX_BAD; if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, PYIDENT("__reduce_cython__"))) { - reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, PYIDENT("__reduce_cython__")); - if (likely(reduce_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__reduce__"), reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__reduce_cython__")); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (reduce == object_reduce || PyErr_Occurred()) { - // Ignore if we're done, i.e. if 'reduce' already has the right name and the original is gone. - // Otherwise: error. - goto __PYX_BAD; - } + reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, PYIDENT("__reduce_cython__")); + if (likely(reduce_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__reduce__"), reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__reduce_cython__")); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (reduce == object_reduce || PyErr_Occurred()) { + // Ignore if we're done, i.e. if 'reduce' already has the right name and the original is gone. + // Otherwise: error. + goto __PYX_BAD; + } setstate = __Pyx_PyObject_GetAttrStr(type_obj, PYIDENT("__setstate__")); if (!setstate) PyErr_Clear(); if (!setstate || __Pyx_setup_reduce_is_named(setstate, PYIDENT("__setstate_cython__"))) { - setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, PYIDENT("__setstate_cython__")); - if (likely(setstate_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__setstate__"), setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__setstate_cython__")); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (!setstate || PyErr_Occurred()) { - // Ignore if we're done, i.e. if 'setstate' already has the right name and the original is gone. - // Otherwise: error. - goto __PYX_BAD; - } + setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, PYIDENT("__setstate_cython__")); + if (likely(setstate_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__setstate__"), setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, PYIDENT("__setstate_cython__")); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (!setstate || PyErr_Occurred()) { + // Ignore if we're done, i.e. if 'setstate' already has the right name and the original is gone. + // Otherwise: error. + goto __PYX_BAD; + } } PyType_Modified((PyTypeObject*)type_obj); } } - goto __PYX_GOOD; + goto __PYX_GOOD; -__PYX_BAD: +__PYX_BAD: if (!PyErr_Occurred()) PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); ret = -1; -__PYX_GOOD: +__PYX_GOOD: #if !CYTHON_USE_PYTYPE_LOOKUP Py_XDECREF(object_reduce); Py_XDECREF(object_reduce_ex); diff --git a/contrib/tools/cython/Cython/Utility/FunctionArguments.c b/contrib/tools/cython/Cython/Utility/FunctionArguments.c index e738a91e1c..8333d93666 100644 --- a/contrib/tools/cython/Cython/Utility/FunctionArguments.c +++ b/contrib/tools/cython/Cython/Utility/FunctionArguments.c @@ -211,7 +211,7 @@ static int __Pyx_ParseOptionalKeywords( name = first_kw_arg; #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { + if (likely(PyString_Check(key))) { while (*name) { if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) && _PyString_Eq(**name, key)) { @@ -239,9 +239,9 @@ static int __Pyx_ParseOptionalKeywords( while (*name) { int cmp = (**name == key) ? 0 : #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : #endif - // In Py2, we may need to convert the argument name from str to unicode for comparison. + // In Py2, we may need to convert the argument name from str to unicode for comparison. PyUnicode_Compare(**name, key); if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; if (cmp == 0) { @@ -257,7 +257,7 @@ static int __Pyx_ParseOptionalKeywords( while (argname != first_kw_arg) { int cmp = (**argname == key) ? 0 : #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : #endif // need to convert argument name from bytes to unicode for comparison PyUnicode_Compare(**argname, key); diff --git a/contrib/tools/cython/Cython/Utility/ImportExport.c b/contrib/tools/cython/Cython/Utility/ImportExport.c index 5633bda97f..532ec326f6 100644 --- a/contrib/tools/cython/Cython/Utility/ImportExport.c +++ b/contrib/tools/cython/Cython/Utility/ImportExport.c @@ -46,8 +46,8 @@ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { { #if PY_MAJOR_VERSION >= 3 if (level == -1) { - // Avoid C compiler warning if strchr() evaluates to false at compile time. - if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { + // Avoid C compiler warning if strchr() evaluates to false at compile time. + if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { /* try package relative import first */ module = PyImport_ImportModuleLevelObject( name, global_dict, empty_dict, list, 1); @@ -152,12 +152,12 @@ __Pyx_import_all_from(PyObject *locals, PyObject *v) } if (skip_leading_underscores && #if PY_MAJOR_VERSION < 3 - likely(PyString_Check(name)) && + likely(PyString_Check(name)) && PyString_AS_STRING(name)[0] == '_') #else - likely(PyUnicode_Check(name)) && - likely(__Pyx_PyUnicode_GET_LENGTH(name)) && - __Pyx_PyUnicode_READ_CHAR(name, 0) == '_') + likely(PyUnicode_Check(name)) && + likely(__Pyx_PyUnicode_GET_LENGTH(name)) && + __Pyx_PyUnicode_READ_CHAR(name, 0) == '_') #endif { Py_DECREF(name); @@ -658,7 +658,7 @@ static int __Pyx_MergeVtables(PyTypeObject *type) { base = base->tp_base; } } - base_vtables = (void**) malloc(sizeof(void*) * (size_t)(base_depth + 1)); + base_vtables = (void**) malloc(sizeof(void*) * (size_t)(base_depth + 1)); base_vtables[0] = unknown; // Could do MRO resolution of individual methods in the future, assuming // compatible vtables, but for now simply require a common vtable base. diff --git a/contrib/tools/cython/Cython/Utility/MemoryView.pyx b/contrib/tools/cython/Cython/Utility/MemoryView.pyx index d6a388c7e4..6ca5fab9ba 100644 --- a/contrib/tools/cython/Cython/Utility/MemoryView.pyx +++ b/contrib/tools/cython/Cython/Utility/MemoryView.pyx @@ -1050,7 +1050,7 @@ cdef memoryview_fromslice({{memviewslice_name}} memviewslice, @cname('__pyx_memoryview_get_slice_from_memoryview') cdef {{memviewslice_name}} *get_slice_from_memview(memoryview memview, - {{memviewslice_name}} *mslice) except NULL: + {{memviewslice_name}} *mslice) except NULL: cdef _memoryviewslice obj if isinstance(memview, _memoryviewslice): obj = memview @@ -1176,10 +1176,10 @@ cdef void copy_strided_to_strided({{memviewslice_name}} *src, @cname('__pyx_memoryview_slice_get_size') cdef Py_ssize_t slice_get_size({{memviewslice_name}} *src, int ndim) nogil: "Return the size of the memory occupied by the slice in number of bytes" - cdef Py_ssize_t shape, size = src.memview.view.itemsize + cdef Py_ssize_t shape, size = src.memview.view.itemsize - for shape in src.shape[:ndim]: - size *= shape + for shape in src.shape[:ndim]: + size *= shape return size @@ -1196,11 +1196,11 @@ cdef Py_ssize_t fill_contig_strides_array( if order == 'F': for idx in range(ndim): strides[idx] = stride - stride *= shape[idx] + stride *= shape[idx] else: for idx in range(ndim - 1, -1, -1): strides[idx] = stride - stride *= shape[idx] + stride *= shape[idx] return stride @@ -1466,8 +1466,8 @@ cdef bytes format_from_typeinfo(__Pyx_TypeInfo *type): cdef bytes part, result if type.typegroup == 'S': - assert type.fields != NULL - assert type.fields.type != NULL + assert type.fields != NULL + assert type.fields.type != NULL if type.flags & __PYX_BUF_FLAGS_PACKED_STRUCT: alignment = b'^' diff --git a/contrib/tools/cython/Cython/Utility/MemoryView_C.c b/contrib/tools/cython/Cython/Utility/MemoryView_C.c index c50819f4c8..0a5d8ee2c2 100644 --- a/contrib/tools/cython/Cython/Utility/MemoryView_C.c +++ b/contrib/tools/cython/Cython/Utility/MemoryView_C.c @@ -181,13 +181,13 @@ __pyx_check_strides(Py_buffer *buf, int dim, int ndim, int spec) if (buf->strides) { if (spec & __Pyx_MEMVIEW_CONTIG) { if (spec & (__Pyx_MEMVIEW_PTR|__Pyx_MEMVIEW_FULL)) { - if (unlikely(buf->strides[dim] != sizeof(void *))) { + if (unlikely(buf->strides[dim] != sizeof(void *))) { PyErr_Format(PyExc_ValueError, "Buffer is not indirectly contiguous " "in dimension %d.", dim); goto fail; } - } else if (unlikely(buf->strides[dim] != buf->itemsize)) { + } else if (unlikely(buf->strides[dim] != buf->itemsize)) { PyErr_SetString(PyExc_ValueError, "Buffer and memoryview are not contiguous " "in the same dimension."); @@ -199,7 +199,7 @@ __pyx_check_strides(Py_buffer *buf, int dim, int ndim, int spec) Py_ssize_t stride = buf->strides[dim]; if (stride < 0) stride = -stride; - if (unlikely(stride < buf->itemsize)) { + if (unlikely(stride < buf->itemsize)) { PyErr_SetString(PyExc_ValueError, "Buffer and memoryview are not contiguous " "in the same dimension."); @@ -207,17 +207,17 @@ __pyx_check_strides(Py_buffer *buf, int dim, int ndim, int spec) } } } else { - if (unlikely(spec & __Pyx_MEMVIEW_CONTIG && dim != ndim - 1)) { + if (unlikely(spec & __Pyx_MEMVIEW_CONTIG && dim != ndim - 1)) { PyErr_Format(PyExc_ValueError, "C-contiguous buffer is not contiguous in " "dimension %d", dim); goto fail; - } else if (unlikely(spec & (__Pyx_MEMVIEW_PTR))) { + } else if (unlikely(spec & (__Pyx_MEMVIEW_PTR))) { PyErr_Format(PyExc_ValueError, "C-contiguous buffer is not indirect in " "dimension %d", dim); goto fail; - } else if (unlikely(buf->suboffsets)) { + } else if (unlikely(buf->suboffsets)) { PyErr_SetString(PyExc_ValueError, "Buffer exposes suboffsets but no strides"); goto fail; @@ -235,7 +235,7 @@ __pyx_check_suboffsets(Py_buffer *buf, int dim, CYTHON_UNUSED int ndim, int spec // Todo: without PyBUF_INDIRECT we may not have suboffset information, i.e., the // ptr may not be set to NULL but may be uninitialized? if (spec & __Pyx_MEMVIEW_DIRECT) { - if (unlikely(buf->suboffsets && buf->suboffsets[dim] >= 0)) { + if (unlikely(buf->suboffsets && buf->suboffsets[dim] >= 0)) { PyErr_Format(PyExc_ValueError, "Buffer not compatible with direct access " "in dimension %d.", dim); @@ -244,7 +244,7 @@ __pyx_check_suboffsets(Py_buffer *buf, int dim, CYTHON_UNUSED int ndim, int spec } if (spec & __Pyx_MEMVIEW_PTR) { - if (unlikely(!buf->suboffsets || (buf->suboffsets[dim] < 0))) { + if (unlikely(!buf->suboffsets || (buf->suboffsets[dim] < 0))) { PyErr_Format(PyExc_ValueError, "Buffer is not indirectly accessible " "in dimension %d.", dim); @@ -265,7 +265,7 @@ __pyx_verify_contig(Py_buffer *buf, int ndim, int c_or_f_flag) if (c_or_f_flag & __Pyx_IS_F_CONTIG) { Py_ssize_t stride = 1; for (i = 0; i < ndim; i++) { - if (unlikely(stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1)) { + if (unlikely(stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1)) { PyErr_SetString(PyExc_ValueError, "Buffer not fortran contiguous."); goto fail; @@ -275,7 +275,7 @@ __pyx_verify_contig(Py_buffer *buf, int ndim, int c_or_f_flag) } else if (c_or_f_flag & __Pyx_IS_C_CONTIG) { Py_ssize_t stride = 1; for (i = ndim - 1; i >- 1; i--) { - if (unlikely(stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1)) { + if (unlikely(stride * buf->itemsize != buf->strides[i] && buf->shape[i] > 1)) { PyErr_SetString(PyExc_ValueError, "Buffer not C contiguous."); goto fail; @@ -322,7 +322,7 @@ static int __Pyx_ValidateAndInit_memviewslice( } buf = &memview->view; - if (unlikely(buf->ndim != ndim)) { + if (unlikely(buf->ndim != ndim)) { PyErr_Format(PyExc_ValueError, "Buffer has wrong number of dimensions (expected %d, got %d)", ndim, buf->ndim); @@ -331,10 +331,10 @@ static int __Pyx_ValidateAndInit_memviewslice( if (new_memview) { __Pyx_BufFmt_Init(&ctx, stack, dtype); - if (unlikely(!__Pyx_BufFmt_CheckString(&ctx, buf->format))) goto fail; + if (unlikely(!__Pyx_BufFmt_CheckString(&ctx, buf->format))) goto fail; } - if (unlikely((unsigned) buf->itemsize != dtype->size)) { + if (unlikely((unsigned) buf->itemsize != dtype->size)) { PyErr_Format(PyExc_ValueError, "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "u byte%s) " "does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "u byte%s)", @@ -347,19 +347,19 @@ static int __Pyx_ValidateAndInit_memviewslice( } /* Check axes */ - if (buf->len > 0) { - // 0-sized arrays do not undergo these checks since their strides are - // irrelevant and they are always both C- and F-contiguous. - for (i = 0; i < ndim; i++) { - spec = axes_specs[i]; - if (unlikely(!__pyx_check_strides(buf, i, ndim, spec))) - goto fail; - if (unlikely(!__pyx_check_suboffsets(buf, i, ndim, spec))) - goto fail; - } - - /* Check contiguity */ - if (unlikely(buf->strides && !__pyx_verify_contig(buf, ndim, c_or_f_flag))) + if (buf->len > 0) { + // 0-sized arrays do not undergo these checks since their strides are + // irrelevant and they are always both C- and F-contiguous. + for (i = 0; i < ndim; i++) { + spec = axes_specs[i]; + if (unlikely(!__pyx_check_strides(buf, i, ndim, spec))) + goto fail; + if (unlikely(!__pyx_check_suboffsets(buf, i, ndim, spec))) + goto fail; + } + + /* Check contiguity */ + if (unlikely(buf->strides && !__pyx_verify_contig(buf, ndim, c_or_f_flag))) goto fail; } @@ -395,7 +395,7 @@ __Pyx_init_memviewslice(struct __pyx_memoryview_obj *memview, Py_buffer *buf = &memview->view; __Pyx_RefNannySetupContext("init_memviewslice", 0); - if (unlikely(memviewslice->memview || memviewslice->data)) { + if (unlikely(memviewslice->memview || memviewslice->data)) { PyErr_SetString(PyExc_ValueError, "memviewslice is already initialized!"); goto fail; @@ -489,16 +489,16 @@ __Pyx_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno) { int first_time; struct {{memview_struct_name}} *memview = memslice->memview; - if (unlikely(!memview || (PyObject *) memview == Py_None)) + if (unlikely(!memview || (PyObject *) memview == Py_None)) return; /* allow uninitialized memoryview assignment */ - if (unlikely(__pyx_get_slice_count(memview) < 0)) + if (unlikely(__pyx_get_slice_count(memview) < 0)) __pyx_fatalerror("Acquisition count is %d (line %d)", __pyx_get_slice_count(memview), lineno); first_time = __pyx_add_acquisition_count(memview) == 0; - if (unlikely(first_time)) { + if (unlikely(first_time)) { if (have_gil) { Py_INCREF((PyObject *) memview); } else { @@ -514,20 +514,20 @@ static CYTHON_INLINE void __Pyx_XDEC_MEMVIEW({{memviewslice_name}} *memslice, int last_time; struct {{memview_struct_name}} *memview = memslice->memview; - if (unlikely(!memview || (PyObject *) memview == Py_None)) { - // we do not ref-count None + if (unlikely(!memview || (PyObject *) memview == Py_None)) { + // we do not ref-count None memslice->memview = NULL; return; } - if (unlikely(__pyx_get_slice_count(memview) <= 0)) + if (unlikely(__pyx_get_slice_count(memview) <= 0)) __pyx_fatalerror("Acquisition count is %d (line %d)", __pyx_get_slice_count(memview), lineno); last_time = __pyx_sub_acquisition_count(memview) == 1; memslice->data = NULL; - - if (unlikely(last_time)) { + + if (unlikely(last_time)) { if (have_gil) { Py_CLEAR(memslice->memview); } else { @@ -571,7 +571,7 @@ __pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs, __Pyx_RefNannySetupContext("__pyx_memoryview_copy_new_contig", 0); for (i = 0; i < ndim; i++) { - if (unlikely(from_mvs->suboffsets[i] >= 0)) { + if (unlikely(from_mvs->suboffsets[i] >= 0)) { PyErr_Format(PyExc_ValueError, "Cannot copy memoryview slice with " "indirect dimensions (axis %d)", i); goto fail; @@ -861,7 +861,7 @@ if (unlikely(__pyx_memoryview_slice_memviewslice( {{endif}} {{if boundscheck}} - if (unlikely(!__Pyx_is_valid_index(__pyx_tmp_idx, __pyx_tmp_shape))) { + if (unlikely(!__Pyx_is_valid_index(__pyx_tmp_idx, __pyx_tmp_shape))) { {{if not have_gil}} #ifdef WITH_THREAD PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure(); diff --git a/contrib/tools/cython/Cython/Utility/ModuleSetupCode.c b/contrib/tools/cython/Cython/Utility/ModuleSetupCode.c index 8b524f8447..0c7059b354 100644 --- a/contrib/tools/cython/Cython/Utility/ModuleSetupCode.c +++ b/contrib/tools/cython/Cython/Utility/ModuleSetupCode.c @@ -4,11 +4,11 @@ #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wshadow" #pragma GCC diagnostic ignored "-Wunused-function" -#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 -// Ignore tp_print initializer. Need for ya make -DUSE_SYSTEM_PYTHON=3.8 -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" +#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 +// Ignore tp_print initializer. Need for ya make -DUSE_SYSTEM_PYTHON=3.8 +#pragma GCC diagnostic ignored "-Wdeprecated-declarations" +#endif #endif -#endif #include <stddef.h> /* For offsetof */ #ifndef offsetof @@ -170,9 +170,9 @@ #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif - #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 - // Python 3.11a2 hid _PyLong_FormatAdvancedWriter and _PyFloat_FormatAdvancedWriter - // therefore disable unicode writer until a better alternative appears + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 + // Python 3.11a2 hid _PyLong_FormatAdvancedWriter and _PyFloat_FormatAdvancedWriter + // therefore disable unicode writer until a better alternative appears #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #elif !defined(CYTHON_USE_UNICODE_WRITER) @@ -191,9 +191,9 @@ #define CYTHON_FAST_THREAD_STATE 1 #endif #ifndef CYTHON_FAST_PYCALL - // Python 3.11 deleted localplus argument from frame object, which is used in our - // fast_pycall code - #define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030B00A1) + // Python 3.11 deleted localplus argument from frame object, which is used in our + // fast_pycall code + #define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030B00A1) #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) @@ -214,9 +214,9 @@ #endif #if CYTHON_USE_PYLONG_INTERNALS - #if PY_MAJOR_VERSION < 3 - #include "longintrepr.h" - #endif + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif /* These short defines can easily conflict with other code */ #undef SHIFT #undef BASE @@ -397,77 +397,77 @@ class __Pyx_FakeReference { #define __Pyx_DefaultClassType PyClass_Type #else #define __Pyx_BUILTIN_MODULE_NAME "builtins" - #define __Pyx_DefaultClassType PyType_Type -#if PY_VERSION_HEX >= 0x030B00A1 - static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f, - PyObject *code, PyObject *c, PyObject* n, PyObject *v, - PyObject *fv, PyObject *cell, PyObject* fn, - PyObject *name, int fline, PyObject *lnos) { - // TODO - currently written to be simple and work in limited API etc. - // A more optimized version would be good - PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL; - PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL; - const char *fn_cstr=NULL; - const char *name_cstr=NULL; - PyCodeObject* co=NULL; - PyObject *type, *value, *traceback; - - // we must be able to call this while an exception is happening - thus clear then restore the state - PyErr_Fetch(&type, &value, &traceback); - - if (!(kwds=PyDict_New())) goto end; - if (!(argcount=PyLong_FromLong(a))) goto end; - if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end; - if (!(posonlyargcount=PyLong_FromLong(0))) goto end; - if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end; - if (!(kwonlyargcount=PyLong_FromLong(k))) goto end; - if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end; - if (!(nlocals=PyLong_FromLong(l))) goto end; - if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end; - if (!(stacksize=PyLong_FromLong(s))) goto end; - if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end; - if (!(flags=PyLong_FromLong(f))) goto end; - if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end; - if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end; - if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end; - if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end; - if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end; - if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end; - if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end; - if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end; - - if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end; - if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end; - if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end; - - if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too; - if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here - if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too; - - Py_XDECREF((PyObject*)co); - co = (PyCodeObject*)call_result; - call_result = NULL; - - if (0) { - cleanup_code_too: - Py_XDECREF((PyObject*)co); - co = NULL; - } - end: - Py_XDECREF(kwds); - Py_XDECREF(argcount); - Py_XDECREF(posonlyargcount); - Py_XDECREF(kwonlyargcount); - Py_XDECREF(nlocals); - Py_XDECREF(stacksize); - Py_XDECREF(replace); - Py_XDECREF(call_result); - Py_XDECREF(empty); - if (type) { - PyErr_Restore(type, value, traceback); - } - return co; - } + #define __Pyx_DefaultClassType PyType_Type +#if PY_VERSION_HEX >= 0x030B00A1 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + // TODO - currently written to be simple and work in limited API etc. + // A more optimized version would be good + PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL; + PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL; + const char *fn_cstr=NULL; + const char *name_cstr=NULL; + PyCodeObject* co=NULL; + PyObject *type, *value, *traceback; + + // we must be able to call this while an exception is happening - thus clear then restore the state + PyErr_Fetch(&type, &value, &traceback); + + if (!(kwds=PyDict_New())) goto end; + if (!(argcount=PyLong_FromLong(a))) goto end; + if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end; + if (!(posonlyargcount=PyLong_FromLong(0))) goto end; + if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end; + if (!(kwonlyargcount=PyLong_FromLong(k))) goto end; + if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end; + if (!(nlocals=PyLong_FromLong(l))) goto end; + if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end; + if (!(stacksize=PyLong_FromLong(s))) goto end; + if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end; + if (!(flags=PyLong_FromLong(f))) goto end; + if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end; + + if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end; + if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end; + if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end; + + if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too; + if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here + if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too; + + Py_XDECREF((PyObject*)co); + co = (PyCodeObject*)call_result; + call_result = NULL; + + if (0) { + cleanup_code_too: + Py_XDECREF((PyObject*)co); + co = NULL; + } + end: + Py_XDECREF(kwds); + Py_XDECREF(argcount); + Py_XDECREF(posonlyargcount); + Py_XDECREF(kwonlyargcount); + Py_XDECREF(nlocals); + Py_XDECREF(stacksize); + Py_XDECREF(replace); + Py_XDECREF(call_result); + Py_XDECREF(empty); + if (type) { + PyErr_Restore(type, value, traceback); + } + return co; + } #else #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) @@ -607,15 +607,15 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { /* new Py3.3 unicode type (PEP 393) */ #if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) #define CYTHON_PEP393_ENABLED 1 - - #if defined(PyUnicode_IS_READY) + + #if defined(PyUnicode_IS_READY) #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ? \ 0 : _PyUnicode_Ready((PyObject *)(op))) - #else - // Py3.12 / PEP-623 will remove wstr type unicode strings and all of the PyUnicode_READY() machinery. - #define __Pyx_PyUnicode_READY(op) (0) - #endif - + #else + // Py3.12 / PEP-623 will remove wstr type unicode strings and all of the PyUnicode_READY() machinery. + #define __Pyx_PyUnicode_READY(op) (0) + #endif + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) @@ -623,17 +623,17 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) - #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) - #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 - // Avoid calling deprecated C-API functions in Py3.9+ that PEP-623 schedules for removal in Py3.12. - // https://www.python.org/dev/peps/pep-0623/ - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) - #else + #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 + // Avoid calling deprecated C-API functions in Py3.9+ that PEP-623 schedules for removal in Py3.12. + // https://www.python.org/dev/peps/pep-0623/ + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) + #else #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #endif - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #endif + #endif + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) + #endif #else #define CYTHON_PEP393_ENABLED 0 #define PyUnicode_1BYTE_KIND 1 @@ -692,11 +692,11 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact - // PyPy3 used to define "PyObject_Unicode" -#ifndef PyObject_Unicode + // PyPy3 used to define "PyObject_Unicode" +#ifndef PyObject_Unicode #define PyObject_Unicode PyObject_Str #endif -#endif +#endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) @@ -710,15 +710,15 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) #endif - -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif - + +#if PY_VERSION_HEX >= 0x030900A4 + #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) +#else + #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) +#endif + #if CYTHON_ASSUME_SAFE_MACROS #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) #else @@ -757,14 +757,14 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #if PY_VERSION_HEX < 0x030200A4 typedef long Py_hash_t; #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t #else #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t #endif #if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) #else #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) #endif @@ -805,22 +805,22 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { /////////////// PyModInitFuncType.proto /////////////// -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#ifndef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -// Py2: define this to void manually because PyMODINIT_FUNC adds __declspec(dllexport) to it's definition. -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else +#elif PY_MAJOR_VERSION < 3 +// Py2: define this to void manually because PyMODINIT_FUNC adds __declspec(dllexport) to it's definition. +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" void +#else #define __Pyx_PyMODINIT_FUNC void #endif #else -// Py3+: define this to PyObject * manually because PyMODINIT_FUNC adds __declspec(dllexport) to it's definition. -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else +// Py3+: define this to PyObject * manually because PyMODINIT_FUNC adds __declspec(dllexport) to it's definition. +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * +#else #define __Pyx_PyMODINIT_FUNC PyObject * #endif #endif @@ -1006,7 +1006,7 @@ typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* enc /////////////// InitThreads.init /////////////// -#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 +#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 PyEval_InitThreads(); #endif @@ -1178,7 +1178,7 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { if (__pyx_code_cache.count == __pyx_code_cache.max_count) { int new_max = __pyx_code_cache.max_count + 64; entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); + __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); if (unlikely(!entries)) { return; } diff --git a/contrib/tools/cython/Cython/Utility/ObjectHandling.c b/contrib/tools/cython/Cython/Utility/ObjectHandling.c index 4888444085..c1b1c60bda 100644 --- a/contrib/tools/cython/Cython/Utility/ObjectHandling.c +++ b/contrib/tools/cython/Cython/Utility/ObjectHandling.c @@ -1040,7 +1040,7 @@ static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { /////////////// CallableCheck.proto /////////////// #if CYTHON_USE_TYPE_SLOTS && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyCallable_Check(obj) (Py_TYPE(obj)->tp_call != NULL) +#define __Pyx_PyCallable_Check(obj) (Py_TYPE(obj)->tp_call != NULL) #else #define __Pyx_PyCallable_Check(obj) PyCallable_Check(obj) #endif @@ -1361,41 +1361,41 @@ static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_nam #endif -/////////////// PyObjectGetAttrStrNoError.proto /////////////// - -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name);/*proto*/ - -/////////////// PyObjectGetAttrStrNoError /////////////// -//@requires: PyObjectGetAttrStr -//@requires: Exceptions.c::PyThreadStateGet -//@requires: Exceptions.c::PyErrFetchRestore -//@requires: Exceptions.c::PyErrExceptionMatches - -static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - __Pyx_PyErr_Clear(); -} - -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { - PyObject *result; -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 - // _PyObject_GenericGetAttrWithDict() in CPython 3.7+ can avoid raising the AttributeError. - // See https://bugs.python.org/issue32544 - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { - return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); - } -#endif - result = __Pyx_PyObject_GetAttrStr(obj, attr_name); - if (unlikely(!result)) { - __Pyx_PyObject_GetAttrStr_ClearAttributeError(); - } - return result; -} - - +/////////////// PyObjectGetAttrStrNoError.proto /////////////// + +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name);/*proto*/ + +/////////////// PyObjectGetAttrStrNoError /////////////// +//@requires: PyObjectGetAttrStr +//@requires: Exceptions.c::PyThreadStateGet +//@requires: Exceptions.c::PyErrFetchRestore +//@requires: Exceptions.c::PyErrExceptionMatches + +static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + __Pyx_PyErr_Clear(); +} + +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { + PyObject *result; +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 + // _PyObject_GenericGetAttrWithDict() in CPython 3.7+ can avoid raising the AttributeError. + // See https://bugs.python.org/issue32544 + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { + return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); + } +#endif + result = __Pyx_PyObject_GetAttrStr(obj, attr_name); + if (unlikely(!result)) { + __Pyx_PyObject_GetAttrStr_ClearAttributeError(); + } + return result; +} + + /////////////// PyObjectGetAttrStr.proto /////////////// #if CYTHON_USE_TYPE_SLOTS @@ -1913,7 +1913,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { PyObject *result; - ternaryfunc call = Py_TYPE(func)->tp_call; + ternaryfunc call = Py_TYPE(func)->tp_call; if (unlikely(!call)) return PyObject_Call(func, arg, kw); @@ -1991,7 +1991,7 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, #define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) #endif -#if CYTHON_FAST_PYCALL +#if CYTHON_FAST_PYCALL // Initialised by module init code. static size_t __pyx_pyframe_localsplus_offset = 0; @@ -2006,7 +2006,7 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) #define __Pyx_PyFrame_GetLocalsplus(frame) \ (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif // CYTHON_FAST_PYCALL +#endif // CYTHON_FAST_PYCALL #endif @@ -2273,7 +2273,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObjec // fast and simple case that we are optimising for return __Pyx_PyObject_CallMethO(func, arg); #if CYTHON_FAST_PYCCALL - } else if (__Pyx_PyFastCFunction_Check(func)) { + } else if (__Pyx_PyFastCFunction_Check(func)) { return __Pyx_PyCFunction_FastCall(func, &arg, 1); #endif } diff --git a/contrib/tools/cython/Cython/Utility/Optimize.c b/contrib/tools/cython/Cython/Utility/Optimize.c index b49144169d..d18c9b78ec 100644 --- a/contrib/tools/cython/Cython/Utility/Optimize.c +++ b/contrib/tools/cython/Cython/Utility/Optimize.c @@ -35,7 +35,7 @@ static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { Py_INCREF(x); PyList_SET_ITEM(list, len, x); - __Pyx_SET_SIZE(list, len + 1); + __Pyx_SET_SIZE(list, len + 1); return 0; } return PyList_Append(list, x); @@ -53,7 +53,7 @@ static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) { if (likely(L->allocated > len)) { Py_INCREF(x); PyList_SET_ITEM(list, len, x); - __Pyx_SET_SIZE(list, len + 1); + __Pyx_SET_SIZE(list, len + 1); return 0; } return PyList_Append(list, x); @@ -104,7 +104,7 @@ static CYTHON_INLINE PyObject* __Pyx__PyObject_Pop(PyObject* L) { static CYTHON_INLINE PyObject* __Pyx_PyList_Pop(PyObject* L) { /* Check that both the size is positive and no reallocation shrinking needs to be done. */ if (likely(PyList_GET_SIZE(L) > (((PyListObject*)L)->allocated >> 1))) { - __Pyx_SET_SIZE(L, Py_SIZE(L) - 1); + __Pyx_SET_SIZE(L, Py_SIZE(L) - 1); return PyList_GET_ITEM(L, PyList_GET_SIZE(L)); } return CALL_UNBOUND_METHOD(PyList_Type, "pop", L); @@ -167,7 +167,7 @@ static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t } if (likely(__Pyx_is_valid_index(cix, size))) { PyObject* v = PyList_GET_ITEM(L, cix); - __Pyx_SET_SIZE(L, Py_SIZE(L) - 1); + __Pyx_SET_SIZE(L, Py_SIZE(L) - 1); size -= 1; memmove(&PyList_GET_ITEM(L, cix), &PyList_GET_ITEM(L, cix+1), (size_t)(size-cix)*sizeof(PyObject*)); return v; diff --git a/contrib/tools/cython/Cython/Utility/Overflow.c b/contrib/tools/cython/Cython/Utility/Overflow.c index 02e41b84fc..0259c58f01 100644 --- a/contrib/tools/cython/Cython/Utility/Overflow.c +++ b/contrib/tools/cython/Cython/Utility/Overflow.c @@ -20,10 +20,10 @@ TODO: Conditionally support 128-bit with intmax_t? /////////////// Common.proto /////////////// static int __Pyx_check_twos_complement(void) { - if ((-1 != ~0)) { + if ((-1 != ~0)) { PyErr_SetString(PyExc_RuntimeError, "Two's complement required for overflow checks."); return 1; - } else if ((sizeof(short) == sizeof(int))) { + } else if ((sizeof(short) == sizeof(int))) { PyErr_SetString(PyExc_RuntimeError, "sizeof(short) < sizeof(int) required for overflow checks."); return 1; } else { @@ -31,11 +31,11 @@ static int __Pyx_check_twos_complement(void) { } } -#define __PYX_IS_UNSIGNED(type) ((((type) -1) > 0)) -#define __PYX_SIGN_BIT(type) ((((unsigned type) 1) << (sizeof(type) * 8 - 1))) -#define __PYX_HALF_MAX(type) ((((type) 1) << (sizeof(type) * 8 - 2))) -#define __PYX_MIN(type) ((__PYX_IS_UNSIGNED(type) ? (type) 0 : 0 - __PYX_HALF_MAX(type) - __PYX_HALF_MAX(type))) -#define __PYX_MAX(type) ((~__PYX_MIN(type))) +#define __PYX_IS_UNSIGNED(type) ((((type) -1) > 0)) +#define __PYX_SIGN_BIT(type) ((((unsigned type) 1) << (sizeof(type) * 8 - 1))) +#define __PYX_HALF_MAX(type) ((((type) 1) << (sizeof(type) * 8 - 2))) +#define __PYX_MIN(type) ((__PYX_IS_UNSIGNED(type) ? (type) 0 : 0 - __PYX_HALF_MAX(type) - __PYX_HALF_MAX(type))) +#define __PYX_MAX(type) ((~__PYX_MIN(type))) #define __Pyx_add_no_overflow(a, b, overflow) ((a) + (b)) #define __Pyx_add_const_no_overflow(a, b, overflow) ((a) + (b)) @@ -82,13 +82,13 @@ static CYTHON_INLINE {{UINT}} __Pyx_sub_{{NAME}}_checking_overflow({{UINT}} a, { } static CYTHON_INLINE {{UINT}} __Pyx_mul_{{NAME}}_checking_overflow({{UINT}} a, {{UINT}} b, int *overflow) { - if ((sizeof({{UINT}}) < sizeof(unsigned long))) { + if ((sizeof({{UINT}}) < sizeof(unsigned long))) { unsigned long big_r = ((unsigned long) a) * ((unsigned long) b); {{UINT}} r = ({{UINT}}) big_r; *overflow |= big_r != r; return r; #ifdef HAVE_LONG_LONG - } else if ((sizeof({{UINT}}) < sizeof(unsigned PY_LONG_LONG))) { + } else if ((sizeof({{UINT}}) < sizeof(unsigned PY_LONG_LONG))) { unsigned PY_LONG_LONG big_r = ((unsigned PY_LONG_LONG) a) * ((unsigned PY_LONG_LONG) b); {{UINT}} r = ({{UINT}}) big_r; *overflow |= big_r != r; @@ -138,13 +138,13 @@ static CYTHON_INLINE {{INT}} __Pyx_mul_const_{{NAME}}_checking_overflow({{INT}} /////////////// BaseCaseSigned /////////////// static CYTHON_INLINE {{INT}} __Pyx_add_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { - if ((sizeof({{INT}}) < sizeof(long))) { + if ((sizeof({{INT}}) < sizeof(long))) { long big_r = ((long) a) + ((long) b); {{INT}} r = ({{INT}}) big_r; *overflow |= big_r != r; return r; #ifdef HAVE_LONG_LONG - } else if ((sizeof({{INT}}) < sizeof(PY_LONG_LONG))) { + } else if ((sizeof({{INT}}) < sizeof(PY_LONG_LONG))) { PY_LONG_LONG big_r = ((PY_LONG_LONG) a) + ((PY_LONG_LONG) b); {{INT}} r = ({{INT}}) big_r; *overflow |= big_r != r; @@ -184,13 +184,13 @@ static CYTHON_INLINE {{INT}} __Pyx_sub_const_{{NAME}}_checking_overflow({{INT}} } static CYTHON_INLINE {{INT}} __Pyx_mul_{{NAME}}_checking_overflow({{INT}} a, {{INT}} b, int *overflow) { - if ((sizeof({{INT}}) < sizeof(long))) { + if ((sizeof({{INT}}) < sizeof(long))) { long big_r = ((long) a) * ((long) b); {{INT}} r = ({{INT}}) big_r; *overflow |= big_r != r; return ({{INT}}) r; #ifdef HAVE_LONG_LONG - } else if ((sizeof({{INT}}) < sizeof(PY_LONG_LONG))) { + } else if ((sizeof({{INT}}) < sizeof(PY_LONG_LONG))) { PY_LONG_LONG big_r = ((PY_LONG_LONG) a) * ((PY_LONG_LONG) b); {{INT}} r = ({{INT}}) big_r; *overflow |= big_r != r; @@ -240,11 +240,11 @@ if (unlikely(__Pyx_check_sane_{{NAME}}())) { /////////////// SizeCheck.proto /////////////// static int __Pyx_check_sane_{{NAME}}(void) { - if (((sizeof({{TYPE}}) <= sizeof(int)) || + if (((sizeof({{TYPE}}) <= sizeof(int)) || #ifdef HAVE_LONG_LONG - (sizeof({{TYPE}}) == sizeof(PY_LONG_LONG)) || + (sizeof({{TYPE}}) == sizeof(PY_LONG_LONG)) || #endif - (sizeof({{TYPE}}) == sizeof(long)))) { + (sizeof({{TYPE}}) == sizeof(long)))) { return 0; } else { PyErr_Format(PyExc_RuntimeError, \ @@ -261,28 +261,28 @@ static CYTHON_INLINE {{TYPE}} __Pyx_{{BINOP}}_{{NAME}}_checking_overflow({{TYPE} /////////////// Binop /////////////// static CYTHON_INLINE {{TYPE}} __Pyx_{{BINOP}}_{{NAME}}_checking_overflow({{TYPE}} a, {{TYPE}} b, int *overflow) { - if ((sizeof({{TYPE}}) < sizeof(int))) { + if ((sizeof({{TYPE}}) < sizeof(int))) { return __Pyx_{{BINOP}}_no_overflow(a, b, overflow); } else if (__PYX_IS_UNSIGNED({{TYPE}})) { - if ((sizeof({{TYPE}}) == sizeof(unsigned int))) { - return ({{TYPE}}) __Pyx_{{BINOP}}_unsigned_int_checking_overflow(a, b, overflow); - } else if ((sizeof({{TYPE}}) == sizeof(unsigned long))) { - return ({{TYPE}}) __Pyx_{{BINOP}}_unsigned_long_checking_overflow(a, b, overflow); + if ((sizeof({{TYPE}}) == sizeof(unsigned int))) { + return ({{TYPE}}) __Pyx_{{BINOP}}_unsigned_int_checking_overflow(a, b, overflow); + } else if ((sizeof({{TYPE}}) == sizeof(unsigned long))) { + return ({{TYPE}}) __Pyx_{{BINOP}}_unsigned_long_checking_overflow(a, b, overflow); #ifdef HAVE_LONG_LONG - } else if ((sizeof({{TYPE}}) == sizeof(unsigned PY_LONG_LONG))) { - return ({{TYPE}}) __Pyx_{{BINOP}}_unsigned_long_long_checking_overflow(a, b, overflow); + } else if ((sizeof({{TYPE}}) == sizeof(unsigned PY_LONG_LONG))) { + return ({{TYPE}}) __Pyx_{{BINOP}}_unsigned_long_long_checking_overflow(a, b, overflow); #endif } else { abort(); return 0; /* handled elsewhere */ } } else { - if ((sizeof({{TYPE}}) == sizeof(int))) { - return ({{TYPE}}) __Pyx_{{BINOP}}_int_checking_overflow(a, b, overflow); - } else if ((sizeof({{TYPE}}) == sizeof(long))) { - return ({{TYPE}}) __Pyx_{{BINOP}}_long_checking_overflow(a, b, overflow); + if ((sizeof({{TYPE}}) == sizeof(int))) { + return ({{TYPE}}) __Pyx_{{BINOP}}_int_checking_overflow(a, b, overflow); + } else if ((sizeof({{TYPE}}) == sizeof(long))) { + return ({{TYPE}}) __Pyx_{{BINOP}}_long_checking_overflow(a, b, overflow); #ifdef HAVE_LONG_LONG - } else if ((sizeof({{TYPE}}) == sizeof(PY_LONG_LONG))) { - return ({{TYPE}}) __Pyx_{{BINOP}}_long_long_checking_overflow(a, b, overflow); + } else if ((sizeof({{TYPE}}) == sizeof(PY_LONG_LONG))) { + return ({{TYPE}}) __Pyx_{{BINOP}}_long_long_checking_overflow(a, b, overflow); #endif } else { abort(); return 0; /* handled elsewhere */ diff --git a/contrib/tools/cython/Cython/Utility/Profile.c b/contrib/tools/cython/Cython/Utility/Profile.c index 57e2f0244c..921eb67529 100644 --- a/contrib/tools/cython/Cython/Utility/Profile.c +++ b/contrib/tools/cython/Cython/Utility/Profile.c @@ -47,58 +47,58 @@ #define CYTHON_FRAME_DEL(frame) Py_CLEAR(frame) #endif - #define __Pyx_TraceDeclarations \ - static PyCodeObject *$frame_code_cname = NULL; \ - CYTHON_FRAME_MODIFIER PyFrameObject *$frame_cname = NULL; \ - int __Pyx_use_tracing = 0; - - #define __Pyx_TraceFrameInit(codeobj) \ - if (codeobj) $frame_code_cname = (PyCodeObject*) codeobj; - -#if PY_VERSION_HEX >= 0x030b00a2 - #define __Pyx_IsTracing(tstate, check_tracing, check_funcs) \ - (unlikely((tstate)->cframe->use_tracing) && \ - (!(check_tracing) || !(tstate)->tracing) && \ - (!(check_funcs) || (tstate)->c_profilefunc || (CYTHON_TRACE && (tstate)->c_tracefunc))) - - #define __Pyx_EnterTracing(tstate) PyThreadState_EnterTracing(tstate) - - #define __Pyx_LeaveTracing(tstate) PyThreadState_LeaveTracing(tstate) - -#elif PY_VERSION_HEX >= 0x030a00b1 - #define __Pyx_IsTracing(tstate, check_tracing, check_funcs) \ - (unlikely((tstate)->cframe->use_tracing) && \ - (!(check_tracing) || !(tstate)->tracing) && \ - (!(check_funcs) || (tstate)->c_profilefunc || (CYTHON_TRACE && (tstate)->c_tracefunc))) - - #define __Pyx_EnterTracing(tstate) \ - do { tstate->tracing++; tstate->cframe->use_tracing = 0; } while (0) - - #define __Pyx_LeaveTracing(tstate) \ - do { \ - tstate->tracing--; \ - tstate->cframe->use_tracing = ((CYTHON_TRACE && tstate->c_tracefunc != NULL) \ - || tstate->c_profilefunc != NULL); \ - } while (0) - -#else - #define __Pyx_IsTracing(tstate, check_tracing, check_funcs) \ - (unlikely((tstate)->use_tracing) && \ - (!(check_tracing) || !(tstate)->tracing) && \ - (!(check_funcs) || (tstate)->c_profilefunc || (CYTHON_TRACE && (tstate)->c_tracefunc))) - - #define __Pyx_EnterTracing(tstate) \ - do { tstate->tracing++; tstate->use_tracing = 0; } while (0) - - #define __Pyx_LeaveTracing(tstate) \ - do { \ - tstate->tracing--; \ - tstate->use_tracing = ((CYTHON_TRACE && tstate->c_tracefunc != NULL) \ - || tstate->c_profilefunc != NULL); \ - } while (0) - -#endif - + #define __Pyx_TraceDeclarations \ + static PyCodeObject *$frame_code_cname = NULL; \ + CYTHON_FRAME_MODIFIER PyFrameObject *$frame_cname = NULL; \ + int __Pyx_use_tracing = 0; + + #define __Pyx_TraceFrameInit(codeobj) \ + if (codeobj) $frame_code_cname = (PyCodeObject*) codeobj; + +#if PY_VERSION_HEX >= 0x030b00a2 + #define __Pyx_IsTracing(tstate, check_tracing, check_funcs) \ + (unlikely((tstate)->cframe->use_tracing) && \ + (!(check_tracing) || !(tstate)->tracing) && \ + (!(check_funcs) || (tstate)->c_profilefunc || (CYTHON_TRACE && (tstate)->c_tracefunc))) + + #define __Pyx_EnterTracing(tstate) PyThreadState_EnterTracing(tstate) + + #define __Pyx_LeaveTracing(tstate) PyThreadState_LeaveTracing(tstate) + +#elif PY_VERSION_HEX >= 0x030a00b1 + #define __Pyx_IsTracing(tstate, check_tracing, check_funcs) \ + (unlikely((tstate)->cframe->use_tracing) && \ + (!(check_tracing) || !(tstate)->tracing) && \ + (!(check_funcs) || (tstate)->c_profilefunc || (CYTHON_TRACE && (tstate)->c_tracefunc))) + + #define __Pyx_EnterTracing(tstate) \ + do { tstate->tracing++; tstate->cframe->use_tracing = 0; } while (0) + + #define __Pyx_LeaveTracing(tstate) \ + do { \ + tstate->tracing--; \ + tstate->cframe->use_tracing = ((CYTHON_TRACE && tstate->c_tracefunc != NULL) \ + || tstate->c_profilefunc != NULL); \ + } while (0) + +#else + #define __Pyx_IsTracing(tstate, check_tracing, check_funcs) \ + (unlikely((tstate)->use_tracing) && \ + (!(check_tracing) || !(tstate)->tracing) && \ + (!(check_funcs) || (tstate)->c_profilefunc || (CYTHON_TRACE && (tstate)->c_tracefunc))) + + #define __Pyx_EnterTracing(tstate) \ + do { tstate->tracing++; tstate->use_tracing = 0; } while (0) + + #define __Pyx_LeaveTracing(tstate) \ + do { \ + tstate->tracing--; \ + tstate->use_tracing = ((CYTHON_TRACE && tstate->c_tracefunc != NULL) \ + || tstate->c_profilefunc != NULL); \ + } while (0) + +#endif + #ifdef WITH_THREAD #define __Pyx_TraceCall(funcname, srcfile, firstlineno, nogil, goto_error) \ if (nogil) { \ @@ -106,7 +106,7 @@ PyThreadState *tstate; \ PyGILState_STATE state = PyGILState_Ensure(); \ tstate = __Pyx_PyThreadState_Current; \ - if (__Pyx_IsTracing(tstate, 1, 1)) { \ + if (__Pyx_IsTracing(tstate, 1, 1)) { \ __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, tstate, funcname, srcfile, firstlineno); \ } \ PyGILState_Release(state); \ @@ -114,7 +114,7 @@ } \ } else { \ PyThreadState* tstate = PyThreadState_GET(); \ - if (__Pyx_IsTracing(tstate, 1, 1)) { \ + if (__Pyx_IsTracing(tstate, 1, 1)) { \ __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, tstate, funcname, srcfile, firstlineno); \ if (unlikely(__Pyx_use_tracing < 0)) goto_error; \ } \ @@ -122,7 +122,7 @@ #else #define __Pyx_TraceCall(funcname, srcfile, firstlineno, nogil, goto_error) \ { PyThreadState* tstate = PyThreadState_GET(); \ - if (__Pyx_IsTracing(tstate, 1, 1)) { \ + if (__Pyx_IsTracing(tstate, 1, 1)) { \ __Pyx_use_tracing = __Pyx_TraceSetupAndCall(&$frame_code_cname, &$frame_cname, tstate, funcname, srcfile, firstlineno); \ if (unlikely(__Pyx_use_tracing < 0)) goto_error; \ } \ @@ -132,8 +132,8 @@ #define __Pyx_TraceException() \ if (likely(!__Pyx_use_tracing)); else { \ PyThreadState* tstate = __Pyx_PyThreadState_Current; \ - if (__Pyx_IsTracing(tstate, 0, 1)) { \ - __Pyx_EnterTracing(tstate); \ + if (__Pyx_IsTracing(tstate, 0, 1)) { \ + __Pyx_EnterTracing(tstate); \ PyObject *exc_info = __Pyx_GetExceptionTuple(tstate); \ if (exc_info) { \ if (CYTHON_TRACE && tstate->c_tracefunc) \ @@ -143,20 +143,20 @@ tstate->c_profileobj, $frame_cname, PyTrace_EXCEPTION, exc_info); \ Py_DECREF(exc_info); \ } \ - __Pyx_LeaveTracing(tstate); \ + __Pyx_LeaveTracing(tstate); \ } \ } static void __Pyx_call_return_trace_func(PyThreadState *tstate, PyFrameObject *frame, PyObject *result) { PyObject *type, *value, *traceback; __Pyx_ErrFetchInState(tstate, &type, &value, &traceback); - __Pyx_EnterTracing(tstate); + __Pyx_EnterTracing(tstate); if (CYTHON_TRACE && tstate->c_tracefunc) tstate->c_tracefunc(tstate->c_traceobj, frame, PyTrace_RETURN, result); if (tstate->c_profilefunc) tstate->c_profilefunc(tstate->c_profileobj, frame, PyTrace_RETURN, result); CYTHON_FRAME_DEL(frame); - __Pyx_LeaveTracing(tstate); + __Pyx_LeaveTracing(tstate); __Pyx_ErrRestoreInState(tstate, type, value, traceback); } @@ -168,14 +168,14 @@ PyThreadState *tstate; \ PyGILState_STATE state = PyGILState_Ensure(); \ tstate = __Pyx_PyThreadState_Current; \ - if (__Pyx_IsTracing(tstate, 0, 0)) { \ + if (__Pyx_IsTracing(tstate, 0, 0)) { \ __Pyx_call_return_trace_func(tstate, $frame_cname, (PyObject*)result); \ } \ PyGILState_Release(state); \ } \ } else { \ PyThreadState* tstate = __Pyx_PyThreadState_Current; \ - if (__Pyx_IsTracing(tstate, 0, 0)) { \ + if (__Pyx_IsTracing(tstate, 0, 0)) { \ __Pyx_call_return_trace_func(tstate, $frame_cname, (PyObject*)result); \ } \ } \ @@ -184,7 +184,7 @@ #define __Pyx_TraceReturn(result, nogil) \ if (likely(!__Pyx_use_tracing)); else { \ PyThreadState* tstate = __Pyx_PyThreadState_Current; \ - if (__Pyx_IsTracing(tstate, 0, 0)) { \ + if (__Pyx_IsTracing(tstate, 0, 0)) { \ __Pyx_call_return_trace_func(tstate, $frame_cname, (PyObject*)result); \ } \ } @@ -211,11 +211,11 @@ PyObject *type, *value, *traceback; __Pyx_ErrFetchInState(tstate, &type, &value, &traceback); __Pyx_PyFrame_SetLineNumber(frame, lineno); - __Pyx_EnterTracing(tstate); - + __Pyx_EnterTracing(tstate); + ret = tstate->c_tracefunc(tstate->c_traceobj, frame, PyTrace_LINE, NULL); - - __Pyx_LeaveTracing(tstate); + + __Pyx_LeaveTracing(tstate); if (likely(!ret)) { __Pyx_ErrRestoreInState(tstate, type, value, traceback); } else { @@ -237,7 +237,7 @@ PyThreadState *tstate; \ PyGILState_STATE state = PyGILState_Ensure(); \ tstate = __Pyx_PyThreadState_Current; \ - if (__Pyx_IsTracing(tstate, 0, 0) && tstate->c_tracefunc && $frame_cname->f_trace) { \ + if (__Pyx_IsTracing(tstate, 0, 0) && tstate->c_tracefunc && $frame_cname->f_trace) { \ ret = __Pyx_call_line_trace_func(tstate, $frame_cname, lineno); \ } \ PyGILState_Release(state); \ @@ -246,7 +246,7 @@ } \ } else { \ PyThreadState* tstate = __Pyx_PyThreadState_Current; \ - if (__Pyx_IsTracing(tstate, 0, 0) && tstate->c_tracefunc && $frame_cname->f_trace) { \ + if (__Pyx_IsTracing(tstate, 0, 0) && tstate->c_tracefunc && $frame_cname->f_trace) { \ int ret = __Pyx_call_line_trace_func(tstate, $frame_cname, lineno); \ // XXX https://github.com/cython/cython/issues/2274 \ if (unlikely(ret)) { fprintf(stderr, "cython: line_trace_func returned %d\n", ret); } \ @@ -259,7 +259,7 @@ // mark error label as used to avoid compiler warnings \ if ((1)); else goto_error; \ PyThreadState* tstate = __Pyx_PyThreadState_Current; \ - if (__Pyx_IsTracing(tstate, 0, 0) && tstate->c_tracefunc && $frame_cname->f_trace) { \ + if (__Pyx_IsTracing(tstate, 0, 0) && tstate->c_tracefunc && $frame_cname->f_trace) { \ int ret = __Pyx_call_line_trace_func(tstate, $frame_cname, lineno); \ // XXX https://github.com/cython/cython/issues/2274 \ if (unlikely(ret)) { fprintf(stderr, "cython: line_trace_func returned %d\n", ret); } \ @@ -306,23 +306,23 @@ static int __Pyx_TraceSetupAndCall(PyCodeObject** code, (*frame)->f_tstate = tstate; #endif } - __Pyx_PyFrame_SetLineNumber(*frame, firstlineno); - + __Pyx_PyFrame_SetLineNumber(*frame, firstlineno); + retval = 1; - __Pyx_EnterTracing(tstate); + __Pyx_EnterTracing(tstate); __Pyx_ErrFetchInState(tstate, &type, &value, &traceback); - + #if CYTHON_TRACE if (tstate->c_tracefunc) retval = tstate->c_tracefunc(tstate->c_traceobj, *frame, PyTrace_CALL, NULL) == 0; if (retval && tstate->c_profilefunc) #endif retval = tstate->c_profilefunc(tstate->c_profileobj, *frame, PyTrace_CALL, NULL) == 0; - - __Pyx_LeaveTracing(tstate); + + __Pyx_LeaveTracing(tstate); if (retval) { __Pyx_ErrRestoreInState(tstate, type, value, traceback); - return __Pyx_IsTracing(tstate, 0, 0) && retval; + return __Pyx_IsTracing(tstate, 0, 0) && retval; } else { Py_XDECREF(type); Py_XDECREF(value); diff --git a/contrib/tools/cython/Cython/Utility/StringTools.c b/contrib/tools/cython/Cython/Utility/StringTools.c index 205bb7204f..2fdae812a0 100644 --- a/contrib/tools/cython/Cython/Utility/StringTools.c +++ b/contrib/tools/cython/Cython/Utility/StringTools.c @@ -66,19 +66,19 @@ static CYTHON_INLINE int __Pyx_UnicodeContainsUCS4(PyObject* unicode, Py_UCS4 ch //////////////////// PyUCS4InUnicode //////////////////// -#if PY_VERSION_HEX < 0x03090000 || (defined(PyUnicode_WCHAR_KIND) && defined(PyUnicode_AS_UNICODE)) - -#if PY_VERSION_HEX < 0x03090000 -#define __Pyx_PyUnicode_AS_UNICODE(op) PyUnicode_AS_UNICODE(op) -#define __Pyx_PyUnicode_GET_SIZE(op) PyUnicode_GET_SIZE(op) -#else -// Avoid calling deprecated C-API functions in Py3.9+ that PEP-623 schedules for removal in Py3.12. -// https://www.python.org/dev/peps/pep-0623/ -#define __Pyx_PyUnicode_AS_UNICODE(op) (((PyASCIIObject *)(op))->wstr) -#define __Pyx_PyUnicode_GET_SIZE(op) ((PyCompactUnicodeObject *)(op))->wstr_length -#endif - -#if !defined(Py_UNICODE_SIZE) || Py_UNICODE_SIZE == 2 +#if PY_VERSION_HEX < 0x03090000 || (defined(PyUnicode_WCHAR_KIND) && defined(PyUnicode_AS_UNICODE)) + +#if PY_VERSION_HEX < 0x03090000 +#define __Pyx_PyUnicode_AS_UNICODE(op) PyUnicode_AS_UNICODE(op) +#define __Pyx_PyUnicode_GET_SIZE(op) PyUnicode_GET_SIZE(op) +#else +// Avoid calling deprecated C-API functions in Py3.9+ that PEP-623 schedules for removal in Py3.12. +// https://www.python.org/dev/peps/pep-0623/ +#define __Pyx_PyUnicode_AS_UNICODE(op) (((PyASCIIObject *)(op))->wstr) +#define __Pyx_PyUnicode_GET_SIZE(op) ((PyCompactUnicodeObject *)(op))->wstr_length +#endif + +#if !defined(Py_UNICODE_SIZE) || Py_UNICODE_SIZE == 2 static int __Pyx_PyUnicodeBufferContainsUCS4_SP(Py_UNICODE* buffer, Py_ssize_t length, Py_UCS4 character) { /* handle surrogate pairs for Py_UNICODE buffers in 16bit Unicode builds */ Py_UNICODE high_val, low_val; @@ -90,7 +90,7 @@ static int __Pyx_PyUnicodeBufferContainsUCS4_SP(Py_UNICODE* buffer, Py_ssize_t l } return 0; } -#endif +#endif static int __Pyx_PyUnicodeBufferContainsUCS4_BMP(Py_UNICODE* buffer, Py_ssize_t length, Py_UCS4 character) { Py_UNICODE uchar; @@ -101,15 +101,15 @@ static int __Pyx_PyUnicodeBufferContainsUCS4_BMP(Py_UNICODE* buffer, Py_ssize_t } return 0; } -#endif +#endif static CYTHON_INLINE int __Pyx_UnicodeContainsUCS4(PyObject* unicode, Py_UCS4 character) { #if CYTHON_PEP393_ENABLED const int kind = PyUnicode_KIND(unicode); - #ifdef PyUnicode_WCHAR_KIND - if (likely(kind != PyUnicode_WCHAR_KIND)) - #endif - { + #ifdef PyUnicode_WCHAR_KIND + if (likely(kind != PyUnicode_WCHAR_KIND)) + #endif + { Py_ssize_t i; const void* udata = PyUnicode_DATA(unicode); const Py_ssize_t length = PyUnicode_GET_LENGTH(unicode); @@ -118,29 +118,29 @@ static CYTHON_INLINE int __Pyx_UnicodeContainsUCS4(PyObject* unicode, Py_UCS4 ch } return 0; } -#elif PY_VERSION_HEX >= 0x03090000 - #error Cannot use "UChar in Unicode" in Python 3.9 without PEP-393 unicode strings. -#elif !defined(PyUnicode_AS_UNICODE) - #error Cannot use "UChar in Unicode" in Python < 3.9 without Py_UNICODE support. +#elif PY_VERSION_HEX >= 0x03090000 + #error Cannot use "UChar in Unicode" in Python 3.9 without PEP-393 unicode strings. +#elif !defined(PyUnicode_AS_UNICODE) + #error Cannot use "UChar in Unicode" in Python < 3.9 without Py_UNICODE support. #endif - -#if PY_VERSION_HEX < 0x03090000 || (defined(PyUnicode_WCHAR_KIND) && defined(PyUnicode_AS_UNICODE)) -#if !defined(Py_UNICODE_SIZE) || Py_UNICODE_SIZE == 2 - if ((sizeof(Py_UNICODE) == 2) && unlikely(character > 65535)) { + +#if PY_VERSION_HEX < 0x03090000 || (defined(PyUnicode_WCHAR_KIND) && defined(PyUnicode_AS_UNICODE)) +#if !defined(Py_UNICODE_SIZE) || Py_UNICODE_SIZE == 2 + if ((sizeof(Py_UNICODE) == 2) && unlikely(character > 65535)) { return __Pyx_PyUnicodeBufferContainsUCS4_SP( - __Pyx_PyUnicode_AS_UNICODE(unicode), - __Pyx_PyUnicode_GET_SIZE(unicode), + __Pyx_PyUnicode_AS_UNICODE(unicode), + __Pyx_PyUnicode_GET_SIZE(unicode), character); - } else -#endif - { + } else +#endif + { return __Pyx_PyUnicodeBufferContainsUCS4_BMP( - __Pyx_PyUnicode_AS_UNICODE(unicode), - __Pyx_PyUnicode_GET_SIZE(unicode), + __Pyx_PyUnicode_AS_UNICODE(unicode), + __Pyx_PyUnicode_GET_SIZE(unicode), character); } -#endif +#endif } @@ -471,7 +471,7 @@ static CYTHON_INLINE PyObject* __Pyx_decode_c_string( /////////////// decode_c_string /////////////// //@requires: IncludeStringH //@requires: decode_c_string_utf16 -//@substitute: naming +//@substitute: naming /* duplicate code to avoid calling strlen() if start >= 0 and stop >= 0 */ static CYTHON_INLINE PyObject* __Pyx_decode_c_string( @@ -495,8 +495,8 @@ static CYTHON_INLINE PyObject* __Pyx_decode_c_string( if (stop < 0) stop += length; } - if (unlikely(stop <= start)) - return __Pyx_NewRef($empty_unicode); + if (unlikely(stop <= start)) + return __Pyx_NewRef($empty_unicode); length = stop - start; cstring += start; if (decode_func) { @@ -515,7 +515,7 @@ static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( /////////////// decode_c_bytes /////////////// //@requires: decode_c_string_utf16 -//@substitute: naming +//@substitute: naming static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, @@ -532,8 +532,8 @@ static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( } if (stop > length) stop = length; - if (unlikely(stop <= start)) - return __Pyx_NewRef($empty_unicode); + if (unlikely(stop <= start)) + return __Pyx_NewRef($empty_unicode); length = stop - start; cstring += start; if (decode_func) { @@ -573,7 +573,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Substring( PyObject* text, Py_ssize_t start, Py_ssize_t stop); /////////////// PyUnicode_Substring /////////////// -//@substitute: naming +//@substitute: naming static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Substring( PyObject* text, Py_ssize_t start, Py_ssize_t stop) { @@ -589,8 +589,8 @@ static CYTHON_INLINE PyObject* __Pyx_PyUnicode_Substring( stop += length; else if (stop > length) stop = length; - if (stop <= start) - return __Pyx_NewRef($empty_unicode); + if (stop <= start) + return __Pyx_NewRef($empty_unicode); #if CYTHON_PEP393_ENABLED return PyUnicode_FromKindAndData(PyUnicode_KIND(text), PyUnicode_1BYTE_DATA(text) + start*PyUnicode_KIND(text), stop-start); diff --git a/contrib/tools/cython/Cython/Utility/TypeConversion.c b/contrib/tools/cython/Cython/Utility/TypeConversion.c index 759ac78274..7a7bf0f799 100644 --- a/contrib/tools/cython/Cython/Utility/TypeConversion.c +++ b/contrib/tools/cython/Cython/Utility/TypeConversion.c @@ -102,7 +102,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); #if CYTHON_ASSUME_SAFE_MACROS #define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) @@ -421,25 +421,25 @@ static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { } -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { - if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { - return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); -#if PY_MAJOR_VERSION < 3 - } else if (likely(PyInt_CheckExact(o))) { - return PyInt_AS_LONG(o); -#endif - } else { - Py_ssize_t ival; - PyObject *x; - x = PyNumber_Index(o); - if (!x) return -1; - ival = PyInt_AsLong(x); - Py_DECREF(x); - return ival; - } -} - - +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} + + static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); } @@ -450,15 +450,15 @@ static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { } -/////////////// GCCDiagnostics.proto /////////////// - -// GCC diagnostic pragmas were introduced in GCC 4.6 -// Used to silence conversion warnings that are ok but cannot be avoided. -#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) -#define __Pyx_HAS_GCC_DIAGNOSTIC -#endif - - +/////////////// GCCDiagnostics.proto /////////////// + +// GCC diagnostic pragmas were introduced in GCC 4.6 +// Used to silence conversion warnings that are ok but cannot be avoided. +#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#define __Pyx_HAS_GCC_DIAGNOSTIC +#endif + + /////////////// ToPyCTupleUtility.proto /////////////// static PyObject* {{funcname}}({{struct_type_decl}}); @@ -645,17 +645,17 @@ static CYTHON_INLINE Py_UNICODE __Pyx_PyObject_AsPy_UNICODE(PyObject* x) { static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value); /////////////// CIntToPy /////////////// -//@requires: GCCDiagnostics +//@requires: GCCDiagnostics static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif const {{TYPE}} neg_one = ({{TYPE}}) -1, const_zero = ({{TYPE}}) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof({{TYPE}}) < sizeof(long)) { @@ -722,10 +722,10 @@ static const char DIGITS_HEX[2*16+1] = { static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value, Py_ssize_t width, char padding_char, char format_char); /////////////// CIntToPyUnicode /////////////// -//@requires: StringTools.c::IncludeStringH +//@requires: StringTools.c::IncludeStringH //@requires: StringTools.c::BuildPyUnicode //@requires: CIntToDigits -//@requires: GCCDiagnostics +//@requires: GCCDiagnostics // NOTE: inlining because most arguments are constant, which collapses lots of code below @@ -738,12 +738,12 @@ static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value, Py_ssize_t wid Py_ssize_t length, ulength; int prepend_sign, last_one_off; {{TYPE}} remaining; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wconversion" #endif const {{TYPE}} neg_one = ({{TYPE}}) -1, const_zero = ({{TYPE}}) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC #pragma GCC diagnostic pop #endif const int is_unsigned = neg_one > const_zero; @@ -764,14 +764,14 @@ static CYTHON_INLINE PyObject* {{TO_PY_FUNCTION}}({{TYPE}} value, Py_ssize_t wid digit_pos = abs((int)(remaining % (8*8))); remaining = ({{TYPE}}) (remaining / (8*8)); dpos -= 2; - memcpy(dpos, DIGIT_PAIRS_8 + digit_pos * 2, 2); /* copy 2 digits at a time, unaligned */ + memcpy(dpos, DIGIT_PAIRS_8 + digit_pos * 2, 2); /* copy 2 digits at a time, unaligned */ last_one_off = (digit_pos < 8); break; case 'd': digit_pos = abs((int)(remaining % (10*10))); remaining = ({{TYPE}}) (remaining / (10*10)); dpos -= 2; - memcpy(dpos, DIGIT_PAIRS_10 + digit_pos * 2, 2); /* copy 2 digits at a time, unaligned */ + memcpy(dpos, DIGIT_PAIRS_10 + digit_pos * 2, 2); /* copy 2 digits at a time, unaligned */ last_one_off = (digit_pos < 10); break; case 'x': @@ -870,19 +870,19 @@ static CYTHON_INLINE {{TYPE}} {{FROM_PY_FUNCTION}}(PyObject *); /////////////// CIntFromPy /////////////// //@requires: CIntFromPyVerify -//@requires: GCCDiagnostics +//@requires: GCCDiagnostics {{py: from Cython.Utility import pylong_join }} static CYTHON_INLINE {{TYPE}} {{FROM_PY_FUNCTION}}(PyObject *x) { -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wconversion" -#endif +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif const {{TYPE}} neg_one = ({{TYPE}}) -1, const_zero = ({{TYPE}}) 0; -#ifdef __Pyx_HAS_GCC_DIAGNOSTIC -#pragma GCC diagnostic pop -#endif +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { diff --git a/contrib/tools/cython/Cython/Utility/arrayarray.h b/contrib/tools/cython/Cython/Utility/arrayarray.h index 1eb1db1ec3..a9e4923785 100644 --- a/contrib/tools/cython/Cython/Utility/arrayarray.h +++ b/contrib/tools/cython/Cython/Utility/arrayarray.h @@ -88,7 +88,7 @@ static CYTHON_INLINE PyObject * newarrayobject(PyTypeObject *type, Py_ssize_t si op->ob_descr = descr; op->allocated = size; op->weakreflist = NULL; - __Pyx_SET_SIZE(op, size); + __Pyx_SET_SIZE(op, size); if (size <= 0) { op->data.ob_item = NULL; } @@ -116,7 +116,7 @@ static CYTHON_INLINE int resize(arrayobject *self, Py_ssize_t n) { return -1; } self->data.ob_item = (char*) items; - __Pyx_SET_SIZE(self, n); + __Pyx_SET_SIZE(self, n); self->allocated = n; return 0; } @@ -126,7 +126,7 @@ static CYTHON_INLINE int resize_smart(arrayobject *self, Py_ssize_t n) { void *items = (void*) self->data.ob_item; Py_ssize_t newsize; if (n < self->allocated && n*4 > self->allocated) { - __Pyx_SET_SIZE(self, n); + __Pyx_SET_SIZE(self, n); return 0; } newsize = n + (n / 2) + 1; @@ -140,7 +140,7 @@ static CYTHON_INLINE int resize_smart(arrayobject *self, Py_ssize_t n) { return -1; } self->data.ob_item = (char*) items; - __Pyx_SET_SIZE(self, n); + __Pyx_SET_SIZE(self, n); self->allocated = newsize; return 0; } diff --git a/contrib/tools/cython/Cython/Utils.py b/contrib/tools/cython/Cython/Utils.py index c1159a3381..d59d67d78b 100644 --- a/contrib/tools/cython/Cython/Utils.py +++ b/contrib/tools/cython/Cython/Utils.py @@ -21,7 +21,7 @@ import re import io import codecs import shutil -import tempfile +import tempfile from contextlib import contextmanager modification_time = os.path.getmtime @@ -337,23 +337,23 @@ def get_cython_cache_dir(): def captured_fd(stream=2, encoding=None): orig_stream = os.dup(stream) # keep copy of original stream try: - with tempfile.TemporaryFile(mode="a+b") as temp_file: - def read_output(_output=[b'']): - if not temp_file.closed: - temp_file.seek(0) - _output[0] = temp_file.read() - return _output[0] - - os.dup2(temp_file.fileno(), stream) # replace stream by copy of pipe - try: - def get_output(): - result = read_output() - return result.decode(encoding) if encoding else result - - yield get_output - finally: - os.dup2(orig_stream, stream) # restore original stream - read_output() # keep the output in case it's used after closing the context manager + with tempfile.TemporaryFile(mode="a+b") as temp_file: + def read_output(_output=[b'']): + if not temp_file.closed: + temp_file.seek(0) + _output[0] = temp_file.read() + return _output[0] + + os.dup2(temp_file.fileno(), stream) # replace stream by copy of pipe + try: + def get_output(): + result = read_output() + return result.decode(encoding) if encoding else result + + yield get_output + finally: + os.dup2(orig_stream, stream) # restore original stream + read_output() # keep the output in case it's used after closing the context manager finally: os.close(orig_stream) @@ -427,7 +427,7 @@ def raise_error_if_module_name_forbidden(full_module_name): def build_hex_version(version_string): """ - Parse and translate '4.3a1' into the readable hex representation '0x040300A1' (like PY_VERSION_HEX). + Parse and translate '4.3a1' into the readable hex representation '0x040300A1' (like PY_VERSION_HEX). """ # First, parse '4.12a1' into [4, 12, 0, 0xA01]. digits = [] |