diff options
author | shadchin <shadchin@yandex-team.ru> | 2022-02-10 16:44:30 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:44:30 +0300 |
commit | 2598ef1d0aee359b4b6d5fdd1758916d5907d04f (patch) | |
tree | 012bb94d777798f1f56ac1cec429509766d05181 /contrib/tools/cython/Cython/Compiler | |
parent | 6751af0b0c1b952fede40b19b71da8025b5d8bcf (diff) | |
download | ydb-2598ef1d0aee359b4b6d5fdd1758916d5907d04f.tar.gz |
Restoring authorship annotation for <shadchin@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'contrib/tools/cython/Cython/Compiler')
29 files changed, 565 insertions, 565 deletions
diff --git a/contrib/tools/cython/Cython/Compiler/Buffer.py b/contrib/tools/cython/Cython/Compiler/Buffer.py index c62a24f568..9603235611 100644 --- a/contrib/tools/cython/Cython/Compiler/Buffer.py +++ b/contrib/tools/cython/Cython/Compiler/Buffer.py @@ -668,11 +668,11 @@ def get_type_information_cname(code, dtype, maxdepth=None): if dtype.is_simple_buffer_dtype(): structinfo_name = "NULL" elif dtype.is_struct: - struct_scope = dtype.scope - if dtype.is_const: - struct_scope = struct_scope.const_base_type_scope - # Must pre-call all used types in order not to recurse during utility code writing. - fields = struct_scope.var_entries + struct_scope = dtype.scope + if dtype.is_const: + struct_scope = struct_scope.const_base_type_scope + # Must pre-call all used types in order not to recurse during utility code writing. + fields = struct_scope.var_entries assert len(fields) > 0 types = [get_type_information_cname(code, f.type, maxdepth - 1) for f in fields] diff --git a/contrib/tools/cython/Cython/Compiler/Builtin.py b/contrib/tools/cython/Cython/Compiler/Builtin.py index 5fa717507d..3dc6698721 100644 --- a/contrib/tools/cython/Cython/Compiler/Builtin.py +++ b/contrib/tools/cython/Cython/Compiler/Builtin.py @@ -203,7 +203,7 @@ builtin_function_table = [ #('raw_input', "", "", ""), #('reduce', "", "", ""), BuiltinFunction('reload', "O", "O", "PyImport_ReloadModule"), - BuiltinFunction('repr', "O", "O", "PyObject_Repr"), # , builtin_return_type='str'), # add in Cython 3.1 + BuiltinFunction('repr', "O", "O", "PyObject_Repr"), # , builtin_return_type='str'), # add in Cython 3.1 #('round', "", "", ""), BuiltinFunction('setattr', "OOO", "r", "PyObject_SetAttr"), #('sum', "", "", ""), diff --git a/contrib/tools/cython/Cython/Compiler/Code.pxd b/contrib/tools/cython/Cython/Compiler/Code.pxd index acad0c1cf4..c07cc415e1 100644 --- a/contrib/tools/cython/Cython/Compiler/Code.pxd +++ b/contrib/tools/cython/Cython/Compiler/Code.pxd @@ -48,7 +48,7 @@ cdef class FunctionState: cdef public list temps_allocated cdef public dict temps_free cdef public dict temps_used_type - cdef public set zombie_temps + cdef public set zombie_temps cdef public size_t temp_counter cdef public list collect_temps_stack diff --git a/contrib/tools/cython/Cython/Compiler/Code.py b/contrib/tools/cython/Cython/Compiler/Code.py index f43c4b2b8e..3120deb795 100644 --- a/contrib/tools/cython/Cython/Compiler/Code.py +++ b/contrib/tools/cython/Cython/Compiler/Code.py @@ -280,7 +280,7 @@ class UtilityCodeBase(object): _, ext = os.path.splitext(path) if ext in ('.pyx', '.py', '.pxd', '.pxi'): comment = '#' - strip_comments = partial(re.compile(r'^\s*#(?!\s*cython\s*:).*').sub, '') + strip_comments = partial(re.compile(r'^\s*#(?!\s*cython\s*:).*').sub, '') rstrip = StringEncoding._unicode.rstrip else: comment = '/' @@ -501,11 +501,11 @@ class UtilityCode(UtilityCodeBase): def specialize(self, pyrex_type=None, **data): # Dicts aren't hashable... - name = self.name + name = self.name if pyrex_type is not None: data['type'] = pyrex_type.empty_declaration_code() data['type_name'] = pyrex_type.specialization_name() - name = "%s[%s]" % (name, data['type_name']) + name = "%s[%s]" % (name, data['type_name']) key = tuple(sorted(data.items())) try: return self._cache[key] @@ -521,9 +521,9 @@ class UtilityCode(UtilityCodeBase): self.none_or_sub(self.init, data), self.none_or_sub(self.cleanup, data), requires, - self.proto_block, - name, - ) + self.proto_block, + name, + ) self.specialize_list.append(s) return s @@ -547,7 +547,7 @@ class UtilityCode(UtilityCodeBase): impl = re.sub(r'PY(IDENT|UNICODE)\("([^"]+)"\)', externalise, impl) assert 'PYIDENT(' not in impl and 'PYUNICODE(' not in impl - return True, impl + return True, impl def inject_unbound_methods(self, impl, output): """Replace 'UNBOUND_METHOD(type, "name")' by a constant Python identifier cname. @@ -570,7 +570,7 @@ class UtilityCode(UtilityCodeBase): r'\)', externalise, impl) assert 'CALL_UNBOUND_METHOD(' not in impl - return True, impl + return True, impl def wrap_c_strings(self, impl): """Replace CSTRING('''xyz''') by a C compatible string @@ -722,10 +722,10 @@ class FunctionState(object): self.can_trace = False self.gil_owned = True - self.temps_allocated = [] # of (name, type, manage_ref, static) - self.temps_free = {} # (type, manage_ref) -> list of free vars with same type/managed status - self.temps_used_type = {} # name -> (type, manage_ref) - self.zombie_temps = set() # temps that must not be reused after release + self.temps_allocated = [] # of (name, type, manage_ref, static) + self.temps_free = {} # (type, manage_ref) -> list of free vars with same type/managed status + self.temps_used_type = {} # name -> (type, manage_ref) + self.zombie_temps = set() # temps that must not be reused after release self.temp_counter = 0 self.closure_temps = None @@ -740,20 +740,20 @@ class FunctionState(object): self.should_declare_error_indicator = False self.uses_error_indicator = False - # safety checks - - def validate_exit(self): - # validate that all allocated temps have been freed - if self.temps_allocated: - leftovers = self.temps_in_use() - if leftovers: - msg = "TEMPGUARD: Temps left over at end of '%s': %s" % (self.scope.name, ', '.join([ - '%s [%s]' % (name, ctype) - for name, ctype, is_pytemp in sorted(leftovers)]), - ) - #print(msg) - raise RuntimeError(msg) - + # safety checks + + def validate_exit(self): + # validate that all allocated temps have been freed + if self.temps_allocated: + leftovers = self.temps_in_use() + if leftovers: + msg = "TEMPGUARD: Temps left over at end of '%s': %s" % (self.scope.name, ', '.join([ + '%s [%s]' % (name, ctype) + for name, ctype, is_pytemp in sorted(leftovers)]), + ) + #print(msg) + raise RuntimeError(msg) + # labels def new_label(self, name=None): @@ -823,7 +823,7 @@ class FunctionState(object): # temp handling - def allocate_temp(self, type, manage_ref, static=False, reusable=True): + def allocate_temp(self, type, manage_ref, static=False, reusable=True): """ Allocates a temporary (which may create a new one or get a previously allocated and released one of the same type). Type is simply registered @@ -842,24 +842,24 @@ class FunctionState(object): This is only used when allocating backing store for a module-level C array literals. - if reusable=False, the temp will not be reused after release. - + if reusable=False, the temp will not be reused after release. + A C string referring to the variable is returned. """ if type.is_const and not type.is_reference: type = type.const_base_type elif type.is_reference and not type.is_fake_reference: type = type.ref_base_type - elif type.is_cfunction: - from . import PyrexTypes - type = PyrexTypes.c_ptr_type(type) # A function itself isn't an l-value + elif type.is_cfunction: + from . import PyrexTypes + type = PyrexTypes.c_ptr_type(type) # A function itself isn't an l-value if not type.is_pyobject and not type.is_memoryviewslice: # Make manage_ref canonical, so that manage_ref will always mean # a decref is needed. manage_ref = False freelist = self.temps_free.get((type, manage_ref)) - if reusable and freelist is not None and freelist[0]: + if reusable and freelist is not None and freelist[0]: result = freelist[0].pop() freelist[1].remove(result) else: @@ -868,11 +868,11 @@ class FunctionState(object): result = "%s%d" % (Naming.codewriter_temp_prefix, self.temp_counter) if result not in self.names_taken: break self.temps_allocated.append((result, type, manage_ref, static)) - if not reusable: - self.zombie_temps.add(result) + if not reusable: + self.zombie_temps.add(result) self.temps_used_type[result] = (type, manage_ref) if DebugFlags.debug_temp_code_comments: - self.owner.putln("/* %s allocated (%s)%s */" % (result, type, "" if reusable else " - zombie")) + self.owner.putln("/* %s allocated (%s)%s */" % (result, type, "" if reusable else " - zombie")) if self.collect_temps_stack: self.collect_temps_stack[-1].add((result, type)) @@ -891,12 +891,12 @@ class FunctionState(object): self.temps_free[(type, manage_ref)] = freelist if name in freelist[1]: raise RuntimeError("Temp %s freed twice!" % name) - if name not in self.zombie_temps: - freelist[0].append(name) + if name not in self.zombie_temps: + freelist[0].append(name) freelist[1].add(name) if DebugFlags.debug_temp_code_comments: - self.owner.putln("/* %s released %s*/" % ( - name, " - zombie" if name in self.zombie_temps else "")) + self.owner.putln("/* %s released %s*/" % ( + name, " - zombie" if name in self.zombie_temps else "")) def temps_in_use(self): """Return a list of (cname,type,manage_ref) tuples of temp names and their type @@ -2366,18 +2366,18 @@ class CCodeWriter(object): self.funcstate.should_declare_error_indicator = True if used: self.funcstate.uses_error_indicator = True - return "__PYX_MARK_ERR_POS(%s, %s)" % ( + return "__PYX_MARK_ERR_POS(%s, %s)" % ( self.lookup_filename(pos[0]), - pos[1]) + pos[1]) - def error_goto(self, pos, used=True): + def error_goto(self, pos, used=True): lbl = self.funcstate.error_label self.funcstate.use_label(lbl) if pos is None: return 'goto %s;' % lbl - self.funcstate.should_declare_error_indicator = True - if used: - self.funcstate.uses_error_indicator = True + self.funcstate.should_declare_error_indicator = True + if used: + self.funcstate.uses_error_indicator = True return "__PYX_ERR(%s, %s, %s)" % ( self.lookup_filename(pos[0]), pos[1], diff --git a/contrib/tools/cython/Cython/Compiler/ExprNodes.py b/contrib/tools/cython/Cython/Compiler/ExprNodes.py index 4a402f8126..94f30d7e7a 100644 --- a/contrib/tools/cython/Cython/Compiler/ExprNodes.py +++ b/contrib/tools/cython/Cython/Compiler/ExprNodes.py @@ -1012,11 +1012,11 @@ class ExprNode(Node): return self elif type.is_pyobject or type.is_int or type.is_ptr or type.is_float: return CoerceToBooleanNode(self, env) - elif type.is_cpp_class and type.scope and type.scope.lookup("operator bool"): + elif type.is_cpp_class and type.scope and type.scope.lookup("operator bool"): return SimpleCallNode( self.pos, function=AttributeNode( - self.pos, obj=self, attribute=StringEncoding.EncodedString('operator bool')), + self.pos, obj=self, attribute=StringEncoding.EncodedString('operator bool')), args=[]).analyse_types(env) elif type.is_ctuple: bool_value = len(type.components) == 0 @@ -1623,23 +1623,23 @@ class UnicodeNode(ConstNode): def generate_evaluation_code(self, code): if self.type.is_pyobject: - # FIXME: this should go away entirely! - # Since string_contains_lone_surrogates() returns False for surrogate pairs in Py2/UCS2, - # Py2 can generate different code from Py3 here. Let's hope we get away with claiming that - # the processing of surrogate pairs in code was always ambiguous and lead to different results - # on P16/32bit Unicode platforms. - if StringEncoding.string_contains_lone_surrogates(self.value): - # lone (unpaired) surrogates are not really portable and cannot be + # FIXME: this should go away entirely! + # Since string_contains_lone_surrogates() returns False for surrogate pairs in Py2/UCS2, + # Py2 can generate different code from Py3 here. Let's hope we get away with claiming that + # the processing of surrogate pairs in code was always ambiguous and lead to different results + # on P16/32bit Unicode platforms. + if StringEncoding.string_contains_lone_surrogates(self.value): + # lone (unpaired) surrogates are not really portable and cannot be # decoded by the UTF-8 codec in Py3.3 self.result_code = code.get_py_const(py_object_type, 'ustring') - data_cname = code.get_string_const( - StringEncoding.BytesLiteral(self.value.encode('unicode_escape'))) + data_cname = code.get_string_const( + StringEncoding.BytesLiteral(self.value.encode('unicode_escape'))) const_code = code.get_cached_constants_writer(self.result_code) if const_code is None: return # already initialised const_code.mark_pos(self.pos) const_code.putln( - "%s = PyUnicode_DecodeUnicodeEscape(%s, sizeof(%s) - 1, NULL); %s" % ( + "%s = PyUnicode_DecodeUnicodeEscape(%s, sizeof(%s) - 1, NULL); %s" % ( self.result_code, data_cname, data_cname, @@ -3228,7 +3228,7 @@ class FormattedValueNode(ExprNode): # {}-delimited portions of an f-string # # value ExprNode The expression itself - # conversion_char str or None Type conversion (!s, !r, !a, or none, or 'd' for integer conversion) + # conversion_char str or None Type conversion (!s, !r, !a, or none, or 'd' for integer conversion) # format_spec JoinedStrNode or None Format string passed to __format__ # c_format_spec str or None If not None, formatting can be done at the C level @@ -3242,7 +3242,7 @@ class FormattedValueNode(ExprNode): 's': 'PyObject_Unicode', 'r': 'PyObject_Repr', 'a': 'PyObject_ASCII', # NOTE: mapped to PyObject_Repr() in Py2 - 'd': '__Pyx_PyNumber_IntOrLong', # NOTE: internal mapping for '%d' formatting + 'd': '__Pyx_PyNumber_IntOrLong', # NOTE: internal mapping for '%d' formatting }.get def may_be_none(self): @@ -4207,9 +4207,9 @@ class BufferIndexNode(_IndexingBaseNode): # Whether we're assigning to a buffer (in that case it needs to be writable) writable_needed = False - # Any indexing temp variables that we need to clean up. - index_temps = () - + # Any indexing temp variables that we need to clean up. + index_temps = () + def analyse_target_types(self, env): self.analyse_types(env, getting=False) @@ -4294,7 +4294,7 @@ class BufferIndexNode(_IndexingBaseNode): warning(self.pos, "Use boundscheck(False) for faster access", level=1) # Assign indices to temps of at least (s)size_t to allow further index calculations. - self.index_temps = index_temps = [self.get_index_in_temp(code,ivar) for ivar in self.indices] + self.index_temps = index_temps = [self.get_index_in_temp(code,ivar) for ivar in self.indices] # Generate buffer access code using these temps from . import Buffer @@ -4340,7 +4340,7 @@ class BufferIndexNode(_IndexingBaseNode): pythran_indexing_code(self.indices), op, rhs.pythran_result())) - code.funcstate.release_temp(obj) + code.funcstate.release_temp(obj) return # Used from generate_assignment_code and InPlaceAssignmentNode @@ -4381,13 +4381,13 @@ class BufferIndexNode(_IndexingBaseNode): code.putln("%s = (PyObject *) *%s;" % (self.result(), self.buffer_ptr_code)) code.putln("__Pyx_INCREF((PyObject*)%s);" % self.result()) - def free_subexpr_temps(self, code): - for temp in self.index_temps: - code.funcstate.release_temp(temp) - self.index_temps = () - super(BufferIndexNode, self).free_subexpr_temps(code) - + def free_subexpr_temps(self, code): + for temp in self.index_temps: + code.funcstate.release_temp(temp) + self.index_temps = () + super(BufferIndexNode, self).free_subexpr_temps(code) + class MemoryViewIndexNode(BufferIndexNode): is_memview_index = True @@ -4662,7 +4662,7 @@ class MemoryCopyNode(ExprNode): self.dst.generate_evaluation_code(code) self._generate_assignment_code(rhs, code) self.dst.generate_disposal_code(code) - self.dst.free_temps(code) + self.dst.free_temps(code) rhs.generate_disposal_code(code) rhs.free_temps(code) @@ -5479,7 +5479,7 @@ class CallNode(ExprNode): func_type = self.function_type() if func_type.is_pyobject: self.gil_error() - elif not func_type.is_error and not getattr(func_type, 'nogil', False): + elif not func_type.is_error and not getattr(func_type, 'nogil', False): self.gil_error() gil_message = "Calling gil-requiring function" @@ -5566,7 +5566,7 @@ class SimpleCallNode(CallNode): env.add_include_file(pythran_get_func_include_file(function)) return NumPyMethodCallNode.from_node( self, - function_cname=pythran_functor(function), + function_cname=pythran_functor(function), arg_tuple=self.arg_tuple, type=PythranExpr(pythran_func_type(function, self.arg_tuple.args)), ) @@ -5847,17 +5847,17 @@ class SimpleCallNode(CallNode): if function.is_name or function.is_attribute: code.globalstate.use_entry_utility_code(function.entry) - abs_function_cnames = ('abs', 'labs', '__Pyx_abs_longlong') - is_signed_int = self.type.is_int and self.type.signed - if self.overflowcheck and is_signed_int and function.result() in abs_function_cnames: - code.globalstate.use_utility_code(UtilityCode.load_cached("Common", "Overflow.c")) - code.putln('if (unlikely(%s == __PYX_MIN(%s))) {\ - PyErr_SetString(PyExc_OverflowError,\ - "Trying to take the absolute value of the most negative integer is not defined."); %s; }' % ( - self.args[0].result(), - self.args[0].type.empty_declaration_code(), - code.error_goto(self.pos))) - + abs_function_cnames = ('abs', 'labs', '__Pyx_abs_longlong') + is_signed_int = self.type.is_int and self.type.signed + if self.overflowcheck and is_signed_int and function.result() in abs_function_cnames: + code.globalstate.use_utility_code(UtilityCode.load_cached("Common", "Overflow.c")) + code.putln('if (unlikely(%s == __PYX_MIN(%s))) {\ + PyErr_SetString(PyExc_OverflowError,\ + "Trying to take the absolute value of the most negative integer is not defined."); %s; }' % ( + self.args[0].result(), + self.args[0].type.empty_declaration_code(), + code.error_goto(self.pos))) + if not function.type.is_pyobject or len(self.arg_tuple.args) > 1 or ( self.arg_tuple.args and self.arg_tuple.is_literal): super(SimpleCallNode, self).generate_evaluation_code(code) @@ -5960,7 +5960,7 @@ class SimpleCallNode(CallNode): self.result() if self.type.is_pyobject else None, func_type.exception_value, self.nogil) else: - if exc_checks: + if exc_checks: goto_error = code.error_goto_if(" && ".join(exc_checks), self.pos) else: goto_error = "" @@ -5971,13 +5971,13 @@ class SimpleCallNode(CallNode): code.funcstate.release_temp(self.opt_arg_struct) -class NumPyMethodCallNode(ExprNode): +class NumPyMethodCallNode(ExprNode): # Pythran call to a NumPy function or method. # - # function_cname string the function/method to call - # arg_tuple TupleNode the arguments as an args tuple + # function_cname string the function/method to call + # arg_tuple TupleNode the arguments as an args tuple - subexprs = ['arg_tuple'] + subexprs = ['arg_tuple'] is_temp = True may_return_none = True @@ -5995,7 +5995,7 @@ class NumPyMethodCallNode(ExprNode): code.putln("new (&%s) decltype(%s){%s{}(%s)};" % ( self.result(), self.result(), - self.function_cname, + self.function_cname, ", ".join(a.pythran_result() for a in args))) @@ -6049,7 +6049,7 @@ class PyMethodCallNode(SimpleCallNode): # not an attribute itself, but might have been assigned from one (e.g. bound method) for assignment in self.function.cf_state: value = assignment.rhs - if value and value.is_attribute and value.obj.type and value.obj.type.is_pyobject: + if value and value.is_attribute and value.obj.type and value.obj.type.is_pyobject: if attribute_is_likely_method(value): likely_method = 'likely' break @@ -6669,7 +6669,7 @@ class MergedDictNode(ExprNode): return dict_type def analyse_types(self, env): - self.keyword_args = [ + self.keyword_args = [ arg.analyse_types(env).coerce_to_pyobject(env).as_none_safe_node( # FIXME: CPython's error message starts with the runtime function name 'argument after ** must be a mapping, not NoneType') @@ -6842,11 +6842,11 @@ class AttributeNode(ExprNode): # FIXME: this is way too redundant with analyse_types() node = self.analyse_as_cimported_attribute_node(env, target=False) if node is not None: - if node.entry.type and node.entry.type.is_cfunction: - # special-case - function converted to pointer - return PyrexTypes.CPtrType(node.entry.type) - else: - return node.entry.type + if node.entry.type and node.entry.type.is_cfunction: + # special-case - function converted to pointer + return PyrexTypes.CPtrType(node.entry.type) + else: + return node.entry.type node = self.analyse_as_type_attribute(env) if node is not None: return node.entry.type @@ -7279,8 +7279,8 @@ class AttributeNode(ExprNode): self.member.upper(), self.obj.result_as(self.obj.type), rhs.result_as(self.ctype()))) - rhs.generate_disposal_code(code) - rhs.free_temps(code) + rhs.generate_disposal_code(code) + rhs.free_temps(code) else: select_code = self.result() if self.type.is_pyobject and self.use_managed_ref: @@ -8131,16 +8131,16 @@ class ListNode(SequenceNode): return t def allocate_temp_result(self, code): - if self.type.is_array: - if self.in_module_scope: - self.temp_code = code.funcstate.allocate_temp( - self.type, manage_ref=False, static=True, reusable=False) - else: - # To be valid C++, we must allocate the memory on the stack - # manually and be sure not to reuse it for something else. - # Yes, this means that we leak a temp array variable. - self.temp_code = code.funcstate.allocate_temp( - self.type, manage_ref=False, reusable=False) + if self.type.is_array: + if self.in_module_scope: + self.temp_code = code.funcstate.allocate_temp( + self.type, manage_ref=False, static=True, reusable=False) + else: + # To be valid C++, we must allocate the memory on the stack + # manually and be sure not to reuse it for something else. + # Yes, this means that we leak a temp array variable. + self.temp_code = code.funcstate.allocate_temp( + self.type, manage_ref=False, reusable=False) else: SequenceNode.allocate_temp_result(self, code) @@ -8955,11 +8955,11 @@ class ClassNode(ExprNode, ModuleNameMixin): # a name, tuple of bases and class dictionary. # # name EncodedString Name of the class - # class_def_node PyClassDefNode PyClassDefNode defining this class + # class_def_node PyClassDefNode PyClassDefNode defining this class # doc ExprNode or None Doc string # module_name EncodedString Name of defining module - subexprs = ['doc'] + subexprs = ['doc'] type = py_object_type is_temp = True @@ -8980,13 +8980,13 @@ class ClassNode(ExprNode, ModuleNameMixin): gil_message = "Constructing Python class" def generate_result_code(self, code): - class_def_node = self.class_def_node + class_def_node = self.class_def_node cname = code.intern_identifier(self.name) if self.doc: code.put_error_if_neg(self.pos, 'PyDict_SetItem(%s, %s, %s)' % ( - class_def_node.dict.py_result(), + class_def_node.dict.py_result(), code.intern_identifier( StringEncoding.EncodedString("__doc__")), self.doc.py_result())) @@ -8995,8 +8995,8 @@ class ClassNode(ExprNode, ModuleNameMixin): code.putln( '%s = __Pyx_CreateClass(%s, %s, %s, %s, %s); %s' % ( self.result(), - class_def_node.bases.py_result(), - class_def_node.dict.py_result(), + class_def_node.bases.py_result(), + class_def_node.dict.py_result(), cname, qualname, py_mod_name, @@ -9011,7 +9011,7 @@ class Py3ClassNode(ExprNode): # # name EncodedString Name of the class # module_name EncodedString Name of defining module - # class_def_node PyClassDefNode PyClassDefNode defining this class + # class_def_node PyClassDefNode PyClassDefNode defining this class # calculate_metaclass bool should call CalculateMetaclass() # allow_py2_metaclass bool should look for Py2 metaclass @@ -9034,10 +9034,10 @@ class Py3ClassNode(ExprNode): def generate_result_code(self, code): code.globalstate.use_utility_code(UtilityCode.load_cached("Py3ClassCreate", "ObjectHandling.c")) cname = code.intern_identifier(self.name) - class_def_node = self.class_def_node - mkw = class_def_node.mkw.py_result() if class_def_node.mkw else 'NULL' - if class_def_node.metaclass: - metaclass = class_def_node.metaclass.py_result() + class_def_node = self.class_def_node + mkw = class_def_node.mkw.py_result() if class_def_node.mkw else 'NULL' + if class_def_node.metaclass: + metaclass = class_def_node.metaclass.py_result() else: metaclass = "((PyObject*)&__Pyx_DefaultClassType)" code.putln( @@ -9045,8 +9045,8 @@ class Py3ClassNode(ExprNode): self.result(), metaclass, cname, - class_def_node.bases.py_result(), - class_def_node.dict.py_result(), + class_def_node.bases.py_result(), + class_def_node.dict.py_result(), mkw, self.calculate_metaclass, self.allow_py2_metaclass, @@ -9057,7 +9057,7 @@ class Py3ClassNode(ExprNode): class PyClassMetaclassNode(ExprNode): # Helper class holds Python3 metaclass object # - # class_def_node PyClassDefNode PyClassDefNode defining this class + # class_def_node PyClassDefNode PyClassDefNode defining this class subexprs = [] @@ -9070,38 +9070,38 @@ class PyClassMetaclassNode(ExprNode): return True def generate_result_code(self, code): - bases = self.class_def_node.bases - mkw = self.class_def_node.mkw - if mkw: + bases = self.class_def_node.bases + mkw = self.class_def_node.mkw + if mkw: code.globalstate.use_utility_code( UtilityCode.load_cached("Py3MetaclassGet", "ObjectHandling.c")) call = "__Pyx_Py3MetaclassGet(%s, %s)" % ( - bases.result(), - mkw.result()) + bases.result(), + mkw.result()) else: code.globalstate.use_utility_code( UtilityCode.load_cached("CalculateMetaclass", "ObjectHandling.c")) call = "__Pyx_CalculateMetaclass(NULL, %s)" % ( - bases.result()) + bases.result()) code.putln( "%s = %s; %s" % ( self.result(), call, code.error_goto_if_null(self.result(), self.pos))) code.put_gotref(self.py_result()) - + class PyClassNamespaceNode(ExprNode, ModuleNameMixin): # Helper class holds Python3 namespace object # # All this are not owned by this node - # class_def_node PyClassDefNode PyClassDefNode defining this class + # class_def_node PyClassDefNode PyClassDefNode defining this class # doc ExprNode or None Doc string (owned) subexprs = ['doc'] def analyse_types(self, env): if self.doc: - self.doc = self.doc.analyse_types(env).coerce_to_pyobject(env) + self.doc = self.doc.analyse_types(env).coerce_to_pyobject(env) self.type = py_object_type self.is_temp = 1 return self @@ -9113,16 +9113,16 @@ class PyClassNamespaceNode(ExprNode, ModuleNameMixin): cname = code.intern_identifier(self.name) py_mod_name = self.get_py_mod_name(code) qualname = self.get_py_qualified_name(code) - class_def_node = self.class_def_node - null = "(PyObject *) NULL" - doc_code = self.doc.result() if self.doc else null - mkw = class_def_node.mkw.py_result() if class_def_node.mkw else null - metaclass = class_def_node.metaclass.py_result() if class_def_node.metaclass else null + class_def_node = self.class_def_node + null = "(PyObject *) NULL" + doc_code = self.doc.result() if self.doc else null + mkw = class_def_node.mkw.py_result() if class_def_node.mkw else null + metaclass = class_def_node.metaclass.py_result() if class_def_node.metaclass else null code.putln( "%s = __Pyx_Py3MetaclassPrepare(%s, %s, %s, %s, %s, %s, %s); %s" % ( self.result(), metaclass, - class_def_node.bases.result(), + class_def_node.bases.result(), cname, qualname, mkw, @@ -9142,20 +9142,20 @@ class ClassCellInjectorNode(ExprNode): def analyse_expressions(self, env): return self - def generate_result_code(self, code): - assert self.is_active - code.putln( - '%s = PyList_New(0); %s' % ( - self.result(), - code.error_goto_if_null(self.result(), self.pos))) - code.put_gotref(self.result()) + def generate_result_code(self, code): + assert self.is_active + code.putln( + '%s = PyList_New(0); %s' % ( + self.result(), + code.error_goto_if_null(self.result(), self.pos))) + code.put_gotref(self.result()) def generate_injection_code(self, code, classobj_cname): - assert self.is_active - code.globalstate.use_utility_code( - UtilityCode.load_cached("CyFunctionClassCell", "CythonFunction.c")) - code.put_error_if_neg(self.pos, '__Pyx_CyFunction_InitClassCell(%s, %s)' % ( - self.result(), classobj_cname)) + assert self.is_active + code.globalstate.use_utility_code( + UtilityCode.load_cached("CyFunctionClassCell", "CythonFunction.c")) + code.put_error_if_neg(self.pos, '__Pyx_CyFunction_InitClassCell(%s, %s)' % ( + self.result(), classobj_cname)) class ClassCellNode(ExprNode): @@ -9404,11 +9404,11 @@ class PyCFunctionNode(ExprNode, ModuleNameMixin): if self.specialized_cpdefs or self.is_specialization: code.globalstate.use_utility_code( UtilityCode.load_cached("FusedFunction", "CythonFunction.c")) - constructor = "__pyx_FusedFunction_New" + constructor = "__pyx_FusedFunction_New" else: code.globalstate.use_utility_code( UtilityCode.load_cached("CythonFunction", "CythonFunction.c")) - constructor = "__Pyx_CyFunction_New" + constructor = "__Pyx_CyFunction_New" if self.code_object: code_object_result = self.code_object.py_result() @@ -10707,20 +10707,20 @@ class CythonArrayNode(ExprNode): code.putln(code.error_goto(self.operand.pos)) code.putln("}") - code.putln("%s = __pyx_format_from_typeinfo(&%s); %s" % ( - format_temp, - type_info, - code.error_goto_if_null(format_temp, self.pos), - )) - code.put_gotref(format_temp) - + code.putln("%s = __pyx_format_from_typeinfo(&%s); %s" % ( + format_temp, + type_info, + code.error_goto_if_null(format_temp, self.pos), + )) + code.put_gotref(format_temp) + buildvalue_fmt = " __PYX_BUILD_PY_SSIZE_T " * len(shapes) - code.putln('%s = Py_BuildValue((char*) "(" %s ")", %s); %s' % ( - shapes_temp, - buildvalue_fmt, - ", ".join(shapes), - code.error_goto_if_null(shapes_temp, self.pos), - )) + code.putln('%s = Py_BuildValue((char*) "(" %s ")", %s); %s' % ( + shapes_temp, + buildvalue_fmt, + ", ".join(shapes), + code.error_goto_if_null(shapes_temp, self.pos), + )) code.put_gotref(shapes_temp) tup = (self.result(), shapes_temp, itemsize, format_temp, @@ -10875,10 +10875,10 @@ class TypeidNode(ExprNode): typeinfo_entry = typeinfo_module.lookup('type_info') return PyrexTypes.CFakeReferenceType(PyrexTypes.c_const_type(typeinfo_entry.type)) - cpp_message = 'typeid operator' - + cpp_message = 'typeid operator' + def analyse_types(self, env): - self.cpp_check(env) + self.cpp_check(env) type_info = self.get_type_info_type(env) if not type_info: self.error("The 'libcpp.typeinfo' module must be cimported to use the typeid() operator") @@ -11415,24 +11415,24 @@ class AddNode(NumBinopNode): self, type1, type2) def py_operation_function(self, code): - type1, type2 = self.operand1.type, self.operand2.type - - if type1 is unicode_type or type2 is unicode_type: - if type1 in (unicode_type, str_type) and type2 in (unicode_type, str_type): - is_unicode_concat = True - elif isinstance(self.operand1, FormattedValueNode) or isinstance(self.operand2, FormattedValueNode): - # Assume that even if we don't know the second type, it's going to be a string. - is_unicode_concat = True + type1, type2 = self.operand1.type, self.operand2.type + + if type1 is unicode_type or type2 is unicode_type: + if type1 in (unicode_type, str_type) and type2 in (unicode_type, str_type): + is_unicode_concat = True + elif isinstance(self.operand1, FormattedValueNode) or isinstance(self.operand2, FormattedValueNode): + # Assume that even if we don't know the second type, it's going to be a string. + is_unicode_concat = True else: - # Operation depends on the second type. - is_unicode_concat = False - - if is_unicode_concat: - if self.operand1.may_be_none() or self.operand2.may_be_none(): - return '__Pyx_PyUnicode_ConcatSafe' - else: - return '__Pyx_PyUnicode_Concat' - + # Operation depends on the second type. + is_unicode_concat = False + + if is_unicode_concat: + if self.operand1.may_be_none() or self.operand2.may_be_none(): + return '__Pyx_PyUnicode_ConcatSafe' + else: + return '__Pyx_PyUnicode_Concat' + return super(AddNode, self).py_operation_function(code) @@ -12515,8 +12515,8 @@ class CmpNode(object): result_code if self.type.is_pyobject else None, self.exception_value, self.in_nogil_context) - else: - code.putln(statement) + else: + code.putln(statement) def c_operator(self, op): if op == 'is': @@ -12976,7 +12976,7 @@ class CoerceToMemViewSliceNode(CoercionNode): self.is_temp = 1 self.use_managed_ref = True self.arg = arg - self.type.create_from_py_utility_code(env) + self.type.create_from_py_utility_code(env) def generate_result_code(self, code): code.putln(self.type.from_py_call_code( @@ -13079,19 +13079,19 @@ class PyTypeTestNode(CoercionNode): def generate_post_assignment_code(self, code): self.arg.generate_post_assignment_code(code) - def allocate_temp_result(self, code): - pass - - def release_temp_result(self, code): - pass - + def allocate_temp_result(self, code): + pass + + def release_temp_result(self, code): + pass + def free_temps(self, code): self.arg.free_temps(code) - def free_subexpr_temps(self, code): - self.arg.free_subexpr_temps(code) - + def free_subexpr_temps(self, code): + self.arg.free_subexpr_temps(code) + class NoneCheckNode(CoercionNode): # This node is used to check that a Python object is not None and # raises an appropriate exception (as specified by the creating diff --git a/contrib/tools/cython/Cython/Compiler/FlowControl.py b/contrib/tools/cython/Cython/Compiler/FlowControl.py index df04471f90..2f5002d14d 100644 --- a/contrib/tools/cython/Cython/Compiler/FlowControl.py +++ b/contrib/tools/cython/Cython/Compiler/FlowControl.py @@ -884,12 +884,12 @@ class ControlFlowAnalysis(CythonTransform): self.mark_position(node) return node - def visit_SizeofVarNode(self, node): - return node - - def visit_TypeidNode(self, node): - return node - + def visit_SizeofVarNode(self, node): + return node + + def visit_TypeidNode(self, node): + return node + def visit_IfStatNode(self, node): next_block = self.flow.newblock() parent = self.flow.block @@ -1232,18 +1232,18 @@ class ControlFlowAnalysis(CythonTransform): self.mark_position(node) self.visitchildren(node) - outer_exception_handlers = iter(self.flow.exceptions[::-1]) - for handler in outer_exception_handlers: - if handler.finally_enter: - self.flow.block.add_child(handler.finally_enter) - if handler.finally_exit: - # 'return' goes to function exit, or to the next outer 'finally' clause - exit_point = self.flow.exit_point - for next_handler in outer_exception_handlers: - if next_handler.finally_enter: - exit_point = next_handler.finally_enter - break - handler.finally_exit.add_child(exit_point) + outer_exception_handlers = iter(self.flow.exceptions[::-1]) + for handler in outer_exception_handlers: + if handler.finally_enter: + self.flow.block.add_child(handler.finally_enter) + if handler.finally_exit: + # 'return' goes to function exit, or to the next outer 'finally' clause + exit_point = self.flow.exit_point + for next_handler in outer_exception_handlers: + if next_handler.finally_enter: + exit_point = next_handler.finally_enter + break + handler.finally_exit.add_child(exit_point) break else: if self.flow.block: diff --git a/contrib/tools/cython/Cython/Compiler/FusedNode.py b/contrib/tools/cython/Cython/Compiler/FusedNode.py index 26d6ffd3d6..f31b74e7a6 100644 --- a/contrib/tools/cython/Cython/Compiler/FusedNode.py +++ b/contrib/tools/cython/Cython/Compiler/FusedNode.py @@ -507,22 +507,22 @@ class FusedCFuncDefNode(StatListNode): ndarray = __Pyx_ImportNumPyArrayTypeIfAvailable() """) - seen_typedefs = set() + seen_typedefs = set() seen_int_dtypes = set() for buffer_type in all_buffer_types: dtype = buffer_type.dtype - dtype_name = self._dtype_name(dtype) + dtype_name = self._dtype_name(dtype) if dtype.is_typedef: - if dtype_name not in seen_typedefs: - seen_typedefs.add(dtype_name) - decl_code.putln( - 'ctypedef %s %s "%s"' % (dtype.resolve(), dtype_name, - dtype.empty_declaration_code())) + if dtype_name not in seen_typedefs: + seen_typedefs.add(dtype_name) + decl_code.putln( + 'ctypedef %s %s "%s"' % (dtype.resolve(), dtype_name, + dtype.empty_declaration_code())) if buffer_type.dtype.is_int: if str(dtype) not in seen_int_dtypes: seen_int_dtypes.add(str(dtype)) - pyx_code.context.update(dtype_name=dtype_name, + pyx_code.context.update(dtype_name=dtype_name, dtype_type=self._dtype_type(dtype)) pyx_code.local_variable_declarations.put_chunk( u""" @@ -878,23 +878,23 @@ class FusedCFuncDefNode(StatListNode): (self.resulting_fused_function.result(), self.__signatures__.result())) code.put_giveref(self.__signatures__.result()) - self.__signatures__.generate_post_assignment_code(code) - self.__signatures__.free_temps(code) + self.__signatures__.generate_post_assignment_code(code) + self.__signatures__.free_temps(code) self.fused_func_assignment.generate_execution_code(code) # Dispose of results self.resulting_fused_function.generate_disposal_code(code) - self.resulting_fused_function.free_temps(code) + self.resulting_fused_function.free_temps(code) self.defaults_tuple.generate_disposal_code(code) - self.defaults_tuple.free_temps(code) + self.defaults_tuple.free_temps(code) self.code_object.generate_disposal_code(code) - self.code_object.free_temps(code) + self.code_object.free_temps(code) for default in self.defaults: if default is not None: default.generate_disposal_code(code) - default.free_temps(code) + default.free_temps(code) def annotate(self, code): for stat in self.stats: diff --git a/contrib/tools/cython/Cython/Compiler/MemoryView.py b/contrib/tools/cython/Cython/Compiler/MemoryView.py index 0406d6c716..fc46861dc1 100644 --- a/contrib/tools/cython/Cython/Compiler/MemoryView.py +++ b/contrib/tools/cython/Cython/Compiler/MemoryView.py @@ -307,7 +307,7 @@ class MemoryViewSliceBufferEntry(Buffer.BufferEntry): util_name = "SimpleSlice" else: util_name = "ToughSlice" - d['error_goto'] = code.error_goto(index.pos) + d['error_goto'] = code.error_goto(index.pos) new_ndim += 1 else: @@ -325,10 +325,10 @@ class MemoryViewSliceBufferEntry(Buffer.BufferEntry): d = dict( locals(), wraparound=int(directives['wraparound']), - boundscheck=int(directives['boundscheck']), + boundscheck=int(directives['boundscheck']), ) - if d['boundscheck']: - d['error_goto'] = code.error_goto(index.pos) + if d['boundscheck']: + d['error_goto'] = code.error_goto(index.pos) util_name = "SliceIndex" _, impl = TempitaUtilityCode.load_as_string(util_name, "MemoryView_C.c", context=d) diff --git a/contrib/tools/cython/Cython/Compiler/ModuleNode.py b/contrib/tools/cython/Cython/Compiler/ModuleNode.py index cd7166408e..b30be60dfe 100644 --- a/contrib/tools/cython/Cython/Compiler/ModuleNode.py +++ b/contrib/tools/cython/Cython/Compiler/ModuleNode.py @@ -176,7 +176,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): h_guard = Naming.h_guard_prefix + self.api_name(env) h_code.put_h_guard(h_guard) h_code.putln("") - h_code.putln('#include "Python.h"') + h_code.putln('#include "Python.h"') self.generate_type_header_code(h_types, h_code) if options.capi_reexport_cincludes: self.generate_includes(env, [], h_code) @@ -430,11 +430,11 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): except ImportError: import xml.etree.ElementTree as ET coverage_xml = ET.parse(coverage_xml_filename).getroot() - if hasattr(coverage_xml, 'iter'): - iterator = coverage_xml.iter() # Python 2.7 & 3.2+ - else: - iterator = coverage_xml.getiterator() - for el in iterator: + if hasattr(coverage_xml, 'iter'): + iterator = coverage_xml.iter() # Python 2.7 & 3.2+ + else: + iterator = coverage_xml.getiterator() + for el in iterator: el.tail = None # save some memory else: coverage_xml = None @@ -580,17 +580,17 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): for entry in vtabslot_list: self.generate_objstruct_predeclaration(entry.type, code) vtabslot_entries = set(vtabslot_list) - ctuple_names = set() + ctuple_names = set() for module in modules: definition = module is env - type_entries = [] - for entry in module.type_entries: - if entry.type.is_ctuple and entry.used: - if entry.name not in ctuple_names: - ctuple_names.add(entry.name) + type_entries = [] + for entry in module.type_entries: + if entry.type.is_ctuple and entry.used: + if entry.name not in ctuple_names: + ctuple_names.add(entry.name) type_entries.append(entry) - elif definition or entry.defined_in_pxd: - type_entries.append(entry) + elif definition or entry.defined_in_pxd: + type_entries.append(entry) type_entries = [t for t in type_entries if t not in vtabslot_entries] self.generate_type_header_code(type_entries, code) for entry in vtabslot_list: @@ -635,10 +635,10 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.putln(json.dumps(metadata, indent=4, sort_keys=True)) code.putln("END: Cython Metadata */") code.putln("") - - code.putln("#ifndef PY_SSIZE_T_CLEAN") + + code.putln("#ifndef PY_SSIZE_T_CLEAN") code.putln("#define PY_SSIZE_T_CLEAN") - code.putln("#endif /* PY_SSIZE_T_CLEAN */") + code.putln("#endif /* PY_SSIZE_T_CLEAN */") for inc in sorted(env.c_includes.values(), key=IncludeCode.sortkey): if inc.location == inc.INITIAL: @@ -666,19 +666,19 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): self._put_setup_code(code, "PythonCompatibility") self._put_setup_code(code, "MathInitCode") - # Using "(void)cname" to prevent "unused" warnings. + # Using "(void)cname" to prevent "unused" warnings. if options.c_line_in_traceback: - cinfo = "%s = %s; (void)%s; " % (Naming.clineno_cname, Naming.line_c_macro, Naming.clineno_cname) + cinfo = "%s = %s; (void)%s; " % (Naming.clineno_cname, Naming.line_c_macro, Naming.clineno_cname) else: cinfo = "" - code.putln("#define __PYX_MARK_ERR_POS(f_index, lineno) \\") - code.putln(" { %s = %s[f_index]; (void)%s; %s = lineno; (void)%s; %s}" % ( - Naming.filename_cname, Naming.filetable_cname, Naming.filename_cname, - Naming.lineno_cname, Naming.lineno_cname, - cinfo - )) - code.putln("#define __PYX_ERR(f_index, lineno, Ln_error) \\") - code.putln(" { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }") + code.putln("#define __PYX_MARK_ERR_POS(f_index, lineno) \\") + code.putln(" { %s = %s[f_index]; (void)%s; %s = lineno; (void)%s; %s}" % ( + Naming.filename_cname, Naming.filetable_cname, Naming.filename_cname, + Naming.lineno_cname, Naming.lineno_cname, + cinfo + )) + code.putln("#define __PYX_ERR(f_index, lineno, Ln_error) \\") + code.putln(" { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }") code.putln("") self.generate_extern_c_macro_definition(code) @@ -1236,10 +1236,10 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): type = entry.type scope = type.scope if scope: # could be None if there was an error - if not scope.directives['c_api_binop_methods']: - error(self.pos, - "The 'c_api_binop_methods' directive is only supported for forward compatibility" - " and must be True.") + if not scope.directives['c_api_binop_methods']: + error(self.pos, + "The 'c_api_binop_methods' directive is only supported for forward compatibility" + " and must be True.") self.generate_exttype_vtable(scope, code) self.generate_new_function(scope, code, entry) self.generate_dealloc_function(scope, code) @@ -1571,11 +1571,11 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.putln("{") code.putln("PyObject *etype, *eval, *etb;") code.putln("PyErr_Fetch(&etype, &eval, &etb);") - # increase the refcount while we are calling into user code - # to prevent recursive deallocation - code.putln("__Pyx_SET_REFCNT(o, Py_REFCNT(o) + 1);") + # increase the refcount while we are calling into user code + # to prevent recursive deallocation + code.putln("__Pyx_SET_REFCNT(o, Py_REFCNT(o) + 1);") code.putln("%s(o);" % entry.func_cname) - code.putln("__Pyx_SET_REFCNT(o, Py_REFCNT(o) - 1);") + code.putln("__Pyx_SET_REFCNT(o, Py_REFCNT(o) - 1);") code.putln("PyErr_Restore(etype, eval, etb);") code.putln("}") @@ -2315,7 +2315,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.exit_cfunc_scope() # done with labels def generate_module_init_func(self, imported_modules, env, options, code): - subfunction = self.mod_init_subfunction(self.pos, self.scope, code) + subfunction = self.mod_init_subfunction(self.pos, self.scope, code) code.enter_cfunc_scope(self.scope) code.putln("") @@ -2413,8 +2413,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.put_error_if_neg(self.pos, "_import_array()") code.putln("/*--- Threads initialization code ---*/") - code.putln("#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 " - "&& defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS") + code.putln("#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 " + "&& defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS") code.putln("PyEval_InitThreads();") code.putln("#endif") @@ -2441,10 +2441,10 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): if Options.cache_builtins: code.putln("/*--- Builtin init code ---*/") - code.put_error_if_neg(self.pos, "__Pyx_InitCachedBuiltins()") + code.put_error_if_neg(self.pos, "__Pyx_InitCachedBuiltins()") code.putln("/*--- Constants init code ---*/") - code.put_error_if_neg(self.pos, "__Pyx_InitCachedConstants()") + code.put_error_if_neg(self.pos, "__Pyx_InitCachedConstants()") code.putln("/*--- Global type/function init code ---*/") @@ -2535,7 +2535,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.exit_cfunc_scope() - def mod_init_subfunction(self, pos, scope, orig_code): + def mod_init_subfunction(self, pos, scope, orig_code): """ Return a context manager that allows deviating the module init code generation into a separate function and instead inserts a call to it. @@ -2591,8 +2591,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.putln("") if needs_error_handling: - self.call_code.putln( - self.call_code.error_goto_if_neg("%s()" % self.cfunc_name, pos)) + self.call_code.putln( + self.call_code.error_goto_if_neg("%s()" % self.cfunc_name, pos)) else: self.call_code.putln("(void)%s();" % self.cfunc_name) self.call_code = None @@ -2671,8 +2671,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): code.putln('static void %s(CYTHON_UNUSED PyObject *self) {' % Naming.cleanup_cname) - code.enter_cfunc_scope(env) - + code.enter_cfunc_scope(env) + if Options.generate_cleanup_code >= 2: code.putln("/*--- Global cleanup code ---*/") rev_entries = list(env.var_entries) @@ -2962,7 +2962,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): module.qualified_name, temp, code.error_goto(self.pos))) - code.put_gotref(temp) + code.put_gotref(temp) for entry in entries: if env is module: cname = entry.cname @@ -2973,8 +2973,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): 'if (__Pyx_ImportVoidPtr(%s, "%s", (void **)&%s, "%s") < 0) %s' % ( temp, entry.name, cname, signature, code.error_goto(self.pos))) - code.put_decref_clear(temp, py_object_type) - code.funcstate.release_temp(temp) + code.put_decref_clear(temp, py_object_type) + code.funcstate.release_temp(temp) def generate_c_function_import_code_for_module(self, module, env, code): # Generate import code for all exported C functions in a cimported module. @@ -2992,7 +2992,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): module.qualified_name, temp, code.error_goto(self.pos))) - code.put_gotref(temp) + code.put_gotref(temp) for entry in entries: code.putln( 'if (__Pyx_ImportFunction(%s, "%s", (void (**)(void))&%s, "%s") < 0) %s' % ( @@ -3001,8 +3001,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode): entry.cname, entry.type.signature_string(), code.error_goto(self.pos))) - code.put_decref_clear(temp, py_object_type) - code.funcstate.release_temp(temp) + code.put_decref_clear(temp, py_object_type) + code.funcstate.release_temp(temp) def generate_type_init_code(self, env, code): # Generate type import code for extern extension types diff --git a/contrib/tools/cython/Cython/Compiler/Nodes.py b/contrib/tools/cython/Cython/Compiler/Nodes.py index 6436c5002d..8c7952e804 100644 --- a/contrib/tools/cython/Cython/Compiler/Nodes.py +++ b/contrib/tools/cython/Cython/Compiler/Nodes.py @@ -1048,8 +1048,8 @@ class CSimpleBaseTypeNode(CBaseTypeNode): type = PyrexTypes.TemplatePlaceholderType(self.name) else: error(self.pos, "'%s' is not a type identifier" % self.name) - if type and type.is_fused and env.fused_to_specific: - type = type.specialize(env.fused_to_specific) + if type and type.is_fused and env.fused_to_specific: + type = type.specialize(env.fused_to_specific) if self.complex: if not type.is_numeric or type.is_complex: error(self.pos, "can only complexify c numeric types") @@ -1382,9 +1382,9 @@ class CVarDefNode(StatNode): self.entry.type.create_to_py_utility_code(env) self.entry.create_wrapper = True else: - if self.overridable: - warning(self.pos, "cpdef variables will not be supported in Cython 3; " - "currently they are no different from cdef variables", 2) + if self.overridable: + warning(self.pos, "cpdef variables will not be supported in Cython 3; " + "currently they are no different from cdef variables", 2) if self.directive_locals: error(self.pos, "Decorators can only be followed by functions") self.entry = dest_scope.declare_var( @@ -3235,14 +3235,14 @@ class DefNode(FuncDefNode): def put_into_closure(entry): if entry.in_closure: code.putln('%s = %s;' % (entry.cname, entry.original_cname)) - if entry.xdecref_cleanup: - # mostly applies to the starstar arg - this can sometimes be NULL - # so must be xincrefed instead - code.put_var_xincref(entry) - code.put_var_xgiveref(entry) - else: - code.put_var_incref(entry) - code.put_var_giveref(entry) + if entry.xdecref_cleanup: + # mostly applies to the starstar arg - this can sometimes be NULL + # so must be xincrefed instead + code.put_var_xincref(entry) + code.put_var_xgiveref(entry) + else: + code.put_var_incref(entry) + code.put_var_giveref(entry) for arg in self.args: put_into_closure(arg.entry) for arg in self.star_arg, self.starstar_arg: @@ -4156,10 +4156,10 @@ class GeneratorBodyDefNode(DefNode): cname=cname, visibility='private') entry.func_cname = cname entry.qualified_name = EncodedString(self.name) - # Work-around for https://github.com/cython/cython/issues/1699 - # We don't currently determine whether the generator entry is used or not, - # so mark it as used to avoid false warnings. - entry.used = True + # Work-around for https://github.com/cython/cython/issues/1699 + # We don't currently determine whether the generator entry is used or not, + # so mark it as used to avoid false warnings. + entry.used = True self.entry = entry def analyse_declarations(self, env): @@ -4511,22 +4511,22 @@ class PyClassDefNode(ClassDefNode): pass # no base classes => no inherited metaclass else: self.metaclass = ExprNodes.PyClassMetaclassNode( - pos, class_def_node=self) + pos, class_def_node=self) needs_metaclass_calculation = False else: needs_metaclass_calculation = True self.dict = ExprNodes.PyClassNamespaceNode( - pos, name=name, doc=doc_node, class_def_node=self) + pos, name=name, doc=doc_node, class_def_node=self) self.classobj = ExprNodes.Py3ClassNode( - pos, name=name, class_def_node=self, doc=doc_node, + pos, name=name, class_def_node=self, doc=doc_node, calculate_metaclass=needs_metaclass_calculation, allow_py2_metaclass=allow_py2_metaclass) else: # no bases, no metaclass => old style class creation self.dict = ExprNodes.DictNode(pos, key_value_pairs=[]) self.classobj = ExprNodes.ClassNode( - pos, name=name, class_def_node=self, doc=doc_node) + pos, name=name, class_def_node=self, doc=doc_node) self.target = ExprNodes.NameNode(pos, name=name) self.class_cell = ExprNodes.ClassCellInjectorNode(self.pos) @@ -4544,7 +4544,7 @@ class PyClassDefNode(ClassDefNode): visibility='private', module_name=None, class_name=self.name, - bases=self.bases or ExprNodes.TupleNode(self.pos, args=[]), + bases=self.bases or ExprNodes.TupleNode(self.pos, args=[]), decorators=self.decorators, body=self.body, in_pxd=False, @@ -4568,10 +4568,10 @@ class PyClassDefNode(ClassDefNode): args=[class_result]) self.decorators = None self.class_result = class_result - if self.bases: - self.bases.analyse_declarations(env) - if self.mkw: - self.mkw.analyse_declarations(env) + if self.bases: + self.bases.analyse_declarations(env) + if self.mkw: + self.mkw.analyse_declarations(env) self.class_result.analyse_declarations(env) self.target.analyse_target_declaration(env) cenv = self.create_scope(env) @@ -4582,8 +4582,8 @@ class PyClassDefNode(ClassDefNode): def analyse_expressions(self, env): if self.bases: self.bases = self.bases.analyse_expressions(env) - if self.mkw: - self.mkw = self.mkw.analyse_expressions(env) + if self.mkw: + self.mkw = self.mkw.analyse_expressions(env) if self.metaclass: self.metaclass = self.metaclass.analyse_expressions(env) self.dict = self.dict.analyse_expressions(env) @@ -4610,22 +4610,22 @@ class PyClassDefNode(ClassDefNode): self.metaclass.generate_evaluation_code(code) self.dict.generate_evaluation_code(code) cenv.namespace_cname = cenv.class_obj_cname = self.dict.result() - - class_cell = self.class_cell - if class_cell is not None and not class_cell.is_active: - class_cell = None - - if class_cell is not None: - class_cell.generate_evaluation_code(code) + + class_cell = self.class_cell + if class_cell is not None and not class_cell.is_active: + class_cell = None + + if class_cell is not None: + class_cell.generate_evaluation_code(code) self.body.generate_execution_code(code) self.class_result.generate_evaluation_code(code) - if class_cell is not None: - class_cell.generate_injection_code( - code, self.class_result.result()) - if class_cell is not None: - class_cell.generate_disposal_code(code) - class_cell.free_temps(code) - + if class_cell is not None: + class_cell.generate_injection_code( + code, self.class_result.result()) + if class_cell is not None: + class_cell.generate_disposal_code(code) + class_cell.free_temps(code) + cenv.namespace_cname = cenv.class_obj_cname = self.classobj.result() self.target.generate_assignment_code(self.class_result, code) self.dict.generate_disposal_code(code) @@ -5876,7 +5876,7 @@ class DelStatNode(StatNode): arg.generate_evaluation_code(code) code.putln("delete %s;" % arg.result()) arg.generate_disposal_code(code) - arg.free_temps(code) + arg.free_temps(code) # else error reported earlier def annotate(self, code): @@ -6005,7 +6005,7 @@ class ReturnStatNode(StatNode): rhs=value, code=code, have_gil=self.in_nogil_context) - value.generate_post_assignment_code(code) + value.generate_post_assignment_code(code) elif self.in_generator: # return value == raise StopIteration(value), but uncatchable code.globalstate.use_utility_code( @@ -6019,7 +6019,7 @@ class ReturnStatNode(StatNode): code.putln("%s = %s;" % ( Naming.retval_cname, value.result_as(self.return_type))) - value.generate_post_assignment_code(code) + value.generate_post_assignment_code(code) value.free_temps(code) else: if self.return_type.is_pyobject: @@ -6421,8 +6421,8 @@ class SwitchStatNode(StatNode): # generate the switch statement, so shouldn't be bothered). code.putln("default: break;") code.putln("}") - self.test.generate_disposal_code(code) - self.test.free_temps(code) + self.test.generate_disposal_code(code) + self.test.free_temps(code) def generate_function_definitions(self, env, code): self.test.generate_function_definitions(env, code) @@ -7688,8 +7688,8 @@ class TryFinallyStatNode(StatNode): code.funcstate.release_temp(ret_temp) if self.in_generator: self.put_error_uncatcher(code, exc_vars) - for cname in exc_vars: - code.funcstate.release_temp(cname) + for cname in exc_vars: + code.funcstate.release_temp(cname) if not self.finally_clause.is_terminator: code.put_goto(old_label) @@ -8786,11 +8786,11 @@ class ParallelStatNode(StatNode, ParallelNode): self.begin_of_parallel_control_block_point = None self.begin_of_parallel_control_block_point_after_decls = None - if self.num_threads is not None: - # FIXME: is it the right place? should not normally produce code. - self.num_threads.generate_disposal_code(code) - self.num_threads.free_temps(code) - + if self.num_threads is not None: + # FIXME: is it the right place? should not normally produce code. + self.num_threads.generate_disposal_code(code) + self.num_threads.free_temps(code) + # Firstly, always prefer errors over returning, continue or break if self.error_label_used: c.putln("const char *%s = NULL; int %s = 0, %s = 0;" % self.parallel_pos_info) @@ -9132,7 +9132,7 @@ class ParallelRangeNode(ParallelStatNode): # TODO: check if the step is 0 and if so, raise an exception in a # 'with gil' block. For now, just abort - code.putln("if ((%(step)s == 0)) abort();" % fmt_dict) + code.putln("if ((%(step)s == 0)) abort();" % fmt_dict) self.setup_parallel_control_flow_block(code) # parallel control flow block @@ -9166,7 +9166,7 @@ class ParallelRangeNode(ParallelStatNode): # And finally, release our privates and write back any closure # variables - for temp in start_stop_step + (self.chunksize,): + for temp in start_stop_step + (self.chunksize,): if temp is not None: temp.generate_disposal_code(code) temp.free_temps(code) @@ -9253,15 +9253,15 @@ class ParallelRangeNode(ParallelStatNode): code.putln("%(target)s = (%(target_type)s)(%(start)s + %(step)s * %(i)s);" % fmt_dict) self.initialize_privates_to_nan(code, exclude=self.target.entry) - if self.is_parallel and not self.is_nested_prange: - # nested pranges are not omp'ified, temps go to outer loops + if self.is_parallel and not self.is_nested_prange: + # nested pranges are not omp'ified, temps go to outer loops code.funcstate.start_collecting_temps() self.body.generate_execution_code(code) self.trap_parallel_exit(code, should_flush=True) - if self.is_parallel and not self.is_nested_prange: - # nested pranges are not omp'ified, temps go to outer loops - self.privatize_temps(code) + if self.is_parallel and not self.is_nested_prange: + # nested pranges are not omp'ified, temps go to outer loops + self.privatize_temps(code) if self.breaking_label_used: # Put a guard around the loop body in case return, break or diff --git a/contrib/tools/cython/Cython/Compiler/Optimize.py b/contrib/tools/cython/Cython/Compiler/Optimize.py index 3cb77efe2c..65924b4a51 100644 --- a/contrib/tools/cython/Cython/Compiler/Optimize.py +++ b/contrib/tools/cython/Cython/Compiler/Optimize.py @@ -285,7 +285,7 @@ class IterationTransform(Visitor.EnvTransform): return self._transform_reversed_iteration(node, iterable) # range() iteration? - if Options.convert_range and 1 <= arg_count <= 3 and ( + if Options.convert_range and 1 <= arg_count <= 3 and ( iterable.self is None and function.is_name and function.name in ('range', 'xrange') and function.entry and function.entry.is_builtin): @@ -1347,10 +1347,10 @@ class FlattenInListTransform(Visitor.VisitorTransform, SkipDeclarations): # note: lhs may have side effects return node - if any([arg.is_starred for arg in args]): - # Starred arguments do not directly translate to comparisons or "in" tests. - return node - + if any([arg.is_starred for arg in args]): + # Starred arguments do not directly translate to comparisons or "in" tests. + return node + lhs = UtilNodes.ResultRefNode(node.operand1) conds = [] @@ -4255,7 +4255,7 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations): string_node.unicode_value = encoded_string( string_node.unicode_value * multiplier, string_node.unicode_value.encoding) - build_string = encoded_string if string_node.value.is_unicode else bytes_literal + build_string = encoded_string if string_node.value.is_unicode else bytes_literal elif isinstance(string_node, ExprNodes.UnicodeNode): if string_node.bytes_value is not None: string_node.bytes_value = bytes_literal( @@ -4263,14 +4263,14 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations): string_node.bytes_value.encoding) else: assert False, "unknown string node type: %s" % type(string_node) - string_node.value = build_string( + string_node.value = build_string( string_node.value * multiplier, string_node.value.encoding) - # follow constant-folding and use unicode_value in preference - if isinstance(string_node, ExprNodes.StringNode) and string_node.unicode_value is not None: - string_node.constant_result = string_node.unicode_value - else: - string_node.constant_result = string_node.value + # follow constant-folding and use unicode_value in preference + if isinstance(string_node, ExprNodes.StringNode) and string_node.unicode_value is not None: + string_node.constant_result = string_node.unicode_value + else: + string_node.constant_result = string_node.value return string_node def _calculate_constant_seq(self, node, sequence_node, factor): @@ -4302,10 +4302,10 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations): return self.visit_BinopNode(node) _parse_string_format_regex = ( - u'(%(?:' # %... - u'(?:[-0-9]+|[ ])?' # width (optional) or space prefix fill character (optional) - u'(?:[.][0-9]+)?' # precision (optional) - u')?.)' # format type (or something different for unsupported formats) + u'(%(?:' # %... + u'(?:[-0-9]+|[ ])?' # width (optional) or space prefix fill character (optional) + u'(?:[.][0-9]+)?' # precision (optional) + u')?.)' # format type (or something different for unsupported formats) ) def _build_fstring(self, pos, ustring, format_args): @@ -4337,25 +4337,25 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations): break if format_type in u'asrfdoxX': format_spec = s[1:] - conversion_char = None + conversion_char = None if format_type in u'doxX' and u'.' in format_spec: # Precision is not allowed for integers in format(), but ok in %-formatting. can_be_optimised = False elif format_type in u'ars': format_spec = format_spec[:-1] - conversion_char = format_type - if format_spec.startswith('0'): - format_spec = '>' + format_spec[1:] # right-alignment '%05s' spells '{:>5}' - elif format_type == u'd': - # '%d' formatting supports float, but '{obj:d}' does not => convert to int first. - conversion_char = 'd' - - if format_spec.startswith('-'): - format_spec = '<' + format_spec[1:] # left-alignment '%-5s' spells '{:<5}' - + conversion_char = format_type + if format_spec.startswith('0'): + format_spec = '>' + format_spec[1:] # right-alignment '%05s' spells '{:>5}' + elif format_type == u'd': + # '%d' formatting supports float, but '{obj:d}' does not => convert to int first. + conversion_char = 'd' + + if format_spec.startswith('-'): + format_spec = '<' + format_spec[1:] # left-alignment '%-5s' spells '{:<5}' + substrings.append(ExprNodes.FormattedValueNode( arg.pos, value=arg, - conversion_char=conversion_char, + conversion_char=conversion_char, format_spec=ExprNodes.UnicodeNode( pos, value=EncodedString(format_spec), constant_result=format_spec) if format_spec else None, diff --git a/contrib/tools/cython/Cython/Compiler/Options.py b/contrib/tools/cython/Cython/Compiler/Options.py index b3ffbcd927..4bd586ee41 100644 --- a/contrib/tools/cython/Cython/Compiler/Options.py +++ b/contrib/tools/cython/Cython/Compiler/Options.py @@ -181,7 +181,7 @@ _directive_defaults = { 'auto_pickle': None, 'cdivision': False, # was True before 0.12 'cdivision_warnings': False, - 'c_api_binop_methods': True, + 'c_api_binop_methods': True, 'overflowcheck': False, 'overflowcheck.fold': True, 'always_allow_keywords': False, diff --git a/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py b/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py index 0da3670cae..18dfb49aa1 100644 --- a/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py +++ b/contrib/tools/cython/Cython/Compiler/ParseTreeTransforms.py @@ -1161,7 +1161,7 @@ class ParallelRangeTransform(CythonTransform, SkipDeclarations): def visit_CallNode(self, node): self.visit(node.function) if not self.parallel_directive: - self.visitchildren(node, exclude=('function',)) + self.visitchildren(node, exclude=('function',)) return node # We are a parallel directive, replace this node with the @@ -1764,9 +1764,9 @@ if VALUE is not None: }, level='c_class', pipeline=[NormalizeTree(None)]).substitute({}) pickle_func.analyse_declarations(node.scope) - self.enter_scope(node, node.scope) # functions should be visited in the class scope + self.enter_scope(node, node.scope) # functions should be visited in the class scope self.visit(pickle_func) - self.exit_scope() + self.exit_scope() node.body.stats.append(pickle_func) def _handle_fused_def_decorators(self, old_decorators, env, node): @@ -2877,7 +2877,7 @@ class GilCheck(VisitorTransform): self.visitchildren(node, outer_attrs) self.nogil = gil_state - self.visitchildren(node, attrs=None, exclude=outer_attrs) + self.visitchildren(node, attrs=None, exclude=outer_attrs) self.nogil = was_nogil def visit_FuncDefNode(self, node): diff --git a/contrib/tools/cython/Cython/Compiler/Parsing.pxd b/contrib/tools/cython/Cython/Compiler/Parsing.pxd index 25453b39ab..ca9a3e85f4 100644 --- a/contrib/tools/cython/Cython/Compiler/Parsing.pxd +++ b/contrib/tools/cython/Cython/Compiler/Parsing.pxd @@ -69,8 +69,8 @@ cdef bint check_for_non_ascii_characters(unicode string) @cython.locals(systr=unicode, is_python3_source=bint, is_raw=bint) cdef p_string_literal(PyrexScanner s, kind_override=*) cdef _append_escape_sequence(kind, builder, unicode escape_sequence, PyrexScanner s) -cdef tuple _f_string_error_pos(pos, string, Py_ssize_t i) -@cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, next_start=Py_ssize_t) +cdef tuple _f_string_error_pos(pos, string, Py_ssize_t i) +@cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, next_start=Py_ssize_t) cdef list p_f_string(PyrexScanner s, unicode_value, pos, bint is_raw) @cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, quote_char=Py_UCS4, NO_CHAR=Py_UCS4) cdef tuple p_f_string_expr(PyrexScanner s, unicode_value, pos, Py_ssize_t starting_index, bint is_raw) diff --git a/contrib/tools/cython/Cython/Compiler/Parsing.py b/contrib/tools/cython/Cython/Compiler/Parsing.py index 4d2f12a24a..40862bcee6 100644 --- a/contrib/tools/cython/Cython/Compiler/Parsing.py +++ b/contrib/tools/cython/Cython/Compiler/Parsing.py @@ -882,7 +882,7 @@ def p_string_literal(s, kind_override=None): pos = s.position() is_python3_source = s.context.language_level >= 3 has_non_ascii_literal_characters = False - string_start_pos = (pos[0], pos[1], pos[2] + len(s.systring)) + string_start_pos = (pos[0], pos[1], pos[2] + len(s.systring)) kind_string = s.systring.rstrip('"\'').lower() if len(kind_string) > 1: if len(set(kind_string)) != len(kind_string): @@ -966,7 +966,7 @@ def p_string_literal(s, kind_override=None): s.error("bytes can only contain ASCII literal characters.", pos=pos) bytes_value = None if kind == 'f': - unicode_value = p_f_string(s, unicode_value, string_start_pos, is_raw='r' in kind_string) + unicode_value = p_f_string(s, unicode_value, string_start_pos, is_raw='r' in kind_string) s.next() return (kind, bytes_value, unicode_value) @@ -1038,10 +1038,10 @@ _parse_escape_sequences_raw, _parse_escape_sequences = [re.compile(( for is_raw in (True, False)] -def _f_string_error_pos(pos, string, i): - return (pos[0], pos[1], pos[2] + i + 1) # FIXME: handle newlines in string - - +def _f_string_error_pos(pos, string, i): + return (pos[0], pos[1], pos[2] + i + 1) # FIXME: handle newlines in string + + def p_f_string(s, unicode_value, pos, is_raw): # Parses a PEP 498 f-string literal into a list of nodes. Nodes are either UnicodeNodes # or FormattedValueNodes. @@ -1055,7 +1055,7 @@ def p_f_string(s, unicode_value, pos, is_raw): end = next_start match = _parse_seq(unicode_value, next_start) if match is None: - error(_f_string_error_pos(pos, unicode_value, next_start), "Invalid escape sequence") + error(_f_string_error_pos(pos, unicode_value, next_start), "Invalid escape sequence") next_start = match.end() part = match.group() @@ -1079,8 +1079,8 @@ def p_f_string(s, unicode_value, pos, is_raw): if part == '}}': builder.append('}') else: - error(_f_string_error_pos(pos, unicode_value, end), - "f-string: single '}' is not allowed") + error(_f_string_error_pos(pos, unicode_value, end), + "f-string: single '}' is not allowed") else: builder.append(part) @@ -1101,20 +1101,20 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): nested_depth = 0 quote_char = NO_CHAR in_triple_quotes = False - backslash_reported = False + backslash_reported = False while True: if i >= size: - break # error will be reported below + break # error will be reported below c = unicode_value[i] if quote_char != NO_CHAR: if c == '\\': - # avoid redundant error reports along '\' sequences - if not backslash_reported: - error(_f_string_error_pos(pos, unicode_value, i), - "backslashes not allowed in f-strings") - backslash_reported = True + # avoid redundant error reports along '\' sequences + if not backslash_reported: + error(_f_string_error_pos(pos, unicode_value, i), + "backslashes not allowed in f-strings") + backslash_reported = True elif c == quote_char: if in_triple_quotes: if i + 2 < size and unicode_value[i + 1] == c and unicode_value[i + 2] == c: @@ -1133,8 +1133,8 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): elif nested_depth != 0 and c in '}])': nested_depth -= 1 elif c == '#': - error(_f_string_error_pos(pos, unicode_value, i), - "format string cannot include #") + error(_f_string_error_pos(pos, unicode_value, i), + "format string cannot include #") elif nested_depth == 0 and c in '!:}': # allow != as a special case if c == '!' and i + 1 < size and unicode_value[i + 1] == '=': @@ -1150,13 +1150,13 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): expr_pos = (pos[0], pos[1], pos[2] + starting_index + 2) # TODO: find exact code position (concat, multi-line, ...) if not expr_str.strip(): - error(_f_string_error_pos(pos, unicode_value, starting_index), - "empty expression not allowed in f-string") + error(_f_string_error_pos(pos, unicode_value, starting_index), + "empty expression not allowed in f-string") if terminal_char == '!': i += 1 if i + 2 > size: - pass # error will be reported below + pass # error will be reported below else: conversion_char = unicode_value[i] i += 1 @@ -1169,7 +1169,7 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): start_format_spec = i + 1 while True: if i >= size: - break # error will be reported below + break # error will be reported below c = unicode_value[i] if not in_triple_quotes and not in_string: if c == '{': @@ -1191,9 +1191,9 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): format_spec_str = unicode_value[start_format_spec:i] if terminal_char != '}': - error(_f_string_error_pos(pos, unicode_value, i), - "missing '}' in format string expression" + ( - ", found '%s'" % terminal_char if terminal_char else "")) + error(_f_string_error_pos(pos, unicode_value, i), + "missing '}' in format string expression" + ( + ", found '%s'" % terminal_char if terminal_char else "")) # parse the expression as if it was surrounded by parentheses buf = StringIO('(%s)' % expr_str) @@ -1202,7 +1202,7 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw): # validate the conversion char if conversion_char is not None and not ExprNodes.FormattedValueNode.find_conversion_func(conversion_char): - error(expr_pos, "invalid conversion character '%s'" % conversion_char) + error(expr_pos, "invalid conversion character '%s'" % conversion_char) # the format spec is itself treated like an f-string if format_spec_str: @@ -2254,7 +2254,7 @@ def p_statement(s, ctx, first_statement = 0): s.error('decorator not allowed here') s.level = ctx.level decorators = p_decorators(s) - if not ctx.allow_struct_enum_decorator and s.sy not in ('def', 'cdef', 'cpdef', 'class', 'async'): + if not ctx.allow_struct_enum_decorator and s.sy not in ('def', 'cdef', 'cpdef', 'class', 'async'): if s.sy == 'IDENT' and s.systring == 'async': pass # handled below else: @@ -2683,7 +2683,7 @@ def looking_at_expr(s): s.put_back(*saved) elif s.sy == '[': s.next() - is_type = s.sy == ']' or not looking_at_expr(s) # could be a nested template type + is_type = s.sy == ']' or not looking_at_expr(s) # could be a nested template type s.put_back(*saved) dotted_path.reverse() diff --git a/contrib/tools/cython/Cython/Compiler/PyrexTypes.py b/contrib/tools/cython/Cython/Compiler/PyrexTypes.py index 3d4931cea6..eebd0306a6 100644 --- a/contrib/tools/cython/Cython/Compiler/PyrexTypes.py +++ b/contrib/tools/cython/Cython/Compiler/PyrexTypes.py @@ -5,7 +5,7 @@ from __future__ import absolute_import import copy -import hashlib +import hashlib import re try: @@ -4043,10 +4043,10 @@ class CTupleType(CType): env.use_utility_code(self._convert_from_py_code) return True - def cast_code(self, expr_code): - return expr_code - + def cast_code(self, expr_code): + return expr_code + def c_tuple_type(components): components = tuple(components) cname = Naming.ctuple_type_prefix + type_list_identifier(components) @@ -4740,5 +4740,5 @@ def type_identifier(type): def cap_length(s, max_prefix=63, max_len=1024): if len(s) <= max_prefix: return s - hash_prefix = hashlib.sha256(s.encode('ascii')).hexdigest()[:6] - return '%s__%s__etc' % (hash_prefix, s[:max_len-17]) + hash_prefix = hashlib.sha256(s.encode('ascii')).hexdigest()[:6] + return '%s__%s__etc' % (hash_prefix, s[:max_len-17]) diff --git a/contrib/tools/cython/Cython/Compiler/Pythran.py b/contrib/tools/cython/Cython/Compiler/Pythran.py index c02704a918..7fa3e0cbab 100644 --- a/contrib/tools/cython/Cython/Compiler/Pythran.py +++ b/contrib/tools/cython/Cython/Compiler/Pythran.py @@ -9,18 +9,18 @@ import cython try: import pythran pythran_is_pre_0_9 = tuple(map(int, pythran.__version__.split('.')[0:2])) < (0, 9) - pythran_is_pre_0_9_6 = tuple(map(int, pythran.__version__.split('.')[0:3])) < (0, 9, 6) + pythran_is_pre_0_9_6 = tuple(map(int, pythran.__version__.split('.')[0:3])) < (0, 9, 6) except ImportError: pythran = None pythran_is_pre_0_9 = True - pythran_is_pre_0_9_6 = True - -if pythran_is_pre_0_9_6: - pythran_builtins = '__builtin__' -else: - pythran_builtins = 'builtins' + pythran_is_pre_0_9_6 = True +if pythran_is_pre_0_9_6: + pythran_builtins = '__builtin__' +else: + pythran_builtins = 'builtins' + # Pythran/Numpy specific operations def has_np_pythran(env): @@ -54,7 +54,7 @@ def pythran_type(Ty, ptype="ndarray"): if Ty.is_pythran_expr: return Ty.pythran_type #if Ty.is_none: - # return "decltype(pythonic::builtins::None)" + # return "decltype(pythonic::builtins::None)" if Ty.is_numeric: return Ty.sign_and_name() raise ValueError("unsupported pythran type %s (%s)" % (Ty, type(Ty))) @@ -89,9 +89,9 @@ def _index_type_code(index_with_type): idx, index_type = index_with_type if idx.is_slice: n = 2 + int(not idx.step.is_none) - return "pythonic::%s::functor::slice{}(%s)" % ( - pythran_builtins, - ",".join(["0"]*n)) + return "pythonic::%s::functor::slice{}(%s)" % ( + pythran_builtins, + ",".join(["0"]*n)) elif index_type.is_int: return "std::declval<%s>()" % index_type.sign_and_name() elif index_type.is_pythran_expr: @@ -163,7 +163,7 @@ def to_pythran(op, ptype=None): if is_type(op_type, ["is_pythran_expr", "is_numeric", "is_float", "is_complex"]): return op.result() if op.is_none: - return "pythonic::%s::None" % pythran_builtins + return "pythonic::%s::None" % pythran_builtins if ptype is None: ptype = pythran_type(op_type) @@ -216,7 +216,7 @@ def include_pythran_generic(env): env.add_include_file("pythonic/types/bool.hpp") env.add_include_file("pythonic/types/ndarray.hpp") env.add_include_file("pythonic/numpy/power.hpp") - env.add_include_file("pythonic/%s/slice.hpp" % pythran_builtins) + env.add_include_file("pythonic/%s/slice.hpp" % pythran_builtins) env.add_include_file("<new>") # for placement new for i in (8, 16, 32, 64): diff --git a/contrib/tools/cython/Cython/Compiler/Scanning.pxd b/contrib/tools/cython/Cython/Compiler/Scanning.pxd index 59593f88a2..20cd54b52a 100644 --- a/contrib/tools/cython/Cython/Compiler/Scanning.pxd +++ b/contrib/tools/cython/Cython/Compiler/Scanning.pxd @@ -38,7 +38,7 @@ cdef class PyrexScanner(Scanner): cdef public list indentation_stack cdef public indentation_char cdef public int bracket_nesting_level - cdef readonly bint async_enabled + cdef readonly bint async_enabled cdef public sy cdef public systring diff --git a/contrib/tools/cython/Cython/Compiler/Scanning.py b/contrib/tools/cython/Cython/Compiler/Scanning.py index c721bba69b..ea33eee7a2 100644 --- a/contrib/tools/cython/Cython/Compiler/Scanning.py +++ b/contrib/tools/cython/Cython/Compiler/Scanning.py @@ -41,8 +41,8 @@ py_reserved_words = [ "global", "nonlocal", "def", "class", "print", "del", "pass", "break", "continue", "return", "raise", "import", "exec", "try", "except", "finally", "while", "if", "elif", "else", "for", - "in", "assert", "and", "or", "not", "is", "lambda", - "from", "yield", "with", + "in", "assert", "and", "or", "not", "is", "lambda", + "from", "yield", "with", ] pyx_reserved_words = py_reserved_words + [ @@ -324,25 +324,25 @@ class PyrexScanner(Scanner): def __init__(self, file, filename, parent_scanner=None, scope=None, context=None, source_encoding=None, parse_comments=True, initial_pos=None): Scanner.__init__(self, get_lexicon(), file, filename, initial_pos) - - if filename.is_python_file(): - self.in_python_file = True - self.keywords = set(py_reserved_words) - else: - self.in_python_file = False - self.keywords = set(pyx_reserved_words) - - self.async_enabled = 0 - + + if filename.is_python_file(): + self.in_python_file = True + self.keywords = set(py_reserved_words) + else: + self.in_python_file = False + self.keywords = set(pyx_reserved_words) + + self.async_enabled = 0 + if parent_scanner: self.context = parent_scanner.context self.included_files = parent_scanner.included_files self.compile_time_env = parent_scanner.compile_time_env self.compile_time_eval = parent_scanner.compile_time_eval self.compile_time_expr = parent_scanner.compile_time_expr - - if parent_scanner.async_enabled: - self.enter_async() + + if parent_scanner.async_enabled: + self.enter_async() else: self.context = context self.included_files = scope.included_files @@ -357,7 +357,7 @@ class PyrexScanner(Scanner): self.indentation_stack = [0] self.indentation_char = None self.bracket_nesting_level = 0 - + self.begin('INDENT') self.sy = '' self.next() diff --git a/contrib/tools/cython/Cython/Compiler/StringEncoding.py b/contrib/tools/cython/Cython/Compiler/StringEncoding.py index c37e8aab79..4bbcd8a3d6 100644 --- a/contrib/tools/cython/Cython/Compiler/StringEncoding.py +++ b/contrib/tools/cython/Cython/Compiler/StringEncoding.py @@ -154,34 +154,34 @@ def string_contains_surrogates(ustring): return False -def string_contains_lone_surrogates(ustring): - """ - Check if the unicode string contains lone surrogate code points - on a CPython platform with wide (UCS-4) or narrow (UTF-16) - Unicode, i.e. characters that would be spelled as two - separate code units on a narrow platform, but that do not form a pair. - """ - last_was_start = False - unicode_uses_surrogate_encoding = sys.maxunicode == 65535 - for c in map(ord, ustring): - # surrogates tend to be rare - if c < 0xD800 or c > 0xDFFF: - if last_was_start: - return True - elif not unicode_uses_surrogate_encoding: - # on 32bit Unicode platforms, there is never a pair - return True - elif c <= 0xDBFF: - if last_was_start: - return True # lone start - last_was_start = True - else: - if not last_was_start: - return True # lone end - last_was_start = False - return last_was_start - - +def string_contains_lone_surrogates(ustring): + """ + Check if the unicode string contains lone surrogate code points + on a CPython platform with wide (UCS-4) or narrow (UTF-16) + Unicode, i.e. characters that would be spelled as two + separate code units on a narrow platform, but that do not form a pair. + """ + last_was_start = False + unicode_uses_surrogate_encoding = sys.maxunicode == 65535 + for c in map(ord, ustring): + # surrogates tend to be rare + if c < 0xD800 or c > 0xDFFF: + if last_was_start: + return True + elif not unicode_uses_surrogate_encoding: + # on 32bit Unicode platforms, there is never a pair + return True + elif c <= 0xDBFF: + if last_was_start: + return True # lone start + last_was_start = True + else: + if not last_was_start: + return True # lone end + last_was_start = False + return last_was_start + + class BytesLiteral(_bytes): # bytes subclass that is compatible with EncodedString encoding = None diff --git a/contrib/tools/cython/Cython/Compiler/Symtab.py b/contrib/tools/cython/Cython/Compiler/Symtab.py index 7361a55aea..bbedbd8c41 100644 --- a/contrib/tools/cython/Cython/Compiler/Symtab.py +++ b/contrib/tools/cython/Cython/Compiler/Symtab.py @@ -822,7 +822,7 @@ class Scope(object): if overridable: # names of cpdef functions can be used as variables and can be assigned to var_entry = Entry(name, cname, py_object_type) # FIXME: cname? - var_entry.qualified_name = self.qualify_name(name) + var_entry.qualified_name = self.qualify_name(name) var_entry.is_variable = 1 var_entry.is_pyglobal = 1 var_entry.scope = entry.scope @@ -1035,7 +1035,7 @@ class BuiltinScope(Scope): else: python_equiv = EncodedString(python_equiv) var_entry = Entry(python_equiv, python_equiv, py_object_type) - var_entry.qualified_name = self.qualify_name(name) + var_entry.qualified_name = self.qualify_name(name) var_entry.is_variable = 1 var_entry.is_builtin = 1 var_entry.utility_code = utility_code @@ -1059,7 +1059,7 @@ class BuiltinScope(Scope): type = self.lookup('type').type, # make sure "type" is the first type declared... pos = entry.pos, cname = entry.type.typeptr_cname) - var_entry.qualified_name = self.qualify_name(name) + var_entry.qualified_name = self.qualify_name(name) var_entry.is_variable = 1 var_entry.is_cglobal = 1 var_entry.is_readonly = 1 @@ -1247,7 +1247,7 @@ class ModuleScope(Scope): else: entry.is_builtin = 1 entry.name = name - entry.qualified_name = self.builtin_scope().qualify_name(name) + entry.qualified_name = self.builtin_scope().qualify_name(name) return entry def find_module(self, module_name, pos, relative_level=-1): @@ -1711,7 +1711,7 @@ class ModuleScope(Scope): type = Builtin.type_type, pos = entry.pos, cname = entry.type.typeptr_cname) - var_entry.qualified_name = entry.qualified_name + var_entry.qualified_name = entry.qualified_name var_entry.is_variable = 1 var_entry.is_cglobal = 1 var_entry.is_readonly = 1 @@ -2295,7 +2295,7 @@ class CClassScope(ClassScope): entry = self.declare_cfunction(name, type, None, cname, visibility='extern', utility_code=utility_code) var_entry = Entry(name, name, py_object_type) - var_entry.qualified_name = name + var_entry.qualified_name = name var_entry.is_variable = 1 var_entry.is_builtin = 1 var_entry.utility_code = utility_code diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestBuffer.py b/contrib/tools/cython/Cython/Compiler/Tests/TestBuffer.py index 1f69d96524..45f8c6b74f 100644 --- a/contrib/tools/cython/Cython/Compiler/Tests/TestBuffer.py +++ b/contrib/tools/cython/Cython/Compiler/Tests/TestBuffer.py @@ -21,7 +21,7 @@ class TestBufferParsing(CythonTest): def test_basic(self): t = self.parse(u"cdef object[float, 4, ndim=2, foo=foo] x") bufnode = t.stats[0].base_type - self.assertTrue(isinstance(bufnode, TemplatedTypeNode)) + self.assertTrue(isinstance(bufnode, TemplatedTypeNode)) self.assertEqual(2, len(bufnode.positional_args)) # print bufnode.dump() # should put more here... @@ -46,7 +46,7 @@ class TestBufferOptions(CythonTest): def nonfatal_error(self, error): # We're passing self as context to transform to trap this self.error = error - self.assertTrue(self.expect_error) + self.assertTrue(self.expect_error) def parse_opts(self, opts, expect_error=False): assert opts != "" @@ -57,12 +57,12 @@ class TestBufferOptions(CythonTest): vardef = root.stats[0].body.stats[0] assert isinstance(vardef, CVarDefNode) # use normal assert as this is to validate the test code buftype = vardef.base_type - self.assertTrue(isinstance(buftype, TemplatedTypeNode)) - self.assertTrue(isinstance(buftype.base_type_node, CSimpleBaseTypeNode)) + self.assertTrue(isinstance(buftype, TemplatedTypeNode)) + self.assertTrue(isinstance(buftype.base_type_node, CSimpleBaseTypeNode)) self.assertEqual(u"object", buftype.base_type_node.name) return buftype else: - self.assertTrue(len(root.stats[0].body.stats) == 0) + self.assertTrue(len(root.stats[0].body.stats) == 0) def non_parse(self, expected_err, opts): self.parse_opts(opts, expect_error=True) @@ -71,14 +71,14 @@ class TestBufferOptions(CythonTest): def __test_basic(self): buf = self.parse_opts(u"unsigned short int, 3") - self.assertTrue(isinstance(buf.dtype_node, CSimpleBaseTypeNode)) - self.assertTrue(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1) + self.assertTrue(isinstance(buf.dtype_node, CSimpleBaseTypeNode)) + self.assertTrue(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1) self.assertEqual(3, buf.ndim) def __test_dict(self): buf = self.parse_opts(u"ndim=3, dtype=unsigned short int") - self.assertTrue(isinstance(buf.dtype_node, CSimpleBaseTypeNode)) - self.assertTrue(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1) + self.assertTrue(isinstance(buf.dtype_node, CSimpleBaseTypeNode)) + self.assertTrue(buf.dtype_node.signed == 0 and buf.dtype_node.longness == -1) self.assertEqual(3, buf.ndim) def __test_ndim(self): @@ -94,8 +94,8 @@ class TestBufferOptions(CythonTest): cdef object[ndim=ndim, dtype=int] y """, pipeline=[NormalizeTree(self), PostParse(self)]).root stats = t.stats[0].body.stats - self.assertTrue(stats[0].base_type.ndim == 3) - self.assertTrue(stats[1].base_type.ndim == 3) + self.assertTrue(stats[0].base_type.ndim == 3) + self.assertTrue(stats[1].base_type.ndim == 3) # add exotic and impossible combinations as they come along... diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py b/contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py index 3792f26e99..237943d217 100644 --- a/contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py +++ b/contrib/tools/cython/Cython/Compiler/Tests/TestMemView.py @@ -48,7 +48,7 @@ class TestMemviewParsing(CythonTest): def test_basic(self): t = self.parse(u"cdef int[:] x") memv_node = t.stats[0].base_type - self.assertTrue(isinstance(memv_node, MemoryViewSliceTypeNode)) + self.assertTrue(isinstance(memv_node, MemoryViewSliceTypeNode)) # we also test other similar declarations (buffers, anonymous C arrays) # since the parsing has to distinguish between them. diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestParseTreeTransforms.py b/contrib/tools/cython/Cython/Compiler/Tests/TestParseTreeTransforms.py index 234b45db5b..5917605ae1 100644 --- a/contrib/tools/cython/Cython/Compiler/Tests/TestParseTreeTransforms.py +++ b/contrib/tools/cython/Cython/Compiler/Tests/TestParseTreeTransforms.py @@ -87,7 +87,7 @@ class TestNormalizeTree(TransformTest): def test_pass_eliminated(self): t = self.run_pipeline([NormalizeTree(None)], u"pass") - self.assertTrue(len(t.stats) == 0) + self.assertTrue(len(t.stats) == 0) class TestWithTransform(object): # (TransformTest): # Disabled! diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestStringEncoding.py b/contrib/tools/cython/Cython/Compiler/Tests/TestStringEncoding.py index 91d099333a..de79469cfb 100644 --- a/contrib/tools/cython/Cython/Compiler/Tests/TestStringEncoding.py +++ b/contrib/tools/cython/Cython/Compiler/Tests/TestStringEncoding.py @@ -1,44 +1,44 @@ -# -*- coding: utf-8 -*- - -import sys -import unittest - -import Cython.Compiler.StringEncoding as StringEncoding - - -class StringEncodingTest(unittest.TestCase): - """ - Test the StringEncoding module. - """ - def test_string_contains_lone_surrogates(self): - self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"abc")) - self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"\uABCD")) - self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"\N{SNOWMAN}")) - - # This behaves differently in Py2 when freshly parsed and read from a .pyc file, - # but it seems to be a marshalling bug in Py2, which doesn't hurt us in Cython. - if sys.version_info[0] != 2: - self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800\uDFFF")) - - # In Py2 with 16bit Unicode, the following is indistinguishable from the 32bit character. - obfuscated_surrogate_pair = (u"\uDFFF" + "\uD800")[::-1] - if sys.version_info[0] == 2 and sys.maxunicode == 65565: - self.assertFalse(StringEncoding.string_contains_lone_surrogates(obfuscated_surrogate_pair)) - else: - self.assertTrue(StringEncoding.string_contains_lone_surrogates(obfuscated_surrogate_pair)) - - self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800")) - self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uDFFF")) - self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uDFFF\uD800")) - self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800x\uDFFF")) - - def test_string_contains_surrogates(self): - self.assertFalse(StringEncoding.string_contains_surrogates(u"abc")) - self.assertFalse(StringEncoding.string_contains_surrogates(u"\uABCD")) - self.assertFalse(StringEncoding.string_contains_surrogates(u"\N{SNOWMAN}")) - - self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800")) - self.assertTrue(StringEncoding.string_contains_surrogates(u"\uDFFF")) - self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800\uDFFF")) - self.assertTrue(StringEncoding.string_contains_surrogates(u"\uDFFF\uD800")) - self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800x\uDFFF")) +# -*- coding: utf-8 -*- + +import sys +import unittest + +import Cython.Compiler.StringEncoding as StringEncoding + + +class StringEncodingTest(unittest.TestCase): + """ + Test the StringEncoding module. + """ + def test_string_contains_lone_surrogates(self): + self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"abc")) + self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"\uABCD")) + self.assertFalse(StringEncoding.string_contains_lone_surrogates(u"\N{SNOWMAN}")) + + # This behaves differently in Py2 when freshly parsed and read from a .pyc file, + # but it seems to be a marshalling bug in Py2, which doesn't hurt us in Cython. + if sys.version_info[0] != 2: + self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800\uDFFF")) + + # In Py2 with 16bit Unicode, the following is indistinguishable from the 32bit character. + obfuscated_surrogate_pair = (u"\uDFFF" + "\uD800")[::-1] + if sys.version_info[0] == 2 and sys.maxunicode == 65565: + self.assertFalse(StringEncoding.string_contains_lone_surrogates(obfuscated_surrogate_pair)) + else: + self.assertTrue(StringEncoding.string_contains_lone_surrogates(obfuscated_surrogate_pair)) + + self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800")) + self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uDFFF")) + self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uDFFF\uD800")) + self.assertTrue(StringEncoding.string_contains_lone_surrogates(u"\uD800x\uDFFF")) + + def test_string_contains_surrogates(self): + self.assertFalse(StringEncoding.string_contains_surrogates(u"abc")) + self.assertFalse(StringEncoding.string_contains_surrogates(u"\uABCD")) + self.assertFalse(StringEncoding.string_contains_surrogates(u"\N{SNOWMAN}")) + + self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800")) + self.assertTrue(StringEncoding.string_contains_surrogates(u"\uDFFF")) + self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800\uDFFF")) + self.assertTrue(StringEncoding.string_contains_surrogates(u"\uDFFF\uD800")) + self.assertTrue(StringEncoding.string_contains_surrogates(u"\uD800x\uDFFF")) diff --git a/contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py b/contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py index 9ee8da5478..7f5a91bccf 100644 --- a/contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py +++ b/contrib/tools/cython/Cython/Compiler/Tests/TestTreeFragment.py @@ -23,7 +23,7 @@ class TestTreeFragments(CythonTest): T = self.fragment(u"y + y").substitute({"y": NameNode(pos=None, name="x")}) self.assertEqual("x", T.stats[0].expr.operand1.name) self.assertEqual("x", T.stats[0].expr.operand2.name) - self.assertTrue(T.stats[0].expr.operand1 is not T.stats[0].expr.operand2) + self.assertTrue(T.stats[0].expr.operand1 is not T.stats[0].expr.operand2) def test_substitution(self): F = self.fragment(u"x = 4") @@ -35,7 +35,7 @@ class TestTreeFragments(CythonTest): F = self.fragment(u"PASS") pass_stat = PassStatNode(pos=None) T = F.substitute({"PASS" : pass_stat}) - self.assertTrue(isinstance(T.stats[0], PassStatNode), T) + self.assertTrue(isinstance(T.stats[0], PassStatNode), T) def test_pos_is_transferred(self): F = self.fragment(u""" @@ -55,9 +55,9 @@ class TestTreeFragments(CythonTest): """) T = F.substitute(temps=[u"TMP"]) s = T.body.stats - self.assertTrue(isinstance(s[0].expr, TempRefNode)) - self.assertTrue(isinstance(s[1].rhs, TempRefNode)) - self.assertTrue(s[0].expr.handle is s[1].rhs.handle) + self.assertTrue(isinstance(s[0].expr, TempRefNode)) + self.assertTrue(isinstance(s[1].rhs, TempRefNode)) + self.assertTrue(s[0].expr.handle is s[1].rhs.handle) if __name__ == "__main__": import unittest diff --git a/contrib/tools/cython/Cython/Compiler/TreePath.py b/contrib/tools/cython/Cython/Compiler/TreePath.py index 8585905557..978f2f6c5c 100644 --- a/contrib/tools/cython/Cython/Compiler/TreePath.py +++ b/contrib/tools/cython/Cython/Compiler/TreePath.py @@ -10,13 +10,13 @@ from __future__ import absolute_import import re import operator -import sys - -if sys.version_info[0] >= 3: - _unicode = str -else: - _unicode = unicode +import sys +if sys.version_info[0] >= 3: + _unicode = str +else: + _unicode = unicode + path_tokenizer = re.compile( r"(" r"'[^']*'|\"[^\"]*\"|" @@ -173,11 +173,11 @@ def handle_attribute(next, token): continue if attr_value == value: yield attr_value - elif (isinstance(attr_value, bytes) and isinstance(value, _unicode) and - attr_value == value.encode()): - # allow a bytes-to-string comparison too - yield attr_value - + elif (isinstance(attr_value, bytes) and isinstance(value, _unicode) and + attr_value == value.encode()): + # allow a bytes-to-string comparison too + yield attr_value + return select diff --git a/contrib/tools/cython/Cython/Compiler/TypeSlots.py b/contrib/tools/cython/Cython/Compiler/TypeSlots.py index 0b4ff67042..2a0bccbe33 100644 --- a/contrib/tools/cython/Cython/Compiler/TypeSlots.py +++ b/contrib/tools/cython/Cython/Compiler/TypeSlots.py @@ -813,7 +813,7 @@ PyAsyncMethods = ( MethodSlot(unaryfunc, "am_await", "__await__"), MethodSlot(unaryfunc, "am_aiter", "__aiter__"), MethodSlot(unaryfunc, "am_anext", "__anext__"), - EmptySlot("am_send", ifdef="PY_VERSION_HEX >= 0x030A00A3"), + EmptySlot("am_send", ifdef="PY_VERSION_HEX >= 0x030A00A3"), ) #------------------------------------------------------------------------------------------ @@ -889,10 +889,10 @@ slot_table = ( EmptySlot("tp_del"), EmptySlot("tp_version_tag"), EmptySlot("tp_finalize", ifdef="PY_VERSION_HEX >= 0x030400a1"), - EmptySlot("tp_vectorcall", ifdef="PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)"), - EmptySlot("tp_print", ifdef="PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000"), - # PyPy specific extension - only here to avoid C compiler warnings. - EmptySlot("tp_pypy_flags", ifdef="CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000"), + EmptySlot("tp_vectorcall", ifdef="PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)"), + EmptySlot("tp_print", ifdef="PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000"), + # PyPy specific extension - only here to avoid C compiler warnings. + EmptySlot("tp_pypy_flags", ifdef="CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000"), ) #------------------------------------------------------------------------------------------ diff --git a/contrib/tools/cython/Cython/Compiler/Visitor.py b/contrib/tools/cython/Cython/Compiler/Visitor.py index a35d13e1d0..4cfa368b02 100644 --- a/contrib/tools/cython/Cython/Compiler/Visitor.py +++ b/contrib/tools/cython/Cython/Compiler/Visitor.py @@ -198,7 +198,7 @@ class TreeVisitor(object): return self._visitchildren(parent, attrs) @cython.final - @cython.locals(idx=cython.Py_ssize_t) + @cython.locals(idx=cython.Py_ssize_t) def _visitchildren(self, parent, attrs): """ Visits the children of the given parent. If parent is None, returns |