summaryrefslogtreecommitdiffstats
path: root/contrib/tools/cython/Cython/Compiler/Pipeline.py
diff options
context:
space:
mode:
authororivej <[email protected]>2022-02-10 16:44:49 +0300
committerDaniil Cherednik <[email protected]>2022-02-10 16:44:49 +0300
commit718c552901d703c502ccbefdfc3c9028d608b947 (patch)
tree46534a98bbefcd7b1f3faa5b52c138ab27db75b7 /contrib/tools/cython/Cython/Compiler/Pipeline.py
parente9656aae26e0358d5378e5b63dcac5c8dbe0e4d0 (diff)
Restoring authorship annotation for <[email protected]>. Commit 1 of 2.
Diffstat (limited to 'contrib/tools/cython/Cython/Compiler/Pipeline.py')
-rw-r--r--contrib/tools/cython/Cython/Compiler/Pipeline.py108
1 files changed, 54 insertions, 54 deletions
diff --git a/contrib/tools/cython/Cython/Compiler/Pipeline.py b/contrib/tools/cython/Cython/Compiler/Pipeline.py
index 5194c3e49b9..891937248b9 100644
--- a/contrib/tools/cython/Cython/Compiler/Pipeline.py
+++ b/contrib/tools/cython/Cython/Compiler/Pipeline.py
@@ -14,7 +14,7 @@ from . import Naming
#
def dumptree(t):
# For quick debugging in pipelines
- print(t.dump())
+ print(t.dump())
return t
def abort_on_errors(node):
@@ -29,7 +29,7 @@ def parse_stage_factory(context):
full_module_name = compsrc.full_module_name
initial_pos = (source_desc, 1, 0)
saved_cimport_from_pyx, Options.cimport_from_pyx = Options.cimport_from_pyx, False
- scope = context.find_module(full_module_name, pos = initial_pos, need_pxd = 0)
+ scope = context.find_module(full_module_name, pos = initial_pos, need_pxd = 0)
Options.cimport_from_pyx = saved_cimport_from_pyx
tree = context.parse(source_desc, scope, pxd = 0, full_module_name = full_module_name)
tree.compilation_source = compsrc
@@ -54,20 +54,20 @@ def generate_pyx_code_stage_factory(options, result):
return result
return generate_pyx_code_stage
-
+
def inject_pxd_code_stage_factory(context):
def inject_pxd_code_stage(module_node):
- for name, (statlistnode, scope) in context.pxds.items():
+ for name, (statlistnode, scope) in context.pxds.items():
module_node.merge_in(statlistnode, scope)
return module_node
return inject_pxd_code_stage
-
+
def use_utility_code_definitions(scope, target, seen=None):
if seen is None:
seen = set()
- for entry in scope.entries.values():
+ for entry in scope.entries.values():
if entry in seen:
continue
@@ -79,54 +79,54 @@ def use_utility_code_definitions(scope, target, seen=None):
elif entry.as_module:
use_utility_code_definitions(entry.as_module, target, seen)
-
-def sort_utility_codes(utilcodes):
- ranks = {}
- def get_rank(utilcode):
- if utilcode not in ranks:
- ranks[utilcode] = 0 # prevent infinite recursion on circular dependencies
- original_order = len(ranks)
- ranks[utilcode] = 1 + min([get_rank(dep) for dep in utilcode.requires or ()] or [-1]) + original_order * 1e-8
- return ranks[utilcode]
- for utilcode in utilcodes:
- get_rank(utilcode)
- return [utilcode for utilcode, _ in sorted(ranks.items(), key=lambda kv: kv[1])]
-
-
-def normalize_deps(utilcodes):
- deps = {}
- for utilcode in utilcodes:
- deps[utilcode] = utilcode
-
- def unify_dep(dep):
- if dep in deps:
- return deps[dep]
- else:
- deps[dep] = dep
- return dep
-
- for utilcode in utilcodes:
- utilcode.requires = [unify_dep(dep) for dep in utilcode.requires or ()]
-
-
+
+def sort_utility_codes(utilcodes):
+ ranks = {}
+ def get_rank(utilcode):
+ if utilcode not in ranks:
+ ranks[utilcode] = 0 # prevent infinite recursion on circular dependencies
+ original_order = len(ranks)
+ ranks[utilcode] = 1 + min([get_rank(dep) for dep in utilcode.requires or ()] or [-1]) + original_order * 1e-8
+ return ranks[utilcode]
+ for utilcode in utilcodes:
+ get_rank(utilcode)
+ return [utilcode for utilcode, _ in sorted(ranks.items(), key=lambda kv: kv[1])]
+
+
+def normalize_deps(utilcodes):
+ deps = {}
+ for utilcode in utilcodes:
+ deps[utilcode] = utilcode
+
+ def unify_dep(dep):
+ if dep in deps:
+ return deps[dep]
+ else:
+ deps[dep] = dep
+ return dep
+
+ for utilcode in utilcodes:
+ utilcode.requires = [unify_dep(dep) for dep in utilcode.requires or ()]
+
+
def inject_utility_code_stage_factory(context):
def inject_utility_code_stage(module_node):
- module_node.prepare_utility_code()
+ module_node.prepare_utility_code()
use_utility_code_definitions(context.cython_scope, module_node.scope)
- module_node.scope.utility_code_list = sort_utility_codes(module_node.scope.utility_code_list)
- normalize_deps(module_node.scope.utility_code_list)
+ module_node.scope.utility_code_list = sort_utility_codes(module_node.scope.utility_code_list)
+ normalize_deps(module_node.scope.utility_code_list)
added = []
# Note: the list might be extended inside the loop (if some utility code
# pulls in other utility code, explicitly or implicitly)
for utilcode in module_node.scope.utility_code_list:
- if utilcode in added:
- continue
+ if utilcode in added:
+ continue
added.append(utilcode)
if utilcode.requires:
for dep in utilcode.requires:
- if dep not in added and dep not in module_node.scope.utility_code_list:
+ if dep not in added and dep not in module_node.scope.utility_code_list:
module_node.scope.utility_code_list.append(dep)
- tree = utilcode.get_tree(cython_scope=context.cython_scope)
+ tree = utilcode.get_tree(cython_scope=context.cython_scope)
if tree:
module_node.merge_in(tree.body, tree.scope, merge_scope=True)
return module_node
@@ -141,7 +141,7 @@ def create_pipeline(context, mode, exclude_classes=()):
assert mode in ('pyx', 'py', 'pxd')
from .Visitor import PrintTree
from .ParseTreeTransforms import WithTransform, NormalizeTree, PostParse, PxdPostParse
- from .ParseTreeTransforms import ForwardDeclareTypes, InjectGilHandling, AnalyseDeclarationsTransform
+ from .ParseTreeTransforms import ForwardDeclareTypes, InjectGilHandling, AnalyseDeclarationsTransform
from .ParseTreeTransforms import AnalyseExpressionsTransform, FindInvalidUseOfFusedTypes
from .ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
from .ParseTreeTransforms import TrackNumpyAttributes, InterpretCompilerDirectives, TransformBuiltinMethods
@@ -186,7 +186,7 @@ def create_pipeline(context, mode, exclude_classes=()):
InterpretCompilerDirectives(context, context.compiler_directives),
ParallelRangeTransform(context),
AdjustDefByDirectives(context),
- WithTransform(context),
+ WithTransform(context),
MarkClosureVisitor(context),
_align_function_definitions,
RemoveUnreachableCode(context),
@@ -194,12 +194,12 @@ def create_pipeline(context, mode, exclude_classes=()):
FlattenInListTransform(),
DecoratorTransform(context),
ForwardDeclareTypes(context),
- InjectGilHandling(),
+ InjectGilHandling(),
AnalyseDeclarationsTransform(context),
AutoTestDictTransform(context),
EmbedSignature(context),
EarlyReplaceBuiltinCalls(context), ## Necessary?
- TransformBuiltinMethods(context),
+ TransformBuiltinMethods(context),
MarkParallelAssignments(context),
ControlFlowAnalysis(context),
RemoveUnreachableCode(context),
@@ -211,8 +211,8 @@ def create_pipeline(context, mode, exclude_classes=()):
AnalyseExpressionsTransform(context),
FindInvalidUseOfFusedTypes(context),
ExpandInplaceOperators(context),
- IterationTransform(context),
- SwitchTransform(context),
+ IterationTransform(context),
+ SwitchTransform(context),
OptimizeBuiltinCalls(context), ## Necessary?
CreateClosureClasses(context), ## After all lookups and type inference
CalculateQualifiedNamesTransform(context),
@@ -344,7 +344,7 @@ def run_pipeline(pipeline, source, printtree=True):
continue
if DebugFlags.debug_verbose_pipeline:
t = time()
- print("Entering pipeline phase %r" % phase)
+ print("Entering pipeline phase %r" % phase)
# create a new wrapper for each step to show the name in profiles
phase_name = getattr(phase, '__name__', type(phase).__name__)
try:
@@ -354,16 +354,16 @@ def run_pipeline(pipeline, source, printtree=True):
run = _pipeline_entry_points[phase_name] = exec_ns[phase_name]
data = run(phase, data)
if DebugFlags.debug_verbose_pipeline:
- print(" %.3f seconds" % (time() - t))
- except CompileError as err:
+ print(" %.3f seconds" % (time() - t))
+ except CompileError as err:
# err is set
Errors.report_error(err, use_stack=False)
error = err
- except InternalError as err:
+ except InternalError as err:
# Only raise if there was not an earlier error
if Errors.num_errors == 0:
raise
error = err
- except AbortError as err:
+ except AbortError as err:
error = err
return (error, data)