summaryrefslogtreecommitdiffstats
path: root/contrib/tools/cython/Cython/Compiler/Pipeline.py
diff options
context:
space:
mode:
authorAnton Samokhvalov <[email protected]>2022-02-10 16:45:15 +0300
committerDaniil Cherednik <[email protected]>2022-02-10 16:45:15 +0300
commit72cb13b4aff9bc9cf22e49251bc8fd143f82538f (patch)
treeda2c34829458c7d4e74bdfbdf85dff449e9e7fb8 /contrib/tools/cython/Cython/Compiler/Pipeline.py
parent778e51ba091dc39e7b7fcab2b9cf4dbedfb6f2b5 (diff)
Restoring authorship annotation for Anton Samokhvalov <[email protected]>. Commit 1 of 2.
Diffstat (limited to 'contrib/tools/cython/Cython/Compiler/Pipeline.py')
-rw-r--r--contrib/tools/cython/Cython/Compiler/Pipeline.py594
1 files changed, 297 insertions, 297 deletions
diff --git a/contrib/tools/cython/Cython/Compiler/Pipeline.py b/contrib/tools/cython/Cython/Compiler/Pipeline.py
index 5194c3e49b9..91196417da8 100644
--- a/contrib/tools/cython/Cython/Compiler/Pipeline.py
+++ b/contrib/tools/cython/Cython/Compiler/Pipeline.py
@@ -1,84 +1,84 @@
-from __future__ import absolute_import
-
-import itertools
-from time import time
-
-from . import Errors
-from . import DebugFlags
-from . import Options
-from .Errors import CompileError, InternalError, AbortError
-from . import Naming
-
-#
-# Really small pipeline stages
-#
-def dumptree(t):
- # For quick debugging in pipelines
+from __future__ import absolute_import
+
+import itertools
+from time import time
+
+from . import Errors
+from . import DebugFlags
+from . import Options
+from .Errors import CompileError, InternalError, AbortError
+from . import Naming
+
+#
+# Really small pipeline stages
+#
+def dumptree(t):
+ # For quick debugging in pipelines
print(t.dump())
- return t
-
-def abort_on_errors(node):
- # Stop the pipeline if there are any errors.
- if Errors.num_errors != 0:
- raise AbortError("pipeline break")
- return node
-
-def parse_stage_factory(context):
- def parse(compsrc):
- source_desc = compsrc.source_desc
- full_module_name = compsrc.full_module_name
- initial_pos = (source_desc, 1, 0)
- saved_cimport_from_pyx, Options.cimport_from_pyx = Options.cimport_from_pyx, False
+ return t
+
+def abort_on_errors(node):
+ # Stop the pipeline if there are any errors.
+ if Errors.num_errors != 0:
+ raise AbortError("pipeline break")
+ return node
+
+def parse_stage_factory(context):
+ def parse(compsrc):
+ source_desc = compsrc.source_desc
+ full_module_name = compsrc.full_module_name
+ initial_pos = (source_desc, 1, 0)
+ saved_cimport_from_pyx, Options.cimport_from_pyx = Options.cimport_from_pyx, False
scope = context.find_module(full_module_name, pos = initial_pos, need_pxd = 0)
- Options.cimport_from_pyx = saved_cimport_from_pyx
- tree = context.parse(source_desc, scope, pxd = 0, full_module_name = full_module_name)
- tree.compilation_source = compsrc
- tree.scope = scope
- tree.is_pxd = False
- return tree
- return parse
-
-def parse_pxd_stage_factory(context, scope, module_name):
- def parse(source_desc):
- tree = context.parse(source_desc, scope, pxd=True,
- full_module_name=module_name)
- tree.scope = scope
- tree.is_pxd = True
- return tree
- return parse
-
-def generate_pyx_code_stage_factory(options, result):
- def generate_pyx_code_stage(module_node):
- module_node.process_implementation(options, result)
- result.compilation_source = module_node.compilation_source
- return result
- return generate_pyx_code_stage
-
-
-def inject_pxd_code_stage_factory(context):
- def inject_pxd_code_stage(module_node):
+ Options.cimport_from_pyx = saved_cimport_from_pyx
+ tree = context.parse(source_desc, scope, pxd = 0, full_module_name = full_module_name)
+ tree.compilation_source = compsrc
+ tree.scope = scope
+ tree.is_pxd = False
+ return tree
+ return parse
+
+def parse_pxd_stage_factory(context, scope, module_name):
+ def parse(source_desc):
+ tree = context.parse(source_desc, scope, pxd=True,
+ full_module_name=module_name)
+ tree.scope = scope
+ tree.is_pxd = True
+ return tree
+ return parse
+
+def generate_pyx_code_stage_factory(options, result):
+ def generate_pyx_code_stage(module_node):
+ module_node.process_implementation(options, result)
+ result.compilation_source = module_node.compilation_source
+ return result
+ return generate_pyx_code_stage
+
+
+def inject_pxd_code_stage_factory(context):
+ def inject_pxd_code_stage(module_node):
for name, (statlistnode, scope) in context.pxds.items():
- module_node.merge_in(statlistnode, scope)
- return module_node
- return inject_pxd_code_stage
-
-
-def use_utility_code_definitions(scope, target, seen=None):
- if seen is None:
- seen = set()
-
+ module_node.merge_in(statlistnode, scope)
+ return module_node
+ return inject_pxd_code_stage
+
+
+def use_utility_code_definitions(scope, target, seen=None):
+ if seen is None:
+ seen = set()
+
for entry in scope.entries.values():
- if entry in seen:
- continue
-
- seen.add(entry)
- if entry.used and entry.utility_code_definition:
- target.use_utility_code(entry.utility_code_definition)
- for required_utility in entry.utility_code_definition.requires:
- target.use_utility_code(required_utility)
- elif entry.as_module:
- use_utility_code_definitions(entry.as_module, target, seen)
-
+ if entry in seen:
+ continue
+
+ seen.add(entry)
+ if entry.used and entry.utility_code_definition:
+ target.use_utility_code(entry.utility_code_definition)
+ for required_utility in entry.utility_code_definition.requires:
+ target.use_utility_code(required_utility)
+ elif entry.as_module:
+ use_utility_code_definitions(entry.as_module, target, seen)
+
def sort_utility_codes(utilcodes):
ranks = {}
@@ -109,241 +109,241 @@ def normalize_deps(utilcodes):
utilcode.requires = [unify_dep(dep) for dep in utilcode.requires or ()]
-def inject_utility_code_stage_factory(context):
- def inject_utility_code_stage(module_node):
+def inject_utility_code_stage_factory(context):
+ def inject_utility_code_stage(module_node):
module_node.prepare_utility_code()
- use_utility_code_definitions(context.cython_scope, module_node.scope)
+ use_utility_code_definitions(context.cython_scope, module_node.scope)
module_node.scope.utility_code_list = sort_utility_codes(module_node.scope.utility_code_list)
normalize_deps(module_node.scope.utility_code_list)
- added = []
- # Note: the list might be extended inside the loop (if some utility code
- # pulls in other utility code, explicitly or implicitly)
- for utilcode in module_node.scope.utility_code_list:
+ added = []
+ # Note: the list might be extended inside the loop (if some utility code
+ # pulls in other utility code, explicitly or implicitly)
+ for utilcode in module_node.scope.utility_code_list:
if utilcode in added:
continue
- added.append(utilcode)
- if utilcode.requires:
- for dep in utilcode.requires:
+ added.append(utilcode)
+ if utilcode.requires:
+ for dep in utilcode.requires:
if dep not in added and dep not in module_node.scope.utility_code_list:
- module_node.scope.utility_code_list.append(dep)
+ module_node.scope.utility_code_list.append(dep)
tree = utilcode.get_tree(cython_scope=context.cython_scope)
- if tree:
- module_node.merge_in(tree.body, tree.scope, merge_scope=True)
- return module_node
- return inject_utility_code_stage
-
-
-#
-# Pipeline factories
-#
-
-def create_pipeline(context, mode, exclude_classes=()):
- assert mode in ('pyx', 'py', 'pxd')
- from .Visitor import PrintTree
- from .ParseTreeTransforms import WithTransform, NormalizeTree, PostParse, PxdPostParse
+ if tree:
+ module_node.merge_in(tree.body, tree.scope, merge_scope=True)
+ return module_node
+ return inject_utility_code_stage
+
+
+#
+# Pipeline factories
+#
+
+def create_pipeline(context, mode, exclude_classes=()):
+ assert mode in ('pyx', 'py', 'pxd')
+ from .Visitor import PrintTree
+ from .ParseTreeTransforms import WithTransform, NormalizeTree, PostParse, PxdPostParse
from .ParseTreeTransforms import ForwardDeclareTypes, InjectGilHandling, AnalyseDeclarationsTransform
- from .ParseTreeTransforms import AnalyseExpressionsTransform, FindInvalidUseOfFusedTypes
- from .ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
+ from .ParseTreeTransforms import AnalyseExpressionsTransform, FindInvalidUseOfFusedTypes
+ from .ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
from .ParseTreeTransforms import TrackNumpyAttributes, InterpretCompilerDirectives, TransformBuiltinMethods
- from .ParseTreeTransforms import ExpandInplaceOperators, ParallelRangeTransform
- from .ParseTreeTransforms import CalculateQualifiedNamesTransform
- from .TypeInference import MarkParallelAssignments, MarkOverflowingArithmetic
- from .ParseTreeTransforms import AdjustDefByDirectives, AlignFunctionDefinitions
- from .ParseTreeTransforms import RemoveUnreachableCode, GilCheck
- from .FlowControl import ControlFlowAnalysis
- from .AnalysedTreeTransforms import AutoTestDictTransform
- from .AutoDocTransforms import EmbedSignature
- from .Optimize import FlattenInListTransform, SwitchTransform, IterationTransform
- from .Optimize import EarlyReplaceBuiltinCalls, OptimizeBuiltinCalls
- from .Optimize import InlineDefNodeCalls
- from .Optimize import ConstantFolding, FinalOptimizePhase
- from .Optimize import DropRefcountingTransform
- from .Optimize import ConsolidateOverflowCheck
- from .Buffer import IntroduceBufferAuxiliaryVars
- from .ModuleNode import check_c_declarations, check_c_declarations_pxd
-
-
- if mode == 'pxd':
- _check_c_declarations = check_c_declarations_pxd
- _specific_post_parse = PxdPostParse(context)
- else:
- _check_c_declarations = check_c_declarations
- _specific_post_parse = None
-
- if mode == 'py':
- _align_function_definitions = AlignFunctionDefinitions(context)
- else:
- _align_function_definitions = None
-
- # NOTE: This is the "common" parts of the pipeline, which is also
- # code in pxd files. So it will be run multiple times in a
- # compilation stage.
- stages = [
- NormalizeTree(context),
- PostParse(context),
- _specific_post_parse,
+ from .ParseTreeTransforms import ExpandInplaceOperators, ParallelRangeTransform
+ from .ParseTreeTransforms import CalculateQualifiedNamesTransform
+ from .TypeInference import MarkParallelAssignments, MarkOverflowingArithmetic
+ from .ParseTreeTransforms import AdjustDefByDirectives, AlignFunctionDefinitions
+ from .ParseTreeTransforms import RemoveUnreachableCode, GilCheck
+ from .FlowControl import ControlFlowAnalysis
+ from .AnalysedTreeTransforms import AutoTestDictTransform
+ from .AutoDocTransforms import EmbedSignature
+ from .Optimize import FlattenInListTransform, SwitchTransform, IterationTransform
+ from .Optimize import EarlyReplaceBuiltinCalls, OptimizeBuiltinCalls
+ from .Optimize import InlineDefNodeCalls
+ from .Optimize import ConstantFolding, FinalOptimizePhase
+ from .Optimize import DropRefcountingTransform
+ from .Optimize import ConsolidateOverflowCheck
+ from .Buffer import IntroduceBufferAuxiliaryVars
+ from .ModuleNode import check_c_declarations, check_c_declarations_pxd
+
+
+ if mode == 'pxd':
+ _check_c_declarations = check_c_declarations_pxd
+ _specific_post_parse = PxdPostParse(context)
+ else:
+ _check_c_declarations = check_c_declarations
+ _specific_post_parse = None
+
+ if mode == 'py':
+ _align_function_definitions = AlignFunctionDefinitions(context)
+ else:
+ _align_function_definitions = None
+
+ # NOTE: This is the "common" parts of the pipeline, which is also
+ # code in pxd files. So it will be run multiple times in a
+ # compilation stage.
+ stages = [
+ NormalizeTree(context),
+ PostParse(context),
+ _specific_post_parse,
TrackNumpyAttributes(),
- InterpretCompilerDirectives(context, context.compiler_directives),
- ParallelRangeTransform(context),
- AdjustDefByDirectives(context),
+ InterpretCompilerDirectives(context, context.compiler_directives),
+ ParallelRangeTransform(context),
+ AdjustDefByDirectives(context),
WithTransform(context),
- MarkClosureVisitor(context),
- _align_function_definitions,
- RemoveUnreachableCode(context),
- ConstantFolding(),
- FlattenInListTransform(),
- DecoratorTransform(context),
- ForwardDeclareTypes(context),
+ MarkClosureVisitor(context),
+ _align_function_definitions,
+ RemoveUnreachableCode(context),
+ ConstantFolding(),
+ FlattenInListTransform(),
+ DecoratorTransform(context),
+ ForwardDeclareTypes(context),
InjectGilHandling(),
- AnalyseDeclarationsTransform(context),
- AutoTestDictTransform(context),
- EmbedSignature(context),
- EarlyReplaceBuiltinCalls(context), ## Necessary?
+ AnalyseDeclarationsTransform(context),
+ AutoTestDictTransform(context),
+ EmbedSignature(context),
+ EarlyReplaceBuiltinCalls(context), ## Necessary?
TransformBuiltinMethods(context),
- MarkParallelAssignments(context),
- ControlFlowAnalysis(context),
- RemoveUnreachableCode(context),
- # MarkParallelAssignments(context),
- MarkOverflowingArithmetic(context),
- IntroduceBufferAuxiliaryVars(context),
- _check_c_declarations,
- InlineDefNodeCalls(context),
- AnalyseExpressionsTransform(context),
- FindInvalidUseOfFusedTypes(context),
- ExpandInplaceOperators(context),
+ MarkParallelAssignments(context),
+ ControlFlowAnalysis(context),
+ RemoveUnreachableCode(context),
+ # MarkParallelAssignments(context),
+ MarkOverflowingArithmetic(context),
+ IntroduceBufferAuxiliaryVars(context),
+ _check_c_declarations,
+ InlineDefNodeCalls(context),
+ AnalyseExpressionsTransform(context),
+ FindInvalidUseOfFusedTypes(context),
+ ExpandInplaceOperators(context),
IterationTransform(context),
SwitchTransform(context),
- OptimizeBuiltinCalls(context), ## Necessary?
- CreateClosureClasses(context), ## After all lookups and type inference
- CalculateQualifiedNamesTransform(context),
- ConsolidateOverflowCheck(context),
- DropRefcountingTransform(),
- FinalOptimizePhase(context),
- GilCheck(),
- ]
- filtered_stages = []
- for s in stages:
- if s.__class__ not in exclude_classes:
- filtered_stages.append(s)
- return filtered_stages
-
-def create_pyx_pipeline(context, options, result, py=False, exclude_classes=()):
- if py:
- mode = 'py'
- else:
- mode = 'pyx'
- test_support = []
- if options.evaluate_tree_assertions:
- from ..TestUtils import TreeAssertVisitor
- test_support.append(TreeAssertVisitor())
-
- if options.gdb_debug:
- from ..Debugger import DebugWriter # requires Py2.5+
- from .ParseTreeTransforms import DebugTransform
- context.gdb_debug_outputwriter = DebugWriter.CythonDebugWriter(
- options.output_dir)
- debug_transform = [DebugTransform(context, options, result)]
- else:
- debug_transform = []
-
- return list(itertools.chain(
- [parse_stage_factory(context)],
- create_pipeline(context, mode, exclude_classes=exclude_classes),
- test_support,
- [inject_pxd_code_stage_factory(context),
- inject_utility_code_stage_factory(context),
- abort_on_errors],
- debug_transform,
- [generate_pyx_code_stage_factory(options, result)]))
-
-def create_pxd_pipeline(context, scope, module_name):
- from .CodeGeneration import ExtractPxdCode
-
- # The pxd pipeline ends up with a CCodeWriter containing the
- # code of the pxd, as well as a pxd scope.
- return [
- parse_pxd_stage_factory(context, scope, module_name)
- ] + create_pipeline(context, 'pxd') + [
- ExtractPxdCode()
- ]
-
-def create_py_pipeline(context, options, result):
- return create_pyx_pipeline(context, options, result, py=True)
-
-def create_pyx_as_pxd_pipeline(context, result):
- from .ParseTreeTransforms import AlignFunctionDefinitions, \
- MarkClosureVisitor, WithTransform, AnalyseDeclarationsTransform
- from .Optimize import ConstantFolding, FlattenInListTransform
- from .Nodes import StatListNode
- pipeline = []
- pyx_pipeline = create_pyx_pipeline(context, context.options, result,
- exclude_classes=[
- AlignFunctionDefinitions,
- MarkClosureVisitor,
- ConstantFolding,
- FlattenInListTransform,
- WithTransform
- ])
- for stage in pyx_pipeline:
- pipeline.append(stage)
- if isinstance(stage, AnalyseDeclarationsTransform):
- # This is the last stage we need.
- break
- def fake_pxd(root):
- for entry in root.scope.entries.values():
- if not entry.in_cinclude:
- entry.defined_in_pxd = 1
- if entry.name == entry.cname and entry.visibility != 'extern':
- # Always mangle non-extern cimported entries.
- entry.cname = entry.scope.mangle(Naming.func_prefix, entry.name)
- return StatListNode(root.pos, stats=[]), root.scope
- pipeline.append(fake_pxd)
- return pipeline
-
-def insert_into_pipeline(pipeline, transform, before=None, after=None):
- """
- Insert a new transform into the pipeline after or before an instance of
- the given class. e.g.
-
- pipeline = insert_into_pipeline(pipeline, transform,
- after=AnalyseDeclarationsTransform)
- """
- assert before or after
-
- cls = before or after
- for i, t in enumerate(pipeline):
- if isinstance(t, cls):
- break
-
- if after:
- i += 1
-
- return pipeline[:i] + [transform] + pipeline[i:]
-
-#
-# Running a pipeline
-#
-
+ OptimizeBuiltinCalls(context), ## Necessary?
+ CreateClosureClasses(context), ## After all lookups and type inference
+ CalculateQualifiedNamesTransform(context),
+ ConsolidateOverflowCheck(context),
+ DropRefcountingTransform(),
+ FinalOptimizePhase(context),
+ GilCheck(),
+ ]
+ filtered_stages = []
+ for s in stages:
+ if s.__class__ not in exclude_classes:
+ filtered_stages.append(s)
+ return filtered_stages
+
+def create_pyx_pipeline(context, options, result, py=False, exclude_classes=()):
+ if py:
+ mode = 'py'
+ else:
+ mode = 'pyx'
+ test_support = []
+ if options.evaluate_tree_assertions:
+ from ..TestUtils import TreeAssertVisitor
+ test_support.append(TreeAssertVisitor())
+
+ if options.gdb_debug:
+ from ..Debugger import DebugWriter # requires Py2.5+
+ from .ParseTreeTransforms import DebugTransform
+ context.gdb_debug_outputwriter = DebugWriter.CythonDebugWriter(
+ options.output_dir)
+ debug_transform = [DebugTransform(context, options, result)]
+ else:
+ debug_transform = []
+
+ return list(itertools.chain(
+ [parse_stage_factory(context)],
+ create_pipeline(context, mode, exclude_classes=exclude_classes),
+ test_support,
+ [inject_pxd_code_stage_factory(context),
+ inject_utility_code_stage_factory(context),
+ abort_on_errors],
+ debug_transform,
+ [generate_pyx_code_stage_factory(options, result)]))
+
+def create_pxd_pipeline(context, scope, module_name):
+ from .CodeGeneration import ExtractPxdCode
+
+ # The pxd pipeline ends up with a CCodeWriter containing the
+ # code of the pxd, as well as a pxd scope.
+ return [
+ parse_pxd_stage_factory(context, scope, module_name)
+ ] + create_pipeline(context, 'pxd') + [
+ ExtractPxdCode()
+ ]
+
+def create_py_pipeline(context, options, result):
+ return create_pyx_pipeline(context, options, result, py=True)
+
+def create_pyx_as_pxd_pipeline(context, result):
+ from .ParseTreeTransforms import AlignFunctionDefinitions, \
+ MarkClosureVisitor, WithTransform, AnalyseDeclarationsTransform
+ from .Optimize import ConstantFolding, FlattenInListTransform
+ from .Nodes import StatListNode
+ pipeline = []
+ pyx_pipeline = create_pyx_pipeline(context, context.options, result,
+ exclude_classes=[
+ AlignFunctionDefinitions,
+ MarkClosureVisitor,
+ ConstantFolding,
+ FlattenInListTransform,
+ WithTransform
+ ])
+ for stage in pyx_pipeline:
+ pipeline.append(stage)
+ if isinstance(stage, AnalyseDeclarationsTransform):
+ # This is the last stage we need.
+ break
+ def fake_pxd(root):
+ for entry in root.scope.entries.values():
+ if not entry.in_cinclude:
+ entry.defined_in_pxd = 1
+ if entry.name == entry.cname and entry.visibility != 'extern':
+ # Always mangle non-extern cimported entries.
+ entry.cname = entry.scope.mangle(Naming.func_prefix, entry.name)
+ return StatListNode(root.pos, stats=[]), root.scope
+ pipeline.append(fake_pxd)
+ return pipeline
+
+def insert_into_pipeline(pipeline, transform, before=None, after=None):
+ """
+ Insert a new transform into the pipeline after or before an instance of
+ the given class. e.g.
+
+ pipeline = insert_into_pipeline(pipeline, transform,
+ after=AnalyseDeclarationsTransform)
+ """
+ assert before or after
+
+ cls = before or after
+ for i, t in enumerate(pipeline):
+ if isinstance(t, cls):
+ break
+
+ if after:
+ i += 1
+
+ return pipeline[:i] + [transform] + pipeline[i:]
+
+#
+# Running a pipeline
+#
+
_pipeline_entry_points = {}
-def run_pipeline(pipeline, source, printtree=True):
- from .Visitor import PrintTree
+def run_pipeline(pipeline, source, printtree=True):
+ from .Visitor import PrintTree
exec_ns = globals().copy() if DebugFlags.debug_verbose_pipeline else None
-
+
def run(phase, data):
return phase(data)
- error = None
- data = source
- try:
- try:
- for phase in pipeline:
- if phase is not None:
+ error = None
+ data = source
+ try:
+ try:
+ for phase in pipeline:
+ if phase is not None:
if not printtree and isinstance(phase, PrintTree):
continue
- if DebugFlags.debug_verbose_pipeline:
- t = time()
+ if DebugFlags.debug_verbose_pipeline:
+ t = time()
print("Entering pipeline phase %r" % phase)
# create a new wrapper for each step to show the name in profiles
phase_name = getattr(phase, '__name__', type(phase).__name__)
@@ -353,17 +353,17 @@ def run_pipeline(pipeline, source, printtree=True):
exec("def %s(phase, data): return phase(data)" % phase_name, exec_ns)
run = _pipeline_entry_points[phase_name] = exec_ns[phase_name]
data = run(phase, data)
- if DebugFlags.debug_verbose_pipeline:
+ if DebugFlags.debug_verbose_pipeline:
print(" %.3f seconds" % (time() - t))
except CompileError as err:
- # err is set
+ # err is set
Errors.report_error(err, use_stack=False)
- error = err
+ error = err
except InternalError as err:
- # Only raise if there was not an earlier error
- if Errors.num_errors == 0:
- raise
- error = err
+ # Only raise if there was not an earlier error
+ if Errors.num_errors == 0:
+ raise
+ error = err
except AbortError as err:
- error = err
- return (error, data)
+ error = err
+ return (error, data)