# Cython Top Level
#
-import os, sys, re
+import os, sys, re, codecs
if sys.version_info[:2] < (2, 3):
sys.stderr.write("Sorry, Cython requires Python 2.3 or later\n")
sys.exit(1)
# Python 2.3
from sets import Set as set
+import itertools
from time import time
+
import Code
import Errors
-import Parsing
+# Do not import Parsing here, import it when needed, because Parsing imports
+# Nodes, which globally needs debug command line options initialized to set a
+# conditional metaclass. These options are processed by CmdLine called from
+# main() in this file.
+# import Parsing
import Version
from Scanning import PyrexScanner, FileSourceDescriptor
-from Errors import PyrexError, CompileError, InternalError, error
+from Errors import PyrexError, CompileError, InternalError, AbortError, error, warning
from Symtab import BuiltinScope, ModuleScope
from Cython import Utils
from Cython.Utils import open_new_file, replace_suffix
def abort_on_errors(node):
# Stop the pipeline if there are any errors.
if Errors.num_errors != 0:
- raise InternalError, "abort"
+ raise AbortError, "pipeline break"
return node
class CompilationData(object):
# include_directories [string]
# future_directives [object]
# language_level int currently 2 or 3 for Python 2/3
-
+
def __init__(self, include_directories, compiler_directives, cpp=False, language_level=2):
- #self.modules = {"__builtin__" : BuiltinScope()}
import Builtin, CythonScope
self.modules = {"__builtin__" : Builtin.builtin_scope}
self.modules["cython"] = CythonScope.create_cython_scope(self)
self.set_language_level(language_level)
+ self.gdb_debug_outputwriter = None
+
def set_language_level(self, level):
self.language_level = level
if level >= 3:
from Future import print_function, unicode_literals
self.future_directives.add(print_function)
self.future_directives.add(unicode_literals)
+ self.modules['builtins'] = self.modules['__builtin__']
def create_pipeline(self, pxd, py=False):
from Visitor import PrintTree
from ParseTreeTransforms import AnalyseDeclarationsTransform, AnalyseExpressionsTransform
from ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
from ParseTreeTransforms import InterpretCompilerDirectives, TransformBuiltinMethods
+ from ParseTreeTransforms import ExpandInplaceOperators
from TypeInference import MarkAssignments, MarkOverflowingArithmetic
from ParseTreeTransforms import AlignFunctionDefinitions, GilCheck
from AnalysedTreeTransforms import AutoTestDictTransform
from Buffer import IntroduceBufferAuxiliaryVars
from ModuleNode import check_c_declarations, check_c_declarations_pxd
- # Temporary hack that can be used to ensure that all result_code's
- # are generated at code generation time.
- import Visitor
- class ClearResultCodes(Visitor.CythonTransform):
- def visit_ExprNode(self, node):
- self.visitchildren(node)
- node.result_code = "<cleared>"
- return node
-
if pxd:
_check_c_declarations = check_c_declarations_pxd
_specific_post_parse = PxdPostParse(self)
else:
_check_c_declarations = check_c_declarations
_specific_post_parse = None
-
+
if py and not pxd:
_align_function_definitions = AlignFunctionDefinitions(self)
else:
_align_function_definitions = None
-
+
return [
NormalizeTree(self),
PostParse(self),
_specific_post_parse,
InterpretCompilerDirectives(self, self.compiler_directives),
- _align_function_definitions,
MarkClosureVisitor(self),
+ _align_function_definitions,
ConstantFolding(),
FlattenInListTransform(),
WithTransform(self),
DecoratorTransform(self),
AnalyseDeclarationsTransform(self),
- CreateClosureClasses(self),
AutoTestDictTransform(self),
EmbedSignature(self),
- EarlyReplaceBuiltinCalls(self),
+ EarlyReplaceBuiltinCalls(self), ## Necessary?
MarkAssignments(self),
MarkOverflowingArithmetic(self),
- TransformBuiltinMethods(self),
+ TransformBuiltinMethods(self), ## Necessary?
IntroduceBufferAuxiliaryVars(self),
_check_c_declarations,
AnalyseExpressionsTransform(self),
- OptimizeBuiltinCalls(self),
+ CreateClosureClasses(self), ## After all lookups and type inference
+ ExpandInplaceOperators(self),
+ OptimizeBuiltinCalls(self), ## Necessary?
IterationTransform(),
SwitchTransform(),
DropRefcountingTransform(),
FinalOptimizePhase(self),
GilCheck(),
- #ClearResultCodes(self),
- #SpecialFunctions(self),
- #CreateClosureClasses(context),
]
def create_pyx_pipeline(self, options, result, py=False):
from Cython.TestUtils import TreeAssertVisitor
test_support.append(TreeAssertVisitor())
- return ([
- create_parse(self),
- ] + self.create_pipeline(pxd=False, py=py) + test_support + [
- inject_pxd_code,
- abort_on_errors,
- generate_pyx_code,
- ])
+ if options.gdb_debug:
+ from Cython.Debugger import DebugWriter
+ from ParseTreeTransforms import DebugTransform
+ self.gdb_debug_outputwriter = DebugWriter.CythonDebugWriter(
+ options.output_dir)
+ debug_transform = [DebugTransform(self, options, result)]
+ else:
+ debug_transform = []
+
+ return list(itertools.chain(
+ [create_parse(self)],
+ self.create_pipeline(pxd=False, py=py),
+ test_support,
+ [inject_pxd_code, abort_on_errors],
+ debug_transform,
+ [generate_pyx_code]))
def create_pxd_pipeline(self, scope, module_name):
def parse_pxd(source_desc):
return [parse_pxd] + self.create_pipeline(pxd=True) + [
ExtractPxdCode(self),
]
-
+
def create_py_pipeline(self, options, result):
return self.create_pyx_pipeline(options, result, py=True)
pipeline = self.create_pxd_pipeline(scope, module_name)
result = self.run_pipeline(pipeline, source_desc)
return result
-
+
def nonfatal_error(self, exc):
return Errors.report_error(exc)
error = None
data = source
try:
- for phase in pipeline:
- if phase is not None:
- if DebugFlags.debug_verbose_pipeline:
- t = time()
- print "Entering pipeline phase %r" % phase
- data = phase(data)
- if DebugFlags.debug_verbose_pipeline:
- print " %.3f seconds" % (time() - t)
- except CompileError, err:
- # err is set
- Errors.report_error(err)
- error = err
+ try:
+ for phase in pipeline:
+ if phase is not None:
+ if DebugFlags.debug_verbose_pipeline:
+ t = time()
+ print "Entering pipeline phase %r" % phase
+ data = phase(data)
+ if DebugFlags.debug_verbose_pipeline:
+ print " %.3f seconds" % (time() - t)
+ except CompileError, err:
+ # err is set
+ Errors.report_error(err)
+ error = err
except InternalError, err:
# Only raise if there was not an earlier error
if Errors.num_errors == 0:
raise
error = err
+ except AbortError, err:
+ error = err
return (error, data)
- def find_module(self, module_name,
+ def find_module(self, module_name,
relative_to = None, pos = None, need_pxd = 1):
# Finds and returns the module scope corresponding to
# the given relative or absolute module name. If this
try:
if debug_find_module:
print("Context.find_module: Parsing %s" % pxd_pathname)
- source_desc = FileSourceDescriptor(pxd_pathname)
+ rel_path = module_name.replace('.', os.sep) + os.path.splitext(pxd_pathname)[1]
+ if not pxd_pathname.endswith(rel_path):
+ rel_path = pxd_pathname # safety measure to prevent printing incorrect paths
+ source_desc = FileSourceDescriptor(pxd_pathname, rel_path)
err, result = self.process_pxd(source_desc, scope, module_name)
if err:
raise err
except CompileError:
pass
return scope
-
+
def find_pxd_file(self, qualified_name, pos):
# Search include path for the .pxd file corresponding to the
# given fully-qualified module name.
# the directory containing the source file is searched first
# for a dotted filename, and its containing package root
# directory is searched first for a non-dotted filename.
- return self.search_include_directories(qualified_name, ".pxd", pos)
+ pxd = self.search_include_directories(qualified_name, ".pxd", pos)
+ if pxd is None: # XXX Keep this until Includes/Deprecated is removed
+ if (qualified_name.startswith('python') or
+ qualified_name in ('stdlib', 'stdio', 'stl')):
+ standard_include_path = os.path.abspath(os.path.normpath(
+ os.path.join(os.path.dirname(__file__), os.path.pardir, 'Includes')))
+ deprecated_include_path = os.path.join(standard_include_path, 'Deprecated')
+ self.include_directories.append(deprecated_include_path)
+ try:
+ pxd = self.search_include_directories(qualified_name, ".pxd", pos)
+ finally:
+ self.include_directories.pop()
+ if pxd:
+ name = qualified_name
+ if name.startswith('python'):
+ warning(pos, "'%s' is deprecated, use 'cpython'" % name, 1)
+ elif name in ('stdlib', 'stdio'):
+ warning(pos, "'%s' is deprecated, use 'libc.%s'" % (name, name), 1)
+ elif name in ('stl'):
+ warning(pos, "'%s' is deprecated, use 'libcpp.*.*'" % name, 1)
+ return pxd
def find_pyx_file(self, qualified_name, pos):
# Search include path for the .pyx file corresponding to the
# given fully-qualified module name, as for find_pxd_file().
return self.search_include_directories(qualified_name, ".pyx", pos)
-
+
def find_include_file(self, filename, pos):
# Search list of include directories for filename.
# Reports an error and returns None if not found.
if not path:
error(pos, "'%s' not found" % filename)
return path
-
+
def search_include_directories(self, qualified_name, suffix, pos,
include=False):
# Search the list of include directories for the given
if dep_path and Utils.file_newer_than(dep_path, c_time):
return 1
return 0
-
+
def find_cimported_module_names(self, source_path):
return [ name for kind, name in self.read_dependency_file(source_path)
if kind == "cimport" ]
def is_package_dir(self, dir_path):
# Return true if the given directory is a package directory.
- for filename in ("__init__.py",
- "__init__.pyx",
+ for filename in ("__init__.py",
+ "__init__.pyx",
"__init__.pxd"):
path = os.path.join(dir_path, filename)
if Utils.path_exists(path):
# Find a top-level module, creating a new one if needed.
scope = self.lookup_submodule(name)
if not scope:
- scope = ModuleScope(name,
+ scope = ModuleScope(name,
parent_module = None, context = self)
self.modules[name] = scope
return scope
def parse(self, source_desc, scope, pxd, full_module_name):
if not isinstance(source_desc, FileSourceDescriptor):
raise RuntimeError("Only file sources for code supported")
- source_filename = Utils.encode_filename(source_desc.filename)
+ source_filename = source_desc.filename
scope.cpp = self.cpp
# Parse the given source file and return a parse tree.
try:
f = Utils.open_source_file(source_filename, "rU")
try:
+ import Parsing
s = PyrexScanner(f, source_desc, source_encoding = f.encoding,
scope = scope, context = self)
tree = Parsing.p_module(s, pxd, full_module_name)
return ".".join(names)
def setup_errors(self, options, result):
+ Errors.reset() # clear any remaining error state
if options.use_listing_file:
result.listing_file = Utils.replace_suffix(source, ".lis")
path = result.listing_file
def run_pipeline(source, options, full_module_name = None):
# Set up context
- context = Context(options.include_path, options.compiler_directives,
- options.cplus, options.language_level)
+ context = options.create_context()
# Set up source object
cwd = os.getcwd()
- source_desc = FileSourceDescriptor(os.path.join(cwd, source))
+ abs_path = os.path.abspath(source)
+ source_ext = os.path.splitext(source)[1]
full_module_name = full_module_name or context.extract_module_name(source, options)
+ if options.relative_path_in_code_position_comments:
+ rel_path = full_module_name.replace('.', os.sep) + source_ext
+ if not abs_path.endswith(rel_path):
+ rel_path = source # safety measure to prevent printing incorrect paths
+ else:
+ rel_path = abs_path
+ source_desc = FileSourceDescriptor(abs_path, rel_path)
source = CompilationSource(source_desc, full_module_name, cwd)
# Set up result object
result = create_default_resultobj(source, options)
-
+
+ if options.annotate is None:
+ # By default, decide based on whether an html file already exists.
+ html_filename = os.path.splitext(result.c_file)[0] + ".html"
+ if os.path.exists(html_filename):
+ line = codecs.open(html_filename, "r", encoding="UTF-8").readline()
+ if line.startswith(u'<!-- Generated by Cython'):
+ options.annotate = True
+
# Get pipeline
- if source_desc.filename.endswith(".py"):
+ if source_ext.lower() == '.py':
pipeline = context.create_py_pipeline(options, result)
else:
pipeline = context.create_pyx_pipeline(options, result)
context.teardown_errors(err, options, result)
return result
+
#------------------------------------------------------------------------
#
# Main Python entry points
class CompilationOptions(object):
"""
Options to the Cython compiler:
-
+
show_version boolean Display version number
use_listing_file boolean Generate a .lis file
errors_to_stderr boolean Echo errors to stderr when using .lis
compiler_directives dict Overrides for pragma options (see Options.py)
evaluate_tree_assertions boolean Test support: evaluate parse tree assertions
language_level integer The Python language level: 2 or 3
-
+
cplus boolean Compile as c++ code
"""
-
+
def __init__(self, defaults = None, **kw):
self.include_path = []
if defaults:
self.__dict__.update(defaults)
self.__dict__.update(kw)
+ def create_context(self):
+ return Context(self.include_path, self.compiler_directives,
+ self.cplus, self.language_level)
+
class CompilationResult(object):
"""
Results from the Cython compiler:
-
+
c_file string or None The generated C source file
h_file string or None The generated C header file
i_file string or None The generated .pxi file
num_errors integer Number of compilation errors
compilation_source CompilationSource
"""
-
+
def __init__(self):
self.c_file = None
self.h_file = None
Results from compiling multiple Pyrex source files. A mapping
from source file paths to CompilationResult instances. Also
has the following attributes:
-
+
num_errors integer Total number of compilation errors
"""
-
+
num_errors = 0
def add(self, source, result):
def compile_single(source, options, full_module_name = None):
"""
compile_single(source, options, full_module_name)
-
+
Compile the given Pyrex implementation file and return a CompilationResult.
Always compiles a single file; does not perform timestamp checking or
recursion.
def compile_multiple(sources, options):
"""
compile_multiple(sources, options)
-
+
Compiles the given sequence of Pyrex implementation files and returns
a CompilationResultSet. Performs timestamp checking and/or recursion
if these are specified in the options.
"""
+ context = options.create_context()
sources = [os.path.abspath(source) for source in sources]
processed = set()
results = CompilationResultSet()
def compile(source, options = None, full_module_name = None, **kwds):
"""
compile(source [, options], [, <option> = <value>]...)
-
+
Compile one or more Pyrex implementation files, with optional timestamp
checking and recursing on dependecies. The source argument may be a string
or a sequence of strings If it is a string and no recursion or timestamp
errors_to_stderr = 1,
cplus = 0,
output_file = None,
- annotate = False,
+ annotate = None,
generate_pxi = 0,
working_path = "",
recursive = 0,
compiler_directives = {},
evaluate_tree_assertions = False,
emit_linenums = False,
+ relative_path_in_code_position_comments = True,
+ c_line_in_traceback = True,
language_level = 2,
+ gdb_debug = False,
)