8 from sets import Set as set
10 from distutils.extension import Extension
12 from Cython import Utils
13 from Cython.Compiler.Main import Context, CompilationOptions, default_options
15 # Unfortunately, Python 2.3 doesn't support decorators.
17 cache_name = '__%s_cache' % f.__name__
18 def wrapper(self, *args):
19 cache = getattr(self, cache_name, None)
22 setattr(self, cache_name, cache)
25 res = cache[args] = f(self, *args)
31 if s[0] == '[' and s[-1] == ']':
36 s, literals = strip_string_literals(s)
38 literal = literal.strip()
40 return literals[literal[1:-1]]
44 return [unquote(item) for item in s.split(delimiter)]
46 transitive_str = object()
47 transitive_list = object()
49 distutils_settings = {
52 'define_macros': list,
54 'libraries': transitive_list,
55 'library_dirs': transitive_list,
56 'runtime_library_dirs': transitive_list,
57 'include_dirs': transitive_list,
58 'extra_objects': list,
59 'extra_compile_args': list,
60 'extra_link_args': list,
61 'export_symbols': list,
62 'depends': transitive_list,
63 'language': transitive_str,
66 def line_iter(source):
69 end = source.find('\n', start)
73 yield source[start:end]
76 class DistutilsInfo(object):
78 def __init__(self, source=None, exn=None):
80 if source is not None:
81 for line in line_iter(source):
83 if line != '' and line[0] != '#':
85 line = line[1:].strip()
86 if line[:10] == 'distutils:':
89 key = str(line[:ix].strip())
90 value = line[ix+1:].strip()
91 type = distutils_settings[key]
92 if type in (list, transitive_list):
93 value = parse_list(value)
94 if key == 'define_macros':
95 value = [tuple(macro.split('=')) for macro in value]
96 self.values[key] = value
98 for key in distutils_settings:
99 if key in ('name', 'sources'):
101 value = getattr(exn, key, None)
103 self.values[key] = value
105 def merge(self, other):
108 for key, value in other.values.items():
109 type = distutils_settings[key]
110 if type is transitive_str and key not in self.values:
111 self.values[key] = value
112 elif type is transitive_list:
113 if key in self.values:
114 all = self.values[key]
119 self.values[key] = value
122 def subs(self, aliases):
125 resolved = DistutilsInfo()
126 for key, value in self.values.items():
127 type = distutils_settings[key]
128 if type in [list, transitive_list]:
133 if isinstance(v, list):
136 new_value_list.append(v)
137 value = new_value_list
140 value = aliases[value]
141 resolved.values[key] = value
145 def strip_string_literals(code, prefix='__Pyx_L'):
147 Normalizes every string literal to be of the form '__Pyx_Lxxx',
148 returning the normalized code and a mapping of labels to
158 hash_mark = code.find('#', q)
159 single_q = code.find("'", q)
160 double_q = code.find('"', q)
161 q = min(single_q, double_q)
162 if q == -1: q = max(single_q, double_q)
165 if -1 < hash_mark and (hash_mark < q or q == -1):
166 end = code.find('\n', hash_mark)
169 new_code.append(code[start:hash_mark+1])
171 label = "%s%s" % (prefix, counter)
172 literals[label] = code[hash_mark+1:end]
173 new_code.append(label)
181 new_code.append(code[start:])
184 # Try to close the quote.
186 if code[q-1] == '\\':
188 while q >= k and code[q-k] == '\\':
193 if code[q:q+len(in_quote)] == in_quote:
195 label = "%s%s" % (prefix, counter)
196 literals[label] = code[start+len(in_quote):q]
197 new_code.append("%s%s%s" % (in_quote, label, in_quote))
207 if len(code) >= q+3 and (code[q+1] == code[q] == code[q+2]):
212 while end>0 and code[end-1] in 'rRbBuU':
213 if code[end-1] in 'rR':
216 new_code.append(code[start:end])
220 return "".join(new_code), literals
223 def parse_dependencies(source_filename):
224 # Actual parsing is way to slow, so we use regular expressions.
225 # The only catch is that we must strip comments and string
226 # literals ahead of time.
227 source = Utils.open_source_file(source_filename, "rU").read()
228 distutils_info = DistutilsInfo(source)
229 source, literals = strip_string_literals(source)
230 source = source.replace('\\\n', ' ')
232 source = source.replace('\t', ' ')
234 dependancy = re.compile(r"(cimport +([0-9a-zA-Z_.]+)\b)|(from +([0-9a-zA-Z_.]+) +cimport)|(include +'([^']+)')|(cdef +extern +from +'([^']+)')")
238 for m in dependancy.finditer(source):
241 cimports.append(groups[1])
243 cimports.append(groups[3])
245 includes.append(literals[groups[5]])
247 externs.append(literals[groups[7]])
248 return cimports, includes, externs, distutils_info
251 class DependencyTree(object):
253 def __init__(self, context):
254 self.context = context
255 self._transitive_cache = {}
258 def parse_dependencies(self, source_filename):
259 return parse_dependencies(source_filename)
260 parse_dependencies = cached_method(parse_dependencies)
263 def cimports_and_externs(self, filename):
264 cimports, includes, externs = self.parse_dependencies(filename)[:3]
265 cimports = set(cimports)
266 externs = set(externs)
267 for include in includes:
268 a, b = self.cimports_and_externs(os.path.join(os.path.dirname(filename), include))
271 return tuple(cimports), tuple(externs)
272 cimports_and_externs = cached_method(cimports_and_externs)
274 def cimports(self, filename):
275 return self.cimports_and_externs(filename)[0]
278 def package(self, filename):
279 dir = os.path.dirname(filename)
280 if os.path.exists(os.path.join(dir, '__init__.py')):
281 return self.package(dir) + (os.path.basename(dir),)
284 package = cached_method(package)
287 def fully_qualifeid_name(self, filename):
288 module = os.path.splitext(os.path.basename(filename))[0]
289 return '.'.join(self.package(filename) + (module,))
290 fully_qualifeid_name = cached_method(fully_qualifeid_name)
292 def find_pxd(self, module, filename=None):
294 raise NotImplementedError("New relative imports.")
295 if filename is not None:
296 relative = '.'.join(self.package(filename) + tuple(module.split('.')))
297 pxd = self.context.find_pxd_file(relative, None)
300 return self.context.find_pxd_file(module, None)
303 def cimported_files(self, filename):
304 if filename[-4:] == '.pyx' and os.path.exists(filename[:-4] + '.pxd'):
305 self_pxd = [filename[:-4] + '.pxd']
308 a = self.cimports(filename)
309 b = filter(None, [self.find_pxd(m, filename) for m in self.cimports(filename)])
312 print("\n\t".join(a))
313 print("\n\t".join(b))
314 return tuple(self_pxd + filter(None, [self.find_pxd(m, filename) for m in self.cimports(filename)]))
315 cimported_files = cached_method(cimported_files)
317 def immediate_dependencies(self, filename):
318 all = list(self.cimported_files(filename))
319 for extern in sum(self.cimports_and_externs(filename), ()):
320 all.append(os.path.normpath(os.path.join(os.path.dirname(filename), extern)))
324 def timestamp(self, filename):
325 return os.path.getmtime(filename)
326 timestamp = cached_method(timestamp)
328 def extract_timestamp(self, filename):
329 # TODO: .h files from extern blocks
330 return self.timestamp(filename), filename
332 def newest_dependency(self, filename):
333 return self.transitive_merge(filename, self.extract_timestamp, max)
335 def distutils_info0(self, filename):
336 return self.parse_dependencies(filename)[3]
338 def distutils_info(self, filename, aliases=None, base=None):
339 return (self.transitive_merge(filename, self.distutils_info0, DistutilsInfo.merge)
343 def transitive_merge(self, node, extract, merge):
345 seen = self._transitive_cache[extract, merge]
347 seen = self._transitive_cache[extract, merge] = {}
348 return self.transitive_merge_helper(
349 node, extract, merge, seen, {}, self.cimported_files)[0]
351 def transitive_merge_helper(self, node, extract, merge, seen, stack, outgoing):
353 return seen[node], None
358 stack[node] = len(stack)
360 for next in outgoing(node):
361 sub_deps, sub_loop = self.transitive_merge_helper(next, extract, merge, seen, stack, outgoing)
362 if sub_loop is not None:
363 if loop is not None and stack[loop] < stack[sub_loop]:
367 deps = merge(deps, sub_deps)
377 def create_dependency_tree(ctx=None):
379 if _dep_tree is None:
381 ctx = Context(["."], CompilationOptions(default_options))
382 _dep_tree = DependencyTree(ctx)
385 # This may be useful for advanced users?
386 def create_extension_list(patterns, ctx=None, aliases=None):
388 deps = create_dependency_tree(ctx)
389 if not isinstance(patterns, list):
390 patterns = [patterns]
392 for pattern in patterns:
393 if isinstance(pattern, str):
394 filepattern = pattern
399 elif isinstance(pattern, Extension):
400 filepattern = pattern.sources[0]
401 if os.path.splitext(filepattern)[1] not in ('.py', '.pyx'):
402 # ignore non-cython modules
403 module_list.append(pattern)
407 base = DistutilsInfo(exn=template)
408 exn_type = template.__class__
410 raise TypeError(pattern)
411 for file in glob(filepattern):
412 pkg = deps.package(file)
414 module_name = deps.fully_qualifeid_name(file)
417 if module_name not in seen:
418 module_list.append(exn_type(
421 **deps.distutils_info(file, aliases, base).values))
425 # This is the user-exposed entry point.
426 def cythonize(module_list, nthreads=0, aliases=None, **options):
427 c_options = CompilationOptions(options)
428 cpp_options = CompilationOptions(options); cpp_options.cplus = True
429 ctx = options.create_context()
430 module_list = create_extension_list(module_list, ctx=ctx, aliases=aliases)
431 deps = create_dependency_tree(ctx)
433 for m in module_list:
435 for source in m.sources:
436 base, ext = os.path.splitext(source)
437 if ext in ('.pyx', '.py'):
438 if m.language == 'c++':
439 c_file = base + '.cpp'
440 options = cpp_options
444 if os.path.exists(c_file):
445 c_timestamp = os.path.getmtime(c_file)
448 # Priority goes first to modified files, second to direct
449 # dependents, and finally to indirect dependents.
450 if c_timestamp < deps.timestamp(source):
451 dep_timestamp, dep = deps.timestamp(source), source
454 dep_timestamp, dep = deps.newest_dependency(source)
455 priority = 2 - (dep in deps.immediate_dependencies(source))
456 if c_timestamp < dep_timestamp:
457 print("Compiling %s because it depends on %s" % (source, dep))
458 to_compile.append((priority, source, c_file, options))
459 new_sources.append(c_file)
461 new_sources.append(source)
462 m.sources = new_sources
465 # Requires multiprocessing (or Python >= 2.6)
467 import multiprocessing
468 pool = multiprocessing.Pool(nthreads)
469 pool.map(cythonize_one_helper, to_compile)
471 print("multiprocessing required for parallel cythonization")
474 for priority, pyx_file, c_file, options in to_compile:
475 cythonize_one(pyx_file, c_file, options)
478 # TODO: Share context? Issue: pyx processing leaks into pxd module
479 def cythonize_one(pyx_file, c_file, options=None):
480 from Cython.Compiler.Main import compile, default_options
481 from Cython.Compiler.Errors import CompileError, PyrexError
484 options = CompilationOptions(default_options)
485 options.output_file = c_file
489 result = compile([pyx_file], options)
490 if result.num_errors > 0:
492 except (EnvironmentError, PyrexError), e:
493 sys.stderr.write(str(e) + '\n')
496 raise CompileError(None, pyx_file)
498 def cythonize_one_helper(m):
499 return cythonize_one(*m[1:])