Merged pull request #12 from bhy/T423.
[cython.git] / Cython / Build / Dependencies.py
1 from glob import glob
2 import re, os, sys
3 from cython import set
4
5
6 from distutils.extension import Extension
7
8 from Cython import Utils
9 from Cython.Compiler.Main import Context, CompilationOptions, default_options
10
11 # Unfortunately, Python 2.3 doesn't support decorators.
12 def cached_method(f):
13     cache_name = '__%s_cache' % f.__name__
14     def wrapper(self, *args):
15         cache = getattr(self, cache_name, None)
16         if cache is None:
17             cache = {}
18             setattr(self, cache_name, cache)
19         if args in cache:
20             return cache[args]
21         res = cache[args] = f(self, *args)
22         return res
23     return wrapper
24
25
26 def parse_list(s):
27     if s[0] == '[' and s[-1] == ']':
28         s = s[1:-1]
29         delimiter = ','
30     else:
31         delimiter = ' '
32     s, literals = strip_string_literals(s)
33     def unquote(literal):
34         literal = literal.strip()
35         if literal[0] == "'":
36             return literals[literal[1:-1]]
37         else:
38             return literal
39
40     return [unquote(item) for item in s.split(delimiter)]
41
42 transitive_str = object()
43 transitive_list = object()
44
45 distutils_settings = {
46     'name':                 str,
47     'sources':              list,
48     'define_macros':        list,
49     'undef_macros':         list,
50     'libraries':            transitive_list,
51     'library_dirs':         transitive_list,
52     'runtime_library_dirs': transitive_list,
53     'include_dirs':         transitive_list,
54     'extra_objects':        list,
55     'extra_compile_args':   transitive_list,
56     'extra_link_args':      transitive_list,
57     'export_symbols':       list,
58     'depends':              transitive_list,
59     'language':             transitive_str,
60 }
61
62 def line_iter(source):
63     start = 0
64     while True:
65         end = source.find('\n', start)
66         if end == -1:
67             yield source[start:]
68             return
69         yield source[start:end]
70         start = end+1
71
72 class DistutilsInfo(object):
73
74     def __init__(self, source=None, exn=None):
75         self.values = {}
76         if source is not None:
77             for line in line_iter(source):
78                 line = line.strip()
79                 if line != '' and line[0] != '#':
80                     break
81                 line = line[1:].strip()
82                 if line[:10] == 'distutils:':
83                     line = line[10:]
84                     ix = line.index('=')
85                     key = str(line[:ix].strip())
86                     value = line[ix+1:].strip()
87                     type = distutils_settings[key]
88                     if type in (list, transitive_list):
89                         value = parse_list(value)
90                         if key == 'define_macros':
91                             value = [tuple(macro.split('=')) for macro in value]
92                     self.values[key] = value
93         elif exn is not None:
94             for key in distutils_settings:
95                 if key in ('name', 'sources'):
96                     continue
97                 value = getattr(exn, key, None)
98                 if value:
99                     self.values[key] = value
100
101     def merge(self, other):
102         if other is None:
103             return self
104         for key, value in other.values.items():
105             type = distutils_settings[key]
106             if type is transitive_str and key not in self.values:
107                 self.values[key] = value
108             elif type is transitive_list:
109                 if key in self.values:
110                     all = self.values[key]
111                     for v in value:
112                         if v not in all:
113                             all.append(v)
114                 else:
115                     self.values[key] = value
116         return self
117
118     def subs(self, aliases):
119         if aliases is None:
120             return self
121         resolved = DistutilsInfo()
122         for key, value in self.values.items():
123             type = distutils_settings[key]
124             if type in [list, transitive_list]:
125                 new_value_list = []
126                 for v in value:
127                     if v in aliases:
128                         v = aliases[v]
129                     if isinstance(v, list):
130                         new_value_list += v
131                     else:
132                         new_value_list.append(v)
133                 value = new_value_list
134             else:
135                 if value in aliases:
136                     value = aliases[value]
137             resolved.values[key] = value
138         return resolved
139
140
141 def strip_string_literals(code, prefix='__Pyx_L'):
142     """
143     Normalizes every string literal to be of the form '__Pyx_Lxxx',
144     returning the normalized code and a mapping of labels to
145     string literals.
146     """
147     new_code = []
148     literals = {}
149     counter = 0
150     start = q = 0
151     in_quote = False
152     raw = False
153     while True:
154         hash_mark = code.find('#', q)
155         single_q = code.find("'", q)
156         double_q = code.find('"', q)
157         q = min(single_q, double_q)
158         if q == -1: q = max(single_q, double_q)
159
160         # We're done.
161         if q == -1 and hash_mark == -1:
162             new_code.append(code[start:])
163             break
164
165         # Try to close the quote.
166         elif in_quote:
167             if code[q-1] == '\\' and not raw:
168                 k = 2
169                 while q >= k and code[q-k] == '\\':
170                     k += 1
171                 if k % 2 == 0:
172                     q += 1
173                     continue
174             if code[q:q+len(in_quote)] == in_quote:
175                 counter += 1
176                 label = "%s%s_" % (prefix, counter)
177                 literals[label] = code[start+len(in_quote):q]
178                 new_code.append("%s%s%s" % (in_quote, label, in_quote))
179                 q += len(in_quote)
180                 in_quote = False
181                 start = q
182             else:
183                 q += 1
184
185         # Process comment.
186         elif -1 != hash_mark and (hash_mark < q or q == -1):
187             end = code.find('\n', hash_mark)
188             if end == -1:
189                 end = None
190             new_code.append(code[start:hash_mark+1])
191             counter += 1
192             label = "%s%s_" % (prefix, counter)
193             literals[label] = code[hash_mark+1:end]
194             new_code.append(label)
195             if end is None:
196                 break
197             q = end
198             start = q
199
200         # Open the quote.
201         else:
202             raw = False
203             if len(code) >= q+3 and (code[q+1] == code[q] == code[q+2]):
204                 in_quote = code[q]*3
205             else:
206                 in_quote = code[q]
207             end = marker = q
208             while marker > 0 and code[marker-1] in 'rRbBuU':
209                 if code[marker-1] in 'rR':
210                     raw = True
211                 marker -= 1
212             new_code.append(code[start:end])
213             start = q
214             q += len(in_quote)
215
216     return "".join(new_code), literals
217
218
219 def parse_dependencies(source_filename):
220     # Actual parsing is way to slow, so we use regular expressions.
221     # The only catch is that we must strip comments and string
222     # literals ahead of time.
223     fh = Utils.open_source_file(source_filename, "rU")
224     try:
225         source = fh.read()
226     finally:
227         fh.close()
228     distutils_info = DistutilsInfo(source)
229     source, literals = strip_string_literals(source)
230     source = source.replace('\\\n', ' ')
231     if '\t' in source:
232         source = source.replace('\t', ' ')
233     # TODO: pure mode
234     dependancy = re.compile(r"(cimport +([0-9a-zA-Z_.]+)\b)|(from +([0-9a-zA-Z_.]+) +cimport)|(include +'([^']+)')|(cdef +extern +from +'([^']+)')")
235     cimports = []
236     includes = []
237     externs  = []
238     for m in dependancy.finditer(source):
239         groups = m.groups()
240         if groups[0]:
241             cimports.append(groups[1])
242         elif groups[2]:
243             cimports.append(groups[3])
244         elif groups[4]:
245             includes.append(literals[groups[5]])
246         else:
247             externs.append(literals[groups[7]])
248     return cimports, includes, externs, distutils_info
249
250
251 class DependencyTree(object):
252
253     def __init__(self, context):
254         self.context = context
255         self._transitive_cache = {}
256
257     #@cached_method
258     def parse_dependencies(self, source_filename):
259         return parse_dependencies(source_filename)
260     parse_dependencies = cached_method(parse_dependencies)
261
262     #@cached_method
263     def cimports_and_externs(self, filename):
264         cimports, includes, externs = self.parse_dependencies(filename)[:3]
265         cimports = set(cimports)
266         externs = set(externs)
267         for include in includes:
268             include_path = os.path.join(os.path.dirname(filename), include)
269             if not os.path.exists(include_path):
270                 include_path = self.context.find_include_file(include, None)
271             if include_path:
272                 a, b = self.cimports_and_externs(include_path)
273                 cimports.update(a)
274                 externs.update(b)
275             else:
276                 print("Unable to locate '%s' referenced from '%s'" % (filename, include))
277         return tuple(cimports), tuple(externs)
278     cimports_and_externs = cached_method(cimports_and_externs)
279
280     def cimports(self, filename):
281         return self.cimports_and_externs(filename)[0]
282
283     #@cached_method
284     def package(self, filename):
285         dir = os.path.dirname(filename)
286         if os.path.exists(os.path.join(dir, '__init__.py')):
287             return self.package(dir) + (os.path.basename(dir),)
288         else:
289             return ()
290     package = cached_method(package)
291
292     #@cached_method
293     def fully_qualifeid_name(self, filename):
294         module = os.path.splitext(os.path.basename(filename))[0]
295         return '.'.join(self.package(filename) + (module,))
296     fully_qualifeid_name = cached_method(fully_qualifeid_name)
297
298     def find_pxd(self, module, filename=None):
299         if module[0] == '.':
300             raise NotImplementedError("New relative imports.")
301         if filename is not None:
302             relative = '.'.join(self.package(filename) + tuple(module.split('.')))
303             pxd = self.context.find_pxd_file(relative, None)
304             if pxd:
305                 return pxd
306         return self.context.find_pxd_file(module, None)
307     find_pxd = cached_method(find_pxd)
308
309     #@cached_method
310     def cimported_files(self, filename):
311         if filename[-4:] == '.pyx' and os.path.exists(filename[:-4] + '.pxd'):
312             self_pxd = [filename[:-4] + '.pxd']
313         else:
314             self_pxd = []
315         a = self.cimports(filename)
316         b = filter(None, [self.find_pxd(m, filename) for m in self.cimports(filename)])
317         if len(a) - int('cython' in a) != len(b):
318             print("missing cimport", filename)
319             print("\n\t".join(a))
320             print("\n\t".join(b))
321         return tuple(self_pxd + filter(None, [self.find_pxd(m, filename) for m in self.cimports(filename)]))
322     cimported_files = cached_method(cimported_files)
323
324     def immediate_dependencies(self, filename):
325         all = list(self.cimported_files(filename))
326         for extern in sum(self.cimports_and_externs(filename), ()):
327             all.append(os.path.normpath(os.path.join(os.path.dirname(filename), extern)))
328         return tuple(all)
329
330     #@cached_method
331     def timestamp(self, filename):
332         return os.path.getmtime(filename)
333     timestamp = cached_method(timestamp)
334
335     def extract_timestamp(self, filename):
336         # TODO: .h files from extern blocks
337         return self.timestamp(filename), filename
338
339     def newest_dependency(self, filename):
340         return self.transitive_merge(filename, self.extract_timestamp, max)
341
342     def distutils_info0(self, filename):
343         return self.parse_dependencies(filename)[3]
344
345     def distutils_info(self, filename, aliases=None, base=None):
346         return (self.transitive_merge(filename, self.distutils_info0, DistutilsInfo.merge)
347             .subs(aliases)
348             .merge(base))
349
350     def transitive_merge(self, node, extract, merge):
351         try:
352             seen = self._transitive_cache[extract, merge]
353         except KeyError:
354             seen = self._transitive_cache[extract, merge] = {}
355         return self.transitive_merge_helper(
356             node, extract, merge, seen, {}, self.cimported_files)[0]
357
358     def transitive_merge_helper(self, node, extract, merge, seen, stack, outgoing):
359         if node in seen:
360             return seen[node], None
361         deps = extract(node)
362         if node in stack:
363             return deps, node
364         try:
365             stack[node] = len(stack)
366             loop = None
367             for next in outgoing(node):
368                 sub_deps, sub_loop = self.transitive_merge_helper(next, extract, merge, seen, stack, outgoing)
369                 if sub_loop is not None:
370                     if loop is not None and stack[loop] < stack[sub_loop]:
371                         pass
372                     else:
373                         loop = sub_loop
374                 deps = merge(deps, sub_deps)
375             if loop == node:
376                 loop = None
377             if loop is None:
378                 seen[node] = deps
379             return deps, loop
380         finally:
381             del stack[node]
382
383 _dep_tree = None
384 def create_dependency_tree(ctx=None):
385     global _dep_tree
386     if _dep_tree is None:
387         if ctx is None:
388             ctx = Context(["."], CompilationOptions(default_options))
389         _dep_tree = DependencyTree(ctx)
390     return _dep_tree
391
392 # This may be useful for advanced users?
393 def create_extension_list(patterns, exclude=[], ctx=None, aliases=None):
394     seen = set()
395     deps = create_dependency_tree(ctx)
396     to_exclude = set()
397     if not isinstance(exclude, list):
398         exclude = [exclude]
399     for pattern in exclude:
400         to_exclude.update(glob(pattern))
401     if not isinstance(patterns, list):
402         patterns = [patterns]
403     module_list = []
404     for pattern in patterns:
405         if isinstance(pattern, str):
406             filepattern = pattern
407             template = None
408             name = '*'
409             base = None
410             exn_type = Extension
411         elif isinstance(pattern, Extension):
412             filepattern = pattern.sources[0]
413             if os.path.splitext(filepattern)[1] not in ('.py', '.pyx'):
414                 # ignore non-cython modules
415                 module_list.append(pattern)
416                 continue
417             template = pattern
418             name = template.name
419             base = DistutilsInfo(exn=template)
420             exn_type = template.__class__
421         else:
422             raise TypeError(pattern)
423         for file in glob(filepattern):
424             if file in to_exclude:
425                 continue
426             pkg = deps.package(file)
427             if '*' in name:
428                 module_name = deps.fully_qualifeid_name(file)
429             else:
430                 module_name = name
431             if module_name not in seen:
432                 kwds = deps.distutils_info(file, aliases, base).values
433                 if base is not None:
434                     for key, value in base.values.items():
435                         if key not in kwds:
436                             kwds[key] = value
437                 sources = [file]
438                 if template is not None:
439                     sources += template.sources[1:]
440                 module_list.append(exn_type(
441                         name=module_name,
442                         sources=sources,
443                         **kwds))
444                 m = module_list[-1]
445                 seen.add(name)
446     return module_list
447
448 # This is the user-exposed entry point.
449 def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, **options):
450     if 'include_path' not in options:
451         options['include_path'] = ['.']
452     c_options = CompilationOptions(**options)
453     cpp_options = CompilationOptions(**options); cpp_options.cplus = True
454     ctx = c_options.create_context()
455     module_list = create_extension_list(
456         module_list,
457         exclude=exclude,
458         ctx=ctx,
459         aliases=aliases)
460     deps = create_dependency_tree(ctx)
461     to_compile = []
462     for m in module_list:
463         new_sources = []
464         for source in m.sources:
465             base, ext = os.path.splitext(source)
466             if ext in ('.pyx', '.py'):
467                 if m.language == 'c++':
468                     c_file = base + '.cpp'
469                     options = cpp_options
470                 else:
471                     c_file = base + '.c'
472                     options = c_options
473                 if os.path.exists(c_file):
474                     c_timestamp = os.path.getmtime(c_file)
475                 else:
476                     c_timestamp = -1
477                 # Priority goes first to modified files, second to direct
478                 # dependents, and finally to indirect dependents.
479                 if c_timestamp < deps.timestamp(source):
480                     dep_timestamp, dep = deps.timestamp(source), source
481                     priority = 0
482                 else:
483                     dep_timestamp, dep = deps.newest_dependency(source)
484                     priority = 2 - (dep in deps.immediate_dependencies(source))
485                 if c_timestamp < dep_timestamp:
486                     if not quiet:
487                         if source == dep:
488                             print("Compiling %s because it changed." % source)
489                         else:
490                             print("Compiling %s because it depends on %s." % (source, dep))
491                     to_compile.append((priority, source, c_file, options))
492                 new_sources.append(c_file)
493             else:
494                 new_sources.append(source)
495         m.sources = new_sources
496     to_compile.sort()
497     if nthreads:
498         # Requires multiprocessing (or Python >= 2.6)
499         try:
500             import multiprocessing
501             pool = multiprocessing.Pool(nthreads)
502             pool.map(cythonize_one_helper, to_compile)
503         except ImportError:
504             print("multiprocessing required for parallel cythonization")
505             nthreads = 0
506     if not nthreads:
507         for priority, pyx_file, c_file, options in to_compile:
508             cythonize_one(pyx_file, c_file, options)
509     return module_list
510
511 # TODO: Share context? Issue: pyx processing leaks into pxd module
512 def cythonize_one(pyx_file, c_file, options=None):
513     from Cython.Compiler.Main import compile, default_options
514     from Cython.Compiler.Errors import CompileError, PyrexError
515
516     if options is None:
517         options = CompilationOptions(default_options)
518     options.output_file = c_file
519
520     any_failures = 0
521     try:
522         result = compile([pyx_file], options)
523         if result.num_errors > 0:
524             any_failures = 1
525     except (EnvironmentError, PyrexError), e:
526         sys.stderr.write(str(e) + '\n')
527         any_failures = 1
528     if any_failures:
529         raise CompileError(None, pyx_file)
530
531 def cythonize_one_helper(m):
532     return cythonize_one(*m[1:])