5 These Nodes represent the canonical external objects that people think
6 of when they think of building software: files and directories.
8 This holds a "default_fs" variable that should be initialized with an FS
9 that can be used by scripts or modules looking for the canonical default.
16 # Permission is hereby granted, free of charge, to any person obtaining
17 # a copy of this software and associated documentation files (the
18 # "Software"), to deal in the Software without restriction, including
19 # without limitation the rights to use, copy, modify, merge, publish,
20 # distribute, sublicense, and/or sell copies of the Software, and to
21 # permit persons to whom the Software is furnished to do so, subject to
22 # the following conditions:
24 # The above copyright notice and this permission notice shall be included
25 # in all copies or substantial portions of the Software.
27 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
28 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
29 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
30 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
31 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
32 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
33 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
35 from __future__ import generators ### KEEP FOR COMPATIBILITY FIXERS
37 __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
39 from itertools import izip
54 # TODO(2.2): Remove when 2.3 becomes the minimal supported version.
57 except AttributeError:
58 codecs.BOM_UTF8 = '\xef\xbb\xbf'
62 except AttributeError:
63 codecs.BOM_UTF16_LE = '\xff\xfe'
64 codecs.BOM_UTF16_BE = '\xfe\xff'
66 # Provide a wrapper function to handle decoding differences in
67 # different versions of Python. Normally, we'd try to do this in the
68 # compat layer (and maybe it still makes sense to move there?) but
69 # that doesn't provide a way to supply the string class used in
70 # pre-2.3 Python versions with a .decode() method that all strings
71 # naturally have. Plus, the 2.[01] encodings behave differently
72 # enough that we have to settle for a lowest-common-denominator
75 # Note that the 2.[012] implementations below may be inefficient
76 # because they perform an explicit look up of the encoding for every
77 # decode, but they're old enough (and we want to stop supporting
78 # them soon enough) that it's not worth complicating the interface.
79 # Think of it as additional incentive for people to upgrade...
82 except AttributeError:
83 # 2.0 through 2.2: strings have no .decode() method
85 codecs.lookup('ascii').decode
86 except AttributeError:
87 # 2.0 and 2.1: encodings are a tuple of functions, and the
88 # decode() function returns a (result, length) tuple.
89 def my_decode(contents, encoding):
90 return codecs.lookup(encoding)[1](contents)[0]
92 # 2.2: encodings are an object with methods, and the
93 # .decode() method returns just the decoded bytes.
94 def my_decode(contents, encoding):
95 return codecs.lookup(encoding).decode(contents)
97 # 2.3 or later: use the .decode() string method
98 def my_decode(contents, encoding):
99 return contents.decode(encoding)
102 from SCons.Debug import logInstanceCreation
106 import SCons.Node.Alias
109 import SCons.Warnings
111 from SCons.Debug import Trace
116 class EntryProxyAttributeError(AttributeError):
118 An AttributeError subclass for recording and displaying the name
119 of the underlying Entry involved in an AttributeError exception.
121 def __init__(self, entry_proxy, attribute):
122 AttributeError.__init__(self)
123 self.entry_proxy = entry_proxy
124 self.attribute = attribute
126 entry = self.entry_proxy.get()
127 fmt = "%s instance %s has no attribute %s"
128 return fmt % (entry.__class__.__name__,
130 repr(self.attribute))
132 # The max_drift value: by default, use a cached signature value for
133 # any file that's been untouched for more than two days.
134 default_max_drift = 2*24*60*60
137 # We stringify these file system Nodes a lot. Turning a file system Node
138 # into a string is non-trivial, because the final string representation
139 # can depend on a lot of factors: whether it's a derived target or not,
140 # whether it's linked to a repository or source directory, and whether
141 # there's duplication going on. The normal technique for optimizing
142 # calculations like this is to memoize (cache) the string value, so you
143 # only have to do the calculation once.
145 # A number of the above factors, however, can be set after we've already
146 # been asked to return a string for a Node, because a Repository() or
147 # VariantDir() call or the like may not occur until later in SConscript
148 # files. So this variable controls whether we bother trying to save
149 # string values for Nodes. The wrapper interface can set this whenever
150 # they're done mucking with Repository and VariantDir and the other stuff,
151 # to let this module know it can start returning saved string values
156 def save_strings(val):
161 # Avoid unnecessary function calls by recording a Boolean value that
162 # tells us whether or not os.path.splitdrive() actually does anything
163 # on this system, and therefore whether we need to bother calling it
164 # when looking up path names in various methods below.
169 def initialize_do_splitdrive():
171 drive, path = os.path.splitdrive('X:/foo')
172 do_splitdrive = not not drive
174 initialize_do_splitdrive()
178 needs_normpath_check = None
180 def initialize_normpath_check():
182 Initialize the normpath_check regular expression.
184 This function is used by the unit tests to re-initialize the pattern
185 when testing for behavior with different values of os.sep.
187 global needs_normpath_check
189 pattern = r'.*/|\.$|\.\.$'
191 pattern = r'.*[/%s]|\.$|\.\.$' % re.escape(os.sep)
192 needs_normpath_check = re.compile(pattern)
194 initialize_normpath_check()
197 # SCons.Action objects for interacting with the outside world.
199 # The Node.FS methods in this module should use these actions to
200 # create and/or remove files and directories; they should *not* use
201 # os.{link,symlink,unlink,mkdir}(), etc., directly.
203 # Using these SCons.Action objects ensures that descriptions of these
204 # external activities are properly displayed, that the displays are
205 # suppressed when the -s (silent) option is used, and (most importantly)
206 # the actions are disabled when the the -n option is used, in which case
207 # there should be *no* changes to the external file system(s)...
210 if hasattr(os, 'link'):
211 def _hardlink_func(fs, src, dst):
212 # If the source is a symlink, we can't just hard-link to it
213 # because a relative symlink may point somewhere completely
214 # different. We must disambiguate the symlink and then
215 # hard-link the final destination file.
216 while fs.islink(src):
217 link = fs.readlink(src)
218 if not os.path.isabs(link):
221 src = os.path.join(os.path.dirname(src), link)
224 _hardlink_func = None
226 if hasattr(os, 'symlink'):
227 def _softlink_func(fs, src, dst):
230 _softlink_func = None
232 def _copy_func(fs, src, dest):
233 shutil.copy2(src, dest)
235 fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
238 Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy',
239 'hard-copy', 'soft-copy', 'copy']
241 Link_Funcs = [] # contains the callables of the specified duplication style
243 def set_duplicate(duplicate):
244 # Fill in the Link_Funcs list according to the argument
245 # (discarding those not available on the platform).
247 # Set up the dictionary that maps the argument names to the
248 # underlying implementations. We do this inside this function,
249 # not in the top-level module code, so that we can remap os.link
250 # and os.symlink for testing purposes.
252 'hard' : _hardlink_func,
253 'soft' : _softlink_func,
257 if not duplicate in Valid_Duplicates:
258 raise SCons.Errors.InternalError, ("The argument of set_duplicate "
259 "should be in Valid_Duplicates")
262 for func in duplicate.split('-'):
264 Link_Funcs.append(link_dict[func])
266 def LinkFunc(target, source, env):
267 # Relative paths cause problems with symbolic links, so
268 # we use absolute paths, which may be a problem for people
269 # who want to move their soft-linked src-trees around. Those
270 # people should use the 'hard-copy' mode, softlinks cannot be
271 # used for that; at least I have no idea how ...
272 src = source[0].abspath
273 dest = target[0].abspath
274 dir, file = os.path.split(dest)
275 if dir and not target[0].fs.isdir(dir):
278 # Set a default order of link functions.
279 set_duplicate('hard-soft-copy')
281 # Now link the files with the previously specified order.
282 for func in Link_Funcs:
286 except (IOError, OSError):
287 # An OSError indicates something happened like a permissions
288 # problem or an attempt to symlink across file-system
289 # boundaries. An IOError indicates something like the file
290 # not existing. In either case, keeping trying additional
291 # functions in the list and only raise an error if the last
293 if func == Link_Funcs[-1]:
294 # exception of the last link method (copy) are fatal
298 Link = SCons.Action.Action(LinkFunc, None)
299 def LocalString(target, source, env):
300 return 'Local copy of %s from %s' % (target[0], source[0])
302 LocalCopy = SCons.Action.Action(LinkFunc, LocalString)
304 def UnlinkFunc(target, source, env):
306 t.fs.unlink(t.abspath)
309 Unlink = SCons.Action.Action(UnlinkFunc, None)
311 def MkdirFunc(target, source, env):
314 t.fs.mkdir(t.abspath)
317 Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None)
321 def get_MkdirBuilder():
323 if MkdirBuilder is None:
325 import SCons.Defaults
326 # "env" will get filled in by Executor.get_build_env()
327 # calling SCons.Defaults.DefaultEnvironment() when necessary.
328 MkdirBuilder = SCons.Builder.Builder(action = Mkdir,
332 target_scanner = SCons.Defaults.DirEntryScanner,
333 name = "MkdirBuilder")
341 DefaultSCCSBuilder = None
342 DefaultRCSBuilder = None
344 def get_DefaultSCCSBuilder():
345 global DefaultSCCSBuilder
346 if DefaultSCCSBuilder is None:
348 # "env" will get filled in by Executor.get_build_env()
349 # calling SCons.Defaults.DefaultEnvironment() when necessary.
350 act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR')
351 DefaultSCCSBuilder = SCons.Builder.Builder(action = act,
353 name = "DefaultSCCSBuilder")
354 return DefaultSCCSBuilder
356 def get_DefaultRCSBuilder():
357 global DefaultRCSBuilder
358 if DefaultRCSBuilder is None:
360 # "env" will get filled in by Executor.get_build_env()
361 # calling SCons.Defaults.DefaultEnvironment() when necessary.
362 act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR')
363 DefaultRCSBuilder = SCons.Builder.Builder(action = act,
365 name = "DefaultRCSBuilder")
366 return DefaultRCSBuilder
368 # Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem.
369 _is_cygwin = sys.platform == "cygwin"
370 if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin:
380 def __init__(self, type, do, ignore):
386 self.__call__ = self.do
387 def set_ignore(self):
388 self.__call__ = self.ignore
390 if self.type in list:
395 def do_diskcheck_match(node, predicate, errorfmt):
398 # If calling the predicate() cached a None value from stat(),
399 # remove it so it doesn't interfere with later attempts to
400 # build this Node as we walk the DAG. (This isn't a great way
401 # to do this, we're reaching into an interface that doesn't
402 # really belong to us, but it's all about performance, so
403 # for now we'll just document the dependency...)
404 if node._memo['stat'] is None:
405 del node._memo['stat']
406 except (AttributeError, KeyError):
409 raise TypeError, errorfmt % node.abspath
411 def ignore_diskcheck_match(node, predicate, errorfmt):
414 def do_diskcheck_rcs(node, name):
416 rcs_dir = node.rcs_dir
417 except AttributeError:
418 if node.entry_exists_on_disk('RCS'):
419 rcs_dir = node.Dir('RCS')
422 node.rcs_dir = rcs_dir
424 return rcs_dir.entry_exists_on_disk(name+',v')
427 def ignore_diskcheck_rcs(node, name):
430 def do_diskcheck_sccs(node, name):
432 sccs_dir = node.sccs_dir
433 except AttributeError:
434 if node.entry_exists_on_disk('SCCS'):
435 sccs_dir = node.Dir('SCCS')
438 node.sccs_dir = sccs_dir
440 return sccs_dir.entry_exists_on_disk('s.'+name)
443 def ignore_diskcheck_sccs(node, name):
446 diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match)
447 diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs)
448 diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs)
456 def set_diskcheck(list):
457 for dc in diskcheckers:
460 def diskcheck_types():
461 return [dc.type for dc in diskcheckers]
465 class EntryProxy(SCons.Util.Proxy):
466 def __get_abspath(self):
468 return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(),
469 entry.name + "_abspath")
471 def __get_filebase(self):
472 name = self.get().name
473 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0],
476 def __get_suffix(self):
477 name = self.get().name
478 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1],
481 def __get_file(self):
482 name = self.get().name
483 return SCons.Subst.SpecialAttrWrapper(name, name + "_file")
485 def __get_base_path(self):
486 """Return the file's directory and file name, with the
489 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0],
490 entry.name + "_base")
492 def __get_posix_path(self):
493 """Return the path with / as the path separator,
494 regardless of platform."""
499 r = entry.get_path().replace(os.sep, '/')
500 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
502 def __get_windows_path(self):
503 """Return the path with \ as the path separator,
504 regardless of platform."""
509 r = entry.get_path().replace(os.sep, '\\')
510 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows")
512 def __get_srcnode(self):
513 return EntryProxy(self.get().srcnode())
515 def __get_srcdir(self):
516 """Returns the directory containing the source node linked to this
517 node via VariantDir(), or the directory of this node if not linked."""
518 return EntryProxy(self.get().srcnode().dir)
520 def __get_rsrcnode(self):
521 return EntryProxy(self.get().srcnode().rfile())
523 def __get_rsrcdir(self):
524 """Returns the directory containing the source node linked to this
525 node via VariantDir(), or the directory of this node if not linked."""
526 return EntryProxy(self.get().srcnode().rfile().dir)
529 return EntryProxy(self.get().dir)
531 dictSpecialAttrs = { "base" : __get_base_path,
532 "posix" : __get_posix_path,
533 "windows" : __get_windows_path,
534 "win32" : __get_windows_path,
535 "srcpath" : __get_srcnode,
536 "srcdir" : __get_srcdir,
538 "abspath" : __get_abspath,
539 "filebase" : __get_filebase,
540 "suffix" : __get_suffix,
542 "rsrcpath" : __get_rsrcnode,
543 "rsrcdir" : __get_rsrcdir,
546 def __getattr__(self, name):
547 # This is how we implement the "special" attributes
548 # such as base, posix, srcdir, etc.
550 attr_function = self.dictSpecialAttrs[name]
553 attr = SCons.Util.Proxy.__getattr__(self, name)
554 except AttributeError, e:
555 # Raise our own AttributeError subclass with an
556 # overridden __str__() method that identifies the
557 # name of the entry that caused the exception.
558 raise EntryProxyAttributeError(self, name)
561 return attr_function(self)
563 class Base(SCons.Node.Node):
564 """A generic class for file system entries. This class is for
565 when we don't know yet whether the entry being looked up is a file
566 or a directory. Instances of this class can morph into either
567 Dir or File objects by a later, more precise lookup.
569 Note: this class does not define __cmp__ and __hash__ for
570 efficiency reasons. SCons does a lot of comparing of
571 Node.FS.{Base,Entry,File,Dir} objects, so those operations must be
572 as fast as possible, which means we want to use Python's built-in
573 object identity comparisons.
576 memoizer_counters = []
578 def __init__(self, name, directory, fs):
579 """Initialize a generic Node.FS.Base object.
581 Call the superclass initialization, take care of setting up
582 our relative and absolute paths, identify our parent
583 directory, and indicate that this node should use
585 if __debug__: logInstanceCreation(self, 'Node.FS.Base')
586 SCons.Node.Node.__init__(self)
588 # Filenames and paths are probably reused and are intern'ed to
590 self.name = SCons.Util.silent_intern(name)
591 self.suffix = SCons.Util.silent_intern(SCons.Util.splitext(name)[1])
594 assert directory, "A directory must be provided"
596 self.abspath = SCons.Util.silent_intern(directory.entry_abspath(name))
597 self.labspath = SCons.Util.silent_intern(directory.entry_labspath(name))
598 if directory.path == '.':
599 self.path = SCons.Util.silent_intern(name)
601 self.path = SCons.Util.silent_intern(directory.entry_path(name))
602 if directory.tpath == '.':
603 self.tpath = SCons.Util.silent_intern(name)
605 self.tpath = SCons.Util.silent_intern(directory.entry_tpath(name))
606 self.path_elements = directory.path_elements + [self]
609 self.cwd = None # will hold the SConscript directory for target nodes
610 self.duplicate = directory.duplicate
612 def str_for_display(self):
613 return '"' + self.__str__() + '"'
615 def must_be_same(self, klass):
617 This node, which already existed, is being looked up as the
618 specified klass. Raise an exception if it isn't.
620 if isinstance(self, klass) or klass is Entry:
622 raise TypeError, "Tried to lookup %s '%s' as a %s." %\
623 (self.__class__.__name__, self.path, klass.__name__)
628 def get_suffix(self):
635 """A Node.FS.Base object's string representation is its path
639 return self._save_str()
640 return self._get_str()
642 memoizer_counters.append(SCons.Memoize.CountValue('_save_str'))
646 return self._memo['_save_str']
649 result = sys.intern(self._get_str())
650 self._memo['_save_str'] = result
655 if self.duplicate or self.is_derived():
656 return self.get_path()
657 srcnode = self.srcnode()
658 if srcnode.stat() is None and self.stat() is not None:
659 result = self.get_path()
661 result = srcnode.get_path()
663 # We're not at the point where we're saving the string string
664 # representations of FS Nodes (because we haven't finished
665 # reading the SConscript files and need to have str() return
666 # things relative to them). That also means we can't yet
667 # cache values returned (or not returned) by stat(), since
668 # Python code in the SConscript files might still create
669 # or otherwise affect the on-disk file. So get rid of the
670 # values that the underlying stat() method saved.
671 try: del self._memo['stat']
672 except KeyError: pass
673 if self is not srcnode:
674 try: del srcnode._memo['stat']
675 except KeyError: pass
680 memoizer_counters.append(SCons.Memoize.CountValue('stat'))
683 try: return self._memo['stat']
684 except KeyError: pass
685 try: result = self.fs.stat(self.abspath)
686 except os.error: result = None
687 self._memo['stat'] = result
691 return self.stat() is not None
694 return self.rfile().exists()
698 if st: return st[stat.ST_MTIME]
703 if st: return st[stat.ST_SIZE]
708 return st is not None and stat.S_ISDIR(st[stat.ST_MODE])
712 return st is not None and stat.S_ISREG(st[stat.ST_MODE])
714 if hasattr(os, 'symlink'):
716 try: st = self.fs.lstat(self.abspath)
717 except os.error: return 0
718 return stat.S_ISLNK(st[stat.ST_MODE])
721 return 0 # no symlinks
723 def is_under(self, dir):
727 return self.dir.is_under(dir)
733 """If this node is in a build path, return the node
734 corresponding to its source file. Otherwise, return
737 srcdir_list = self.dir.srcdir_list()
739 srcnode = srcdir_list[0].Entry(self.name)
740 srcnode.must_be_same(self.__class__)
744 def get_path(self, dir=None):
745 """Return path relative to the current working directory of the
746 Node.FS.Base object that owns us."""
748 dir = self.fs.getcwd()
751 path_elems = self.path_elements
752 try: i = path_elems.index(dir)
753 except ValueError: pass
754 else: path_elems = path_elems[i+1:]
755 path_elems = [n.name for n in path_elems]
756 return os.sep.join(path_elems)
758 def set_src_builder(self, builder):
759 """Set the source code builder for this node."""
760 self.sbuilder = builder
761 if not self.has_builder():
762 self.builder_set(builder)
764 def src_builder(self):
765 """Fetch the source code builder for this node.
767 If there isn't one, we cache the source code builder specified
768 for the directory (which in turn will cache the value from its
769 parent directory, and so on up to the file system root).
773 except AttributeError:
774 scb = self.dir.src_builder()
778 def get_abspath(self):
779 """Get the absolute path of the file."""
782 def for_signature(self):
783 # Return just our name. Even an absolute path would not work,
784 # because that can change thanks to symlinks or remapped network
788 def get_subst_proxy(self):
791 except AttributeError:
792 ret = EntryProxy(self)
796 def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext):
799 Generates a target entry that corresponds to this entry (usually
800 a source file) with the specified prefix and suffix.
802 Note that this method can be overridden dynamically for generated
803 files that need different behavior. See Tool/swig.py for
806 return self.dir.Entry(prefix + splitext(self.name)[0] + suffix)
808 def _Rfindalldirs_key(self, pathlist):
811 memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key))
813 def Rfindalldirs(self, pathlist):
815 Return all of the directories for a given path list, including
816 corresponding "backing" directories in any repositories.
818 The Node lookups are relative to this Node (typically a
819 directory), so memoizing result saves cycles from looking
820 up the same path for each target in a given directory.
823 memo_dict = self._memo['Rfindalldirs']
826 self._memo['Rfindalldirs'] = memo_dict
829 return memo_dict[pathlist]
833 create_dir_relative_to_self = self.Dir
835 for path in pathlist:
836 if isinstance(path, SCons.Node.Node):
839 dir = create_dir_relative_to_self(path)
840 result.extend(dir.get_all_rdirs())
842 memo_dict[pathlist] = result
846 def RDirs(self, pathlist):
847 """Search for a list of directories in the Repository list."""
848 cwd = self.cwd or self.fs._cwd
849 return cwd.Rfindalldirs(pathlist)
851 memoizer_counters.append(SCons.Memoize.CountValue('rentry'))
855 return self._memo['rentry']
859 if not self.exists():
860 norm_name = _my_normcase(self.name)
861 for dir in self.dir.get_all_rdirs():
863 node = dir.entries[norm_name]
865 if dir.entry_exists_on_disk(self.name):
866 result = dir.Entry(self.name)
868 self._memo['rentry'] = result
871 def _glob1(self, pattern, ondisk=True, source=False, strings=False):
875 """This is the class for generic Node.FS entries--that is, things
876 that could be a File or a Dir, but we're just not sure yet.
877 Consequently, the methods in this class really exist just to
878 transform their associated object into the right class when the
879 time comes, and then call the same-named method in the transformed
882 def diskcheck_match(self):
885 def disambiguate(self, must_exist=None):
892 self.__class__ = File
896 # There was nothing on-disk at this location, so look in
899 # We can't just use self.srcnode() straight away because
900 # that would create an actual Node for this file in the src
901 # directory, and there might not be one. Instead, use the
902 # dir_on_disk() method to see if there's something on-disk
903 # with that name, in which case we can go ahead and call
904 # self.srcnode() to create the right type of entry.
905 srcdir = self.dir.srcnode()
906 if srcdir != self.dir and \
907 srcdir.entry_exists_on_disk(self.name) and \
908 self.srcnode().isdir():
912 msg = "No such file or directory: '%s'" % self.abspath
913 raise SCons.Errors.UserError, msg
915 self.__class__ = File
921 """We're a generic Entry, but the caller is actually looking for
922 a File at this point, so morph into one."""
923 self.__class__ = File
926 return File.rfile(self)
928 def scanner_key(self):
929 return self.get_suffix()
931 def get_contents(self):
932 """Fetch the contents of the entry. Returns the exact binary
933 contents of the file."""
935 self = self.disambiguate(must_exist=1)
936 except SCons.Errors.UserError:
937 # There was nothing on disk with which to disambiguate
938 # this entry. Leave it as an Entry, but return a null
939 # string so calls to get_contents() in emitters and the
940 # like (e.g. in qt.py) don't have to disambiguate by hand
941 # or catch the exception.
944 return self.get_contents()
946 def get_text_contents(self):
947 """Fetch the decoded text contents of a Unicode encoded Entry.
949 Since this should return the text contents from the file
950 system, we check to see into what sort of subclass we should
953 self = self.disambiguate(must_exist=1)
954 except SCons.Errors.UserError:
955 # There was nothing on disk with which to disambiguate
956 # this entry. Leave it as an Entry, but return a null
957 # string so calls to get_text_contents() in emitters and
958 # the like (e.g. in qt.py) don't have to disambiguate by
959 # hand or catch the exception.
962 return self.get_text_contents()
964 def must_be_same(self, klass):
965 """Called to make sure a Node is a Dir. Since we're an
966 Entry, we can morph into one."""
967 if self.__class__ is not klass:
968 self.__class__ = klass
972 # The following methods can get called before the Taskmaster has
973 # had a chance to call disambiguate() directly to see if this Entry
974 # should really be a Dir or a File. We therefore use these to call
975 # disambiguate() transparently (from our caller's point of view).
977 # Right now, this minimal set of methods has been derived by just
978 # looking at some of the methods that will obviously be called early
979 # in any of the various Taskmasters' calling sequences, and then
980 # empirically figuring out which additional methods are necessary
981 # to make various tests pass.
984 """Return if the Entry exists. Check the file system to see
985 what we should turn into first. Assume a file if there's no
987 return self.disambiguate().exists()
989 def rel_path(self, other):
990 d = self.disambiguate()
991 if d.__class__ is Entry:
992 raise "rel_path() could not disambiguate File/Dir"
993 return d.rel_path(other)
996 return self.disambiguate().new_ninfo()
998 def changed_since_last_build(self, target, prev_ni):
999 return self.disambiguate().changed_since_last_build(target, prev_ni)
1001 def _glob1(self, pattern, ondisk=True, source=False, strings=False):
1002 return self.disambiguate()._glob1(pattern, ondisk, source, strings)
1004 def get_subst_proxy(self):
1005 return self.disambiguate().get_subst_proxy()
1007 # This is for later so we can differentiate between Entry the class and Entry
1008 # the method of the FS class.
1014 if SCons.Memoize.use_memoizer:
1015 __metaclass__ = SCons.Memoize.Memoized_Metaclass
1017 # This class implements an abstraction layer for operations involving
1018 # a local file system. Essentially, this wraps any function in
1019 # the os, os.path or shutil modules that we use to actually go do
1020 # anything with or to the local file system.
1022 # Note that there's a very good chance we'll refactor this part of
1023 # the architecture in some way as we really implement the interface(s)
1024 # for remote file system Nodes. For example, the right architecture
1025 # might be to have this be a subclass instead of a base class.
1026 # Nevertheless, we're using this as a first step in that direction.
1028 # We're not using chdir() yet because the calling subclass method
1029 # needs to use os.chdir() directly to avoid recursion. Will we
1030 # really need this one?
1031 #def chdir(self, path):
1032 # return os.chdir(path)
1033 def chmod(self, path, mode):
1034 return os.chmod(path, mode)
1035 def copy(self, src, dst):
1036 return shutil.copy(src, dst)
1037 def copy2(self, src, dst):
1038 return shutil.copy2(src, dst)
1039 def exists(self, path):
1040 return os.path.exists(path)
1041 def getmtime(self, path):
1042 return os.path.getmtime(path)
1043 def getsize(self, path):
1044 return os.path.getsize(path)
1045 def isdir(self, path):
1046 return os.path.isdir(path)
1047 def isfile(self, path):
1048 return os.path.isfile(path)
1049 def link(self, src, dst):
1050 return os.link(src, dst)
1051 def lstat(self, path):
1052 return os.lstat(path)
1053 def listdir(self, path):
1054 return os.listdir(path)
1055 def makedirs(self, path):
1056 return os.makedirs(path)
1057 def mkdir(self, path):
1058 return os.mkdir(path)
1059 def rename(self, old, new):
1060 return os.rename(old, new)
1061 def stat(self, path):
1062 return os.stat(path)
1063 def symlink(self, src, dst):
1064 return os.symlink(src, dst)
1065 def open(self, path):
1067 def unlink(self, path):
1068 return os.unlink(path)
1070 if hasattr(os, 'symlink'):
1071 def islink(self, path):
1072 return os.path.islink(path)
1074 def islink(self, path):
1075 return 0 # no symlinks
1077 if hasattr(os, 'readlink'):
1078 def readlink(self, file):
1079 return os.readlink(file)
1081 def readlink(self, file):
1086 # # Skeleton for the obvious methods we might need from the
1087 # # abstraction layer for a remote filesystem.
1088 # def upload(self, local_src, remote_dst):
1090 # def download(self, remote_src, local_dst):
1096 memoizer_counters = []
1098 def __init__(self, path = None):
1099 """Initialize the Node.FS subsystem.
1101 The supplied path is the top of the source tree, where we
1102 expect to find the top-level build file. If no path is
1103 supplied, the current directory is the default.
1105 The path argument must be a valid absolute path.
1107 if __debug__: logInstanceCreation(self, 'Node.FS')
1112 self.SConstruct_dir = None
1113 self.max_drift = default_max_drift
1117 self.pathTop = os.getcwd()
1120 self.defaultDrive = _my_normcase(os.path.splitdrive(self.pathTop)[0])
1122 self.Top = self.Dir(self.pathTop)
1124 self.Top.tpath = '.'
1125 self._cwd = self.Top
1127 DirNodeInfo.fs = self
1128 FileNodeInfo.fs = self
1130 def set_SConstruct_dir(self, dir):
1131 self.SConstruct_dir = dir
1133 def get_max_drift(self):
1134 return self.max_drift
1136 def set_max_drift(self, max_drift):
1137 self.max_drift = max_drift
1142 def chdir(self, dir, change_os_dir=0):
1143 """Change the current working directory for lookups.
1144 If change_os_dir is true, we will also change the "real" cwd
1152 os.chdir(dir.abspath)
1157 def get_root(self, drive):
1159 Returns the root directory for the specified drive, creating
1162 drive = _my_normcase(drive)
1164 return self.Root[drive]
1166 root = RootDir(drive, self)
1167 self.Root[drive] = root
1169 self.Root[self.defaultDrive] = root
1170 elif drive == self.defaultDrive:
1171 self.Root[''] = root
1174 def _lookup(self, p, directory, fsclass, create=1):
1176 The generic entry point for Node lookup with user-supplied data.
1178 This translates arbitrary input into a canonical Node.FS object
1179 of the specified fsclass. The general approach for strings is
1180 to turn it into a fully normalized absolute path and then call
1181 the root directory's lookup_abs() method for the heavy lifting.
1183 If the path name begins with '#', it is unconditionally
1184 interpreted relative to the top-level directory of this FS. '#'
1185 is treated as a synonym for the top-level SConstruct directory,
1186 much like '~' is treated as a synonym for the user's home
1187 directory in a UNIX shell. So both '#foo' and '#/foo' refer
1188 to the 'foo' subdirectory underneath the top-level SConstruct
1191 If the path name is relative, then the path is looked up relative
1192 to the specified directory, or the current directory (self._cwd,
1193 typically the SConscript directory) if the specified directory
1196 if isinstance(p, Base):
1197 # It's already a Node.FS object. Make sure it's the right
1199 p.must_be_same(fsclass)
1201 # str(p) in case it's something like a proxy object
1204 initial_hash = (p[0:1] == '#')
1206 # There was an initial '#', so we strip it and override
1207 # whatever directory they may have specified with the
1208 # top-level SConstruct directory.
1210 directory = self.Top
1212 if directory and not isinstance(directory, Dir):
1213 directory = self.Dir(directory)
1216 drive, p = os.path.splitdrive(p)
1220 # This causes a naked drive letter to be treated as a synonym
1221 # for the root directory on that drive.
1223 absolute = os.path.isabs(p)
1225 needs_normpath = needs_normpath_check.match(p)
1227 if initial_hash or not absolute:
1228 # This is a relative lookup, either to the top-level
1229 # SConstruct directory (because of the initial '#') or to
1230 # the current directory (the path name is not absolute).
1231 # Add the string to the appropriate directory lookup path,
1232 # after which the whole thing gets normalized.
1234 directory = self._cwd
1236 p = directory.labspath + '/' + p
1238 p = directory.labspath
1241 p = os.path.normpath(p)
1243 if drive or absolute:
1244 root = self.get_root(drive)
1247 directory = self._cwd
1248 root = directory.root
1251 p = p.replace(os.sep, '/')
1252 return root._lookup_abs(p, fsclass, create)
1254 def Entry(self, name, directory = None, create = 1):
1255 """Look up or create a generic Entry node with the specified name.
1256 If the name is a relative path (begins with ./, ../, or a file
1257 name), then it is looked up relative to the supplied directory
1258 node, or to the top level directory of the FS (supplied at
1259 construction time) if no directory is supplied.
1261 return self._lookup(name, directory, Entry, create)
1263 def File(self, name, directory = None, create = 1):
1264 """Look up or create a File node with the specified name. If
1265 the name is a relative path (begins with ./, ../, or a file name),
1266 then it is looked up relative to the supplied directory node,
1267 or to the top level directory of the FS (supplied at construction
1268 time) if no directory is supplied.
1270 This method will raise TypeError if a directory is found at the
1273 return self._lookup(name, directory, File, create)
1275 def Dir(self, name, directory = None, create = True):
1276 """Look up or create a Dir node with the specified name. If
1277 the name is a relative path (begins with ./, ../, or a file name),
1278 then it is looked up relative to the supplied directory node,
1279 or to the top level directory of the FS (supplied at construction
1280 time) if no directory is supplied.
1282 This method will raise TypeError if a normal file is found at the
1285 return self._lookup(name, directory, Dir, create)
1287 def VariantDir(self, variant_dir, src_dir, duplicate=1):
1288 """Link the supplied variant directory to the source directory
1289 for purposes of building files."""
1291 if not isinstance(src_dir, SCons.Node.Node):
1292 src_dir = self.Dir(src_dir)
1293 if not isinstance(variant_dir, SCons.Node.Node):
1294 variant_dir = self.Dir(variant_dir)
1295 if src_dir.is_under(variant_dir):
1296 raise SCons.Errors.UserError, "Source directory cannot be under variant directory."
1297 if variant_dir.srcdir:
1298 if variant_dir.srcdir == src_dir:
1299 return # We already did this.
1300 raise SCons.Errors.UserError, "'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir)
1301 variant_dir.link(src_dir, duplicate)
1303 def Repository(self, *dirs):
1304 """Specify Repository directories to search."""
1306 if not isinstance(d, SCons.Node.Node):
1308 self.Top.addRepository(d)
1310 def variant_dir_target_climb(self, orig, dir, tail):
1311 """Create targets in corresponding variant directories
1313 Climb the directory tree, and look up path names
1314 relative to any linked variant directories we find.
1316 Even though this loops and walks up the tree, we don't memoize
1317 the return value because this is really only used to process
1318 the command-line targets.
1322 fmt = "building associated VariantDir targets: %s"
1325 for bd in dir.variant_dirs:
1326 if start_dir.is_under(bd):
1327 # If already in the build-dir location, don't reflect
1328 return [orig], fmt % str(orig)
1329 p = os.path.join(bd.path, *tail)
1330 targets.append(self.Entry(p))
1331 tail = [dir.name] + tail
1334 message = fmt % ' '.join(map(str, targets))
1335 return targets, message
1337 def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None):
1341 This is mainly a shim layer
1345 return cwd.glob(pathname, ondisk, source, strings)
1347 class DirNodeInfo(SCons.Node.NodeInfoBase):
1348 # This should get reset by the FS initialization.
1349 current_version_id = 1
1353 def str_to_node(self, s):
1357 drive, s = os.path.splitdrive(s)
1359 root = self.fs.get_root(drive)
1360 if not os.path.isabs(s):
1361 s = top.labspath + '/' + s
1362 return root._lookup_abs(s, Entry)
1364 class DirBuildInfo(SCons.Node.BuildInfoBase):
1365 current_version_id = 1
1367 glob_magic_check = re.compile('[*?[]')
1369 def has_glob_magic(s):
1370 return glob_magic_check.search(s) is not None
1373 """A class for directories in a file system.
1376 memoizer_counters = []
1378 NodeInfo = DirNodeInfo
1379 BuildInfo = DirBuildInfo
1381 def __init__(self, name, directory, fs):
1382 if __debug__: logInstanceCreation(self, 'Node.FS.Dir')
1383 Base.__init__(self, name, directory, fs)
1387 """Turn a file system Node (either a freshly initialized directory
1388 object or a separate Entry object) into a proper directory object.
1390 Set up this directory's entries and hook it into the file
1391 system tree. Specify that directories (this Node) don't use
1392 signatures for calculating whether they're current.
1395 self.repositories = []
1399 self.entries['.'] = self
1400 self.entries['..'] = self.dir
1403 self._sconsign = None
1404 self.variant_dirs = []
1405 self.root = self.dir.root
1407 # Don't just reset the executor, replace its action list,
1408 # because it might have some pre-or post-actions that need to
1410 self.builder = get_MkdirBuilder()
1411 self.get_executor().set_action_list(self.builder.action)
1413 def diskcheck_match(self):
1414 diskcheck_match(self, self.isfile,
1415 "File %s found where directory expected.")
1417 def __clearRepositoryCache(self, duplicate=None):
1418 """Called when we change the repository(ies) for a directory.
1419 This clears any cached information that is invalidated by changing
1422 for node in self.entries.values():
1423 if node != self.dir:
1424 if node != self and isinstance(node, Dir):
1425 node.__clearRepositoryCache(duplicate)
1430 except AttributeError:
1432 if duplicate is not None:
1433 node.duplicate=duplicate
1435 def __resetDuplicate(self, node):
1437 node.duplicate = node.get_dir().duplicate
1439 def Entry(self, name):
1441 Looks up or creates an entry node named 'name' relative to
1444 return self.fs.Entry(name, self)
1446 def Dir(self, name, create=True):
1448 Looks up or creates a directory node named 'name' relative to
1451 return self.fs.Dir(name, self, create)
1453 def File(self, name):
1455 Looks up or creates a file node named 'name' relative to
1458 return self.fs.File(name, self)
1460 def _lookup_rel(self, name, klass, create=1):
1462 Looks up a *normalized* relative path name, relative to this
1465 This method is intended for use by internal lookups with
1466 already-normalized path data. For general-purpose lookups,
1467 use the Entry(), Dir() and File() methods above.
1469 This method does *no* input checking and will die or give
1470 incorrect results if it's passed a non-normalized path name (e.g.,
1471 a path containing '..'), an absolute path name, a top-relative
1472 ('#foo') path name, or any kind of object.
1474 name = self.entry_labspath(name)
1475 return self.root._lookup_abs(name, klass, create)
1477 def link(self, srcdir, duplicate):
1478 """Set this directory as the variant directory for the
1479 supplied source directory."""
1480 self.srcdir = srcdir
1481 self.duplicate = duplicate
1482 self.__clearRepositoryCache(duplicate)
1483 srcdir.variant_dirs.append(self)
1485 def getRepositories(self):
1486 """Returns a list of repositories for this directory.
1488 if self.srcdir and not self.duplicate:
1489 return self.srcdir.get_all_rdirs() + self.repositories
1490 return self.repositories
1492 memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs'))
1494 def get_all_rdirs(self):
1496 return list(self._memo['get_all_rdirs'])
1504 for rep in dir.getRepositories():
1505 result.append(rep.Dir(fname))
1509 fname = dir.name + os.sep + fname
1512 self._memo['get_all_rdirs'] = list(result)
1516 def addRepository(self, dir):
1517 if dir != self and not dir in self.repositories:
1518 self.repositories.append(dir)
1520 self.__clearRepositoryCache()
1523 return self.entries['..']
1525 def _rel_path_key(self, other):
1528 memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key))
1530 def rel_path(self, other):
1531 """Return a path to "other" relative to this directory.
1534 # This complicated and expensive method, which constructs relative
1535 # paths between arbitrary Node.FS objects, is no longer used
1536 # by SCons itself. It was introduced to store dependency paths
1537 # in .sconsign files relative to the target, but that ended up
1538 # being significantly inefficient.
1540 # We're continuing to support the method because some SConstruct
1541 # files out there started using it when it was available, and
1542 # we're all about backwards compatibility..
1545 memo_dict = self._memo['rel_path']
1548 self._memo['rel_path'] = memo_dict
1551 return memo_dict[other]
1558 elif not other in self.path_elements:
1560 other_dir = other.get_dir()
1561 except AttributeError:
1564 if other_dir is None:
1567 dir_rel_path = self.rel_path(other_dir)
1568 if dir_rel_path == '.':
1571 result = dir_rel_path + os.sep + other.name
1573 i = self.path_elements.index(other) + 1
1575 path_elems = ['..'] * (len(self.path_elements) - i) \
1576 + [n.name for n in other.path_elements[i:]]
1578 result = os.sep.join(path_elems)
1580 memo_dict[other] = result
1584 def get_env_scanner(self, env, kw={}):
1585 import SCons.Defaults
1586 return SCons.Defaults.DirEntryScanner
1588 def get_target_scanner(self):
1589 import SCons.Defaults
1590 return SCons.Defaults.DirEntryScanner
1592 def get_found_includes(self, env, scanner, path):
1593 """Return this directory's implicit dependencies.
1595 We don't bother caching the results because the scan typically
1596 shouldn't be requested more than once (as opposed to scanning
1597 .h file contents, which can be requested as many times as the
1598 files is #included by other files).
1602 # Clear cached info for this Dir. If we already visited this
1603 # directory on our walk down the tree (because we didn't know at
1604 # that point it was being used as the source for another Node)
1605 # then we may have calculated build signature before realizing
1606 # we had to scan the disk. Now that we have to, though, we need
1607 # to invalidate the old calculated signature so that any node
1608 # dependent on our directory structure gets one that includes
1609 # info about everything on disk.
1611 return scanner(self, env, path)
1614 # Taskmaster interface subsystem
1620 def build(self, **kw):
1621 """A null "builder" for directories."""
1623 if self.builder is not MkdirBuilder:
1624 SCons.Node.Node.build(self, **kw)
1631 """Create this directory, silently and without worrying about
1632 whether the builder is the default or not."""
1638 listDirs.append(parent)
1641 # Don't use while: - else: for this condition because
1642 # if so, then parent is None and has no .path attribute.
1643 raise SCons.Errors.StopError, parent.path
1646 for dirnode in listDirs:
1648 # Don't call dirnode.build(), call the base Node method
1649 # directly because we definitely *must* create this
1650 # directory. The dirnode.build() method will suppress
1651 # the build if it's the default builder.
1652 SCons.Node.Node.build(dirnode)
1653 dirnode.get_executor().nullify()
1654 # The build() action may or may not have actually
1655 # created the directory, depending on whether the -n
1656 # option was used or not. Delete the _exists and
1657 # _rexists attributes so they can be reevaluated.
1662 def multiple_side_effect_has_builder(self):
1664 return self.builder is not MkdirBuilder and self.has_builder()
1666 def alter_targets(self):
1667 """Return any corresponding targets in a variant directory.
1669 return self.fs.variant_dir_target_climb(self, self, [])
1671 def scanner_key(self):
1672 """A directory does not get scanned."""
1675 def get_text_contents(self):
1676 """We already emit things in text, so just return the binary
1678 return self.get_contents()
1680 def get_contents(self):
1681 """Return content signatures and names of all our children
1682 separated by new-lines. Ensure that the nodes are sorted."""
1684 for node in sorted(self.children(), key=lambda t: t.name):
1685 contents.append('%s %s\n' % (node.get_csig(), node.name))
1686 return ''.join(contents)
1689 """Compute the content signature for Directory nodes. In
1690 general, this is not needed and the content signature is not
1691 stored in the DirNodeInfo. However, if get_contents on a Dir
1692 node is called which has a child directory, the child
1693 directory should return the hash of its contents."""
1694 contents = self.get_contents()
1695 return SCons.Util.MD5signature(contents)
1697 def do_duplicate(self, src):
1700 changed_since_last_build = SCons.Node.Node.state_has_changed
1702 def is_up_to_date(self):
1703 """If any child is not up-to-date, then this directory isn't,
1705 if self.builder is not MkdirBuilder and not self.exists():
1707 up_to_date = SCons.Node.up_to_date
1708 for kid in self.children():
1709 if kid.get_state() > up_to_date:
1714 if not self.exists():
1715 norm_name = _my_normcase(self.name)
1716 for dir in self.dir.get_all_rdirs():
1717 try: node = dir.entries[norm_name]
1718 except KeyError: node = dir.dir_on_disk(self.name)
1719 if node and node.exists() and \
1720 (isinstance(dir, Dir) or isinstance(dir, Entry)):
1725 """Return the .sconsign file info for this directory,
1726 creating it first if necessary."""
1727 if not self._sconsign:
1728 import SCons.SConsign
1729 self._sconsign = SCons.SConsign.ForDirectory(self)
1730 return self._sconsign
1733 """Dir has a special need for srcnode()...if we
1734 have a srcdir attribute set, then that *is* our srcnode."""
1737 return Base.srcnode(self)
1739 def get_timestamp(self):
1740 """Return the latest timestamp from among our children"""
1742 for kid in self.children():
1743 if kid.get_timestamp() > stamp:
1744 stamp = kid.get_timestamp()
1747 def entry_abspath(self, name):
1748 return self.abspath + os.sep + name
1750 def entry_labspath(self, name):
1751 return self.labspath + '/' + name
1753 def entry_path(self, name):
1754 return self.path + os.sep + name
1756 def entry_tpath(self, name):
1757 return self.tpath + os.sep + name
1759 def entry_exists_on_disk(self, name):
1761 d = self.on_disk_entries
1762 except AttributeError:
1765 entries = os.listdir(self.abspath)
1769 for entry in map(_my_normcase, entries):
1771 self.on_disk_entries = d
1772 if sys.platform == 'win32':
1773 name = _my_normcase(name)
1774 result = d.get(name)
1776 # Belt-and-suspenders for Windows: check directly for
1777 # 8.3 file names that don't show up in os.listdir().
1778 result = os.path.exists(self.abspath + os.sep + name)
1784 memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list'))
1786 def srcdir_list(self):
1788 return self._memo['srcdir_list']
1798 result.append(dir.srcdir.Dir(dirname))
1799 dirname = dir.name + os.sep + dirname
1802 self._memo['srcdir_list'] = result
1806 def srcdir_duplicate(self, name):
1807 for dir in self.srcdir_list():
1808 if self.is_under(dir):
1809 # We shouldn't source from something in the build path;
1810 # variant_dir is probably under src_dir, in which case
1811 # we are reflecting.
1813 if dir.entry_exists_on_disk(name):
1814 srcnode = dir.Entry(name).disambiguate()
1816 node = self.Entry(name).disambiguate()
1817 node.do_duplicate(srcnode)
1823 def _srcdir_find_file_key(self, filename):
1826 memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key))
1828 def srcdir_find_file(self, filename):
1830 memo_dict = self._memo['srcdir_find_file']
1833 self._memo['srcdir_find_file'] = memo_dict
1836 return memo_dict[filename]
1841 if (isinstance(node, File) or isinstance(node, Entry)) and \
1842 (node.is_derived() or node.exists()):
1846 norm_name = _my_normcase(filename)
1848 for rdir in self.get_all_rdirs():
1849 try: node = rdir.entries[norm_name]
1850 except KeyError: node = rdir.file_on_disk(filename)
1851 else: node = func(node)
1853 result = (node, self)
1854 memo_dict[filename] = result
1857 for srcdir in self.srcdir_list():
1858 for rdir in srcdir.get_all_rdirs():
1859 try: node = rdir.entries[norm_name]
1860 except KeyError: node = rdir.file_on_disk(filename)
1861 else: node = func(node)
1863 result = (File(filename, self, self.fs), srcdir)
1864 memo_dict[filename] = result
1867 result = (None, None)
1868 memo_dict[filename] = result
1871 def dir_on_disk(self, name):
1872 if self.entry_exists_on_disk(name):
1873 try: return self.Dir(name)
1874 except TypeError: pass
1875 node = self.srcdir_duplicate(name)
1876 if isinstance(node, File):
1880 def file_on_disk(self, name):
1881 if self.entry_exists_on_disk(name) or \
1882 diskcheck_rcs(self, name) or \
1883 diskcheck_sccs(self, name):
1884 try: return self.File(name)
1885 except TypeError: pass
1886 node = self.srcdir_duplicate(name)
1887 if isinstance(node, Dir):
1891 def walk(self, func, arg):
1893 Walk this directory tree by calling the specified function
1894 for each directory in the tree.
1896 This behaves like the os.path.walk() function, but for in-memory
1897 Node.FS.Dir objects. The function takes the same arguments as
1898 the functions passed to os.path.walk():
1900 func(arg, dirname, fnames)
1902 Except that "dirname" will actually be the directory *Node*,
1903 not the string. The '.' and '..' entries are excluded from
1904 fnames. The fnames list may be modified in-place to filter the
1905 subdirectories visited or otherwise impose a specific order.
1906 The "arg" argument is always passed to func() and may be used
1907 in any way (or ignored, passing None is common).
1909 entries = self.entries
1910 names = entries.keys()
1913 func(arg, self, names)
1914 for dirname in [n for n in names if isinstance(entries[n], Dir)]:
1915 entries[dirname].walk(func, arg)
1917 def glob(self, pathname, ondisk=True, source=False, strings=False):
1919 Returns a list of Nodes (or strings) matching a specified
1922 Pathname patterns follow UNIX shell semantics: * matches
1923 any-length strings of any characters, ? matches any character,
1924 and [] can enclose lists or ranges of characters. Matches do
1925 not span directory separators.
1927 The matches take into account Repositories, returning local
1928 Nodes if a corresponding entry exists in a Repository (either
1929 an in-memory Node or something on disk).
1931 By defafult, the glob() function matches entries that exist
1932 on-disk, in addition to in-memory Nodes. Setting the "ondisk"
1933 argument to False (or some other non-true value) causes the glob()
1934 function to only match in-memory Nodes. The default behavior is
1935 to return both the on-disk and in-memory Nodes.
1937 The "source" argument, when true, specifies that corresponding
1938 source Nodes must be returned if you're globbing in a build
1939 directory (initialized with VariantDir()). The default behavior
1940 is to return Nodes local to the VariantDir().
1942 The "strings" argument, when true, returns the matches as strings,
1943 not Nodes. The strings are path names relative to this directory.
1945 The underlying algorithm is adapted from the glob.glob() function
1946 in the Python library (but heavily modified), and uses fnmatch()
1949 dirname, basename = os.path.split(pathname)
1951 return sorted(self._glob1(basename, ondisk, source, strings),
1952 key=lambda t: str(t))
1953 if has_glob_magic(dirname):
1954 list = self.glob(dirname, ondisk, source, strings=False)
1956 list = [self.Dir(dirname, create=True)]
1959 r = dir._glob1(basename, ondisk, source, strings)
1961 r = [os.path.join(str(dir), x) for x in r]
1963 result.sort(lambda a, b: cmp(str(a), str(b)))
1966 def _glob1(self, pattern, ondisk=True, source=False, strings=False):
1968 Globs for and returns a list of entry names matching a single
1969 pattern in this directory.
1971 This searches any repositories and source directories for
1972 corresponding entries and returns a Node (or string) relative
1973 to the current directory if an entry is found anywhere.
1975 TODO: handle pattern with no wildcard
1977 search_dir_list = self.get_all_rdirs()
1978 for srcdir in self.srcdir_list():
1979 search_dir_list.extend(srcdir.get_all_rdirs())
1981 selfEntry = self.Entry
1983 for dir in search_dir_list:
1984 # We use the .name attribute from the Node because the keys of
1985 # the dir.entries dictionary are normalized (that is, all upper
1986 # case) on case-insensitive systems like Windows.
1987 node_names = [ v.name for k, v in dir.entries.items()
1988 if k not in ('.', '..') ]
1989 names.extend(node_names)
1991 # Make sure the working directory (self) actually has
1992 # entries for all Nodes in repositories or variant dirs.
1993 for name in node_names: selfEntry(name)
1996 disk_names = os.listdir(dir.abspath)
1999 names.extend(disk_names)
2001 # We're going to return corresponding Nodes in
2002 # the local directory, so we need to make sure
2003 # those Nodes exist. We only want to create
2004 # Nodes for the entries that will match the
2005 # specified pattern, though, which means we
2006 # need to filter the list here, even though
2007 # the overall list will also be filtered later,
2008 # after we exit this loop.
2009 if pattern[0] != '.':
2010 #disk_names = [ d for d in disk_names if d[0] != '.' ]
2011 disk_names = [x for x in disk_names if x[0] != '.']
2012 disk_names = fnmatch.filter(disk_names, pattern)
2013 dirEntry = dir.Entry
2014 for name in disk_names:
2015 # Add './' before disk filename so that '#' at
2016 # beginning of filename isn't interpreted.
2018 node = dirEntry(name).disambiguate()
2020 if n.__class__ != node.__class__:
2021 n.__class__ = node.__class__
2025 if pattern[0] != '.':
2026 #names = [ n for n in names if n[0] != '.' ]
2027 names = [x for x in names if x[0] != '.']
2028 names = fnmatch.filter(names, pattern)
2033 #return [ self.entries[_my_normcase(n)] for n in names ]
2034 return [self.entries[_my_normcase(n)] for n in names]
2037 """A class for the root directory of a file system.
2039 This is the same as a Dir class, except that the path separator
2040 ('/' or '\\') is actually part of the name, so we don't need to
2041 add a separator when creating the path names of entries within
2044 def __init__(self, name, fs):
2045 if __debug__: logInstanceCreation(self, 'Node.FS.RootDir')
2046 # We're going to be our own parent directory (".." entry and .dir
2047 # attribute) so we have to set up some values so Base.__init__()
2048 # won't gag won't it calls some of our methods.
2053 self.path_elements = []
2056 Base.__init__(self, name, self, fs)
2058 # Now set our paths to what we really want them to be: the
2059 # initial drive letter (the name) plus the directory separator,
2060 # except for the "lookup abspath," which does not have the
2062 self.abspath = name + os.sep
2064 self.path = name + os.sep
2065 self.tpath = name + os.sep
2068 self._lookupDict = {}
2070 # The // and os.sep + os.sep entries are necessary because
2071 # os.path.normpath() seems to preserve double slashes at the
2072 # beginning of a path (presumably for UNC path names), but
2073 # collapses triple slashes to a single slash.
2074 self._lookupDict[''] = self
2075 self._lookupDict['/'] = self
2076 self._lookupDict['//'] = self
2077 self._lookupDict[os.sep] = self
2078 self._lookupDict[os.sep + os.sep] = self
2080 def must_be_same(self, klass):
2083 Base.must_be_same(self, klass)
2085 def _lookup_abs(self, p, klass, create=1):
2087 Fast (?) lookup of a *normalized* absolute path.
2089 This method is intended for use by internal lookups with
2090 already-normalized path data. For general-purpose lookups,
2091 use the FS.Entry(), FS.Dir() or FS.File() methods.
2093 The caller is responsible for making sure we're passed a
2094 normalized absolute path; we merely let Python's dictionary look
2095 up and return the One True Node.FS object for the path.
2097 If no Node for the specified "p" doesn't already exist, and
2098 "create" is specified, the Node may be created after recursive
2099 invocation to find or create the parent directory or directories.
2103 result = self._lookupDict[k]
2106 msg = "No such file or directory: '%s' in '%s' (and create is False)" % (p, str(self))
2107 raise SCons.Errors.UserError, msg
2108 # There is no Node for this path name, and we're allowed
2110 dir_name, file_name = os.path.split(p)
2111 dir_node = self._lookup_abs(dir_name, Dir)
2112 result = klass(file_name, dir_node, self.fs)
2114 # Double-check on disk (as configured) that the Node we
2115 # created matches whatever is out there in the real world.
2116 result.diskcheck_match()
2118 self._lookupDict[k] = result
2119 dir_node.entries[_my_normcase(file_name)] = result
2120 dir_node.implicit = None
2122 # There is already a Node for this path name. Allow it to
2123 # complain if we were looking for an inappropriate type.
2124 result.must_be_same(klass)
2130 def entry_abspath(self, name):
2131 return self.abspath + name
2133 def entry_labspath(self, name):
2136 def entry_path(self, name):
2137 return self.path + name
2139 def entry_tpath(self, name):
2140 return self.tpath + name
2142 def is_under(self, dir):
2154 def src_builder(self):
2157 class FileNodeInfo(SCons.Node.NodeInfoBase):
2158 current_version_id = 1
2160 field_list = ['csig', 'timestamp', 'size']
2162 # This should get reset by the FS initialization.
2165 def str_to_node(self, s):
2169 drive, s = os.path.splitdrive(s)
2171 root = self.fs.get_root(drive)
2172 if not os.path.isabs(s):
2173 s = top.labspath + '/' + s
2174 return root._lookup_abs(s, Entry)
2176 class FileBuildInfo(SCons.Node.BuildInfoBase):
2177 current_version_id = 1
2179 def convert_to_sconsign(self):
2181 Converts this FileBuildInfo object for writing to a .sconsign file
2183 This replaces each Node in our various dependency lists with its
2184 usual string representation: relative to the top-level SConstruct
2185 directory, or an absolute path if it's outside.
2193 except AttributeError:
2196 s = s.replace(os.sep, '/')
2198 for attr in ['bsources', 'bdepends', 'bimplicit']:
2200 val = getattr(self, attr)
2201 except AttributeError:
2204 setattr(self, attr, list(map(node_to_str, val)))
2205 def convert_from_sconsign(self, dir, name):
2207 Converts a newly-read FileBuildInfo object for in-SCons use
2209 For normal up-to-date checking, we don't have any conversion to
2210 perform--but we're leaving this method here to make that clear.
2213 def prepare_dependencies(self):
2215 Prepares a FileBuildInfo object for explaining what changed
2217 The bsources, bdepends and bimplicit lists have all been
2218 stored on disk as paths relative to the top-level SConstruct
2219 directory. Convert the strings to actual Nodes (for use by the
2220 --debug=explain code and --implicit-cache).
2223 ('bsources', 'bsourcesigs'),
2224 ('bdepends', 'bdependsigs'),
2225 ('bimplicit', 'bimplicitsigs'),
2227 for (nattr, sattr) in attrs:
2229 strings = getattr(self, nattr)
2230 nodeinfos = getattr(self, sattr)
2231 except AttributeError:
2234 for s, ni in izip(strings, nodeinfos):
2235 if not isinstance(s, SCons.Node.Node):
2236 s = ni.str_to_node(s)
2238 setattr(self, nattr, nodes)
2239 def format(self, names=0):
2241 bkids = self.bsources + self.bdepends + self.bimplicit
2242 bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs
2243 for bkid, bkidsig in izip(bkids, bkidsigs):
2244 result.append(str(bkid) + ': ' +
2245 ' '.join(bkidsig.format(names=names)))
2246 result.append('%s [%s]' % (self.bactsig, self.bact))
2247 return '\n'.join(result)
2250 """A class for files in a file system.
2253 memoizer_counters = []
2255 NodeInfo = FileNodeInfo
2256 BuildInfo = FileBuildInfo
2260 def diskcheck_match(self):
2261 diskcheck_match(self, self.isdir,
2262 "Directory %s found where file expected.")
2264 def __init__(self, name, directory, fs):
2265 if __debug__: logInstanceCreation(self, 'Node.FS.File')
2266 Base.__init__(self, name, directory, fs)
2269 def Entry(self, name):
2270 """Create an entry node named 'name' relative to
2271 the directory of this file."""
2272 return self.dir.Entry(name)
2274 def Dir(self, name, create=True):
2275 """Create a directory node named 'name' relative to
2276 the directory of this file."""
2277 return self.dir.Dir(name, create=create)
2279 def Dirs(self, pathlist):
2280 """Create a list of directories relative to the SConscript
2281 directory of this file."""
2283 # return [self.Dir(p) for p in pathlist]
2284 return [self.Dir(p) for p in pathlist]
2286 def File(self, name):
2287 """Create a file node named 'name' relative to
2288 the directory of this file."""
2289 return self.dir.File(name)
2291 #def generate_build_dict(self):
2292 # """Return an appropriate dictionary of values for building
2294 # return {'Dir' : self.Dir,
2295 # 'File' : self.File,
2296 # 'RDirs' : self.RDirs}
2299 """Turn a file system node into a File object."""
2300 self.scanner_paths = {}
2301 if not hasattr(self, '_local'):
2304 # If there was already a Builder set on this entry, then
2305 # we need to make sure we call the target-decider function,
2306 # not the source-decider. Reaching in and doing this by hand
2307 # is a little bogus. We'd prefer to handle this by adding
2308 # an Entry.builder_set() method that disambiguates like the
2309 # other methods, but that starts running into problems with the
2310 # fragile way we initialize Dir Nodes with their Mkdir builders,
2311 # yet still allow them to be overridden by the user. Since it's
2312 # not clear right now how to fix that, stick with what works
2313 # until it becomes clear...
2314 if self.has_builder():
2315 self.changed_since_last_build = self.decide_target
2317 def scanner_key(self):
2318 return self.get_suffix()
2320 def get_contents(self):
2321 if not self.rexists():
2323 fname = self.rfile().abspath
2325 contents = open(fname, "rb").read()
2326 except EnvironmentError, e:
2335 get_text_contents = get_contents
2337 # This attempts to figure out what the encoding of the text is
2338 # based upon the BOM bytes, and then decodes the contents so that
2339 # it's a valid python string.
2340 def get_text_contents(self):
2341 contents = self.get_contents()
2342 # The behavior of various decode() methods and functions
2343 # w.r.t. the initial BOM bytes is different for different
2344 # encodings and/or Python versions. ('utf-8' does not strip
2345 # them, but has a 'utf-8-sig' which does; 'utf-16' seems to
2346 # strip them; etc.) Just side step all the complication by
2347 # explicitly stripping the BOM before we decode().
2348 if contents.startswith(codecs.BOM_UTF8):
2349 contents = contents[len(codecs.BOM_UTF8):]
2350 # TODO(2.2): Remove when 2.3 becomes floor.
2351 #contents = contents.decode('utf-8')
2352 contents = my_decode(contents, 'utf-8')
2353 elif contents.startswith(codecs.BOM_UTF16_LE):
2354 contents = contents[len(codecs.BOM_UTF16_LE):]
2355 # TODO(2.2): Remove when 2.3 becomes floor.
2356 #contents = contents.decode('utf-16-le')
2357 contents = my_decode(contents, 'utf-16-le')
2358 elif contents.startswith(codecs.BOM_UTF16_BE):
2359 contents = contents[len(codecs.BOM_UTF16_BE):]
2360 # TODO(2.2): Remove when 2.3 becomes floor.
2361 #contents = contents.decode('utf-16-be')
2362 contents = my_decode(contents, 'utf-16-be')
2365 def get_content_hash(self):
2367 Compute and return the MD5 hash for this file.
2369 if not self.rexists():
2370 return SCons.Util.MD5signature('')
2371 fname = self.rfile().abspath
2373 cs = SCons.Util.MD5filesignature(fname,
2374 chunksize=SCons.Node.FS.File.md5_chunksize*1024)
2375 except EnvironmentError, e:
2382 memoizer_counters.append(SCons.Memoize.CountValue('get_size'))
2386 return self._memo['get_size']
2391 size = self.rfile().getsize()
2395 self._memo['get_size'] = size
2399 memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp'))
2401 def get_timestamp(self):
2403 return self._memo['get_timestamp']
2408 timestamp = self.rfile().getmtime()
2412 self._memo['get_timestamp'] = timestamp
2416 def store_info(self):
2417 # Merge our build information into the already-stored entry.
2418 # This accomodates "chained builds" where a file that's a target
2419 # in one build (SConstruct file) is a source in a different build.
2420 # See test/chained-build.py for the use case.
2422 self.dir.sconsign().store_info(self.name, self)
2424 convert_copy_attrs = [
2434 convert_sig_attrs = [
2440 def convert_old_entry(self, old_entry):
2441 # Convert a .sconsign entry from before the Big Signature
2442 # Refactoring, doing what we can to convert its information
2443 # to the new .sconsign entry format.
2445 # The old format looked essentially like this:
2454 # .bsourcesigs ("signature" list)
2456 # .bdependsigs ("signature" list)
2458 # .bimplicitsigs ("signature" list)
2462 # The new format looks like this:
2469 # .binfo (BuildInfo)
2471 # .bsourcesigs (NodeInfo list)
2477 # .bdependsigs (NodeInfo list)
2483 # .bimplicitsigs (NodeInfo list)
2491 # The basic idea of the new structure is that a NodeInfo always
2492 # holds all available information about the state of a given Node
2493 # at a certain point in time. The various .b*sigs lists can just
2494 # be a list of pointers to the .ninfo attributes of the different
2495 # dependent nodes, without any copying of information until it's
2496 # time to pickle it for writing out to a .sconsign file.
2498 # The complicating issue is that the *old* format only stored one
2499 # "signature" per dependency, based on however the *last* build
2500 # was configured. We don't know from just looking at it whether
2501 # it was a build signature, a content signature, or a timestamp
2502 # "signature". Since we no longer use build signatures, the
2503 # best we can do is look at the length and if it's thirty two,
2504 # assume that it was (or might have been) a content signature.
2505 # If it was actually a build signature, then it will cause a
2506 # rebuild anyway when it doesn't match the new content signature,
2507 # but that's probably the best we can do.
2508 import SCons.SConsign
2509 new_entry = SCons.SConsign.SConsignEntry()
2510 new_entry.binfo = self.new_binfo()
2511 binfo = new_entry.binfo
2512 for attr in self.convert_copy_attrs:
2514 value = getattr(old_entry, attr)
2515 except AttributeError:
2517 setattr(binfo, attr, value)
2518 delattr(old_entry, attr)
2519 for attr in self.convert_sig_attrs:
2521 sig_list = getattr(old_entry, attr)
2522 except AttributeError:
2525 for sig in sig_list:
2526 ninfo = self.new_ninfo()
2530 ninfo.timestamp = sig
2532 setattr(binfo, attr, value)
2533 delattr(old_entry, attr)
2536 memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info'))
2538 def get_stored_info(self):
2540 return self._memo['get_stored_info']
2545 sconsign_entry = self.dir.sconsign().get_entry(self.name)
2546 except (KeyError, EnvironmentError):
2547 import SCons.SConsign
2548 sconsign_entry = SCons.SConsign.SConsignEntry()
2549 sconsign_entry.binfo = self.new_binfo()
2550 sconsign_entry.ninfo = self.new_ninfo()
2552 if isinstance(sconsign_entry, FileBuildInfo):
2553 # This is a .sconsign file from before the Big Signature
2554 # Refactoring; convert it as best we can.
2555 sconsign_entry = self.convert_old_entry(sconsign_entry)
2557 delattr(sconsign_entry.ninfo, 'bsig')
2558 except AttributeError:
2561 self._memo['get_stored_info'] = sconsign_entry
2563 return sconsign_entry
2565 def get_stored_implicit(self):
2566 binfo = self.get_stored_info().binfo
2567 binfo.prepare_dependencies()
2568 try: return binfo.bimplicit
2569 except AttributeError: return None
2571 def rel_path(self, other):
2572 return self.dir.rel_path(other)
2574 def _get_found_includes_key(self, env, scanner, path):
2575 return (id(env), id(scanner), path)
2577 memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key))
2579 def get_found_includes(self, env, scanner, path):
2580 """Return the included implicit dependencies in this file.
2581 Cache results so we only scan the file once per path
2582 regardless of how many times this information is requested.
2584 memo_key = (id(env), id(scanner), path)
2586 memo_dict = self._memo['get_found_includes']
2589 self._memo['get_found_includes'] = memo_dict
2592 return memo_dict[memo_key]
2597 # result = [n.disambiguate() for n in scanner(self, env, path)]
2598 result = scanner(self, env, path)
2599 result = [N.disambiguate() for N in result]
2603 memo_dict[memo_key] = result
2607 def _createDir(self):
2608 # ensure that the directories for this node are
2612 def push_to_cache(self):
2613 """Try to push the node into a cache
2615 # This should get called before the Nodes' .built() method is
2616 # called, which would clear the build signature if the file has
2619 # We have to clear the local memoized values *before* we push
2620 # the node to cache so that the memoization of the self.exists()
2621 # return value doesn't interfere.
2624 self.clear_memoized_values()
2626 self.get_build_env().get_CacheDir().push(self)
2628 def retrieve_from_cache(self):
2629 """Try to retrieve the node's content from a cache
2631 This method is called from multiple threads in a parallel build,
2632 so only do thread safe stuff here. Do thread unsafe stuff in
2635 Returns true iff the node was successfully retrieved.
2639 if not self.is_derived():
2641 return self.get_build_env().get_CacheDir().retrieve(self)
2645 self.get_build_env().get_CacheDir().push_if_forced(self)
2647 ninfo = self.get_ninfo()
2649 csig = self.get_max_drift_csig()
2653 ninfo.timestamp = self.get_timestamp()
2654 ninfo.size = self.get_size()
2656 if not self.has_builder():
2657 # This is a source file, but it might have been a target file
2658 # in another build that included more of the DAG. Copy
2659 # any build information that's stored in the .sconsign file
2660 # into our binfo object so it doesn't get lost.
2661 old = self.get_stored_info()
2662 self.get_binfo().__dict__.update(old.binfo.__dict__)
2666 def find_src_builder(self):
2669 scb = self.dir.src_builder()
2671 if diskcheck_sccs(self.dir, self.name):
2672 scb = get_DefaultSCCSBuilder()
2673 elif diskcheck_rcs(self.dir, self.name):
2674 scb = get_DefaultRCSBuilder()
2680 except AttributeError:
2683 self.builder_set(scb)
2686 def has_src_builder(self):
2687 """Return whether this Node has a source builder or not.
2689 If this Node doesn't have an explicit source code builder, this
2690 is where we figure out, on the fly, if there's a transparent
2691 source code builder for it.
2693 Note that if we found a source builder, we also set the
2694 self.builder attribute, so that all of the methods that actually
2695 *build* this file don't have to do anything different.
2699 except AttributeError:
2700 scb = self.sbuilder = self.find_src_builder()
2701 return scb is not None
2703 def alter_targets(self):
2704 """Return any corresponding targets in a variant directory.
2706 if self.is_derived():
2708 return self.fs.variant_dir_target_climb(self, self.dir, [self.name])
2710 def _rmv_existing(self):
2711 self.clear_memoized_values()
2712 e = Unlink(self, [], None)
2713 if isinstance(e, SCons.Errors.BuildError):
2717 # Taskmaster interface subsystem
2720 def make_ready(self):
2721 self.has_src_builder()
2725 """Prepare for this file to be created."""
2726 SCons.Node.Node.prepare(self)
2728 if self.get_state() != SCons.Node.up_to_date:
2730 if self.is_derived() and not self.precious:
2731 self._rmv_existing()
2735 except SCons.Errors.StopError, drive:
2736 desc = "No drive `%s' for target `%s'." % (drive, self)
2737 raise SCons.Errors.StopError, desc
2744 """Remove this file."""
2745 if self.exists() or self.islink():
2746 self.fs.unlink(self.path)
2750 def do_duplicate(self, src):
2752 Unlink(self, None, None)
2753 e = Link(self, src, None)
2754 if isinstance(e, SCons.Errors.BuildError):
2755 desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr)
2756 raise SCons.Errors.StopError, desc
2758 # The Link() action may or may not have actually
2759 # created the file, depending on whether the -n
2760 # option was used or not. Delete the _exists and
2761 # _rexists attributes so they can be reevaluated.
2764 memoizer_counters.append(SCons.Memoize.CountValue('exists'))
2768 return self._memo['exists']
2771 # Duplicate from source path if we are set up to do this.
2772 if self.duplicate and not self.is_derived() and not self.linked:
2773 src = self.srcnode()
2775 # At this point, src is meant to be copied in a variant directory.
2777 if src.abspath != self.abspath:
2779 self.do_duplicate(src)
2780 # Can't return 1 here because the duplication might
2781 # not actually occur if the -n option is being used.
2783 # The source file does not exist. Make sure no old
2784 # copy remains in the variant directory.
2785 if Base.exists(self) or self.islink():
2786 self.fs.unlink(self.path)
2787 # Return None explicitly because the Base.exists() call
2788 # above will have cached its value if the file existed.
2789 self._memo['exists'] = None
2791 result = Base.exists(self)
2792 self._memo['exists'] = result
2796 # SIGNATURE SUBSYSTEM
2799 def get_max_drift_csig(self):
2801 Returns the content signature currently stored for this node
2802 if it's been unmodified longer than the max_drift value, or the
2803 max_drift value is 0. Returns None otherwise.
2805 old = self.get_stored_info()
2806 mtime = self.get_timestamp()
2808 max_drift = self.fs.max_drift
2810 if (time.time() - mtime) > max_drift:
2813 if n.timestamp and n.csig and n.timestamp == mtime:
2815 except AttributeError:
2817 elif max_drift == 0:
2819 return old.ninfo.csig
2820 except AttributeError:
2827 Generate a node's content signature, the digested signature
2831 cache - alternate node to use for the signature cache
2832 returns - the content signature
2834 ninfo = self.get_ninfo()
2837 except AttributeError:
2840 csig = self.get_max_drift_csig()
2844 if self.get_size() < SCons.Node.FS.File.md5_chunksize:
2845 contents = self.get_contents()
2847 csig = self.get_content_hash()
2849 # This can happen if there's actually a directory on-disk,
2850 # which can be the case if they've disabled disk checks,
2851 # or if an action with a File target actually happens to
2852 # create a same-named directory by mistake.
2856 csig = SCons.Util.MD5signature(contents)
2863 # DECISION SUBSYSTEM
2866 def builder_set(self, builder):
2867 SCons.Node.Node.builder_set(self, builder)
2868 self.changed_since_last_build = self.decide_target
2870 def changed_content(self, target, prev_ni):
2871 cur_csig = self.get_csig()
2873 return cur_csig != prev_ni.csig
2874 except AttributeError:
2877 def changed_state(self, target, prev_ni):
2878 return self.state != SCons.Node.up_to_date
2880 def changed_timestamp_then_content(self, target, prev_ni):
2881 if not self.changed_timestamp_match(target, prev_ni):
2883 self.get_ninfo().csig = prev_ni.csig
2884 except AttributeError:
2887 return self.changed_content(target, prev_ni)
2889 def changed_timestamp_newer(self, target, prev_ni):
2891 return self.get_timestamp() > target.get_timestamp()
2892 except AttributeError:
2895 def changed_timestamp_match(self, target, prev_ni):
2897 return self.get_timestamp() != prev_ni.timestamp
2898 except AttributeError:
2901 def decide_source(self, target, prev_ni):
2902 return target.get_build_env().decide_source(self, target, prev_ni)
2904 def decide_target(self, target, prev_ni):
2905 return target.get_build_env().decide_target(self, target, prev_ni)
2907 # Initialize this Node's decider function to decide_source() because
2908 # every file is a source file until it has a Builder attached...
2909 changed_since_last_build = decide_source
2911 def is_up_to_date(self):
2913 if T: Trace('is_up_to_date(%s):' % self)
2914 if not self.exists():
2915 if T: Trace(' not self.exists():')
2916 # The file doesn't exist locally...
2919 # ...but there is one in a Repository...
2920 if not self.changed(r):
2921 if T: Trace(' changed(%s):' % r)
2922 # ...and it's even up-to-date...
2924 # ...and they'd like a local copy.
2925 e = LocalCopy(self, r, None)
2926 if isinstance(e, SCons.Errors.BuildError):
2932 if T: Trace(' None\n')
2936 if T: Trace(' self.exists(): %s\n' % r)
2939 memoizer_counters.append(SCons.Memoize.CountValue('rfile'))
2943 return self._memo['rfile']
2947 if not self.exists():
2948 norm_name = _my_normcase(self.name)
2949 for dir in self.dir.get_all_rdirs():
2950 try: node = dir.entries[norm_name]
2951 except KeyError: node = dir.file_on_disk(self.name)
2952 if node and node.exists() and \
2953 (isinstance(node, File) or isinstance(node, Entry) \
2954 or not node.is_derived()):
2956 # Copy over our local attributes to the repository
2957 # Node so we identify shared object files in the
2958 # repository and don't assume they're static.
2960 # This isn't perfect; the attribute would ideally
2961 # be attached to the object in the repository in
2962 # case it was built statically in the repository
2963 # and we changed it to shared locally, but that's
2964 # rarely the case and would only occur if you
2965 # intentionally used the same suffix for both
2966 # shared and static objects anyway. So this
2967 # should work well in practice.
2968 result.attributes = self.attributes
2970 self._memo['rfile'] = result
2974 return str(self.rfile())
2976 def get_cachedir_csig(self):
2978 Fetch a Node's content signature for purposes of computing
2979 another Node's cachesig.
2981 This is a wrapper around the normal get_csig() method that handles
2982 the somewhat obscure case of using CacheDir with the -n option.
2983 Any files that don't exist would normally be "built" by fetching
2984 them from the cache, but the normal get_csig() method will try
2985 to open up the local file, which doesn't exist because the -n
2986 option meant we didn't actually pull the file from cachedir.
2987 But since the file *does* actually exist in the cachedir, we
2988 can use its contents for the csig.
2991 return self.cachedir_csig
2992 except AttributeError:
2995 cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self)
2996 if not self.exists() and cachefile and os.path.exists(cachefile):
2997 self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \
2998 SCons.Node.FS.File.md5_chunksize * 1024)
3000 self.cachedir_csig = self.get_csig()
3001 return self.cachedir_csig
3003 def get_cachedir_bsig(self):
3005 return self.cachesig
3006 except AttributeError:
3009 # Add the path to the cache signature, because multiple
3010 # targets built by the same action will all have the same
3011 # build signature, and we have to differentiate them somehow.
3012 children = self.children()
3013 executor = self.get_executor()
3014 # sigs = [n.get_cachedir_csig() for n in children]
3015 sigs = [n.get_cachedir_csig() for n in children]
3016 sigs.append(SCons.Util.MD5signature(executor.get_contents()))
3017 sigs.append(self.path)
3018 result = self.cachesig = SCons.Util.MD5collect(sigs)
3024 def get_default_fs():
3033 if SCons.Memoize.use_memoizer:
3034 __metaclass__ = SCons.Memoize.Memoized_Metaclass
3036 memoizer_counters = []
3041 def filedir_lookup(self, p, fd=None):
3043 A helper method for find_file() that looks up a directory for
3044 a file we're trying to find. This only creates the Dir Node if
3045 it exists on-disk, since if the directory doesn't exist we know
3046 we won't find any files in it... :-)
3048 It would be more compact to just use this as a nested function
3049 with a default keyword argument (see the commented-out version
3050 below), but that doesn't work unless you have nested scopes,
3051 so we define it here just so this work under Python 1.5.2.
3054 fd = self.default_filedir
3055 dir, name = os.path.split(fd)
3056 drive, d = os.path.splitdrive(dir)
3057 if not name and d[:1] in ('/', os.sep):
3058 #return p.fs.get_root(drive).dir_on_disk(name)
3059 return p.fs.get_root(drive)
3061 p = self.filedir_lookup(p, dir)
3064 norm_name = _my_normcase(name)
3066 node = p.entries[norm_name]
3068 return p.dir_on_disk(name)
3069 if isinstance(node, Dir):
3071 if isinstance(node, Entry):
3072 node.must_be_same(Dir)
3076 def _find_file_key(self, filename, paths, verbose=None):
3077 return (filename, paths)
3079 memoizer_counters.append(SCons.Memoize.CountDict('find_file', _find_file_key))
3081 def find_file(self, filename, paths, verbose=None):
3083 find_file(str, [Dir()]) -> [nodes]
3085 filename - a filename to find
3086 paths - a list of directory path *nodes* to search in. Can be
3087 represented as a list, a tuple, or a callable that is
3088 called with no arguments and returns the list or tuple.
3090 returns - the node created from the found file.
3092 Find a node corresponding to either a derived file or a file
3093 that exists already.
3095 Only the first file found is returned, and none is returned
3096 if no file is found.
3098 memo_key = self._find_file_key(filename, paths)
3100 memo_dict = self._memo['find_file']
3103 self._memo['find_file'] = memo_dict
3106 return memo_dict[memo_key]
3110 if verbose and not callable(verbose):
3111 if not SCons.Util.is_String(verbose):
3112 verbose = "find_file"
3113 _verbose = u' %s: ' % verbose
3114 verbose = lambda s: sys.stdout.write(_verbose + s)
3116 filedir, filename = os.path.split(filename)
3118 # More compact code that we can't use until we drop
3119 # support for Python 1.5.2:
3121 #def filedir_lookup(p, fd=filedir):
3123 # A helper function that looks up a directory for a file
3124 # we're trying to find. This only creates the Dir Node
3125 # if it exists on-disk, since if the directory doesn't
3126 # exist we know we won't find any files in it... :-)
3128 # dir, name = os.path.split(fd)
3130 # p = filedir_lookup(p, dir)
3133 # norm_name = _my_normcase(name)
3135 # node = p.entries[norm_name]
3137 # return p.dir_on_disk(name)
3138 # if isinstance(node, Dir):
3140 # if isinstance(node, Entry):
3141 # node.must_be_same(Dir)
3143 # if isinstance(node, Dir) or isinstance(node, Entry):
3146 #paths = filter(None, map(filedir_lookup, paths))
3148 self.default_filedir = filedir
3149 paths = [_f for _f in map(self.filedir_lookup, paths) if _f]
3154 verbose("looking for '%s' in '%s' ...\n" % (filename, dir))
3155 node, d = dir.srcdir_find_file(filename)
3158 verbose("... FOUND '%s' in '%s'\n" % (filename, d))
3162 memo_dict[memo_key] = result
3166 find_file = FileFinder().find_file
3169 def invalidate_node_memos(targets):
3171 Invalidate the memoized values of all Nodes (files or directories)
3172 that are associated with the given entries. Has been added to
3173 clear the cache of nodes affected by a direct execution of an
3174 action (e.g. Delete/Copy/Chmod). Existing Node caches become
3175 inconsistent if the action is run through Execute(). The argument
3176 `targets` can be a single Node object or filename, or a sequence
3179 from traceback import extract_stack
3181 # First check if the cache really needs to be flushed. Only
3182 # actions run in the SConscript with Execute() seem to be
3183 # affected. XXX The way to check if Execute() is in the stacktrace
3184 # is a very dirty hack and should be replaced by a more sensible
3186 for f in extract_stack():
3187 if f[2] == 'Execute' and f[0][-14:] == 'Environment.py':
3190 # Dont have to invalidate, so return
3193 if not SCons.Util.is_List(targets):
3196 for entry in targets:
3197 # If the target is a Node object, clear the cache. If it is a
3198 # filename, look up potentially existing Node object first.
3200 entry.clear_memoized_values()
3201 except AttributeError:
3202 # Not a Node object, try to look up Node by filename. XXX
3203 # This creates Node objects even for those filenames which
3204 # do not correspond to an existing Node object.
3205 node = get_default_fs().Entry(entry)
3207 node.clear_memoized_values()
3211 # indent-tabs-mode:nil
3213 # vim: set expandtab tabstop=4 shiftwidth=4: