5 These Nodes represent the canonical external objects that people think
6 of when they think of building software: files and directories.
8 This holds a "default_fs" variable that should be initialized with an FS
9 that can be used by scripts or modules looking for the canonical default.
16 # Permission is hereby granted, free of charge, to any person obtaining
17 # a copy of this software and associated documentation files (the
18 # "Software"), to deal in the Software without restriction, including
19 # without limitation the rights to use, copy, modify, merge, publish,
20 # distribute, sublicense, and/or sell copies of the Software, and to
21 # permit persons to whom the Software is furnished to do so, subject to
22 # the following conditions:
24 # The above copyright notice and this permission notice shall be included
25 # in all copies or substantial portions of the Software.
27 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
28 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
29 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
30 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
31 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
32 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
33 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
35 from __future__ import generators ### KEEP FOR COMPATIBILITY FIXERS
37 __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
39 from itertools import izip
55 # TODO(2.2): Remove when 2.3 becomes the minimal supported version.
58 except AttributeError:
59 codecs.BOM_UTF8 = '\xef\xbb\xbf'
63 except AttributeError:
64 codecs.BOM_UTF16_LE = '\xff\xfe'
65 codecs.BOM_UTF16_BE = '\xfe\xff'
67 # Provide a wrapper function to handle decoding differences in
68 # different versions of Python. Normally, we'd try to do this in the
69 # compat layer (and maybe it still makes sense to move there?) but
70 # that doesn't provide a way to supply the string class used in
71 # pre-2.3 Python versions with a .decode() method that all strings
72 # naturally have. Plus, the 2.[01] encodings behave differently
73 # enough that we have to settle for a lowest-common-denominator
76 # Note that the 2.[012] implementations below may be inefficient
77 # because they perform an explicit look up of the encoding for every
78 # decode, but they're old enough (and we want to stop supporting
79 # them soon enough) that it's not worth complicating the interface.
80 # Think of it as additional incentive for people to upgrade...
83 except AttributeError:
84 # 2.0 through 2.2: strings have no .decode() method
86 codecs.lookup('ascii').decode
87 except AttributeError:
88 # 2.0 and 2.1: encodings are a tuple of functions, and the
89 # decode() function returns a (result, length) tuple.
90 def my_decode(contents, encoding):
91 return codecs.lookup(encoding)[1](contents)[0]
93 # 2.2: encodings are an object with methods, and the
94 # .decode() method returns just the decoded bytes.
95 def my_decode(contents, encoding):
96 return codecs.lookup(encoding).decode(contents)
98 # 2.3 or later: use the .decode() string method
99 def my_decode(contents, encoding):
100 return contents.decode(encoding)
103 from SCons.Debug import logInstanceCreation
107 import SCons.Node.Alias
110 import SCons.Warnings
112 from SCons.Debug import Trace
117 class EntryProxyAttributeError(AttributeError):
119 An AttributeError subclass for recording and displaying the name
120 of the underlying Entry involved in an AttributeError exception.
122 def __init__(self, entry_proxy, attribute):
123 AttributeError.__init__(self)
124 self.entry_proxy = entry_proxy
125 self.attribute = attribute
127 entry = self.entry_proxy.get()
128 fmt = "%s instance %s has no attribute %s"
129 return fmt % (entry.__class__.__name__,
131 repr(self.attribute))
133 # The max_drift value: by default, use a cached signature value for
134 # any file that's been untouched for more than two days.
135 default_max_drift = 2*24*60*60
138 # We stringify these file system Nodes a lot. Turning a file system Node
139 # into a string is non-trivial, because the final string representation
140 # can depend on a lot of factors: whether it's a derived target or not,
141 # whether it's linked to a repository or source directory, and whether
142 # there's duplication going on. The normal technique for optimizing
143 # calculations like this is to memoize (cache) the string value, so you
144 # only have to do the calculation once.
146 # A number of the above factors, however, can be set after we've already
147 # been asked to return a string for a Node, because a Repository() or
148 # VariantDir() call or the like may not occur until later in SConscript
149 # files. So this variable controls whether we bother trying to save
150 # string values for Nodes. The wrapper interface can set this whenever
151 # they're done mucking with Repository and VariantDir and the other stuff,
152 # to let this module know it can start returning saved string values
157 def save_strings(val):
162 # Avoid unnecessary function calls by recording a Boolean value that
163 # tells us whether or not os.path.splitdrive() actually does anything
164 # on this system, and therefore whether we need to bother calling it
165 # when looking up path names in various methods below.
170 def initialize_do_splitdrive():
172 drive, path = os.path.splitdrive('X:/foo')
173 do_splitdrive = not not drive
175 initialize_do_splitdrive()
179 needs_normpath_check = None
181 def initialize_normpath_check():
183 Initialize the normpath_check regular expression.
185 This function is used by the unit tests to re-initialize the pattern
186 when testing for behavior with different values of os.sep.
188 global needs_normpath_check
190 pattern = r'.*/|\.$|\.\.$'
192 pattern = r'.*[/%s]|\.$|\.\.$' % re.escape(os.sep)
193 needs_normpath_check = re.compile(pattern)
195 initialize_normpath_check()
198 # SCons.Action objects for interacting with the outside world.
200 # The Node.FS methods in this module should use these actions to
201 # create and/or remove files and directories; they should *not* use
202 # os.{link,symlink,unlink,mkdir}(), etc., directly.
204 # Using these SCons.Action objects ensures that descriptions of these
205 # external activities are properly displayed, that the displays are
206 # suppressed when the -s (silent) option is used, and (most importantly)
207 # the actions are disabled when the the -n option is used, in which case
208 # there should be *no* changes to the external file system(s)...
211 if hasattr(os, 'link'):
212 def _hardlink_func(fs, src, dst):
213 # If the source is a symlink, we can't just hard-link to it
214 # because a relative symlink may point somewhere completely
215 # different. We must disambiguate the symlink and then
216 # hard-link the final destination file.
217 while fs.islink(src):
218 link = fs.readlink(src)
219 if not os.path.isabs(link):
222 src = os.path.join(os.path.dirname(src), link)
225 _hardlink_func = None
227 if hasattr(os, 'symlink'):
228 def _softlink_func(fs, src, dst):
231 _softlink_func = None
233 def _copy_func(fs, src, dest):
234 shutil.copy2(src, dest)
236 fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
239 Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy',
240 'hard-copy', 'soft-copy', 'copy']
242 Link_Funcs = [] # contains the callables of the specified duplication style
244 def set_duplicate(duplicate):
245 # Fill in the Link_Funcs list according to the argument
246 # (discarding those not available on the platform).
248 # Set up the dictionary that maps the argument names to the
249 # underlying implementations. We do this inside this function,
250 # not in the top-level module code, so that we can remap os.link
251 # and os.symlink for testing purposes.
253 'hard' : _hardlink_func,
254 'soft' : _softlink_func,
258 if not duplicate in Valid_Duplicates:
259 raise SCons.Errors.InternalError, ("The argument of set_duplicate "
260 "should be in Valid_Duplicates")
263 for func in duplicate.split('-'):
265 Link_Funcs.append(link_dict[func])
267 def LinkFunc(target, source, env):
268 # Relative paths cause problems with symbolic links, so
269 # we use absolute paths, which may be a problem for people
270 # who want to move their soft-linked src-trees around. Those
271 # people should use the 'hard-copy' mode, softlinks cannot be
272 # used for that; at least I have no idea how ...
273 src = source[0].abspath
274 dest = target[0].abspath
275 dir, file = os.path.split(dest)
276 if dir and not target[0].fs.isdir(dir):
279 # Set a default order of link functions.
280 set_duplicate('hard-soft-copy')
282 # Now link the files with the previously specified order.
283 for func in Link_Funcs:
287 except (IOError, OSError):
288 # An OSError indicates something happened like a permissions
289 # problem or an attempt to symlink across file-system
290 # boundaries. An IOError indicates something like the file
291 # not existing. In either case, keeping trying additional
292 # functions in the list and only raise an error if the last
294 if func == Link_Funcs[-1]:
295 # exception of the last link method (copy) are fatal
299 Link = SCons.Action.Action(LinkFunc, None)
300 def LocalString(target, source, env):
301 return 'Local copy of %s from %s' % (target[0], source[0])
303 LocalCopy = SCons.Action.Action(LinkFunc, LocalString)
305 def UnlinkFunc(target, source, env):
307 t.fs.unlink(t.abspath)
310 Unlink = SCons.Action.Action(UnlinkFunc, None)
312 def MkdirFunc(target, source, env):
315 t.fs.mkdir(t.abspath)
318 Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None)
322 def get_MkdirBuilder():
324 if MkdirBuilder is None:
326 import SCons.Defaults
327 # "env" will get filled in by Executor.get_build_env()
328 # calling SCons.Defaults.DefaultEnvironment() when necessary.
329 MkdirBuilder = SCons.Builder.Builder(action = Mkdir,
333 target_scanner = SCons.Defaults.DirEntryScanner,
334 name = "MkdirBuilder")
342 DefaultSCCSBuilder = None
343 DefaultRCSBuilder = None
345 def get_DefaultSCCSBuilder():
346 global DefaultSCCSBuilder
347 if DefaultSCCSBuilder is None:
349 # "env" will get filled in by Executor.get_build_env()
350 # calling SCons.Defaults.DefaultEnvironment() when necessary.
351 act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR')
352 DefaultSCCSBuilder = SCons.Builder.Builder(action = act,
354 name = "DefaultSCCSBuilder")
355 return DefaultSCCSBuilder
357 def get_DefaultRCSBuilder():
358 global DefaultRCSBuilder
359 if DefaultRCSBuilder is None:
361 # "env" will get filled in by Executor.get_build_env()
362 # calling SCons.Defaults.DefaultEnvironment() when necessary.
363 act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR')
364 DefaultRCSBuilder = SCons.Builder.Builder(action = act,
366 name = "DefaultRCSBuilder")
367 return DefaultRCSBuilder
369 # Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem.
370 _is_cygwin = sys.platform == "cygwin"
371 if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin:
381 def __init__(self, type, do, ignore):
387 self.__call__ = self.do
388 def set_ignore(self):
389 self.__call__ = self.ignore
391 if self.type in list:
396 def do_diskcheck_match(node, predicate, errorfmt):
399 # If calling the predicate() cached a None value from stat(),
400 # remove it so it doesn't interfere with later attempts to
401 # build this Node as we walk the DAG. (This isn't a great way
402 # to do this, we're reaching into an interface that doesn't
403 # really belong to us, but it's all about performance, so
404 # for now we'll just document the dependency...)
405 if node._memo['stat'] is None:
406 del node._memo['stat']
407 except (AttributeError, KeyError):
410 raise TypeError, errorfmt % node.abspath
412 def ignore_diskcheck_match(node, predicate, errorfmt):
415 def do_diskcheck_rcs(node, name):
417 rcs_dir = node.rcs_dir
418 except AttributeError:
419 if node.entry_exists_on_disk('RCS'):
420 rcs_dir = node.Dir('RCS')
423 node.rcs_dir = rcs_dir
425 return rcs_dir.entry_exists_on_disk(name+',v')
428 def ignore_diskcheck_rcs(node, name):
431 def do_diskcheck_sccs(node, name):
433 sccs_dir = node.sccs_dir
434 except AttributeError:
435 if node.entry_exists_on_disk('SCCS'):
436 sccs_dir = node.Dir('SCCS')
439 node.sccs_dir = sccs_dir
441 return sccs_dir.entry_exists_on_disk('s.'+name)
444 def ignore_diskcheck_sccs(node, name):
447 diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match)
448 diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs)
449 diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs)
457 def set_diskcheck(list):
458 for dc in diskcheckers:
461 def diskcheck_types():
462 return [dc.type for dc in diskcheckers]
466 class EntryProxy(SCons.Util.Proxy):
467 def __get_abspath(self):
469 return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(),
470 entry.name + "_abspath")
472 def __get_filebase(self):
473 name = self.get().name
474 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0],
477 def __get_suffix(self):
478 name = self.get().name
479 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1],
482 def __get_file(self):
483 name = self.get().name
484 return SCons.Subst.SpecialAttrWrapper(name, name + "_file")
486 def __get_base_path(self):
487 """Return the file's directory and file name, with the
490 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0],
491 entry.name + "_base")
493 def __get_posix_path(self):
494 """Return the path with / as the path separator,
495 regardless of platform."""
500 r = entry.get_path().replace(os.sep, '/')
501 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
503 def __get_windows_path(self):
504 """Return the path with \ as the path separator,
505 regardless of platform."""
510 r = entry.get_path().replace(os.sep, '\\')
511 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows")
513 def __get_srcnode(self):
514 return EntryProxy(self.get().srcnode())
516 def __get_srcdir(self):
517 """Returns the directory containing the source node linked to this
518 node via VariantDir(), or the directory of this node if not linked."""
519 return EntryProxy(self.get().srcnode().dir)
521 def __get_rsrcnode(self):
522 return EntryProxy(self.get().srcnode().rfile())
524 def __get_rsrcdir(self):
525 """Returns the directory containing the source node linked to this
526 node via VariantDir(), or the directory of this node if not linked."""
527 return EntryProxy(self.get().srcnode().rfile().dir)
530 return EntryProxy(self.get().dir)
532 dictSpecialAttrs = { "base" : __get_base_path,
533 "posix" : __get_posix_path,
534 "windows" : __get_windows_path,
535 "win32" : __get_windows_path,
536 "srcpath" : __get_srcnode,
537 "srcdir" : __get_srcdir,
539 "abspath" : __get_abspath,
540 "filebase" : __get_filebase,
541 "suffix" : __get_suffix,
543 "rsrcpath" : __get_rsrcnode,
544 "rsrcdir" : __get_rsrcdir,
547 def __getattr__(self, name):
548 # This is how we implement the "special" attributes
549 # such as base, posix, srcdir, etc.
551 attr_function = self.dictSpecialAttrs[name]
554 attr = SCons.Util.Proxy.__getattr__(self, name)
555 except AttributeError, e:
556 # Raise our own AttributeError subclass with an
557 # overridden __str__() method that identifies the
558 # name of the entry that caused the exception.
559 raise EntryProxyAttributeError(self, name)
562 return attr_function(self)
564 class Base(SCons.Node.Node):
565 """A generic class for file system entries. This class is for
566 when we don't know yet whether the entry being looked up is a file
567 or a directory. Instances of this class can morph into either
568 Dir or File objects by a later, more precise lookup.
570 Note: this class does not define __cmp__ and __hash__ for
571 efficiency reasons. SCons does a lot of comparing of
572 Node.FS.{Base,Entry,File,Dir} objects, so those operations must be
573 as fast as possible, which means we want to use Python's built-in
574 object identity comparisons.
577 memoizer_counters = []
579 def __init__(self, name, directory, fs):
580 """Initialize a generic Node.FS.Base object.
582 Call the superclass initialization, take care of setting up
583 our relative and absolute paths, identify our parent
584 directory, and indicate that this node should use
586 if __debug__: logInstanceCreation(self, 'Node.FS.Base')
587 SCons.Node.Node.__init__(self)
589 # Filenames and paths are probably reused and are intern'ed to
591 self.name = SCons.Util.silent_intern(name)
592 self.suffix = SCons.Util.silent_intern(SCons.Util.splitext(name)[1])
595 assert directory, "A directory must be provided"
597 self.abspath = SCons.Util.silent_intern(directory.entry_abspath(name))
598 self.labspath = SCons.Util.silent_intern(directory.entry_labspath(name))
599 if directory.path == '.':
600 self.path = SCons.Util.silent_intern(name)
602 self.path = SCons.Util.silent_intern(directory.entry_path(name))
603 if directory.tpath == '.':
604 self.tpath = SCons.Util.silent_intern(name)
606 self.tpath = SCons.Util.silent_intern(directory.entry_tpath(name))
607 self.path_elements = directory.path_elements + [self]
610 self.cwd = None # will hold the SConscript directory for target nodes
611 self.duplicate = directory.duplicate
613 def str_for_display(self):
614 return '"' + self.__str__() + '"'
616 def must_be_same(self, klass):
618 This node, which already existed, is being looked up as the
619 specified klass. Raise an exception if it isn't.
621 if isinstance(self, klass) or klass is Entry:
623 raise TypeError, "Tried to lookup %s '%s' as a %s." %\
624 (self.__class__.__name__, self.path, klass.__name__)
629 def get_suffix(self):
636 """A Node.FS.Base object's string representation is its path
640 return self._save_str()
641 return self._get_str()
643 memoizer_counters.append(SCons.Memoize.CountValue('_save_str'))
647 return self._memo['_save_str']
650 result = intern(self._get_str())
651 self._memo['_save_str'] = result
656 if self.duplicate or self.is_derived():
657 return self.get_path()
658 srcnode = self.srcnode()
659 if srcnode.stat() is None and self.stat() is not None:
660 result = self.get_path()
662 result = srcnode.get_path()
664 # We're not at the point where we're saving the string string
665 # representations of FS Nodes (because we haven't finished
666 # reading the SConscript files and need to have str() return
667 # things relative to them). That also means we can't yet
668 # cache values returned (or not returned) by stat(), since
669 # Python code in the SConscript files might still create
670 # or otherwise affect the on-disk file. So get rid of the
671 # values that the underlying stat() method saved.
672 try: del self._memo['stat']
673 except KeyError: pass
674 if self is not srcnode:
675 try: del srcnode._memo['stat']
676 except KeyError: pass
681 memoizer_counters.append(SCons.Memoize.CountValue('stat'))
684 try: return self._memo['stat']
685 except KeyError: pass
686 try: result = self.fs.stat(self.abspath)
687 except os.error: result = None
688 self._memo['stat'] = result
692 return self.stat() is not None
695 return self.rfile().exists()
699 if st: return st[stat.ST_MTIME]
704 if st: return st[stat.ST_SIZE]
709 return st is not None and stat.S_ISDIR(st[stat.ST_MODE])
713 return st is not None and stat.S_ISREG(st[stat.ST_MODE])
715 if hasattr(os, 'symlink'):
717 try: st = self.fs.lstat(self.abspath)
718 except os.error: return 0
719 return stat.S_ISLNK(st[stat.ST_MODE])
722 return 0 # no symlinks
724 def is_under(self, dir):
728 return self.dir.is_under(dir)
734 """If this node is in a build path, return the node
735 corresponding to its source file. Otherwise, return
738 srcdir_list = self.dir.srcdir_list()
740 srcnode = srcdir_list[0].Entry(self.name)
741 srcnode.must_be_same(self.__class__)
745 def get_path(self, dir=None):
746 """Return path relative to the current working directory of the
747 Node.FS.Base object that owns us."""
749 dir = self.fs.getcwd()
752 path_elems = self.path_elements
753 try: i = path_elems.index(dir)
754 except ValueError: pass
755 else: path_elems = path_elems[i+1:]
756 path_elems = [n.name for n in path_elems]
757 return os.sep.join(path_elems)
759 def set_src_builder(self, builder):
760 """Set the source code builder for this node."""
761 self.sbuilder = builder
762 if not self.has_builder():
763 self.builder_set(builder)
765 def src_builder(self):
766 """Fetch the source code builder for this node.
768 If there isn't one, we cache the source code builder specified
769 for the directory (which in turn will cache the value from its
770 parent directory, and so on up to the file system root).
774 except AttributeError:
775 scb = self.dir.src_builder()
779 def get_abspath(self):
780 """Get the absolute path of the file."""
783 def for_signature(self):
784 # Return just our name. Even an absolute path would not work,
785 # because that can change thanks to symlinks or remapped network
789 def get_subst_proxy(self):
792 except AttributeError:
793 ret = EntryProxy(self)
797 def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext):
800 Generates a target entry that corresponds to this entry (usually
801 a source file) with the specified prefix and suffix.
803 Note that this method can be overridden dynamically for generated
804 files that need different behavior. See Tool/swig.py for
807 return self.dir.Entry(prefix + splitext(self.name)[0] + suffix)
809 def _Rfindalldirs_key(self, pathlist):
812 memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key))
814 def Rfindalldirs(self, pathlist):
816 Return all of the directories for a given path list, including
817 corresponding "backing" directories in any repositories.
819 The Node lookups are relative to this Node (typically a
820 directory), so memoizing result saves cycles from looking
821 up the same path for each target in a given directory.
824 memo_dict = self._memo['Rfindalldirs']
827 self._memo['Rfindalldirs'] = memo_dict
830 return memo_dict[pathlist]
834 create_dir_relative_to_self = self.Dir
836 for path in pathlist:
837 if isinstance(path, SCons.Node.Node):
840 dir = create_dir_relative_to_self(path)
841 result.extend(dir.get_all_rdirs())
843 memo_dict[pathlist] = result
847 def RDirs(self, pathlist):
848 """Search for a list of directories in the Repository list."""
849 cwd = self.cwd or self.fs._cwd
850 return cwd.Rfindalldirs(pathlist)
852 memoizer_counters.append(SCons.Memoize.CountValue('rentry'))
856 return self._memo['rentry']
860 if not self.exists():
861 norm_name = _my_normcase(self.name)
862 for dir in self.dir.get_all_rdirs():
864 node = dir.entries[norm_name]
866 if dir.entry_exists_on_disk(self.name):
867 result = dir.Entry(self.name)
869 self._memo['rentry'] = result
872 def _glob1(self, pattern, ondisk=True, source=False, strings=False):
876 """This is the class for generic Node.FS entries--that is, things
877 that could be a File or a Dir, but we're just not sure yet.
878 Consequently, the methods in this class really exist just to
879 transform their associated object into the right class when the
880 time comes, and then call the same-named method in the transformed
883 def diskcheck_match(self):
886 def disambiguate(self, must_exist=None):
893 self.__class__ = File
897 # There was nothing on-disk at this location, so look in
900 # We can't just use self.srcnode() straight away because
901 # that would create an actual Node for this file in the src
902 # directory, and there might not be one. Instead, use the
903 # dir_on_disk() method to see if there's something on-disk
904 # with that name, in which case we can go ahead and call
905 # self.srcnode() to create the right type of entry.
906 srcdir = self.dir.srcnode()
907 if srcdir != self.dir and \
908 srcdir.entry_exists_on_disk(self.name) and \
909 self.srcnode().isdir():
913 msg = "No such file or directory: '%s'" % self.abspath
914 raise SCons.Errors.UserError, msg
916 self.__class__ = File
922 """We're a generic Entry, but the caller is actually looking for
923 a File at this point, so morph into one."""
924 self.__class__ = File
927 return File.rfile(self)
929 def scanner_key(self):
930 return self.get_suffix()
932 def get_contents(self):
933 """Fetch the contents of the entry. Returns the exact binary
934 contents of the file."""
936 self = self.disambiguate(must_exist=1)
937 except SCons.Errors.UserError:
938 # There was nothing on disk with which to disambiguate
939 # this entry. Leave it as an Entry, but return a null
940 # string so calls to get_contents() in emitters and the
941 # like (e.g. in qt.py) don't have to disambiguate by hand
942 # or catch the exception.
945 return self.get_contents()
947 def get_text_contents(self):
948 """Fetch the decoded text contents of a Unicode encoded Entry.
950 Since this should return the text contents from the file
951 system, we check to see into what sort of subclass we should
954 self = self.disambiguate(must_exist=1)
955 except SCons.Errors.UserError:
956 # There was nothing on disk with which to disambiguate
957 # this entry. Leave it as an Entry, but return a null
958 # string so calls to get_text_contents() in emitters and
959 # the like (e.g. in qt.py) don't have to disambiguate by
960 # hand or catch the exception.
963 return self.get_text_contents()
965 def must_be_same(self, klass):
966 """Called to make sure a Node is a Dir. Since we're an
967 Entry, we can morph into one."""
968 if self.__class__ is not klass:
969 self.__class__ = klass
973 # The following methods can get called before the Taskmaster has
974 # had a chance to call disambiguate() directly to see if this Entry
975 # should really be a Dir or a File. We therefore use these to call
976 # disambiguate() transparently (from our caller's point of view).
978 # Right now, this minimal set of methods has been derived by just
979 # looking at some of the methods that will obviously be called early
980 # in any of the various Taskmasters' calling sequences, and then
981 # empirically figuring out which additional methods are necessary
982 # to make various tests pass.
985 """Return if the Entry exists. Check the file system to see
986 what we should turn into first. Assume a file if there's no
988 return self.disambiguate().exists()
990 def rel_path(self, other):
991 d = self.disambiguate()
992 if d.__class__ is Entry:
993 raise "rel_path() could not disambiguate File/Dir"
994 return d.rel_path(other)
997 return self.disambiguate().new_ninfo()
999 def changed_since_last_build(self, target, prev_ni):
1000 return self.disambiguate().changed_since_last_build(target, prev_ni)
1002 def _glob1(self, pattern, ondisk=True, source=False, strings=False):
1003 return self.disambiguate()._glob1(pattern, ondisk, source, strings)
1005 def get_subst_proxy(self):
1006 return self.disambiguate().get_subst_proxy()
1008 # This is for later so we can differentiate between Entry the class and Entry
1009 # the method of the FS class.
1015 if SCons.Memoize.use_memoizer:
1016 __metaclass__ = SCons.Memoize.Memoized_Metaclass
1018 # This class implements an abstraction layer for operations involving
1019 # a local file system. Essentially, this wraps any function in
1020 # the os, os.path or shutil modules that we use to actually go do
1021 # anything with or to the local file system.
1023 # Note that there's a very good chance we'll refactor this part of
1024 # the architecture in some way as we really implement the interface(s)
1025 # for remote file system Nodes. For example, the right architecture
1026 # might be to have this be a subclass instead of a base class.
1027 # Nevertheless, we're using this as a first step in that direction.
1029 # We're not using chdir() yet because the calling subclass method
1030 # needs to use os.chdir() directly to avoid recursion. Will we
1031 # really need this one?
1032 #def chdir(self, path):
1033 # return os.chdir(path)
1034 def chmod(self, path, mode):
1035 return os.chmod(path, mode)
1036 def copy(self, src, dst):
1037 return shutil.copy(src, dst)
1038 def copy2(self, src, dst):
1039 return shutil.copy2(src, dst)
1040 def exists(self, path):
1041 return os.path.exists(path)
1042 def getmtime(self, path):
1043 return os.path.getmtime(path)
1044 def getsize(self, path):
1045 return os.path.getsize(path)
1046 def isdir(self, path):
1047 return os.path.isdir(path)
1048 def isfile(self, path):
1049 return os.path.isfile(path)
1050 def link(self, src, dst):
1051 return os.link(src, dst)
1052 def lstat(self, path):
1053 return os.lstat(path)
1054 def listdir(self, path):
1055 return os.listdir(path)
1056 def makedirs(self, path):
1057 return os.makedirs(path)
1058 def mkdir(self, path):
1059 return os.mkdir(path)
1060 def rename(self, old, new):
1061 return os.rename(old, new)
1062 def stat(self, path):
1063 return os.stat(path)
1064 def symlink(self, src, dst):
1065 return os.symlink(src, dst)
1066 def open(self, path):
1068 def unlink(self, path):
1069 return os.unlink(path)
1071 if hasattr(os, 'symlink'):
1072 def islink(self, path):
1073 return os.path.islink(path)
1075 def islink(self, path):
1076 return 0 # no symlinks
1078 if hasattr(os, 'readlink'):
1079 def readlink(self, file):
1080 return os.readlink(file)
1082 def readlink(self, file):
1087 # # Skeleton for the obvious methods we might need from the
1088 # # abstraction layer for a remote filesystem.
1089 # def upload(self, local_src, remote_dst):
1091 # def download(self, remote_src, local_dst):
1097 memoizer_counters = []
1099 def __init__(self, path = None):
1100 """Initialize the Node.FS subsystem.
1102 The supplied path is the top of the source tree, where we
1103 expect to find the top-level build file. If no path is
1104 supplied, the current directory is the default.
1106 The path argument must be a valid absolute path.
1108 if __debug__: logInstanceCreation(self, 'Node.FS')
1113 self.SConstruct_dir = None
1114 self.max_drift = default_max_drift
1118 self.pathTop = os.getcwd()
1121 self.defaultDrive = _my_normcase(os.path.splitdrive(self.pathTop)[0])
1123 self.Top = self.Dir(self.pathTop)
1125 self.Top.tpath = '.'
1126 self._cwd = self.Top
1128 DirNodeInfo.fs = self
1129 FileNodeInfo.fs = self
1131 def set_SConstruct_dir(self, dir):
1132 self.SConstruct_dir = dir
1134 def get_max_drift(self):
1135 return self.max_drift
1137 def set_max_drift(self, max_drift):
1138 self.max_drift = max_drift
1143 def chdir(self, dir, change_os_dir=0):
1144 """Change the current working directory for lookups.
1145 If change_os_dir is true, we will also change the "real" cwd
1153 os.chdir(dir.abspath)
1158 def get_root(self, drive):
1160 Returns the root directory for the specified drive, creating
1163 drive = _my_normcase(drive)
1165 return self.Root[drive]
1167 root = RootDir(drive, self)
1168 self.Root[drive] = root
1170 self.Root[self.defaultDrive] = root
1171 elif drive == self.defaultDrive:
1172 self.Root[''] = root
1175 def _lookup(self, p, directory, fsclass, create=1):
1177 The generic entry point for Node lookup with user-supplied data.
1179 This translates arbitrary input into a canonical Node.FS object
1180 of the specified fsclass. The general approach for strings is
1181 to turn it into a fully normalized absolute path and then call
1182 the root directory's lookup_abs() method for the heavy lifting.
1184 If the path name begins with '#', it is unconditionally
1185 interpreted relative to the top-level directory of this FS. '#'
1186 is treated as a synonym for the top-level SConstruct directory,
1187 much like '~' is treated as a synonym for the user's home
1188 directory in a UNIX shell. So both '#foo' and '#/foo' refer
1189 to the 'foo' subdirectory underneath the top-level SConstruct
1192 If the path name is relative, then the path is looked up relative
1193 to the specified directory, or the current directory (self._cwd,
1194 typically the SConscript directory) if the specified directory
1197 if isinstance(p, Base):
1198 # It's already a Node.FS object. Make sure it's the right
1200 p.must_be_same(fsclass)
1202 # str(p) in case it's something like a proxy object
1205 initial_hash = (p[0:1] == '#')
1207 # There was an initial '#', so we strip it and override
1208 # whatever directory they may have specified with the
1209 # top-level SConstruct directory.
1211 directory = self.Top
1213 if directory and not isinstance(directory, Dir):
1214 directory = self.Dir(directory)
1217 drive, p = os.path.splitdrive(p)
1221 # This causes a naked drive letter to be treated as a synonym
1222 # for the root directory on that drive.
1224 absolute = os.path.isabs(p)
1226 needs_normpath = needs_normpath_check.match(p)
1228 if initial_hash or not absolute:
1229 # This is a relative lookup, either to the top-level
1230 # SConstruct directory (because of the initial '#') or to
1231 # the current directory (the path name is not absolute).
1232 # Add the string to the appropriate directory lookup path,
1233 # after which the whole thing gets normalized.
1235 directory = self._cwd
1237 p = directory.labspath + '/' + p
1239 p = directory.labspath
1242 p = os.path.normpath(p)
1244 if drive or absolute:
1245 root = self.get_root(drive)
1248 directory = self._cwd
1249 root = directory.root
1252 p = p.replace(os.sep, '/')
1253 return root._lookup_abs(p, fsclass, create)
1255 def Entry(self, name, directory = None, create = 1):
1256 """Look up or create a generic Entry node with the specified name.
1257 If the name is a relative path (begins with ./, ../, or a file
1258 name), then it is looked up relative to the supplied directory
1259 node, or to the top level directory of the FS (supplied at
1260 construction time) if no directory is supplied.
1262 return self._lookup(name, directory, Entry, create)
1264 def File(self, name, directory = None, create = 1):
1265 """Look up or create a File node with the specified name. If
1266 the name is a relative path (begins with ./, ../, or a file name),
1267 then it is looked up relative to the supplied directory node,
1268 or to the top level directory of the FS (supplied at construction
1269 time) if no directory is supplied.
1271 This method will raise TypeError if a directory is found at the
1274 return self._lookup(name, directory, File, create)
1276 def Dir(self, name, directory = None, create = True):
1277 """Look up or create a Dir node with the specified name. If
1278 the name is a relative path (begins with ./, ../, or a file name),
1279 then it is looked up relative to the supplied directory node,
1280 or to the top level directory of the FS (supplied at construction
1281 time) if no directory is supplied.
1283 This method will raise TypeError if a normal file is found at the
1286 return self._lookup(name, directory, Dir, create)
1288 def VariantDir(self, variant_dir, src_dir, duplicate=1):
1289 """Link the supplied variant directory to the source directory
1290 for purposes of building files."""
1292 if not isinstance(src_dir, SCons.Node.Node):
1293 src_dir = self.Dir(src_dir)
1294 if not isinstance(variant_dir, SCons.Node.Node):
1295 variant_dir = self.Dir(variant_dir)
1296 if src_dir.is_under(variant_dir):
1297 raise SCons.Errors.UserError, "Source directory cannot be under variant directory."
1298 if variant_dir.srcdir:
1299 if variant_dir.srcdir == src_dir:
1300 return # We already did this.
1301 raise SCons.Errors.UserError, "'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir)
1302 variant_dir.link(src_dir, duplicate)
1304 def Repository(self, *dirs):
1305 """Specify Repository directories to search."""
1307 if not isinstance(d, SCons.Node.Node):
1309 self.Top.addRepository(d)
1311 def variant_dir_target_climb(self, orig, dir, tail):
1312 """Create targets in corresponding variant directories
1314 Climb the directory tree, and look up path names
1315 relative to any linked variant directories we find.
1317 Even though this loops and walks up the tree, we don't memoize
1318 the return value because this is really only used to process
1319 the command-line targets.
1323 fmt = "building associated VariantDir targets: %s"
1326 for bd in dir.variant_dirs:
1327 if start_dir.is_under(bd):
1328 # If already in the build-dir location, don't reflect
1329 return [orig], fmt % str(orig)
1330 p = os.path.join(bd.path, *tail)
1331 targets.append(self.Entry(p))
1332 tail = [dir.name] + tail
1335 message = fmt % ' '.join(map(str, targets))
1336 return targets, message
1338 def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None):
1342 This is mainly a shim layer
1346 return cwd.glob(pathname, ondisk, source, strings)
1348 class DirNodeInfo(SCons.Node.NodeInfoBase):
1349 # This should get reset by the FS initialization.
1350 current_version_id = 1
1354 def str_to_node(self, s):
1358 drive, s = os.path.splitdrive(s)
1360 root = self.fs.get_root(drive)
1361 if not os.path.isabs(s):
1362 s = top.labspath + '/' + s
1363 return root._lookup_abs(s, Entry)
1365 class DirBuildInfo(SCons.Node.BuildInfoBase):
1366 current_version_id = 1
1368 glob_magic_check = re.compile('[*?[]')
1370 def has_glob_magic(s):
1371 return glob_magic_check.search(s) is not None
1374 """A class for directories in a file system.
1377 memoizer_counters = []
1379 NodeInfo = DirNodeInfo
1380 BuildInfo = DirBuildInfo
1382 def __init__(self, name, directory, fs):
1383 if __debug__: logInstanceCreation(self, 'Node.FS.Dir')
1384 Base.__init__(self, name, directory, fs)
1388 """Turn a file system Node (either a freshly initialized directory
1389 object or a separate Entry object) into a proper directory object.
1391 Set up this directory's entries and hook it into the file
1392 system tree. Specify that directories (this Node) don't use
1393 signatures for calculating whether they're current.
1396 self.repositories = []
1400 self.entries['.'] = self
1401 self.entries['..'] = self.dir
1404 self._sconsign = None
1405 self.variant_dirs = []
1406 self.root = self.dir.root
1408 # Don't just reset the executor, replace its action list,
1409 # because it might have some pre-or post-actions that need to
1411 self.builder = get_MkdirBuilder()
1412 self.get_executor().set_action_list(self.builder.action)
1414 def diskcheck_match(self):
1415 diskcheck_match(self, self.isfile,
1416 "File %s found where directory expected.")
1418 def __clearRepositoryCache(self, duplicate=None):
1419 """Called when we change the repository(ies) for a directory.
1420 This clears any cached information that is invalidated by changing
1423 for node in self.entries.values():
1424 if node != self.dir:
1425 if node != self and isinstance(node, Dir):
1426 node.__clearRepositoryCache(duplicate)
1431 except AttributeError:
1433 if duplicate is not None:
1434 node.duplicate=duplicate
1436 def __resetDuplicate(self, node):
1438 node.duplicate = node.get_dir().duplicate
1440 def Entry(self, name):
1442 Looks up or creates an entry node named 'name' relative to
1445 return self.fs.Entry(name, self)
1447 def Dir(self, name, create=True):
1449 Looks up or creates a directory node named 'name' relative to
1452 return self.fs.Dir(name, self, create)
1454 def File(self, name):
1456 Looks up or creates a file node named 'name' relative to
1459 return self.fs.File(name, self)
1461 def _lookup_rel(self, name, klass, create=1):
1463 Looks up a *normalized* relative path name, relative to this
1466 This method is intended for use by internal lookups with
1467 already-normalized path data. For general-purpose lookups,
1468 use the Entry(), Dir() and File() methods above.
1470 This method does *no* input checking and will die or give
1471 incorrect results if it's passed a non-normalized path name (e.g.,
1472 a path containing '..'), an absolute path name, a top-relative
1473 ('#foo') path name, or any kind of object.
1475 name = self.entry_labspath(name)
1476 return self.root._lookup_abs(name, klass, create)
1478 def link(self, srcdir, duplicate):
1479 """Set this directory as the variant directory for the
1480 supplied source directory."""
1481 self.srcdir = srcdir
1482 self.duplicate = duplicate
1483 self.__clearRepositoryCache(duplicate)
1484 srcdir.variant_dirs.append(self)
1486 def getRepositories(self):
1487 """Returns a list of repositories for this directory.
1489 if self.srcdir and not self.duplicate:
1490 return self.srcdir.get_all_rdirs() + self.repositories
1491 return self.repositories
1493 memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs'))
1495 def get_all_rdirs(self):
1497 return list(self._memo['get_all_rdirs'])
1505 for rep in dir.getRepositories():
1506 result.append(rep.Dir(fname))
1510 fname = dir.name + os.sep + fname
1513 self._memo['get_all_rdirs'] = list(result)
1517 def addRepository(self, dir):
1518 if dir != self and not dir in self.repositories:
1519 self.repositories.append(dir)
1521 self.__clearRepositoryCache()
1524 return self.entries['..']
1526 def _rel_path_key(self, other):
1529 memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key))
1531 def rel_path(self, other):
1532 """Return a path to "other" relative to this directory.
1535 # This complicated and expensive method, which constructs relative
1536 # paths between arbitrary Node.FS objects, is no longer used
1537 # by SCons itself. It was introduced to store dependency paths
1538 # in .sconsign files relative to the target, but that ended up
1539 # being significantly inefficient.
1541 # We're continuing to support the method because some SConstruct
1542 # files out there started using it when it was available, and
1543 # we're all about backwards compatibility..
1546 memo_dict = self._memo['rel_path']
1549 self._memo['rel_path'] = memo_dict
1552 return memo_dict[other]
1559 elif not other in self.path_elements:
1561 other_dir = other.get_dir()
1562 except AttributeError:
1565 if other_dir is None:
1568 dir_rel_path = self.rel_path(other_dir)
1569 if dir_rel_path == '.':
1572 result = dir_rel_path + os.sep + other.name
1574 i = self.path_elements.index(other) + 1
1576 path_elems = ['..'] * (len(self.path_elements) - i) \
1577 + [n.name for n in other.path_elements[i:]]
1579 result = os.sep.join(path_elems)
1581 memo_dict[other] = result
1585 def get_env_scanner(self, env, kw={}):
1586 import SCons.Defaults
1587 return SCons.Defaults.DirEntryScanner
1589 def get_target_scanner(self):
1590 import SCons.Defaults
1591 return SCons.Defaults.DirEntryScanner
1593 def get_found_includes(self, env, scanner, path):
1594 """Return this directory's implicit dependencies.
1596 We don't bother caching the results because the scan typically
1597 shouldn't be requested more than once (as opposed to scanning
1598 .h file contents, which can be requested as many times as the
1599 files is #included by other files).
1603 # Clear cached info for this Dir. If we already visited this
1604 # directory on our walk down the tree (because we didn't know at
1605 # that point it was being used as the source for another Node)
1606 # then we may have calculated build signature before realizing
1607 # we had to scan the disk. Now that we have to, though, we need
1608 # to invalidate the old calculated signature so that any node
1609 # dependent on our directory structure gets one that includes
1610 # info about everything on disk.
1612 return scanner(self, env, path)
1615 # Taskmaster interface subsystem
1621 def build(self, **kw):
1622 """A null "builder" for directories."""
1624 if self.builder is not MkdirBuilder:
1625 SCons.Node.Node.build(self, **kw)
1632 """Create this directory, silently and without worrying about
1633 whether the builder is the default or not."""
1639 listDirs.append(parent)
1642 # Don't use while: - else: for this condition because
1643 # if so, then parent is None and has no .path attribute.
1644 raise SCons.Errors.StopError, parent.path
1647 for dirnode in listDirs:
1649 # Don't call dirnode.build(), call the base Node method
1650 # directly because we definitely *must* create this
1651 # directory. The dirnode.build() method will suppress
1652 # the build if it's the default builder.
1653 SCons.Node.Node.build(dirnode)
1654 dirnode.get_executor().nullify()
1655 # The build() action may or may not have actually
1656 # created the directory, depending on whether the -n
1657 # option was used or not. Delete the _exists and
1658 # _rexists attributes so they can be reevaluated.
1663 def multiple_side_effect_has_builder(self):
1665 return self.builder is not MkdirBuilder and self.has_builder()
1667 def alter_targets(self):
1668 """Return any corresponding targets in a variant directory.
1670 return self.fs.variant_dir_target_climb(self, self, [])
1672 def scanner_key(self):
1673 """A directory does not get scanned."""
1676 def get_text_contents(self):
1677 """We already emit things in text, so just return the binary
1679 return self.get_contents()
1681 def get_contents(self):
1682 """Return content signatures and names of all our children
1683 separated by new-lines. Ensure that the nodes are sorted."""
1685 name_cmp = lambda a, b: cmp(a.name, b.name)
1686 sorted_children = self.children()[:]
1687 sorted_children.sort(name_cmp)
1688 for node in sorted_children:
1689 contents.append('%s %s\n' % (node.get_csig(), node.name))
1690 return ''.join(contents)
1693 """Compute the content signature for Directory nodes. In
1694 general, this is not needed and the content signature is not
1695 stored in the DirNodeInfo. However, if get_contents on a Dir
1696 node is called which has a child directory, the child
1697 directory should return the hash of its contents."""
1698 contents = self.get_contents()
1699 return SCons.Util.MD5signature(contents)
1701 def do_duplicate(self, src):
1704 changed_since_last_build = SCons.Node.Node.state_has_changed
1706 def is_up_to_date(self):
1707 """If any child is not up-to-date, then this directory isn't,
1709 if self.builder is not MkdirBuilder and not self.exists():
1711 up_to_date = SCons.Node.up_to_date
1712 for kid in self.children():
1713 if kid.get_state() > up_to_date:
1718 if not self.exists():
1719 norm_name = _my_normcase(self.name)
1720 for dir in self.dir.get_all_rdirs():
1721 try: node = dir.entries[norm_name]
1722 except KeyError: node = dir.dir_on_disk(self.name)
1723 if node and node.exists() and \
1724 (isinstance(dir, Dir) or isinstance(dir, Entry)):
1729 """Return the .sconsign file info for this directory,
1730 creating it first if necessary."""
1731 if not self._sconsign:
1732 import SCons.SConsign
1733 self._sconsign = SCons.SConsign.ForDirectory(self)
1734 return self._sconsign
1737 """Dir has a special need for srcnode()...if we
1738 have a srcdir attribute set, then that *is* our srcnode."""
1741 return Base.srcnode(self)
1743 def get_timestamp(self):
1744 """Return the latest timestamp from among our children"""
1746 for kid in self.children():
1747 if kid.get_timestamp() > stamp:
1748 stamp = kid.get_timestamp()
1751 def entry_abspath(self, name):
1752 return self.abspath + os.sep + name
1754 def entry_labspath(self, name):
1755 return self.labspath + '/' + name
1757 def entry_path(self, name):
1758 return self.path + os.sep + name
1760 def entry_tpath(self, name):
1761 return self.tpath + os.sep + name
1763 def entry_exists_on_disk(self, name):
1765 d = self.on_disk_entries
1766 except AttributeError:
1769 entries = os.listdir(self.abspath)
1773 for entry in map(_my_normcase, entries):
1775 self.on_disk_entries = d
1776 if sys.platform == 'win32':
1777 name = _my_normcase(name)
1778 result = d.get(name)
1780 # Belt-and-suspenders for Windows: check directly for
1781 # 8.3 file names that don't show up in os.listdir().
1782 result = os.path.exists(self.abspath + os.sep + name)
1788 memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list'))
1790 def srcdir_list(self):
1792 return self._memo['srcdir_list']
1802 result.append(dir.srcdir.Dir(dirname))
1803 dirname = dir.name + os.sep + dirname
1806 self._memo['srcdir_list'] = result
1810 def srcdir_duplicate(self, name):
1811 for dir in self.srcdir_list():
1812 if self.is_under(dir):
1813 # We shouldn't source from something in the build path;
1814 # variant_dir is probably under src_dir, in which case
1815 # we are reflecting.
1817 if dir.entry_exists_on_disk(name):
1818 srcnode = dir.Entry(name).disambiguate()
1820 node = self.Entry(name).disambiguate()
1821 node.do_duplicate(srcnode)
1827 def _srcdir_find_file_key(self, filename):
1830 memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key))
1832 def srcdir_find_file(self, filename):
1834 memo_dict = self._memo['srcdir_find_file']
1837 self._memo['srcdir_find_file'] = memo_dict
1840 return memo_dict[filename]
1845 if (isinstance(node, File) or isinstance(node, Entry)) and \
1846 (node.is_derived() or node.exists()):
1850 norm_name = _my_normcase(filename)
1852 for rdir in self.get_all_rdirs():
1853 try: node = rdir.entries[norm_name]
1854 except KeyError: node = rdir.file_on_disk(filename)
1855 else: node = func(node)
1857 result = (node, self)
1858 memo_dict[filename] = result
1861 for srcdir in self.srcdir_list():
1862 for rdir in srcdir.get_all_rdirs():
1863 try: node = rdir.entries[norm_name]
1864 except KeyError: node = rdir.file_on_disk(filename)
1865 else: node = func(node)
1867 result = (File(filename, self, self.fs), srcdir)
1868 memo_dict[filename] = result
1871 result = (None, None)
1872 memo_dict[filename] = result
1875 def dir_on_disk(self, name):
1876 if self.entry_exists_on_disk(name):
1877 try: return self.Dir(name)
1878 except TypeError: pass
1879 node = self.srcdir_duplicate(name)
1880 if isinstance(node, File):
1884 def file_on_disk(self, name):
1885 if self.entry_exists_on_disk(name) or \
1886 diskcheck_rcs(self, name) or \
1887 diskcheck_sccs(self, name):
1888 try: return self.File(name)
1889 except TypeError: pass
1890 node = self.srcdir_duplicate(name)
1891 if isinstance(node, Dir):
1895 def walk(self, func, arg):
1897 Walk this directory tree by calling the specified function
1898 for each directory in the tree.
1900 This behaves like the os.path.walk() function, but for in-memory
1901 Node.FS.Dir objects. The function takes the same arguments as
1902 the functions passed to os.path.walk():
1904 func(arg, dirname, fnames)
1906 Except that "dirname" will actually be the directory *Node*,
1907 not the string. The '.' and '..' entries are excluded from
1908 fnames. The fnames list may be modified in-place to filter the
1909 subdirectories visited or otherwise impose a specific order.
1910 The "arg" argument is always passed to func() and may be used
1911 in any way (or ignored, passing None is common).
1913 entries = self.entries
1914 names = entries.keys()
1917 func(arg, self, names)
1918 for dirname in [n for n in names if isinstance(entries[n], Dir)]:
1919 entries[dirname].walk(func, arg)
1921 def glob(self, pathname, ondisk=True, source=False, strings=False):
1923 Returns a list of Nodes (or strings) matching a specified
1926 Pathname patterns follow UNIX shell semantics: * matches
1927 any-length strings of any characters, ? matches any character,
1928 and [] can enclose lists or ranges of characters. Matches do
1929 not span directory separators.
1931 The matches take into account Repositories, returning local
1932 Nodes if a corresponding entry exists in a Repository (either
1933 an in-memory Node or something on disk).
1935 By defafult, the glob() function matches entries that exist
1936 on-disk, in addition to in-memory Nodes. Setting the "ondisk"
1937 argument to False (or some other non-true value) causes the glob()
1938 function to only match in-memory Nodes. The default behavior is
1939 to return both the on-disk and in-memory Nodes.
1941 The "source" argument, when true, specifies that corresponding
1942 source Nodes must be returned if you're globbing in a build
1943 directory (initialized with VariantDir()). The default behavior
1944 is to return Nodes local to the VariantDir().
1946 The "strings" argument, when true, returns the matches as strings,
1947 not Nodes. The strings are path names relative to this directory.
1949 The underlying algorithm is adapted from the glob.glob() function
1950 in the Python library (but heavily modified), and uses fnmatch()
1953 dirname, basename = os.path.split(pathname)
1955 result = self._glob1(basename, ondisk, source, strings)
1956 result.sort(lambda a, b: cmp(str(a), str(b)))
1958 if has_glob_magic(dirname):
1959 list = self.glob(dirname, ondisk, source, strings=False)
1961 list = [self.Dir(dirname, create=True)]
1964 r = dir._glob1(basename, ondisk, source, strings)
1966 r = [os.path.join(str(dir), x) for x in r]
1968 result.sort(lambda a, b: cmp(str(a), str(b)))
1971 def _glob1(self, pattern, ondisk=True, source=False, strings=False):
1973 Globs for and returns a list of entry names matching a single
1974 pattern in this directory.
1976 This searches any repositories and source directories for
1977 corresponding entries and returns a Node (or string) relative
1978 to the current directory if an entry is found anywhere.
1980 TODO: handle pattern with no wildcard
1982 search_dir_list = self.get_all_rdirs()
1983 for srcdir in self.srcdir_list():
1984 search_dir_list.extend(srcdir.get_all_rdirs())
1986 selfEntry = self.Entry
1988 for dir in search_dir_list:
1989 # We use the .name attribute from the Node because the keys of
1990 # the dir.entries dictionary are normalized (that is, all upper
1991 # case) on case-insensitive systems like Windows.
1992 node_names = [ v.name for k, v in dir.entries.items()
1993 if k not in ('.', '..') ]
1994 names.extend(node_names)
1996 # Make sure the working directory (self) actually has
1997 # entries for all Nodes in repositories or variant dirs.
1998 for name in node_names: selfEntry(name)
2001 disk_names = os.listdir(dir.abspath)
2004 names.extend(disk_names)
2006 # We're going to return corresponding Nodes in
2007 # the local directory, so we need to make sure
2008 # those Nodes exist. We only want to create
2009 # Nodes for the entries that will match the
2010 # specified pattern, though, which means we
2011 # need to filter the list here, even though
2012 # the overall list will also be filtered later,
2013 # after we exit this loop.
2014 if pattern[0] != '.':
2015 #disk_names = [ d for d in disk_names if d[0] != '.' ]
2016 disk_names = [x for x in disk_names if x[0] != '.']
2017 disk_names = fnmatch.filter(disk_names, pattern)
2018 dirEntry = dir.Entry
2019 for name in disk_names:
2020 # Add './' before disk filename so that '#' at
2021 # beginning of filename isn't interpreted.
2023 node = dirEntry(name).disambiguate()
2025 if n.__class__ != node.__class__:
2026 n.__class__ = node.__class__
2030 if pattern[0] != '.':
2031 #names = [ n for n in names if n[0] != '.' ]
2032 names = [x for x in names if x[0] != '.']
2033 names = fnmatch.filter(names, pattern)
2038 #return [ self.entries[_my_normcase(n)] for n in names ]
2039 return [self.entries[_my_normcase(n)] for n in names]
2042 """A class for the root directory of a file system.
2044 This is the same as a Dir class, except that the path separator
2045 ('/' or '\\') is actually part of the name, so we don't need to
2046 add a separator when creating the path names of entries within
2049 def __init__(self, name, fs):
2050 if __debug__: logInstanceCreation(self, 'Node.FS.RootDir')
2051 # We're going to be our own parent directory (".." entry and .dir
2052 # attribute) so we have to set up some values so Base.__init__()
2053 # won't gag won't it calls some of our methods.
2058 self.path_elements = []
2061 Base.__init__(self, name, self, fs)
2063 # Now set our paths to what we really want them to be: the
2064 # initial drive letter (the name) plus the directory separator,
2065 # except for the "lookup abspath," which does not have the
2067 self.abspath = name + os.sep
2069 self.path = name + os.sep
2070 self.tpath = name + os.sep
2073 self._lookupDict = {}
2075 # The // and os.sep + os.sep entries are necessary because
2076 # os.path.normpath() seems to preserve double slashes at the
2077 # beginning of a path (presumably for UNC path names), but
2078 # collapses triple slashes to a single slash.
2079 self._lookupDict[''] = self
2080 self._lookupDict['/'] = self
2081 self._lookupDict['//'] = self
2082 self._lookupDict[os.sep] = self
2083 self._lookupDict[os.sep + os.sep] = self
2085 def must_be_same(self, klass):
2088 Base.must_be_same(self, klass)
2090 def _lookup_abs(self, p, klass, create=1):
2092 Fast (?) lookup of a *normalized* absolute path.
2094 This method is intended for use by internal lookups with
2095 already-normalized path data. For general-purpose lookups,
2096 use the FS.Entry(), FS.Dir() or FS.File() methods.
2098 The caller is responsible for making sure we're passed a
2099 normalized absolute path; we merely let Python's dictionary look
2100 up and return the One True Node.FS object for the path.
2102 If no Node for the specified "p" doesn't already exist, and
2103 "create" is specified, the Node may be created after recursive
2104 invocation to find or create the parent directory or directories.
2108 result = self._lookupDict[k]
2111 msg = "No such file or directory: '%s' in '%s' (and create is False)" % (p, str(self))
2112 raise SCons.Errors.UserError, msg
2113 # There is no Node for this path name, and we're allowed
2115 dir_name, file_name = os.path.split(p)
2116 dir_node = self._lookup_abs(dir_name, Dir)
2117 result = klass(file_name, dir_node, self.fs)
2119 # Double-check on disk (as configured) that the Node we
2120 # created matches whatever is out there in the real world.
2121 result.diskcheck_match()
2123 self._lookupDict[k] = result
2124 dir_node.entries[_my_normcase(file_name)] = result
2125 dir_node.implicit = None
2127 # There is already a Node for this path name. Allow it to
2128 # complain if we were looking for an inappropriate type.
2129 result.must_be_same(klass)
2135 def entry_abspath(self, name):
2136 return self.abspath + name
2138 def entry_labspath(self, name):
2141 def entry_path(self, name):
2142 return self.path + name
2144 def entry_tpath(self, name):
2145 return self.tpath + name
2147 def is_under(self, dir):
2159 def src_builder(self):
2162 class FileNodeInfo(SCons.Node.NodeInfoBase):
2163 current_version_id = 1
2165 field_list = ['csig', 'timestamp', 'size']
2167 # This should get reset by the FS initialization.
2170 def str_to_node(self, s):
2174 drive, s = os.path.splitdrive(s)
2176 root = self.fs.get_root(drive)
2177 if not os.path.isabs(s):
2178 s = top.labspath + '/' + s
2179 return root._lookup_abs(s, Entry)
2181 class FileBuildInfo(SCons.Node.BuildInfoBase):
2182 current_version_id = 1
2184 def convert_to_sconsign(self):
2186 Converts this FileBuildInfo object for writing to a .sconsign file
2188 This replaces each Node in our various dependency lists with its
2189 usual string representation: relative to the top-level SConstruct
2190 directory, or an absolute path if it's outside.
2198 except AttributeError:
2201 s = s.replace(os.sep, '/')
2203 for attr in ['bsources', 'bdepends', 'bimplicit']:
2205 val = getattr(self, attr)
2206 except AttributeError:
2209 setattr(self, attr, list(map(node_to_str, val)))
2210 def convert_from_sconsign(self, dir, name):
2212 Converts a newly-read FileBuildInfo object for in-SCons use
2214 For normal up-to-date checking, we don't have any conversion to
2215 perform--but we're leaving this method here to make that clear.
2218 def prepare_dependencies(self):
2220 Prepares a FileBuildInfo object for explaining what changed
2222 The bsources, bdepends and bimplicit lists have all been
2223 stored on disk as paths relative to the top-level SConstruct
2224 directory. Convert the strings to actual Nodes (for use by the
2225 --debug=explain code and --implicit-cache).
2228 ('bsources', 'bsourcesigs'),
2229 ('bdepends', 'bdependsigs'),
2230 ('bimplicit', 'bimplicitsigs'),
2232 for (nattr, sattr) in attrs:
2234 strings = getattr(self, nattr)
2235 nodeinfos = getattr(self, sattr)
2236 except AttributeError:
2239 for s, ni in izip(strings, nodeinfos):
2240 if not isinstance(s, SCons.Node.Node):
2241 s = ni.str_to_node(s)
2243 setattr(self, nattr, nodes)
2244 def format(self, names=0):
2246 bkids = self.bsources + self.bdepends + self.bimplicit
2247 bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs
2248 for bkid, bkidsig in izip(bkids, bkidsigs):
2249 result.append(str(bkid) + ': ' +
2250 ' '.join(bkidsig.format(names=names)))
2251 result.append('%s [%s]' % (self.bactsig, self.bact))
2252 return '\n'.join(result)
2255 """A class for files in a file system.
2258 memoizer_counters = []
2260 NodeInfo = FileNodeInfo
2261 BuildInfo = FileBuildInfo
2265 def diskcheck_match(self):
2266 diskcheck_match(self, self.isdir,
2267 "Directory %s found where file expected.")
2269 def __init__(self, name, directory, fs):
2270 if __debug__: logInstanceCreation(self, 'Node.FS.File')
2271 Base.__init__(self, name, directory, fs)
2274 def Entry(self, name):
2275 """Create an entry node named 'name' relative to
2276 the directory of this file."""
2277 return self.dir.Entry(name)
2279 def Dir(self, name, create=True):
2280 """Create a directory node named 'name' relative to
2281 the directory of this file."""
2282 return self.dir.Dir(name, create=create)
2284 def Dirs(self, pathlist):
2285 """Create a list of directories relative to the SConscript
2286 directory of this file."""
2288 # return [self.Dir(p) for p in pathlist]
2289 return [self.Dir(p) for p in pathlist]
2291 def File(self, name):
2292 """Create a file node named 'name' relative to
2293 the directory of this file."""
2294 return self.dir.File(name)
2296 #def generate_build_dict(self):
2297 # """Return an appropriate dictionary of values for building
2299 # return {'Dir' : self.Dir,
2300 # 'File' : self.File,
2301 # 'RDirs' : self.RDirs}
2304 """Turn a file system node into a File object."""
2305 self.scanner_paths = {}
2306 if not hasattr(self, '_local'):
2309 # If there was already a Builder set on this entry, then
2310 # we need to make sure we call the target-decider function,
2311 # not the source-decider. Reaching in and doing this by hand
2312 # is a little bogus. We'd prefer to handle this by adding
2313 # an Entry.builder_set() method that disambiguates like the
2314 # other methods, but that starts running into problems with the
2315 # fragile way we initialize Dir Nodes with their Mkdir builders,
2316 # yet still allow them to be overridden by the user. Since it's
2317 # not clear right now how to fix that, stick with what works
2318 # until it becomes clear...
2319 if self.has_builder():
2320 self.changed_since_last_build = self.decide_target
2322 def scanner_key(self):
2323 return self.get_suffix()
2325 def get_contents(self):
2326 if not self.rexists():
2328 fname = self.rfile().abspath
2330 contents = open(fname, "rb").read()
2331 except EnvironmentError, e:
2340 get_text_contents = get_contents
2342 # This attempts to figure out what the encoding of the text is
2343 # based upon the BOM bytes, and then decodes the contents so that
2344 # it's a valid python string.
2345 def get_text_contents(self):
2346 contents = self.get_contents()
2347 # The behavior of various decode() methods and functions
2348 # w.r.t. the initial BOM bytes is different for different
2349 # encodings and/or Python versions. ('utf-8' does not strip
2350 # them, but has a 'utf-8-sig' which does; 'utf-16' seems to
2351 # strip them; etc.) Just side step all the complication by
2352 # explicitly stripping the BOM before we decode().
2353 if contents.startswith(codecs.BOM_UTF8):
2354 contents = contents[len(codecs.BOM_UTF8):]
2355 # TODO(2.2): Remove when 2.3 becomes floor.
2356 #contents = contents.decode('utf-8')
2357 contents = my_decode(contents, 'utf-8')
2358 elif contents.startswith(codecs.BOM_UTF16_LE):
2359 contents = contents[len(codecs.BOM_UTF16_LE):]
2360 # TODO(2.2): Remove when 2.3 becomes floor.
2361 #contents = contents.decode('utf-16-le')
2362 contents = my_decode(contents, 'utf-16-le')
2363 elif contents.startswith(codecs.BOM_UTF16_BE):
2364 contents = contents[len(codecs.BOM_UTF16_BE):]
2365 # TODO(2.2): Remove when 2.3 becomes floor.
2366 #contents = contents.decode('utf-16-be')
2367 contents = my_decode(contents, 'utf-16-be')
2370 def get_content_hash(self):
2372 Compute and return the MD5 hash for this file.
2374 if not self.rexists():
2375 return SCons.Util.MD5signature('')
2376 fname = self.rfile().abspath
2378 cs = SCons.Util.MD5filesignature(fname,
2379 chunksize=SCons.Node.FS.File.md5_chunksize*1024)
2380 except EnvironmentError, e:
2387 memoizer_counters.append(SCons.Memoize.CountValue('get_size'))
2391 return self._memo['get_size']
2396 size = self.rfile().getsize()
2400 self._memo['get_size'] = size
2404 memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp'))
2406 def get_timestamp(self):
2408 return self._memo['get_timestamp']
2413 timestamp = self.rfile().getmtime()
2417 self._memo['get_timestamp'] = timestamp
2421 def store_info(self):
2422 # Merge our build information into the already-stored entry.
2423 # This accomodates "chained builds" where a file that's a target
2424 # in one build (SConstruct file) is a source in a different build.
2425 # See test/chained-build.py for the use case.
2427 self.dir.sconsign().store_info(self.name, self)
2429 convert_copy_attrs = [
2439 convert_sig_attrs = [
2445 def convert_old_entry(self, old_entry):
2446 # Convert a .sconsign entry from before the Big Signature
2447 # Refactoring, doing what we can to convert its information
2448 # to the new .sconsign entry format.
2450 # The old format looked essentially like this:
2459 # .bsourcesigs ("signature" list)
2461 # .bdependsigs ("signature" list)
2463 # .bimplicitsigs ("signature" list)
2467 # The new format looks like this:
2474 # .binfo (BuildInfo)
2476 # .bsourcesigs (NodeInfo list)
2482 # .bdependsigs (NodeInfo list)
2488 # .bimplicitsigs (NodeInfo list)
2496 # The basic idea of the new structure is that a NodeInfo always
2497 # holds all available information about the state of a given Node
2498 # at a certain point in time. The various .b*sigs lists can just
2499 # be a list of pointers to the .ninfo attributes of the different
2500 # dependent nodes, without any copying of information until it's
2501 # time to pickle it for writing out to a .sconsign file.
2503 # The complicating issue is that the *old* format only stored one
2504 # "signature" per dependency, based on however the *last* build
2505 # was configured. We don't know from just looking at it whether
2506 # it was a build signature, a content signature, or a timestamp
2507 # "signature". Since we no longer use build signatures, the
2508 # best we can do is look at the length and if it's thirty two,
2509 # assume that it was (or might have been) a content signature.
2510 # If it was actually a build signature, then it will cause a
2511 # rebuild anyway when it doesn't match the new content signature,
2512 # but that's probably the best we can do.
2513 import SCons.SConsign
2514 new_entry = SCons.SConsign.SConsignEntry()
2515 new_entry.binfo = self.new_binfo()
2516 binfo = new_entry.binfo
2517 for attr in self.convert_copy_attrs:
2519 value = getattr(old_entry, attr)
2520 except AttributeError:
2522 setattr(binfo, attr, value)
2523 delattr(old_entry, attr)
2524 for attr in self.convert_sig_attrs:
2526 sig_list = getattr(old_entry, attr)
2527 except AttributeError:
2530 for sig in sig_list:
2531 ninfo = self.new_ninfo()
2535 ninfo.timestamp = sig
2537 setattr(binfo, attr, value)
2538 delattr(old_entry, attr)
2541 memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info'))
2543 def get_stored_info(self):
2545 return self._memo['get_stored_info']
2550 sconsign_entry = self.dir.sconsign().get_entry(self.name)
2551 except (KeyError, EnvironmentError):
2552 import SCons.SConsign
2553 sconsign_entry = SCons.SConsign.SConsignEntry()
2554 sconsign_entry.binfo = self.new_binfo()
2555 sconsign_entry.ninfo = self.new_ninfo()
2557 if isinstance(sconsign_entry, FileBuildInfo):
2558 # This is a .sconsign file from before the Big Signature
2559 # Refactoring; convert it as best we can.
2560 sconsign_entry = self.convert_old_entry(sconsign_entry)
2562 delattr(sconsign_entry.ninfo, 'bsig')
2563 except AttributeError:
2566 self._memo['get_stored_info'] = sconsign_entry
2568 return sconsign_entry
2570 def get_stored_implicit(self):
2571 binfo = self.get_stored_info().binfo
2572 binfo.prepare_dependencies()
2573 try: return binfo.bimplicit
2574 except AttributeError: return None
2576 def rel_path(self, other):
2577 return self.dir.rel_path(other)
2579 def _get_found_includes_key(self, env, scanner, path):
2580 return (id(env), id(scanner), path)
2582 memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key))
2584 def get_found_includes(self, env, scanner, path):
2585 """Return the included implicit dependencies in this file.
2586 Cache results so we only scan the file once per path
2587 regardless of how many times this information is requested.
2589 memo_key = (id(env), id(scanner), path)
2591 memo_dict = self._memo['get_found_includes']
2594 self._memo['get_found_includes'] = memo_dict
2597 return memo_dict[memo_key]
2602 # result = [n.disambiguate() for n in scanner(self, env, path)]
2603 result = scanner(self, env, path)
2604 result = [N.disambiguate() for N in result]
2608 memo_dict[memo_key] = result
2612 def _createDir(self):
2613 # ensure that the directories for this node are
2617 def push_to_cache(self):
2618 """Try to push the node into a cache
2620 # This should get called before the Nodes' .built() method is
2621 # called, which would clear the build signature if the file has
2624 # We have to clear the local memoized values *before* we push
2625 # the node to cache so that the memoization of the self.exists()
2626 # return value doesn't interfere.
2629 self.clear_memoized_values()
2631 self.get_build_env().get_CacheDir().push(self)
2633 def retrieve_from_cache(self):
2634 """Try to retrieve the node's content from a cache
2636 This method is called from multiple threads in a parallel build,
2637 so only do thread safe stuff here. Do thread unsafe stuff in
2640 Returns true iff the node was successfully retrieved.
2644 if not self.is_derived():
2646 return self.get_build_env().get_CacheDir().retrieve(self)
2650 self.get_build_env().get_CacheDir().push_if_forced(self)
2652 ninfo = self.get_ninfo()
2654 csig = self.get_max_drift_csig()
2658 ninfo.timestamp = self.get_timestamp()
2659 ninfo.size = self.get_size()
2661 if not self.has_builder():
2662 # This is a source file, but it might have been a target file
2663 # in another build that included more of the DAG. Copy
2664 # any build information that's stored in the .sconsign file
2665 # into our binfo object so it doesn't get lost.
2666 old = self.get_stored_info()
2667 self.get_binfo().__dict__.update(old.binfo.__dict__)
2671 def find_src_builder(self):
2674 scb = self.dir.src_builder()
2676 if diskcheck_sccs(self.dir, self.name):
2677 scb = get_DefaultSCCSBuilder()
2678 elif diskcheck_rcs(self.dir, self.name):
2679 scb = get_DefaultRCSBuilder()
2685 except AttributeError:
2688 self.builder_set(scb)
2691 def has_src_builder(self):
2692 """Return whether this Node has a source builder or not.
2694 If this Node doesn't have an explicit source code builder, this
2695 is where we figure out, on the fly, if there's a transparent
2696 source code builder for it.
2698 Note that if we found a source builder, we also set the
2699 self.builder attribute, so that all of the methods that actually
2700 *build* this file don't have to do anything different.
2704 except AttributeError:
2705 scb = self.sbuilder = self.find_src_builder()
2706 return scb is not None
2708 def alter_targets(self):
2709 """Return any corresponding targets in a variant directory.
2711 if self.is_derived():
2713 return self.fs.variant_dir_target_climb(self, self.dir, [self.name])
2715 def _rmv_existing(self):
2716 self.clear_memoized_values()
2717 e = Unlink(self, [], None)
2718 if isinstance(e, SCons.Errors.BuildError):
2722 # Taskmaster interface subsystem
2725 def make_ready(self):
2726 self.has_src_builder()
2730 """Prepare for this file to be created."""
2731 SCons.Node.Node.prepare(self)
2733 if self.get_state() != SCons.Node.up_to_date:
2735 if self.is_derived() and not self.precious:
2736 self._rmv_existing()
2740 except SCons.Errors.StopError, drive:
2741 desc = "No drive `%s' for target `%s'." % (drive, self)
2742 raise SCons.Errors.StopError, desc
2749 """Remove this file."""
2750 if self.exists() or self.islink():
2751 self.fs.unlink(self.path)
2755 def do_duplicate(self, src):
2757 Unlink(self, None, None)
2758 e = Link(self, src, None)
2759 if isinstance(e, SCons.Errors.BuildError):
2760 desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr)
2761 raise SCons.Errors.StopError, desc
2763 # The Link() action may or may not have actually
2764 # created the file, depending on whether the -n
2765 # option was used or not. Delete the _exists and
2766 # _rexists attributes so they can be reevaluated.
2769 memoizer_counters.append(SCons.Memoize.CountValue('exists'))
2773 return self._memo['exists']
2776 # Duplicate from source path if we are set up to do this.
2777 if self.duplicate and not self.is_derived() and not self.linked:
2778 src = self.srcnode()
2780 # At this point, src is meant to be copied in a variant directory.
2782 if src.abspath != self.abspath:
2784 self.do_duplicate(src)
2785 # Can't return 1 here because the duplication might
2786 # not actually occur if the -n option is being used.
2788 # The source file does not exist. Make sure no old
2789 # copy remains in the variant directory.
2790 if Base.exists(self) or self.islink():
2791 self.fs.unlink(self.path)
2792 # Return None explicitly because the Base.exists() call
2793 # above will have cached its value if the file existed.
2794 self._memo['exists'] = None
2796 result = Base.exists(self)
2797 self._memo['exists'] = result
2801 # SIGNATURE SUBSYSTEM
2804 def get_max_drift_csig(self):
2806 Returns the content signature currently stored for this node
2807 if it's been unmodified longer than the max_drift value, or the
2808 max_drift value is 0. Returns None otherwise.
2810 old = self.get_stored_info()
2811 mtime = self.get_timestamp()
2813 max_drift = self.fs.max_drift
2815 if (time.time() - mtime) > max_drift:
2818 if n.timestamp and n.csig and n.timestamp == mtime:
2820 except AttributeError:
2822 elif max_drift == 0:
2824 return old.ninfo.csig
2825 except AttributeError:
2832 Generate a node's content signature, the digested signature
2836 cache - alternate node to use for the signature cache
2837 returns - the content signature
2839 ninfo = self.get_ninfo()
2842 except AttributeError:
2845 csig = self.get_max_drift_csig()
2849 if self.get_size() < SCons.Node.FS.File.md5_chunksize:
2850 contents = self.get_contents()
2852 csig = self.get_content_hash()
2854 # This can happen if there's actually a directory on-disk,
2855 # which can be the case if they've disabled disk checks,
2856 # or if an action with a File target actually happens to
2857 # create a same-named directory by mistake.
2861 csig = SCons.Util.MD5signature(contents)
2868 # DECISION SUBSYSTEM
2871 def builder_set(self, builder):
2872 SCons.Node.Node.builder_set(self, builder)
2873 self.changed_since_last_build = self.decide_target
2875 def changed_content(self, target, prev_ni):
2876 cur_csig = self.get_csig()
2878 return cur_csig != prev_ni.csig
2879 except AttributeError:
2882 def changed_state(self, target, prev_ni):
2883 return self.state != SCons.Node.up_to_date
2885 def changed_timestamp_then_content(self, target, prev_ni):
2886 if not self.changed_timestamp_match(target, prev_ni):
2888 self.get_ninfo().csig = prev_ni.csig
2889 except AttributeError:
2892 return self.changed_content(target, prev_ni)
2894 def changed_timestamp_newer(self, target, prev_ni):
2896 return self.get_timestamp() > target.get_timestamp()
2897 except AttributeError:
2900 def changed_timestamp_match(self, target, prev_ni):
2902 return self.get_timestamp() != prev_ni.timestamp
2903 except AttributeError:
2906 def decide_source(self, target, prev_ni):
2907 return target.get_build_env().decide_source(self, target, prev_ni)
2909 def decide_target(self, target, prev_ni):
2910 return target.get_build_env().decide_target(self, target, prev_ni)
2912 # Initialize this Node's decider function to decide_source() because
2913 # every file is a source file until it has a Builder attached...
2914 changed_since_last_build = decide_source
2916 def is_up_to_date(self):
2918 if T: Trace('is_up_to_date(%s):' % self)
2919 if not self.exists():
2920 if T: Trace(' not self.exists():')
2921 # The file doesn't exist locally...
2924 # ...but there is one in a Repository...
2925 if not self.changed(r):
2926 if T: Trace(' changed(%s):' % r)
2927 # ...and it's even up-to-date...
2929 # ...and they'd like a local copy.
2930 e = LocalCopy(self, r, None)
2931 if isinstance(e, SCons.Errors.BuildError):
2937 if T: Trace(' None\n')
2941 if T: Trace(' self.exists(): %s\n' % r)
2944 memoizer_counters.append(SCons.Memoize.CountValue('rfile'))
2948 return self._memo['rfile']
2952 if not self.exists():
2953 norm_name = _my_normcase(self.name)
2954 for dir in self.dir.get_all_rdirs():
2955 try: node = dir.entries[norm_name]
2956 except KeyError: node = dir.file_on_disk(self.name)
2957 if node and node.exists() and \
2958 (isinstance(node, File) or isinstance(node, Entry) \
2959 or not node.is_derived()):
2961 # Copy over our local attributes to the repository
2962 # Node so we identify shared object files in the
2963 # repository and don't assume they're static.
2965 # This isn't perfect; the attribute would ideally
2966 # be attached to the object in the repository in
2967 # case it was built statically in the repository
2968 # and we changed it to shared locally, but that's
2969 # rarely the case and would only occur if you
2970 # intentionally used the same suffix for both
2971 # shared and static objects anyway. So this
2972 # should work well in practice.
2973 result.attributes = self.attributes
2975 self._memo['rfile'] = result
2979 return str(self.rfile())
2981 def get_cachedir_csig(self):
2983 Fetch a Node's content signature for purposes of computing
2984 another Node's cachesig.
2986 This is a wrapper around the normal get_csig() method that handles
2987 the somewhat obscure case of using CacheDir with the -n option.
2988 Any files that don't exist would normally be "built" by fetching
2989 them from the cache, but the normal get_csig() method will try
2990 to open up the local file, which doesn't exist because the -n
2991 option meant we didn't actually pull the file from cachedir.
2992 But since the file *does* actually exist in the cachedir, we
2993 can use its contents for the csig.
2996 return self.cachedir_csig
2997 except AttributeError:
3000 cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self)
3001 if not self.exists() and cachefile and os.path.exists(cachefile):
3002 self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \
3003 SCons.Node.FS.File.md5_chunksize * 1024)
3005 self.cachedir_csig = self.get_csig()
3006 return self.cachedir_csig
3008 def get_cachedir_bsig(self):
3010 return self.cachesig
3011 except AttributeError:
3014 # Add the path to the cache signature, because multiple
3015 # targets built by the same action will all have the same
3016 # build signature, and we have to differentiate them somehow.
3017 children = self.children()
3018 executor = self.get_executor()
3019 # sigs = [n.get_cachedir_csig() for n in children]
3020 sigs = [n.get_cachedir_csig() for n in children]
3021 sigs.append(SCons.Util.MD5signature(executor.get_contents()))
3022 sigs.append(self.path)
3023 result = self.cachesig = SCons.Util.MD5collect(sigs)
3029 def get_default_fs():
3038 if SCons.Memoize.use_memoizer:
3039 __metaclass__ = SCons.Memoize.Memoized_Metaclass
3041 memoizer_counters = []
3046 def filedir_lookup(self, p, fd=None):
3048 A helper method for find_file() that looks up a directory for
3049 a file we're trying to find. This only creates the Dir Node if
3050 it exists on-disk, since if the directory doesn't exist we know
3051 we won't find any files in it... :-)
3053 It would be more compact to just use this as a nested function
3054 with a default keyword argument (see the commented-out version
3055 below), but that doesn't work unless you have nested scopes,
3056 so we define it here just so this work under Python 1.5.2.
3059 fd = self.default_filedir
3060 dir, name = os.path.split(fd)
3061 drive, d = os.path.splitdrive(dir)
3062 if not name and d[:1] in ('/', os.sep):
3063 #return p.fs.get_root(drive).dir_on_disk(name)
3064 return p.fs.get_root(drive)
3066 p = self.filedir_lookup(p, dir)
3069 norm_name = _my_normcase(name)
3071 node = p.entries[norm_name]
3073 return p.dir_on_disk(name)
3074 if isinstance(node, Dir):
3076 if isinstance(node, Entry):
3077 node.must_be_same(Dir)
3081 def _find_file_key(self, filename, paths, verbose=None):
3082 return (filename, paths)
3084 memoizer_counters.append(SCons.Memoize.CountDict('find_file', _find_file_key))
3086 def find_file(self, filename, paths, verbose=None):
3088 find_file(str, [Dir()]) -> [nodes]
3090 filename - a filename to find
3091 paths - a list of directory path *nodes* to search in. Can be
3092 represented as a list, a tuple, or a callable that is
3093 called with no arguments and returns the list or tuple.
3095 returns - the node created from the found file.
3097 Find a node corresponding to either a derived file or a file
3098 that exists already.
3100 Only the first file found is returned, and none is returned
3101 if no file is found.
3103 memo_key = self._find_file_key(filename, paths)
3105 memo_dict = self._memo['find_file']
3108 self._memo['find_file'] = memo_dict
3111 return memo_dict[memo_key]
3115 if verbose and not callable(verbose):
3116 if not SCons.Util.is_String(verbose):
3117 verbose = "find_file"
3118 _verbose = ' %s: ' % verbose
3119 verbose = lambda s: sys.stdout.write(_verbose + s)
3121 filedir, filename = os.path.split(filename)
3123 # More compact code that we can't use until we drop
3124 # support for Python 1.5.2:
3126 #def filedir_lookup(p, fd=filedir):
3128 # A helper function that looks up a directory for a file
3129 # we're trying to find. This only creates the Dir Node
3130 # if it exists on-disk, since if the directory doesn't
3131 # exist we know we won't find any files in it... :-)
3133 # dir, name = os.path.split(fd)
3135 # p = filedir_lookup(p, dir)
3138 # norm_name = _my_normcase(name)
3140 # node = p.entries[norm_name]
3142 # return p.dir_on_disk(name)
3143 # if isinstance(node, Dir):
3145 # if isinstance(node, Entry):
3146 # node.must_be_same(Dir)
3148 # if isinstance(node, Dir) or isinstance(node, Entry):
3151 #paths = filter(None, map(filedir_lookup, paths))
3153 self.default_filedir = filedir
3154 paths = [_f for _f in map(self.filedir_lookup, paths) if _f]
3159 verbose("looking for '%s' in '%s' ...\n" % (filename, dir))
3160 node, d = dir.srcdir_find_file(filename)
3163 verbose("... FOUND '%s' in '%s'\n" % (filename, d))
3167 memo_dict[memo_key] = result
3171 find_file = FileFinder().find_file
3174 def invalidate_node_memos(targets):
3176 Invalidate the memoized values of all Nodes (files or directories)
3177 that are associated with the given entries. Has been added to
3178 clear the cache of nodes affected by a direct execution of an
3179 action (e.g. Delete/Copy/Chmod). Existing Node caches become
3180 inconsistent if the action is run through Execute(). The argument
3181 `targets` can be a single Node object or filename, or a sequence
3184 from traceback import extract_stack
3186 # First check if the cache really needs to be flushed. Only
3187 # actions run in the SConscript with Execute() seem to be
3188 # affected. XXX The way to check if Execute() is in the stacktrace
3189 # is a very dirty hack and should be replaced by a more sensible
3191 for f in extract_stack():
3192 if f[2] == 'Execute' and f[0][-14:] == 'Environment.py':
3195 # Dont have to invalidate, so return
3198 if not SCons.Util.is_List(targets):
3201 for entry in targets:
3202 # If the target is a Node object, clear the cache. If it is a
3203 # filename, look up potentially existing Node object first.
3205 entry.clear_memoized_values()
3206 except AttributeError:
3207 # Not a Node object, try to look up Node by filename. XXX
3208 # This creates Node objects even for those filenames which
3209 # do not correspond to an existing Node object.
3210 node = get_default_fs().Entry(entry)
3212 node.clear_memoized_values()
3216 # indent-tabs-mode:nil
3218 # vim: set expandtab tabstop=4 shiftwidth=4: