5 These Nodes represent the canonical external objects that people think
6 of when they think of building software: files and directories.
8 This holds a "default_fs" variable that should be initialized with an FS
9 that can be used by scripts or modules looking for the canonical default.
16 # Permission is hereby granted, free of charge, to any person obtaining
17 # a copy of this software and associated documentation files (the
18 # "Software"), to deal in the Software without restriction, including
19 # without limitation the rights to use, copy, modify, merge, publish,
20 # distribute, sublicense, and/or sell copies of the Software, and to
21 # permit persons to whom the Software is furnished to do so, subject to
22 # the following conditions:
24 # The above copyright notice and this permission notice shall be included
25 # in all copies or substantial portions of the Software.
27 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
28 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
29 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
30 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
31 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
32 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
33 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
35 from __future__ import generators ### KEEP FOR COMPATIBILITY FIXERS
37 __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
39 from itertools import izip
55 # TODO(2.2): Remove when 2.3 becomes the minimal supported version.
58 except AttributeError:
59 codecs.BOM_UTF8 = '\xef\xbb\xbf'
63 except AttributeError:
64 codecs.BOM_UTF16_LE = '\xff\xfe'
65 codecs.BOM_UTF16_BE = '\xfe\xff'
67 # Provide a wrapper function to handle decoding differences in
68 # different versions of Python. Normally, we'd try to do this in the
69 # compat layer (and maybe it still makes sense to move there?) but
70 # that doesn't provide a way to supply the string class used in
71 # pre-2.3 Python versions with a .decode() method that all strings
72 # naturally have. Plus, the 2.[01] encodings behave differently
73 # enough that we have to settle for a lowest-common-denominator
76 # Note that the 2.[012] implementations below may be inefficient
77 # because they perform an explicit look up of the encoding for every
78 # decode, but they're old enough (and we want to stop supporting
79 # them soon enough) that it's not worth complicating the interface.
80 # Think of it as additional incentive for people to upgrade...
83 except AttributeError:
84 # 2.0 through 2.2: strings have no .decode() method
86 codecs.lookup('ascii').decode
87 except AttributeError:
88 # 2.0 and 2.1: encodings are a tuple of functions, and the
89 # decode() function returns a (result, length) tuple.
90 def my_decode(contents, encoding):
91 return codecs.lookup(encoding)[1](contents)[0]
93 # 2.2: encodings are an object with methods, and the
94 # .decode() method returns just the decoded bytes.
95 def my_decode(contents, encoding):
96 return codecs.lookup(encoding).decode(contents)
98 # 2.3 or later: use the .decode() string method
99 def my_decode(contents, encoding):
100 return contents.decode(encoding)
103 from SCons.Debug import logInstanceCreation
107 import SCons.Node.Alias
110 import SCons.Warnings
112 from SCons.Debug import Trace
117 class EntryProxyAttributeError(AttributeError):
119 An AttributeError subclass for recording and displaying the name
120 of the underlying Entry involved in an AttributeError exception.
122 def __init__(self, entry_proxy, attribute):
123 AttributeError.__init__(self)
124 self.entry_proxy = entry_proxy
125 self.attribute = attribute
127 entry = self.entry_proxy.get()
128 fmt = "%s instance %s has no attribute %s"
129 return fmt % (entry.__class__.__name__,
131 repr(self.attribute))
133 # The max_drift value: by default, use a cached signature value for
134 # any file that's been untouched for more than two days.
135 default_max_drift = 2*24*60*60
138 # We stringify these file system Nodes a lot. Turning a file system Node
139 # into a string is non-trivial, because the final string representation
140 # can depend on a lot of factors: whether it's a derived target or not,
141 # whether it's linked to a repository or source directory, and whether
142 # there's duplication going on. The normal technique for optimizing
143 # calculations like this is to memoize (cache) the string value, so you
144 # only have to do the calculation once.
146 # A number of the above factors, however, can be set after we've already
147 # been asked to return a string for a Node, because a Repository() or
148 # VariantDir() call or the like may not occur until later in SConscript
149 # files. So this variable controls whether we bother trying to save
150 # string values for Nodes. The wrapper interface can set this whenever
151 # they're done mucking with Repository and VariantDir and the other stuff,
152 # to let this module know it can start returning saved string values
157 def save_strings(val):
162 # Avoid unnecessary function calls by recording a Boolean value that
163 # tells us whether or not os.path.splitdrive() actually does anything
164 # on this system, and therefore whether we need to bother calling it
165 # when looking up path names in various methods below.
170 def initialize_do_splitdrive():
172 drive, path = os.path.splitdrive('X:/foo')
173 do_splitdrive = not not drive
175 initialize_do_splitdrive()
179 needs_normpath_check = None
181 def initialize_normpath_check():
183 Initialize the normpath_check regular expression.
185 This function is used by the unit tests to re-initialize the pattern
186 when testing for behavior with different values of os.sep.
188 global needs_normpath_check
190 pattern = r'.*/|\.$|\.\.$'
192 pattern = r'.*[/%s]|\.$|\.\.$' % re.escape(os.sep)
193 needs_normpath_check = re.compile(pattern)
195 initialize_normpath_check()
198 # SCons.Action objects for interacting with the outside world.
200 # The Node.FS methods in this module should use these actions to
201 # create and/or remove files and directories; they should *not* use
202 # os.{link,symlink,unlink,mkdir}(), etc., directly.
204 # Using these SCons.Action objects ensures that descriptions of these
205 # external activities are properly displayed, that the displays are
206 # suppressed when the -s (silent) option is used, and (most importantly)
207 # the actions are disabled when the the -n option is used, in which case
208 # there should be *no* changes to the external file system(s)...
211 if hasattr(os, 'link'):
212 def _hardlink_func(fs, src, dst):
213 # If the source is a symlink, we can't just hard-link to it
214 # because a relative symlink may point somewhere completely
215 # different. We must disambiguate the symlink and then
216 # hard-link the final destination file.
217 while fs.islink(src):
218 link = fs.readlink(src)
219 if not os.path.isabs(link):
222 src = os.path.join(os.path.dirname(src), link)
225 _hardlink_func = None
227 if hasattr(os, 'symlink'):
228 def _softlink_func(fs, src, dst):
231 _softlink_func = None
233 def _copy_func(fs, src, dest):
234 shutil.copy2(src, dest)
236 fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
239 Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy',
240 'hard-copy', 'soft-copy', 'copy']
242 Link_Funcs = [] # contains the callables of the specified duplication style
244 def set_duplicate(duplicate):
245 # Fill in the Link_Funcs list according to the argument
246 # (discarding those not available on the platform).
248 # Set up the dictionary that maps the argument names to the
249 # underlying implementations. We do this inside this function,
250 # not in the top-level module code, so that we can remap os.link
251 # and os.symlink for testing purposes.
253 'hard' : _hardlink_func,
254 'soft' : _softlink_func,
258 if not duplicate in Valid_Duplicates:
259 raise SCons.Errors.InternalError, ("The argument of set_duplicate "
260 "should be in Valid_Duplicates")
263 for func in duplicate.split('-'):
265 Link_Funcs.append(link_dict[func])
267 def LinkFunc(target, source, env):
268 # Relative paths cause problems with symbolic links, so
269 # we use absolute paths, which may be a problem for people
270 # who want to move their soft-linked src-trees around. Those
271 # people should use the 'hard-copy' mode, softlinks cannot be
272 # used for that; at least I have no idea how ...
273 src = source[0].abspath
274 dest = target[0].abspath
275 dir, file = os.path.split(dest)
276 if dir and not target[0].fs.isdir(dir):
279 # Set a default order of link functions.
280 set_duplicate('hard-soft-copy')
282 # Now link the files with the previously specified order.
283 for func in Link_Funcs:
287 except (IOError, OSError):
288 # An OSError indicates something happened like a permissions
289 # problem or an attempt to symlink across file-system
290 # boundaries. An IOError indicates something like the file
291 # not existing. In either case, keeping trying additional
292 # functions in the list and only raise an error if the last
294 if func == Link_Funcs[-1]:
295 # exception of the last link method (copy) are fatal
299 Link = SCons.Action.Action(LinkFunc, None)
300 def LocalString(target, source, env):
301 return 'Local copy of %s from %s' % (target[0], source[0])
303 LocalCopy = SCons.Action.Action(LinkFunc, LocalString)
305 def UnlinkFunc(target, source, env):
307 t.fs.unlink(t.abspath)
310 Unlink = SCons.Action.Action(UnlinkFunc, None)
312 def MkdirFunc(target, source, env):
315 t.fs.mkdir(t.abspath)
318 Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None)
322 def get_MkdirBuilder():
324 if MkdirBuilder is None:
326 import SCons.Defaults
327 # "env" will get filled in by Executor.get_build_env()
328 # calling SCons.Defaults.DefaultEnvironment() when necessary.
329 MkdirBuilder = SCons.Builder.Builder(action = Mkdir,
333 target_scanner = SCons.Defaults.DirEntryScanner,
334 name = "MkdirBuilder")
342 DefaultSCCSBuilder = None
343 DefaultRCSBuilder = None
345 def get_DefaultSCCSBuilder():
346 global DefaultSCCSBuilder
347 if DefaultSCCSBuilder is None:
349 # "env" will get filled in by Executor.get_build_env()
350 # calling SCons.Defaults.DefaultEnvironment() when necessary.
351 act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR')
352 DefaultSCCSBuilder = SCons.Builder.Builder(action = act,
354 name = "DefaultSCCSBuilder")
355 return DefaultSCCSBuilder
357 def get_DefaultRCSBuilder():
358 global DefaultRCSBuilder
359 if DefaultRCSBuilder is None:
361 # "env" will get filled in by Executor.get_build_env()
362 # calling SCons.Defaults.DefaultEnvironment() when necessary.
363 act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR')
364 DefaultRCSBuilder = SCons.Builder.Builder(action = act,
366 name = "DefaultRCSBuilder")
367 return DefaultRCSBuilder
369 # Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem.
370 _is_cygwin = sys.platform == "cygwin"
371 if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin:
381 def __init__(self, type, do, ignore):
387 self.__call__ = self.do
388 def set_ignore(self):
389 self.__call__ = self.ignore
391 if self.type in list:
396 def do_diskcheck_match(node, predicate, errorfmt):
399 # If calling the predicate() cached a None value from stat(),
400 # remove it so it doesn't interfere with later attempts to
401 # build this Node as we walk the DAG. (This isn't a great way
402 # to do this, we're reaching into an interface that doesn't
403 # really belong to us, but it's all about performance, so
404 # for now we'll just document the dependency...)
405 if node._memo['stat'] is None:
406 del node._memo['stat']
407 except (AttributeError, KeyError):
410 raise TypeError, errorfmt % node.abspath
412 def ignore_diskcheck_match(node, predicate, errorfmt):
415 def do_diskcheck_rcs(node, name):
417 rcs_dir = node.rcs_dir
418 except AttributeError:
419 if node.entry_exists_on_disk('RCS'):
420 rcs_dir = node.Dir('RCS')
423 node.rcs_dir = rcs_dir
425 return rcs_dir.entry_exists_on_disk(name+',v')
428 def ignore_diskcheck_rcs(node, name):
431 def do_diskcheck_sccs(node, name):
433 sccs_dir = node.sccs_dir
434 except AttributeError:
435 if node.entry_exists_on_disk('SCCS'):
436 sccs_dir = node.Dir('SCCS')
439 node.sccs_dir = sccs_dir
441 return sccs_dir.entry_exists_on_disk('s.'+name)
444 def ignore_diskcheck_sccs(node, name):
447 diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match)
448 diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs)
449 diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs)
457 def set_diskcheck(list):
458 for dc in diskcheckers:
461 def diskcheck_types():
462 return [dc.type for dc in diskcheckers]
466 class EntryProxy(SCons.Util.Proxy):
467 def __get_abspath(self):
469 return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(),
470 entry.name + "_abspath")
472 def __get_filebase(self):
473 name = self.get().name
474 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0],
477 def __get_suffix(self):
478 name = self.get().name
479 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1],
482 def __get_file(self):
483 name = self.get().name
484 return SCons.Subst.SpecialAttrWrapper(name, name + "_file")
486 def __get_base_path(self):
487 """Return the file's directory and file name, with the
490 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0],
491 entry.name + "_base")
493 def __get_posix_path(self):
494 """Return the path with / as the path separator,
495 regardless of platform."""
500 r = entry.get_path().replace(os.sep, '/')
501 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
503 def __get_windows_path(self):
504 """Return the path with \ as the path separator,
505 regardless of platform."""
510 r = entry.get_path().replace(os.sep, '\\')
511 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows")
513 def __get_srcnode(self):
514 return EntryProxy(self.get().srcnode())
516 def __get_srcdir(self):
517 """Returns the directory containing the source node linked to this
518 node via VariantDir(), or the directory of this node if not linked."""
519 return EntryProxy(self.get().srcnode().dir)
521 def __get_rsrcnode(self):
522 return EntryProxy(self.get().srcnode().rfile())
524 def __get_rsrcdir(self):
525 """Returns the directory containing the source node linked to this
526 node via VariantDir(), or the directory of this node if not linked."""
527 return EntryProxy(self.get().srcnode().rfile().dir)
530 return EntryProxy(self.get().dir)
532 dictSpecialAttrs = { "base" : __get_base_path,
533 "posix" : __get_posix_path,
534 "windows" : __get_windows_path,
535 "win32" : __get_windows_path,
536 "srcpath" : __get_srcnode,
537 "srcdir" : __get_srcdir,
539 "abspath" : __get_abspath,
540 "filebase" : __get_filebase,
541 "suffix" : __get_suffix,
543 "rsrcpath" : __get_rsrcnode,
544 "rsrcdir" : __get_rsrcdir,
547 def __getattr__(self, name):
548 # This is how we implement the "special" attributes
549 # such as base, posix, srcdir, etc.
551 attr_function = self.dictSpecialAttrs[name]
554 attr = SCons.Util.Proxy.__getattr__(self, name)
555 except AttributeError, e:
556 # Raise our own AttributeError subclass with an
557 # overridden __str__() method that identifies the
558 # name of the entry that caused the exception.
559 raise EntryProxyAttributeError(self, name)
562 return attr_function(self)
564 class Base(SCons.Node.Node):
565 """A generic class for file system entries. This class is for
566 when we don't know yet whether the entry being looked up is a file
567 or a directory. Instances of this class can morph into either
568 Dir or File objects by a later, more precise lookup.
570 Note: this class does not define __cmp__ and __hash__ for
571 efficiency reasons. SCons does a lot of comparing of
572 Node.FS.{Base,Entry,File,Dir} objects, so those operations must be
573 as fast as possible, which means we want to use Python's built-in
574 object identity comparisons.
577 memoizer_counters = []
579 def __init__(self, name, directory, fs):
580 """Initialize a generic Node.FS.Base object.
582 Call the superclass initialization, take care of setting up
583 our relative and absolute paths, identify our parent
584 directory, and indicate that this node should use
586 if __debug__: logInstanceCreation(self, 'Node.FS.Base')
587 SCons.Node.Node.__init__(self)
589 # Filenames and paths are probably reused and are intern'ed to
591 self.name = SCons.Util.silent_intern(name)
592 self.suffix = SCons.Util.silent_intern(SCons.Util.splitext(name)[1])
595 assert directory, "A directory must be provided"
597 self.abspath = SCons.Util.silent_intern(directory.entry_abspath(name))
598 self.labspath = SCons.Util.silent_intern(directory.entry_labspath(name))
599 if directory.path == '.':
600 self.path = SCons.Util.silent_intern(name)
602 self.path = SCons.Util.silent_intern(directory.entry_path(name))
603 if directory.tpath == '.':
604 self.tpath = SCons.Util.silent_intern(name)
606 self.tpath = SCons.Util.silent_intern(directory.entry_tpath(name))
607 self.path_elements = directory.path_elements + [self]
610 self.cwd = None # will hold the SConscript directory for target nodes
611 self.duplicate = directory.duplicate
613 def str_for_display(self):
614 return '"' + self.__str__() + '"'
616 def must_be_same(self, klass):
618 This node, which already existed, is being looked up as the
619 specified klass. Raise an exception if it isn't.
621 if isinstance(self, klass) or klass is Entry:
623 raise TypeError, "Tried to lookup %s '%s' as a %s." %\
624 (self.__class__.__name__, self.path, klass.__name__)
629 def get_suffix(self):
636 """A Node.FS.Base object's string representation is its path
640 return self._save_str()
641 return self._get_str()
643 memoizer_counters.append(SCons.Memoize.CountValue('_save_str'))
647 return self._memo['_save_str']
650 result = intern(self._get_str())
651 self._memo['_save_str'] = result
656 if self.duplicate or self.is_derived():
657 return self.get_path()
658 srcnode = self.srcnode()
659 if srcnode.stat() is None and self.stat() is not None:
660 result = self.get_path()
662 result = srcnode.get_path()
664 # We're not at the point where we're saving the string string
665 # representations of FS Nodes (because we haven't finished
666 # reading the SConscript files and need to have str() return
667 # things relative to them). That also means we can't yet
668 # cache values returned (or not returned) by stat(), since
669 # Python code in the SConscript files might still create
670 # or otherwise affect the on-disk file. So get rid of the
671 # values that the underlying stat() method saved.
672 try: del self._memo['stat']
673 except KeyError: pass
674 if self is not srcnode:
675 try: del srcnode._memo['stat']
676 except KeyError: pass
681 memoizer_counters.append(SCons.Memoize.CountValue('stat'))
684 try: return self._memo['stat']
685 except KeyError: pass
686 try: result = self.fs.stat(self.abspath)
687 except os.error: result = None
688 self._memo['stat'] = result
692 return self.stat() is not None
695 return self.rfile().exists()
699 if st: return st[stat.ST_MTIME]
704 if st: return st[stat.ST_SIZE]
709 return st is not None and stat.S_ISDIR(st[stat.ST_MODE])
713 return st is not None and stat.S_ISREG(st[stat.ST_MODE])
715 if hasattr(os, 'symlink'):
717 try: st = self.fs.lstat(self.abspath)
718 except os.error: return 0
719 return stat.S_ISLNK(st[stat.ST_MODE])
722 return 0 # no symlinks
724 def is_under(self, dir):
728 return self.dir.is_under(dir)
734 """If this node is in a build path, return the node
735 corresponding to its source file. Otherwise, return
738 srcdir_list = self.dir.srcdir_list()
740 srcnode = srcdir_list[0].Entry(self.name)
741 srcnode.must_be_same(self.__class__)
745 def get_path(self, dir=None):
746 """Return path relative to the current working directory of the
747 Node.FS.Base object that owns us."""
749 dir = self.fs.getcwd()
752 path_elems = self.path_elements
753 try: i = path_elems.index(dir)
754 except ValueError: pass
755 else: path_elems = path_elems[i+1:]
756 path_elems = [n.name for n in path_elems]
757 return os.sep.join(path_elems)
759 def set_src_builder(self, builder):
760 """Set the source code builder for this node."""
761 self.sbuilder = builder
762 if not self.has_builder():
763 self.builder_set(builder)
765 def src_builder(self):
766 """Fetch the source code builder for this node.
768 If there isn't one, we cache the source code builder specified
769 for the directory (which in turn will cache the value from its
770 parent directory, and so on up to the file system root).
774 except AttributeError:
775 scb = self.dir.src_builder()
779 def get_abspath(self):
780 """Get the absolute path of the file."""
783 def for_signature(self):
784 # Return just our name. Even an absolute path would not work,
785 # because that can change thanks to symlinks or remapped network
789 def get_subst_proxy(self):
792 except AttributeError:
793 ret = EntryProxy(self)
797 def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext):
800 Generates a target entry that corresponds to this entry (usually
801 a source file) with the specified prefix and suffix.
803 Note that this method can be overridden dynamically for generated
804 files that need different behavior. See Tool/swig.py for
807 return self.dir.Entry(prefix + splitext(self.name)[0] + suffix)
809 def _Rfindalldirs_key(self, pathlist):
812 memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key))
814 def Rfindalldirs(self, pathlist):
816 Return all of the directories for a given path list, including
817 corresponding "backing" directories in any repositories.
819 The Node lookups are relative to this Node (typically a
820 directory), so memoizing result saves cycles from looking
821 up the same path for each target in a given directory.
824 memo_dict = self._memo['Rfindalldirs']
827 self._memo['Rfindalldirs'] = memo_dict
830 return memo_dict[pathlist]
834 create_dir_relative_to_self = self.Dir
836 for path in pathlist:
837 if isinstance(path, SCons.Node.Node):
840 dir = create_dir_relative_to_self(path)
841 result.extend(dir.get_all_rdirs())
843 memo_dict[pathlist] = result
847 def RDirs(self, pathlist):
848 """Search for a list of directories in the Repository list."""
849 cwd = self.cwd or self.fs._cwd
850 return cwd.Rfindalldirs(pathlist)
852 memoizer_counters.append(SCons.Memoize.CountValue('rentry'))
856 return self._memo['rentry']
860 if not self.exists():
861 norm_name = _my_normcase(self.name)
862 for dir in self.dir.get_all_rdirs():
864 node = dir.entries[norm_name]
866 if dir.entry_exists_on_disk(self.name):
867 result = dir.Entry(self.name)
869 self._memo['rentry'] = result
872 def _glob1(self, pattern, ondisk=True, source=False, strings=False):
876 """This is the class for generic Node.FS entries--that is, things
877 that could be a File or a Dir, but we're just not sure yet.
878 Consequently, the methods in this class really exist just to
879 transform their associated object into the right class when the
880 time comes, and then call the same-named method in the transformed
883 def diskcheck_match(self):
886 def disambiguate(self, must_exist=None):
893 self.__class__ = File
897 # There was nothing on-disk at this location, so look in
900 # We can't just use self.srcnode() straight away because
901 # that would create an actual Node for this file in the src
902 # directory, and there might not be one. Instead, use the
903 # dir_on_disk() method to see if there's something on-disk
904 # with that name, in which case we can go ahead and call
905 # self.srcnode() to create the right type of entry.
906 srcdir = self.dir.srcnode()
907 if srcdir != self.dir and \
908 srcdir.entry_exists_on_disk(self.name) and \
909 self.srcnode().isdir():
913 msg = "No such file or directory: '%s'" % self.abspath
914 raise SCons.Errors.UserError, msg
916 self.__class__ = File
922 """We're a generic Entry, but the caller is actually looking for
923 a File at this point, so morph into one."""
924 self.__class__ = File
927 return File.rfile(self)
929 def scanner_key(self):
930 return self.get_suffix()
932 def get_contents(self):
933 """Fetch the contents of the entry. Returns the exact binary
934 contents of the file."""
936 self = self.disambiguate(must_exist=1)
937 except SCons.Errors.UserError:
938 # There was nothing on disk with which to disambiguate
939 # this entry. Leave it as an Entry, but return a null
940 # string so calls to get_contents() in emitters and the
941 # like (e.g. in qt.py) don't have to disambiguate by hand
942 # or catch the exception.
945 return self.get_contents()
947 def get_text_contents(self):
948 """Fetch the decoded text contents of a Unicode encoded Entry.
950 Since this should return the text contents from the file
951 system, we check to see into what sort of subclass we should
954 self = self.disambiguate(must_exist=1)
955 except SCons.Errors.UserError:
956 # There was nothing on disk with which to disambiguate
957 # this entry. Leave it as an Entry, but return a null
958 # string so calls to get_text_contents() in emitters and
959 # the like (e.g. in qt.py) don't have to disambiguate by
960 # hand or catch the exception.
963 return self.get_text_contents()
965 def must_be_same(self, klass):
966 """Called to make sure a Node is a Dir. Since we're an
967 Entry, we can morph into one."""
968 if self.__class__ is not klass:
969 self.__class__ = klass
973 # The following methods can get called before the Taskmaster has
974 # had a chance to call disambiguate() directly to see if this Entry
975 # should really be a Dir or a File. We therefore use these to call
976 # disambiguate() transparently (from our caller's point of view).
978 # Right now, this minimal set of methods has been derived by just
979 # looking at some of the methods that will obviously be called early
980 # in any of the various Taskmasters' calling sequences, and then
981 # empirically figuring out which additional methods are necessary
982 # to make various tests pass.
985 """Return if the Entry exists. Check the file system to see
986 what we should turn into first. Assume a file if there's no
988 return self.disambiguate().exists()
990 def rel_path(self, other):
991 d = self.disambiguate()
992 if d.__class__ is Entry:
993 raise "rel_path() could not disambiguate File/Dir"
994 return d.rel_path(other)
997 return self.disambiguate().new_ninfo()
999 def changed_since_last_build(self, target, prev_ni):
1000 return self.disambiguate().changed_since_last_build(target, prev_ni)
1002 def _glob1(self, pattern, ondisk=True, source=False, strings=False):
1003 return self.disambiguate()._glob1(pattern, ondisk, source, strings)
1005 def get_subst_proxy(self):
1006 return self.disambiguate().get_subst_proxy()
1008 # This is for later so we can differentiate between Entry the class and Entry
1009 # the method of the FS class.
1015 if SCons.Memoize.use_memoizer:
1016 __metaclass__ = SCons.Memoize.Memoized_Metaclass
1018 # This class implements an abstraction layer for operations involving
1019 # a local file system. Essentially, this wraps any function in
1020 # the os, os.path or shutil modules that we use to actually go do
1021 # anything with or to the local file system.
1023 # Note that there's a very good chance we'll refactor this part of
1024 # the architecture in some way as we really implement the interface(s)
1025 # for remote file system Nodes. For example, the right architecture
1026 # might be to have this be a subclass instead of a base class.
1027 # Nevertheless, we're using this as a first step in that direction.
1029 # We're not using chdir() yet because the calling subclass method
1030 # needs to use os.chdir() directly to avoid recursion. Will we
1031 # really need this one?
1032 #def chdir(self, path):
1033 # return os.chdir(path)
1034 def chmod(self, path, mode):
1035 return os.chmod(path, mode)
1036 def copy(self, src, dst):
1037 return shutil.copy(src, dst)
1038 def copy2(self, src, dst):
1039 return shutil.copy2(src, dst)
1040 def exists(self, path):
1041 return os.path.exists(path)
1042 def getmtime(self, path):
1043 return os.path.getmtime(path)
1044 def getsize(self, path):
1045 return os.path.getsize(path)
1046 def isdir(self, path):
1047 return os.path.isdir(path)
1048 def isfile(self, path):
1049 return os.path.isfile(path)
1050 def link(self, src, dst):
1051 return os.link(src, dst)
1052 def lstat(self, path):
1053 return os.lstat(path)
1054 def listdir(self, path):
1055 return os.listdir(path)
1056 def makedirs(self, path):
1057 return os.makedirs(path)
1058 def mkdir(self, path):
1059 return os.mkdir(path)
1060 def rename(self, old, new):
1061 return os.rename(old, new)
1062 def stat(self, path):
1063 return os.stat(path)
1064 def symlink(self, src, dst):
1065 return os.symlink(src, dst)
1066 def open(self, path):
1068 def unlink(self, path):
1069 return os.unlink(path)
1071 if hasattr(os, 'symlink'):
1072 def islink(self, path):
1073 return os.path.islink(path)
1075 def islink(self, path):
1076 return 0 # no symlinks
1078 if hasattr(os, 'readlink'):
1079 def readlink(self, file):
1080 return os.readlink(file)
1082 def readlink(self, file):
1087 # # Skeleton for the obvious methods we might need from the
1088 # # abstraction layer for a remote filesystem.
1089 # def upload(self, local_src, remote_dst):
1091 # def download(self, remote_src, local_dst):
1097 memoizer_counters = []
1099 def __init__(self, path = None):
1100 """Initialize the Node.FS subsystem.
1102 The supplied path is the top of the source tree, where we
1103 expect to find the top-level build file. If no path is
1104 supplied, the current directory is the default.
1106 The path argument must be a valid absolute path.
1108 if __debug__: logInstanceCreation(self, 'Node.FS')
1113 self.SConstruct_dir = None
1114 self.max_drift = default_max_drift
1118 self.pathTop = os.getcwd()
1121 self.defaultDrive = _my_normcase(os.path.splitdrive(self.pathTop)[0])
1123 self.Top = self.Dir(self.pathTop)
1125 self.Top.tpath = '.'
1126 self._cwd = self.Top
1128 DirNodeInfo.fs = self
1129 FileNodeInfo.fs = self
1131 def set_SConstruct_dir(self, dir):
1132 self.SConstruct_dir = dir
1134 def get_max_drift(self):
1135 return self.max_drift
1137 def set_max_drift(self, max_drift):
1138 self.max_drift = max_drift
1143 def chdir(self, dir, change_os_dir=0):
1144 """Change the current working directory for lookups.
1145 If change_os_dir is true, we will also change the "real" cwd
1153 os.chdir(dir.abspath)
1158 def get_root(self, drive):
1160 Returns the root directory for the specified drive, creating
1163 drive = _my_normcase(drive)
1165 return self.Root[drive]
1167 root = RootDir(drive, self)
1168 self.Root[drive] = root
1170 self.Root[self.defaultDrive] = root
1171 elif drive == self.defaultDrive:
1172 self.Root[''] = root
1175 def _lookup(self, p, directory, fsclass, create=1):
1177 The generic entry point for Node lookup with user-supplied data.
1179 This translates arbitrary input into a canonical Node.FS object
1180 of the specified fsclass. The general approach for strings is
1181 to turn it into a fully normalized absolute path and then call
1182 the root directory's lookup_abs() method for the heavy lifting.
1184 If the path name begins with '#', it is unconditionally
1185 interpreted relative to the top-level directory of this FS. '#'
1186 is treated as a synonym for the top-level SConstruct directory,
1187 much like '~' is treated as a synonym for the user's home
1188 directory in a UNIX shell. So both '#foo' and '#/foo' refer
1189 to the 'foo' subdirectory underneath the top-level SConstruct
1192 If the path name is relative, then the path is looked up relative
1193 to the specified directory, or the current directory (self._cwd,
1194 typically the SConscript directory) if the specified directory
1197 if isinstance(p, Base):
1198 # It's already a Node.FS object. Make sure it's the right
1200 p.must_be_same(fsclass)
1202 # str(p) in case it's something like a proxy object
1205 initial_hash = (p[0:1] == '#')
1207 # There was an initial '#', so we strip it and override
1208 # whatever directory they may have specified with the
1209 # top-level SConstruct directory.
1211 directory = self.Top
1213 if directory and not isinstance(directory, Dir):
1214 directory = self.Dir(directory)
1217 drive, p = os.path.splitdrive(p)
1221 # This causes a naked drive letter to be treated as a synonym
1222 # for the root directory on that drive.
1224 absolute = os.path.isabs(p)
1226 needs_normpath = needs_normpath_check.match(p)
1228 if initial_hash or not absolute:
1229 # This is a relative lookup, either to the top-level
1230 # SConstruct directory (because of the initial '#') or to
1231 # the current directory (the path name is not absolute).
1232 # Add the string to the appropriate directory lookup path,
1233 # after which the whole thing gets normalized.
1235 directory = self._cwd
1237 p = directory.labspath + '/' + p
1239 p = directory.labspath
1242 p = os.path.normpath(p)
1244 if drive or absolute:
1245 root = self.get_root(drive)
1248 directory = self._cwd
1249 root = directory.root
1252 p = p.replace(os.sep, '/')
1253 return root._lookup_abs(p, fsclass, create)
1255 def Entry(self, name, directory = None, create = 1):
1256 """Look up or create a generic Entry node with the specified name.
1257 If the name is a relative path (begins with ./, ../, or a file
1258 name), then it is looked up relative to the supplied directory
1259 node, or to the top level directory of the FS (supplied at
1260 construction time) if no directory is supplied.
1262 return self._lookup(name, directory, Entry, create)
1264 def File(self, name, directory = None, create = 1):
1265 """Look up or create a File node with the specified name. If
1266 the name is a relative path (begins with ./, ../, or a file name),
1267 then it is looked up relative to the supplied directory node,
1268 or to the top level directory of the FS (supplied at construction
1269 time) if no directory is supplied.
1271 This method will raise TypeError if a directory is found at the
1274 return self._lookup(name, directory, File, create)
1276 def Dir(self, name, directory = None, create = True):
1277 """Look up or create a Dir node with the specified name. If
1278 the name is a relative path (begins with ./, ../, or a file name),
1279 then it is looked up relative to the supplied directory node,
1280 or to the top level directory of the FS (supplied at construction
1281 time) if no directory is supplied.
1283 This method will raise TypeError if a normal file is found at the
1286 return self._lookup(name, directory, Dir, create)
1288 def VariantDir(self, variant_dir, src_dir, duplicate=1):
1289 """Link the supplied variant directory to the source directory
1290 for purposes of building files."""
1292 if not isinstance(src_dir, SCons.Node.Node):
1293 src_dir = self.Dir(src_dir)
1294 if not isinstance(variant_dir, SCons.Node.Node):
1295 variant_dir = self.Dir(variant_dir)
1296 if src_dir.is_under(variant_dir):
1297 raise SCons.Errors.UserError, "Source directory cannot be under variant directory."
1298 if variant_dir.srcdir:
1299 if variant_dir.srcdir == src_dir:
1300 return # We already did this.
1301 raise SCons.Errors.UserError, "'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir)
1302 variant_dir.link(src_dir, duplicate)
1304 def Repository(self, *dirs):
1305 """Specify Repository directories to search."""
1307 if not isinstance(d, SCons.Node.Node):
1309 self.Top.addRepository(d)
1311 def variant_dir_target_climb(self, orig, dir, tail):
1312 """Create targets in corresponding variant directories
1314 Climb the directory tree, and look up path names
1315 relative to any linked variant directories we find.
1317 Even though this loops and walks up the tree, we don't memoize
1318 the return value because this is really only used to process
1319 the command-line targets.
1323 fmt = "building associated VariantDir targets: %s"
1326 for bd in dir.variant_dirs:
1327 if start_dir.is_under(bd):
1328 # If already in the build-dir location, don't reflect
1329 return [orig], fmt % str(orig)
1330 p = os.path.join(bd.path, *tail)
1331 targets.append(self.Entry(p))
1332 tail = [dir.name] + tail
1335 message = fmt % ' '.join(map(str, targets))
1336 return targets, message
1338 def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None):
1342 This is mainly a shim layer
1346 return cwd.glob(pathname, ondisk, source, strings)
1348 class DirNodeInfo(SCons.Node.NodeInfoBase):
1349 # This should get reset by the FS initialization.
1350 current_version_id = 1
1354 def str_to_node(self, s):
1358 drive, s = os.path.splitdrive(s)
1360 root = self.fs.get_root(drive)
1361 if not os.path.isabs(s):
1362 s = top.labspath + '/' + s
1363 return root._lookup_abs(s, Entry)
1365 class DirBuildInfo(SCons.Node.BuildInfoBase):
1366 current_version_id = 1
1368 glob_magic_check = re.compile('[*?[]')
1370 def has_glob_magic(s):
1371 return glob_magic_check.search(s) is not None
1374 """A class for directories in a file system.
1377 memoizer_counters = []
1379 NodeInfo = DirNodeInfo
1380 BuildInfo = DirBuildInfo
1382 def __init__(self, name, directory, fs):
1383 if __debug__: logInstanceCreation(self, 'Node.FS.Dir')
1384 Base.__init__(self, name, directory, fs)
1388 """Turn a file system Node (either a freshly initialized directory
1389 object or a separate Entry object) into a proper directory object.
1391 Set up this directory's entries and hook it into the file
1392 system tree. Specify that directories (this Node) don't use
1393 signatures for calculating whether they're current.
1396 self.repositories = []
1400 self.entries['.'] = self
1401 self.entries['..'] = self.dir
1404 self._sconsign = None
1405 self.variant_dirs = []
1406 self.root = self.dir.root
1408 # Don't just reset the executor, replace its action list,
1409 # because it might have some pre-or post-actions that need to
1411 self.builder = get_MkdirBuilder()
1412 self.get_executor().set_action_list(self.builder.action)
1414 def diskcheck_match(self):
1415 diskcheck_match(self, self.isfile,
1416 "File %s found where directory expected.")
1418 def __clearRepositoryCache(self, duplicate=None):
1419 """Called when we change the repository(ies) for a directory.
1420 This clears any cached information that is invalidated by changing
1423 for node in self.entries.values():
1424 if node != self.dir:
1425 if node != self and isinstance(node, Dir):
1426 node.__clearRepositoryCache(duplicate)
1431 except AttributeError:
1433 if duplicate is not None:
1434 node.duplicate=duplicate
1436 def __resetDuplicate(self, node):
1438 node.duplicate = node.get_dir().duplicate
1440 def Entry(self, name):
1442 Looks up or creates an entry node named 'name' relative to
1445 return self.fs.Entry(name, self)
1447 def Dir(self, name, create=True):
1449 Looks up or creates a directory node named 'name' relative to
1452 return self.fs.Dir(name, self, create)
1454 def File(self, name):
1456 Looks up or creates a file node named 'name' relative to
1459 return self.fs.File(name, self)
1461 def _lookup_rel(self, name, klass, create=1):
1463 Looks up a *normalized* relative path name, relative to this
1466 This method is intended for use by internal lookups with
1467 already-normalized path data. For general-purpose lookups,
1468 use the Entry(), Dir() and File() methods above.
1470 This method does *no* input checking and will die or give
1471 incorrect results if it's passed a non-normalized path name (e.g.,
1472 a path containing '..'), an absolute path name, a top-relative
1473 ('#foo') path name, or any kind of object.
1475 name = self.entry_labspath(name)
1476 return self.root._lookup_abs(name, klass, create)
1478 def link(self, srcdir, duplicate):
1479 """Set this directory as the variant directory for the
1480 supplied source directory."""
1481 self.srcdir = srcdir
1482 self.duplicate = duplicate
1483 self.__clearRepositoryCache(duplicate)
1484 srcdir.variant_dirs.append(self)
1486 def getRepositories(self):
1487 """Returns a list of repositories for this directory.
1489 if self.srcdir and not self.duplicate:
1490 return self.srcdir.get_all_rdirs() + self.repositories
1491 return self.repositories
1493 memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs'))
1495 def get_all_rdirs(self):
1497 return list(self._memo['get_all_rdirs'])
1505 for rep in dir.getRepositories():
1506 result.append(rep.Dir(fname))
1510 fname = dir.name + os.sep + fname
1513 self._memo['get_all_rdirs'] = list(result)
1517 def addRepository(self, dir):
1518 if dir != self and not dir in self.repositories:
1519 self.repositories.append(dir)
1521 self.__clearRepositoryCache()
1524 return self.entries['..']
1526 def _rel_path_key(self, other):
1529 memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key))
1531 def rel_path(self, other):
1532 """Return a path to "other" relative to this directory.
1535 # This complicated and expensive method, which constructs relative
1536 # paths between arbitrary Node.FS objects, is no longer used
1537 # by SCons itself. It was introduced to store dependency paths
1538 # in .sconsign files relative to the target, but that ended up
1539 # being significantly inefficient.
1541 # We're continuing to support the method because some SConstruct
1542 # files out there started using it when it was available, and
1543 # we're all about backwards compatibility..
1546 memo_dict = self._memo['rel_path']
1549 self._memo['rel_path'] = memo_dict
1552 return memo_dict[other]
1559 elif not other in self.path_elements:
1561 other_dir = other.get_dir()
1562 except AttributeError:
1565 if other_dir is None:
1568 dir_rel_path = self.rel_path(other_dir)
1569 if dir_rel_path == '.':
1572 result = dir_rel_path + os.sep + other.name
1574 i = self.path_elements.index(other) + 1
1576 path_elems = ['..'] * (len(self.path_elements) - i) \
1577 + [n.name for n in other.path_elements[i:]]
1579 result = os.sep.join(path_elems)
1581 memo_dict[other] = result
1585 def get_env_scanner(self, env, kw={}):
1586 import SCons.Defaults
1587 return SCons.Defaults.DirEntryScanner
1589 def get_target_scanner(self):
1590 import SCons.Defaults
1591 return SCons.Defaults.DirEntryScanner
1593 def get_found_includes(self, env, scanner, path):
1594 """Return this directory's implicit dependencies.
1596 We don't bother caching the results because the scan typically
1597 shouldn't be requested more than once (as opposed to scanning
1598 .h file contents, which can be requested as many times as the
1599 files is #included by other files).
1603 # Clear cached info for this Dir. If we already visited this
1604 # directory on our walk down the tree (because we didn't know at
1605 # that point it was being used as the source for another Node)
1606 # then we may have calculated build signature before realizing
1607 # we had to scan the disk. Now that we have to, though, we need
1608 # to invalidate the old calculated signature so that any node
1609 # dependent on our directory structure gets one that includes
1610 # info about everything on disk.
1612 return scanner(self, env, path)
1615 # Taskmaster interface subsystem
1621 def build(self, **kw):
1622 """A null "builder" for directories."""
1624 if self.builder is not MkdirBuilder:
1625 SCons.Node.Node.build(self, **kw)
1632 """Create this directory, silently and without worrying about
1633 whether the builder is the default or not."""
1639 listDirs.append(parent)
1642 # Don't use while: - else: for this condition because
1643 # if so, then parent is None and has no .path attribute.
1644 raise SCons.Errors.StopError, parent.path
1647 for dirnode in listDirs:
1649 # Don't call dirnode.build(), call the base Node method
1650 # directly because we definitely *must* create this
1651 # directory. The dirnode.build() method will suppress
1652 # the build if it's the default builder.
1653 SCons.Node.Node.build(dirnode)
1654 dirnode.get_executor().nullify()
1655 # The build() action may or may not have actually
1656 # created the directory, depending on whether the -n
1657 # option was used or not. Delete the _exists and
1658 # _rexists attributes so they can be reevaluated.
1663 def multiple_side_effect_has_builder(self):
1665 return self.builder is not MkdirBuilder and self.has_builder()
1667 def alter_targets(self):
1668 """Return any corresponding targets in a variant directory.
1670 return self.fs.variant_dir_target_climb(self, self, [])
1672 def scanner_key(self):
1673 """A directory does not get scanned."""
1676 def get_text_contents(self):
1677 """We already emit things in text, so just return the binary
1679 return self.get_contents()
1681 def get_contents(self):
1682 """Return content signatures and names of all our children
1683 separated by new-lines. Ensure that the nodes are sorted."""
1685 for node in sorted(self.children(), key=lambda t: t.name):
1686 contents.append('%s %s\n' % (node.get_csig(), node.name))
1687 return ''.join(contents)
1690 """Compute the content signature for Directory nodes. In
1691 general, this is not needed and the content signature is not
1692 stored in the DirNodeInfo. However, if get_contents on a Dir
1693 node is called which has a child directory, the child
1694 directory should return the hash of its contents."""
1695 contents = self.get_contents()
1696 return SCons.Util.MD5signature(contents)
1698 def do_duplicate(self, src):
1701 changed_since_last_build = SCons.Node.Node.state_has_changed
1703 def is_up_to_date(self):
1704 """If any child is not up-to-date, then this directory isn't,
1706 if self.builder is not MkdirBuilder and not self.exists():
1708 up_to_date = SCons.Node.up_to_date
1709 for kid in self.children():
1710 if kid.get_state() > up_to_date:
1715 if not self.exists():
1716 norm_name = _my_normcase(self.name)
1717 for dir in self.dir.get_all_rdirs():
1718 try: node = dir.entries[norm_name]
1719 except KeyError: node = dir.dir_on_disk(self.name)
1720 if node and node.exists() and \
1721 (isinstance(dir, Dir) or isinstance(dir, Entry)):
1726 """Return the .sconsign file info for this directory,
1727 creating it first if necessary."""
1728 if not self._sconsign:
1729 import SCons.SConsign
1730 self._sconsign = SCons.SConsign.ForDirectory(self)
1731 return self._sconsign
1734 """Dir has a special need for srcnode()...if we
1735 have a srcdir attribute set, then that *is* our srcnode."""
1738 return Base.srcnode(self)
1740 def get_timestamp(self):
1741 """Return the latest timestamp from among our children"""
1743 for kid in self.children():
1744 if kid.get_timestamp() > stamp:
1745 stamp = kid.get_timestamp()
1748 def entry_abspath(self, name):
1749 return self.abspath + os.sep + name
1751 def entry_labspath(self, name):
1752 return self.labspath + '/' + name
1754 def entry_path(self, name):
1755 return self.path + os.sep + name
1757 def entry_tpath(self, name):
1758 return self.tpath + os.sep + name
1760 def entry_exists_on_disk(self, name):
1762 d = self.on_disk_entries
1763 except AttributeError:
1766 entries = os.listdir(self.abspath)
1770 for entry in map(_my_normcase, entries):
1772 self.on_disk_entries = d
1773 if sys.platform == 'win32':
1774 name = _my_normcase(name)
1775 result = d.get(name)
1777 # Belt-and-suspenders for Windows: check directly for
1778 # 8.3 file names that don't show up in os.listdir().
1779 result = os.path.exists(self.abspath + os.sep + name)
1785 memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list'))
1787 def srcdir_list(self):
1789 return self._memo['srcdir_list']
1799 result.append(dir.srcdir.Dir(dirname))
1800 dirname = dir.name + os.sep + dirname
1803 self._memo['srcdir_list'] = result
1807 def srcdir_duplicate(self, name):
1808 for dir in self.srcdir_list():
1809 if self.is_under(dir):
1810 # We shouldn't source from something in the build path;
1811 # variant_dir is probably under src_dir, in which case
1812 # we are reflecting.
1814 if dir.entry_exists_on_disk(name):
1815 srcnode = dir.Entry(name).disambiguate()
1817 node = self.Entry(name).disambiguate()
1818 node.do_duplicate(srcnode)
1824 def _srcdir_find_file_key(self, filename):
1827 memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key))
1829 def srcdir_find_file(self, filename):
1831 memo_dict = self._memo['srcdir_find_file']
1834 self._memo['srcdir_find_file'] = memo_dict
1837 return memo_dict[filename]
1842 if (isinstance(node, File) or isinstance(node, Entry)) and \
1843 (node.is_derived() or node.exists()):
1847 norm_name = _my_normcase(filename)
1849 for rdir in self.get_all_rdirs():
1850 try: node = rdir.entries[norm_name]
1851 except KeyError: node = rdir.file_on_disk(filename)
1852 else: node = func(node)
1854 result = (node, self)
1855 memo_dict[filename] = result
1858 for srcdir in self.srcdir_list():
1859 for rdir in srcdir.get_all_rdirs():
1860 try: node = rdir.entries[norm_name]
1861 except KeyError: node = rdir.file_on_disk(filename)
1862 else: node = func(node)
1864 result = (File(filename, self, self.fs), srcdir)
1865 memo_dict[filename] = result
1868 result = (None, None)
1869 memo_dict[filename] = result
1872 def dir_on_disk(self, name):
1873 if self.entry_exists_on_disk(name):
1874 try: return self.Dir(name)
1875 except TypeError: pass
1876 node = self.srcdir_duplicate(name)
1877 if isinstance(node, File):
1881 def file_on_disk(self, name):
1882 if self.entry_exists_on_disk(name) or \
1883 diskcheck_rcs(self, name) or \
1884 diskcheck_sccs(self, name):
1885 try: return self.File(name)
1886 except TypeError: pass
1887 node = self.srcdir_duplicate(name)
1888 if isinstance(node, Dir):
1892 def walk(self, func, arg):
1894 Walk this directory tree by calling the specified function
1895 for each directory in the tree.
1897 This behaves like the os.path.walk() function, but for in-memory
1898 Node.FS.Dir objects. The function takes the same arguments as
1899 the functions passed to os.path.walk():
1901 func(arg, dirname, fnames)
1903 Except that "dirname" will actually be the directory *Node*,
1904 not the string. The '.' and '..' entries are excluded from
1905 fnames. The fnames list may be modified in-place to filter the
1906 subdirectories visited or otherwise impose a specific order.
1907 The "arg" argument is always passed to func() and may be used
1908 in any way (or ignored, passing None is common).
1910 entries = self.entries
1911 names = entries.keys()
1914 func(arg, self, names)
1915 for dirname in [n for n in names if isinstance(entries[n], Dir)]:
1916 entries[dirname].walk(func, arg)
1918 def glob(self, pathname, ondisk=True, source=False, strings=False):
1920 Returns a list of Nodes (or strings) matching a specified
1923 Pathname patterns follow UNIX shell semantics: * matches
1924 any-length strings of any characters, ? matches any character,
1925 and [] can enclose lists or ranges of characters. Matches do
1926 not span directory separators.
1928 The matches take into account Repositories, returning local
1929 Nodes if a corresponding entry exists in a Repository (either
1930 an in-memory Node or something on disk).
1932 By defafult, the glob() function matches entries that exist
1933 on-disk, in addition to in-memory Nodes. Setting the "ondisk"
1934 argument to False (or some other non-true value) causes the glob()
1935 function to only match in-memory Nodes. The default behavior is
1936 to return both the on-disk and in-memory Nodes.
1938 The "source" argument, when true, specifies that corresponding
1939 source Nodes must be returned if you're globbing in a build
1940 directory (initialized with VariantDir()). The default behavior
1941 is to return Nodes local to the VariantDir().
1943 The "strings" argument, when true, returns the matches as strings,
1944 not Nodes. The strings are path names relative to this directory.
1946 The underlying algorithm is adapted from the glob.glob() function
1947 in the Python library (but heavily modified), and uses fnmatch()
1950 dirname, basename = os.path.split(pathname)
1952 return sorted(self._glob1(basename, ondisk, source, strings),
1953 key=lambda t: str(t))
1954 if has_glob_magic(dirname):
1955 list = self.glob(dirname, ondisk, source, strings=False)
1957 list = [self.Dir(dirname, create=True)]
1960 r = dir._glob1(basename, ondisk, source, strings)
1962 r = [os.path.join(str(dir), x) for x in r]
1964 result.sort(lambda a, b: cmp(str(a), str(b)))
1967 def _glob1(self, pattern, ondisk=True, source=False, strings=False):
1969 Globs for and returns a list of entry names matching a single
1970 pattern in this directory.
1972 This searches any repositories and source directories for
1973 corresponding entries and returns a Node (or string) relative
1974 to the current directory if an entry is found anywhere.
1976 TODO: handle pattern with no wildcard
1978 search_dir_list = self.get_all_rdirs()
1979 for srcdir in self.srcdir_list():
1980 search_dir_list.extend(srcdir.get_all_rdirs())
1982 selfEntry = self.Entry
1984 for dir in search_dir_list:
1985 # We use the .name attribute from the Node because the keys of
1986 # the dir.entries dictionary are normalized (that is, all upper
1987 # case) on case-insensitive systems like Windows.
1988 node_names = [ v.name for k, v in dir.entries.items()
1989 if k not in ('.', '..') ]
1990 names.extend(node_names)
1992 # Make sure the working directory (self) actually has
1993 # entries for all Nodes in repositories or variant dirs.
1994 for name in node_names: selfEntry(name)
1997 disk_names = os.listdir(dir.abspath)
2000 names.extend(disk_names)
2002 # We're going to return corresponding Nodes in
2003 # the local directory, so we need to make sure
2004 # those Nodes exist. We only want to create
2005 # Nodes for the entries that will match the
2006 # specified pattern, though, which means we
2007 # need to filter the list here, even though
2008 # the overall list will also be filtered later,
2009 # after we exit this loop.
2010 if pattern[0] != '.':
2011 #disk_names = [ d for d in disk_names if d[0] != '.' ]
2012 disk_names = [x for x in disk_names if x[0] != '.']
2013 disk_names = fnmatch.filter(disk_names, pattern)
2014 dirEntry = dir.Entry
2015 for name in disk_names:
2016 # Add './' before disk filename so that '#' at
2017 # beginning of filename isn't interpreted.
2019 node = dirEntry(name).disambiguate()
2021 if n.__class__ != node.__class__:
2022 n.__class__ = node.__class__
2026 if pattern[0] != '.':
2027 #names = [ n for n in names if n[0] != '.' ]
2028 names = [x for x in names if x[0] != '.']
2029 names = fnmatch.filter(names, pattern)
2034 #return [ self.entries[_my_normcase(n)] for n in names ]
2035 return [self.entries[_my_normcase(n)] for n in names]
2038 """A class for the root directory of a file system.
2040 This is the same as a Dir class, except that the path separator
2041 ('/' or '\\') is actually part of the name, so we don't need to
2042 add a separator when creating the path names of entries within
2045 def __init__(self, name, fs):
2046 if __debug__: logInstanceCreation(self, 'Node.FS.RootDir')
2047 # We're going to be our own parent directory (".." entry and .dir
2048 # attribute) so we have to set up some values so Base.__init__()
2049 # won't gag won't it calls some of our methods.
2054 self.path_elements = []
2057 Base.__init__(self, name, self, fs)
2059 # Now set our paths to what we really want them to be: the
2060 # initial drive letter (the name) plus the directory separator,
2061 # except for the "lookup abspath," which does not have the
2063 self.abspath = name + os.sep
2065 self.path = name + os.sep
2066 self.tpath = name + os.sep
2069 self._lookupDict = {}
2071 # The // and os.sep + os.sep entries are necessary because
2072 # os.path.normpath() seems to preserve double slashes at the
2073 # beginning of a path (presumably for UNC path names), but
2074 # collapses triple slashes to a single slash.
2075 self._lookupDict[''] = self
2076 self._lookupDict['/'] = self
2077 self._lookupDict['//'] = self
2078 self._lookupDict[os.sep] = self
2079 self._lookupDict[os.sep + os.sep] = self
2081 def must_be_same(self, klass):
2084 Base.must_be_same(self, klass)
2086 def _lookup_abs(self, p, klass, create=1):
2088 Fast (?) lookup of a *normalized* absolute path.
2090 This method is intended for use by internal lookups with
2091 already-normalized path data. For general-purpose lookups,
2092 use the FS.Entry(), FS.Dir() or FS.File() methods.
2094 The caller is responsible for making sure we're passed a
2095 normalized absolute path; we merely let Python's dictionary look
2096 up and return the One True Node.FS object for the path.
2098 If no Node for the specified "p" doesn't already exist, and
2099 "create" is specified, the Node may be created after recursive
2100 invocation to find or create the parent directory or directories.
2104 result = self._lookupDict[k]
2107 msg = "No such file or directory: '%s' in '%s' (and create is False)" % (p, str(self))
2108 raise SCons.Errors.UserError, msg
2109 # There is no Node for this path name, and we're allowed
2111 dir_name, file_name = os.path.split(p)
2112 dir_node = self._lookup_abs(dir_name, Dir)
2113 result = klass(file_name, dir_node, self.fs)
2115 # Double-check on disk (as configured) that the Node we
2116 # created matches whatever is out there in the real world.
2117 result.diskcheck_match()
2119 self._lookupDict[k] = result
2120 dir_node.entries[_my_normcase(file_name)] = result
2121 dir_node.implicit = None
2123 # There is already a Node for this path name. Allow it to
2124 # complain if we were looking for an inappropriate type.
2125 result.must_be_same(klass)
2131 def entry_abspath(self, name):
2132 return self.abspath + name
2134 def entry_labspath(self, name):
2137 def entry_path(self, name):
2138 return self.path + name
2140 def entry_tpath(self, name):
2141 return self.tpath + name
2143 def is_under(self, dir):
2155 def src_builder(self):
2158 class FileNodeInfo(SCons.Node.NodeInfoBase):
2159 current_version_id = 1
2161 field_list = ['csig', 'timestamp', 'size']
2163 # This should get reset by the FS initialization.
2166 def str_to_node(self, s):
2170 drive, s = os.path.splitdrive(s)
2172 root = self.fs.get_root(drive)
2173 if not os.path.isabs(s):
2174 s = top.labspath + '/' + s
2175 return root._lookup_abs(s, Entry)
2177 class FileBuildInfo(SCons.Node.BuildInfoBase):
2178 current_version_id = 1
2180 def convert_to_sconsign(self):
2182 Converts this FileBuildInfo object for writing to a .sconsign file
2184 This replaces each Node in our various dependency lists with its
2185 usual string representation: relative to the top-level SConstruct
2186 directory, or an absolute path if it's outside.
2194 except AttributeError:
2197 s = s.replace(os.sep, '/')
2199 for attr in ['bsources', 'bdepends', 'bimplicit']:
2201 val = getattr(self, attr)
2202 except AttributeError:
2205 setattr(self, attr, list(map(node_to_str, val)))
2206 def convert_from_sconsign(self, dir, name):
2208 Converts a newly-read FileBuildInfo object for in-SCons use
2210 For normal up-to-date checking, we don't have any conversion to
2211 perform--but we're leaving this method here to make that clear.
2214 def prepare_dependencies(self):
2216 Prepares a FileBuildInfo object for explaining what changed
2218 The bsources, bdepends and bimplicit lists have all been
2219 stored on disk as paths relative to the top-level SConstruct
2220 directory. Convert the strings to actual Nodes (for use by the
2221 --debug=explain code and --implicit-cache).
2224 ('bsources', 'bsourcesigs'),
2225 ('bdepends', 'bdependsigs'),
2226 ('bimplicit', 'bimplicitsigs'),
2228 for (nattr, sattr) in attrs:
2230 strings = getattr(self, nattr)
2231 nodeinfos = getattr(self, sattr)
2232 except AttributeError:
2235 for s, ni in izip(strings, nodeinfos):
2236 if not isinstance(s, SCons.Node.Node):
2237 s = ni.str_to_node(s)
2239 setattr(self, nattr, nodes)
2240 def format(self, names=0):
2242 bkids = self.bsources + self.bdepends + self.bimplicit
2243 bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs
2244 for bkid, bkidsig in izip(bkids, bkidsigs):
2245 result.append(str(bkid) + ': ' +
2246 ' '.join(bkidsig.format(names=names)))
2247 result.append('%s [%s]' % (self.bactsig, self.bact))
2248 return '\n'.join(result)
2251 """A class for files in a file system.
2254 memoizer_counters = []
2256 NodeInfo = FileNodeInfo
2257 BuildInfo = FileBuildInfo
2261 def diskcheck_match(self):
2262 diskcheck_match(self, self.isdir,
2263 "Directory %s found where file expected.")
2265 def __init__(self, name, directory, fs):
2266 if __debug__: logInstanceCreation(self, 'Node.FS.File')
2267 Base.__init__(self, name, directory, fs)
2270 def Entry(self, name):
2271 """Create an entry node named 'name' relative to
2272 the directory of this file."""
2273 return self.dir.Entry(name)
2275 def Dir(self, name, create=True):
2276 """Create a directory node named 'name' relative to
2277 the directory of this file."""
2278 return self.dir.Dir(name, create=create)
2280 def Dirs(self, pathlist):
2281 """Create a list of directories relative to the SConscript
2282 directory of this file."""
2284 # return [self.Dir(p) for p in pathlist]
2285 return [self.Dir(p) for p in pathlist]
2287 def File(self, name):
2288 """Create a file node named 'name' relative to
2289 the directory of this file."""
2290 return self.dir.File(name)
2292 #def generate_build_dict(self):
2293 # """Return an appropriate dictionary of values for building
2295 # return {'Dir' : self.Dir,
2296 # 'File' : self.File,
2297 # 'RDirs' : self.RDirs}
2300 """Turn a file system node into a File object."""
2301 self.scanner_paths = {}
2302 if not hasattr(self, '_local'):
2305 # If there was already a Builder set on this entry, then
2306 # we need to make sure we call the target-decider function,
2307 # not the source-decider. Reaching in and doing this by hand
2308 # is a little bogus. We'd prefer to handle this by adding
2309 # an Entry.builder_set() method that disambiguates like the
2310 # other methods, but that starts running into problems with the
2311 # fragile way we initialize Dir Nodes with their Mkdir builders,
2312 # yet still allow them to be overridden by the user. Since it's
2313 # not clear right now how to fix that, stick with what works
2314 # until it becomes clear...
2315 if self.has_builder():
2316 self.changed_since_last_build = self.decide_target
2318 def scanner_key(self):
2319 return self.get_suffix()
2321 def get_contents(self):
2322 if not self.rexists():
2324 fname = self.rfile().abspath
2326 contents = open(fname, "rb").read()
2327 except EnvironmentError, e:
2336 get_text_contents = get_contents
2338 # This attempts to figure out what the encoding of the text is
2339 # based upon the BOM bytes, and then decodes the contents so that
2340 # it's a valid python string.
2341 def get_text_contents(self):
2342 contents = self.get_contents()
2343 # The behavior of various decode() methods and functions
2344 # w.r.t. the initial BOM bytes is different for different
2345 # encodings and/or Python versions. ('utf-8' does not strip
2346 # them, but has a 'utf-8-sig' which does; 'utf-16' seems to
2347 # strip them; etc.) Just side step all the complication by
2348 # explicitly stripping the BOM before we decode().
2349 if contents.startswith(codecs.BOM_UTF8):
2350 contents = contents[len(codecs.BOM_UTF8):]
2351 # TODO(2.2): Remove when 2.3 becomes floor.
2352 #contents = contents.decode('utf-8')
2353 contents = my_decode(contents, 'utf-8')
2354 elif contents.startswith(codecs.BOM_UTF16_LE):
2355 contents = contents[len(codecs.BOM_UTF16_LE):]
2356 # TODO(2.2): Remove when 2.3 becomes floor.
2357 #contents = contents.decode('utf-16-le')
2358 contents = my_decode(contents, 'utf-16-le')
2359 elif contents.startswith(codecs.BOM_UTF16_BE):
2360 contents = contents[len(codecs.BOM_UTF16_BE):]
2361 # TODO(2.2): Remove when 2.3 becomes floor.
2362 #contents = contents.decode('utf-16-be')
2363 contents = my_decode(contents, 'utf-16-be')
2366 def get_content_hash(self):
2368 Compute and return the MD5 hash for this file.
2370 if not self.rexists():
2371 return SCons.Util.MD5signature('')
2372 fname = self.rfile().abspath
2374 cs = SCons.Util.MD5filesignature(fname,
2375 chunksize=SCons.Node.FS.File.md5_chunksize*1024)
2376 except EnvironmentError, e:
2383 memoizer_counters.append(SCons.Memoize.CountValue('get_size'))
2387 return self._memo['get_size']
2392 size = self.rfile().getsize()
2396 self._memo['get_size'] = size
2400 memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp'))
2402 def get_timestamp(self):
2404 return self._memo['get_timestamp']
2409 timestamp = self.rfile().getmtime()
2413 self._memo['get_timestamp'] = timestamp
2417 def store_info(self):
2418 # Merge our build information into the already-stored entry.
2419 # This accomodates "chained builds" where a file that's a target
2420 # in one build (SConstruct file) is a source in a different build.
2421 # See test/chained-build.py for the use case.
2423 self.dir.sconsign().store_info(self.name, self)
2425 convert_copy_attrs = [
2435 convert_sig_attrs = [
2441 def convert_old_entry(self, old_entry):
2442 # Convert a .sconsign entry from before the Big Signature
2443 # Refactoring, doing what we can to convert its information
2444 # to the new .sconsign entry format.
2446 # The old format looked essentially like this:
2455 # .bsourcesigs ("signature" list)
2457 # .bdependsigs ("signature" list)
2459 # .bimplicitsigs ("signature" list)
2463 # The new format looks like this:
2470 # .binfo (BuildInfo)
2472 # .bsourcesigs (NodeInfo list)
2478 # .bdependsigs (NodeInfo list)
2484 # .bimplicitsigs (NodeInfo list)
2492 # The basic idea of the new structure is that a NodeInfo always
2493 # holds all available information about the state of a given Node
2494 # at a certain point in time. The various .b*sigs lists can just
2495 # be a list of pointers to the .ninfo attributes of the different
2496 # dependent nodes, without any copying of information until it's
2497 # time to pickle it for writing out to a .sconsign file.
2499 # The complicating issue is that the *old* format only stored one
2500 # "signature" per dependency, based on however the *last* build
2501 # was configured. We don't know from just looking at it whether
2502 # it was a build signature, a content signature, or a timestamp
2503 # "signature". Since we no longer use build signatures, the
2504 # best we can do is look at the length and if it's thirty two,
2505 # assume that it was (or might have been) a content signature.
2506 # If it was actually a build signature, then it will cause a
2507 # rebuild anyway when it doesn't match the new content signature,
2508 # but that's probably the best we can do.
2509 import SCons.SConsign
2510 new_entry = SCons.SConsign.SConsignEntry()
2511 new_entry.binfo = self.new_binfo()
2512 binfo = new_entry.binfo
2513 for attr in self.convert_copy_attrs:
2515 value = getattr(old_entry, attr)
2516 except AttributeError:
2518 setattr(binfo, attr, value)
2519 delattr(old_entry, attr)
2520 for attr in self.convert_sig_attrs:
2522 sig_list = getattr(old_entry, attr)
2523 except AttributeError:
2526 for sig in sig_list:
2527 ninfo = self.new_ninfo()
2531 ninfo.timestamp = sig
2533 setattr(binfo, attr, value)
2534 delattr(old_entry, attr)
2537 memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info'))
2539 def get_stored_info(self):
2541 return self._memo['get_stored_info']
2546 sconsign_entry = self.dir.sconsign().get_entry(self.name)
2547 except (KeyError, EnvironmentError):
2548 import SCons.SConsign
2549 sconsign_entry = SCons.SConsign.SConsignEntry()
2550 sconsign_entry.binfo = self.new_binfo()
2551 sconsign_entry.ninfo = self.new_ninfo()
2553 if isinstance(sconsign_entry, FileBuildInfo):
2554 # This is a .sconsign file from before the Big Signature
2555 # Refactoring; convert it as best we can.
2556 sconsign_entry = self.convert_old_entry(sconsign_entry)
2558 delattr(sconsign_entry.ninfo, 'bsig')
2559 except AttributeError:
2562 self._memo['get_stored_info'] = sconsign_entry
2564 return sconsign_entry
2566 def get_stored_implicit(self):
2567 binfo = self.get_stored_info().binfo
2568 binfo.prepare_dependencies()
2569 try: return binfo.bimplicit
2570 except AttributeError: return None
2572 def rel_path(self, other):
2573 return self.dir.rel_path(other)
2575 def _get_found_includes_key(self, env, scanner, path):
2576 return (id(env), id(scanner), path)
2578 memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key))
2580 def get_found_includes(self, env, scanner, path):
2581 """Return the included implicit dependencies in this file.
2582 Cache results so we only scan the file once per path
2583 regardless of how many times this information is requested.
2585 memo_key = (id(env), id(scanner), path)
2587 memo_dict = self._memo['get_found_includes']
2590 self._memo['get_found_includes'] = memo_dict
2593 return memo_dict[memo_key]
2598 # result = [n.disambiguate() for n in scanner(self, env, path)]
2599 result = scanner(self, env, path)
2600 result = [N.disambiguate() for N in result]
2604 memo_dict[memo_key] = result
2608 def _createDir(self):
2609 # ensure that the directories for this node are
2613 def push_to_cache(self):
2614 """Try to push the node into a cache
2616 # This should get called before the Nodes' .built() method is
2617 # called, which would clear the build signature if the file has
2620 # We have to clear the local memoized values *before* we push
2621 # the node to cache so that the memoization of the self.exists()
2622 # return value doesn't interfere.
2625 self.clear_memoized_values()
2627 self.get_build_env().get_CacheDir().push(self)
2629 def retrieve_from_cache(self):
2630 """Try to retrieve the node's content from a cache
2632 This method is called from multiple threads in a parallel build,
2633 so only do thread safe stuff here. Do thread unsafe stuff in
2636 Returns true iff the node was successfully retrieved.
2640 if not self.is_derived():
2642 return self.get_build_env().get_CacheDir().retrieve(self)
2646 self.get_build_env().get_CacheDir().push_if_forced(self)
2648 ninfo = self.get_ninfo()
2650 csig = self.get_max_drift_csig()
2654 ninfo.timestamp = self.get_timestamp()
2655 ninfo.size = self.get_size()
2657 if not self.has_builder():
2658 # This is a source file, but it might have been a target file
2659 # in another build that included more of the DAG. Copy
2660 # any build information that's stored in the .sconsign file
2661 # into our binfo object so it doesn't get lost.
2662 old = self.get_stored_info()
2663 self.get_binfo().__dict__.update(old.binfo.__dict__)
2667 def find_src_builder(self):
2670 scb = self.dir.src_builder()
2672 if diskcheck_sccs(self.dir, self.name):
2673 scb = get_DefaultSCCSBuilder()
2674 elif diskcheck_rcs(self.dir, self.name):
2675 scb = get_DefaultRCSBuilder()
2681 except AttributeError:
2684 self.builder_set(scb)
2687 def has_src_builder(self):
2688 """Return whether this Node has a source builder or not.
2690 If this Node doesn't have an explicit source code builder, this
2691 is where we figure out, on the fly, if there's a transparent
2692 source code builder for it.
2694 Note that if we found a source builder, we also set the
2695 self.builder attribute, so that all of the methods that actually
2696 *build* this file don't have to do anything different.
2700 except AttributeError:
2701 scb = self.sbuilder = self.find_src_builder()
2702 return scb is not None
2704 def alter_targets(self):
2705 """Return any corresponding targets in a variant directory.
2707 if self.is_derived():
2709 return self.fs.variant_dir_target_climb(self, self.dir, [self.name])
2711 def _rmv_existing(self):
2712 self.clear_memoized_values()
2713 e = Unlink(self, [], None)
2714 if isinstance(e, SCons.Errors.BuildError):
2718 # Taskmaster interface subsystem
2721 def make_ready(self):
2722 self.has_src_builder()
2726 """Prepare for this file to be created."""
2727 SCons.Node.Node.prepare(self)
2729 if self.get_state() != SCons.Node.up_to_date:
2731 if self.is_derived() and not self.precious:
2732 self._rmv_existing()
2736 except SCons.Errors.StopError, drive:
2737 desc = "No drive `%s' for target `%s'." % (drive, self)
2738 raise SCons.Errors.StopError, desc
2745 """Remove this file."""
2746 if self.exists() or self.islink():
2747 self.fs.unlink(self.path)
2751 def do_duplicate(self, src):
2753 Unlink(self, None, None)
2754 e = Link(self, src, None)
2755 if isinstance(e, SCons.Errors.BuildError):
2756 desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr)
2757 raise SCons.Errors.StopError, desc
2759 # The Link() action may or may not have actually
2760 # created the file, depending on whether the -n
2761 # option was used or not. Delete the _exists and
2762 # _rexists attributes so they can be reevaluated.
2765 memoizer_counters.append(SCons.Memoize.CountValue('exists'))
2769 return self._memo['exists']
2772 # Duplicate from source path if we are set up to do this.
2773 if self.duplicate and not self.is_derived() and not self.linked:
2774 src = self.srcnode()
2776 # At this point, src is meant to be copied in a variant directory.
2778 if src.abspath != self.abspath:
2780 self.do_duplicate(src)
2781 # Can't return 1 here because the duplication might
2782 # not actually occur if the -n option is being used.
2784 # The source file does not exist. Make sure no old
2785 # copy remains in the variant directory.
2786 if Base.exists(self) or self.islink():
2787 self.fs.unlink(self.path)
2788 # Return None explicitly because the Base.exists() call
2789 # above will have cached its value if the file existed.
2790 self._memo['exists'] = None
2792 result = Base.exists(self)
2793 self._memo['exists'] = result
2797 # SIGNATURE SUBSYSTEM
2800 def get_max_drift_csig(self):
2802 Returns the content signature currently stored for this node
2803 if it's been unmodified longer than the max_drift value, or the
2804 max_drift value is 0. Returns None otherwise.
2806 old = self.get_stored_info()
2807 mtime = self.get_timestamp()
2809 max_drift = self.fs.max_drift
2811 if (time.time() - mtime) > max_drift:
2814 if n.timestamp and n.csig and n.timestamp == mtime:
2816 except AttributeError:
2818 elif max_drift == 0:
2820 return old.ninfo.csig
2821 except AttributeError:
2828 Generate a node's content signature, the digested signature
2832 cache - alternate node to use for the signature cache
2833 returns - the content signature
2835 ninfo = self.get_ninfo()
2838 except AttributeError:
2841 csig = self.get_max_drift_csig()
2845 if self.get_size() < SCons.Node.FS.File.md5_chunksize:
2846 contents = self.get_contents()
2848 csig = self.get_content_hash()
2850 # This can happen if there's actually a directory on-disk,
2851 # which can be the case if they've disabled disk checks,
2852 # or if an action with a File target actually happens to
2853 # create a same-named directory by mistake.
2857 csig = SCons.Util.MD5signature(contents)
2864 # DECISION SUBSYSTEM
2867 def builder_set(self, builder):
2868 SCons.Node.Node.builder_set(self, builder)
2869 self.changed_since_last_build = self.decide_target
2871 def changed_content(self, target, prev_ni):
2872 cur_csig = self.get_csig()
2874 return cur_csig != prev_ni.csig
2875 except AttributeError:
2878 def changed_state(self, target, prev_ni):
2879 return self.state != SCons.Node.up_to_date
2881 def changed_timestamp_then_content(self, target, prev_ni):
2882 if not self.changed_timestamp_match(target, prev_ni):
2884 self.get_ninfo().csig = prev_ni.csig
2885 except AttributeError:
2888 return self.changed_content(target, prev_ni)
2890 def changed_timestamp_newer(self, target, prev_ni):
2892 return self.get_timestamp() > target.get_timestamp()
2893 except AttributeError:
2896 def changed_timestamp_match(self, target, prev_ni):
2898 return self.get_timestamp() != prev_ni.timestamp
2899 except AttributeError:
2902 def decide_source(self, target, prev_ni):
2903 return target.get_build_env().decide_source(self, target, prev_ni)
2905 def decide_target(self, target, prev_ni):
2906 return target.get_build_env().decide_target(self, target, prev_ni)
2908 # Initialize this Node's decider function to decide_source() because
2909 # every file is a source file until it has a Builder attached...
2910 changed_since_last_build = decide_source
2912 def is_up_to_date(self):
2914 if T: Trace('is_up_to_date(%s):' % self)
2915 if not self.exists():
2916 if T: Trace(' not self.exists():')
2917 # The file doesn't exist locally...
2920 # ...but there is one in a Repository...
2921 if not self.changed(r):
2922 if T: Trace(' changed(%s):' % r)
2923 # ...and it's even up-to-date...
2925 # ...and they'd like a local copy.
2926 e = LocalCopy(self, r, None)
2927 if isinstance(e, SCons.Errors.BuildError):
2933 if T: Trace(' None\n')
2937 if T: Trace(' self.exists(): %s\n' % r)
2940 memoizer_counters.append(SCons.Memoize.CountValue('rfile'))
2944 return self._memo['rfile']
2948 if not self.exists():
2949 norm_name = _my_normcase(self.name)
2950 for dir in self.dir.get_all_rdirs():
2951 try: node = dir.entries[norm_name]
2952 except KeyError: node = dir.file_on_disk(self.name)
2953 if node and node.exists() and \
2954 (isinstance(node, File) or isinstance(node, Entry) \
2955 or not node.is_derived()):
2957 # Copy over our local attributes to the repository
2958 # Node so we identify shared object files in the
2959 # repository and don't assume they're static.
2961 # This isn't perfect; the attribute would ideally
2962 # be attached to the object in the repository in
2963 # case it was built statically in the repository
2964 # and we changed it to shared locally, but that's
2965 # rarely the case and would only occur if you
2966 # intentionally used the same suffix for both
2967 # shared and static objects anyway. So this
2968 # should work well in practice.
2969 result.attributes = self.attributes
2971 self._memo['rfile'] = result
2975 return str(self.rfile())
2977 def get_cachedir_csig(self):
2979 Fetch a Node's content signature for purposes of computing
2980 another Node's cachesig.
2982 This is a wrapper around the normal get_csig() method that handles
2983 the somewhat obscure case of using CacheDir with the -n option.
2984 Any files that don't exist would normally be "built" by fetching
2985 them from the cache, but the normal get_csig() method will try
2986 to open up the local file, which doesn't exist because the -n
2987 option meant we didn't actually pull the file from cachedir.
2988 But since the file *does* actually exist in the cachedir, we
2989 can use its contents for the csig.
2992 return self.cachedir_csig
2993 except AttributeError:
2996 cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self)
2997 if not self.exists() and cachefile and os.path.exists(cachefile):
2998 self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \
2999 SCons.Node.FS.File.md5_chunksize * 1024)
3001 self.cachedir_csig = self.get_csig()
3002 return self.cachedir_csig
3004 def get_cachedir_bsig(self):
3006 return self.cachesig
3007 except AttributeError:
3010 # Add the path to the cache signature, because multiple
3011 # targets built by the same action will all have the same
3012 # build signature, and we have to differentiate them somehow.
3013 children = self.children()
3014 executor = self.get_executor()
3015 # sigs = [n.get_cachedir_csig() for n in children]
3016 sigs = [n.get_cachedir_csig() for n in children]
3017 sigs.append(SCons.Util.MD5signature(executor.get_contents()))
3018 sigs.append(self.path)
3019 result = self.cachesig = SCons.Util.MD5collect(sigs)
3025 def get_default_fs():
3034 if SCons.Memoize.use_memoizer:
3035 __metaclass__ = SCons.Memoize.Memoized_Metaclass
3037 memoizer_counters = []
3042 def filedir_lookup(self, p, fd=None):
3044 A helper method for find_file() that looks up a directory for
3045 a file we're trying to find. This only creates the Dir Node if
3046 it exists on-disk, since if the directory doesn't exist we know
3047 we won't find any files in it... :-)
3049 It would be more compact to just use this as a nested function
3050 with a default keyword argument (see the commented-out version
3051 below), but that doesn't work unless you have nested scopes,
3052 so we define it here just so this work under Python 1.5.2.
3055 fd = self.default_filedir
3056 dir, name = os.path.split(fd)
3057 drive, d = os.path.splitdrive(dir)
3058 if not name and d[:1] in ('/', os.sep):
3059 #return p.fs.get_root(drive).dir_on_disk(name)
3060 return p.fs.get_root(drive)
3062 p = self.filedir_lookup(p, dir)
3065 norm_name = _my_normcase(name)
3067 node = p.entries[norm_name]
3069 return p.dir_on_disk(name)
3070 if isinstance(node, Dir):
3072 if isinstance(node, Entry):
3073 node.must_be_same(Dir)
3077 def _find_file_key(self, filename, paths, verbose=None):
3078 return (filename, paths)
3080 memoizer_counters.append(SCons.Memoize.CountDict('find_file', _find_file_key))
3082 def find_file(self, filename, paths, verbose=None):
3084 find_file(str, [Dir()]) -> [nodes]
3086 filename - a filename to find
3087 paths - a list of directory path *nodes* to search in. Can be
3088 represented as a list, a tuple, or a callable that is
3089 called with no arguments and returns the list or tuple.
3091 returns - the node created from the found file.
3093 Find a node corresponding to either a derived file or a file
3094 that exists already.
3096 Only the first file found is returned, and none is returned
3097 if no file is found.
3099 memo_key = self._find_file_key(filename, paths)
3101 memo_dict = self._memo['find_file']
3104 self._memo['find_file'] = memo_dict
3107 return memo_dict[memo_key]
3111 if verbose and not callable(verbose):
3112 if not SCons.Util.is_String(verbose):
3113 verbose = "find_file"
3114 _verbose = ' %s: ' % verbose
3115 verbose = lambda s: sys.stdout.write(_verbose + s)
3117 filedir, filename = os.path.split(filename)
3119 # More compact code that we can't use until we drop
3120 # support for Python 1.5.2:
3122 #def filedir_lookup(p, fd=filedir):
3124 # A helper function that looks up a directory for a file
3125 # we're trying to find. This only creates the Dir Node
3126 # if it exists on-disk, since if the directory doesn't
3127 # exist we know we won't find any files in it... :-)
3129 # dir, name = os.path.split(fd)
3131 # p = filedir_lookup(p, dir)
3134 # norm_name = _my_normcase(name)
3136 # node = p.entries[norm_name]
3138 # return p.dir_on_disk(name)
3139 # if isinstance(node, Dir):
3141 # if isinstance(node, Entry):
3142 # node.must_be_same(Dir)
3144 # if isinstance(node, Dir) or isinstance(node, Entry):
3147 #paths = filter(None, map(filedir_lookup, paths))
3149 self.default_filedir = filedir
3150 paths = [_f for _f in map(self.filedir_lookup, paths) if _f]
3155 verbose("looking for '%s' in '%s' ...\n" % (filename, dir))
3156 node, d = dir.srcdir_find_file(filename)
3159 verbose("... FOUND '%s' in '%s'\n" % (filename, d))
3163 memo_dict[memo_key] = result
3167 find_file = FileFinder().find_file
3170 def invalidate_node_memos(targets):
3172 Invalidate the memoized values of all Nodes (files or directories)
3173 that are associated with the given entries. Has been added to
3174 clear the cache of nodes affected by a direct execution of an
3175 action (e.g. Delete/Copy/Chmod). Existing Node caches become
3176 inconsistent if the action is run through Execute(). The argument
3177 `targets` can be a single Node object or filename, or a sequence
3180 from traceback import extract_stack
3182 # First check if the cache really needs to be flushed. Only
3183 # actions run in the SConscript with Execute() seem to be
3184 # affected. XXX The way to check if Execute() is in the stacktrace
3185 # is a very dirty hack and should be replaced by a more sensible
3187 for f in extract_stack():
3188 if f[2] == 'Execute' and f[0][-14:] == 'Environment.py':
3191 # Dont have to invalidate, so return
3194 if not SCons.Util.is_List(targets):
3197 for entry in targets:
3198 # If the target is a Node object, clear the cache. If it is a
3199 # filename, look up potentially existing Node object first.
3201 entry.clear_memoized_values()
3202 except AttributeError:
3203 # Not a Node object, try to look up Node by filename. XXX
3204 # This creates Node objects even for those filenames which
3205 # do not correspond to an existing Node object.
3206 node = get_default_fs().Entry(entry)
3208 node.clear_memoized_values()
3212 # indent-tabs-mode:nil
3214 # vim: set expandtab tabstop=4 shiftwidth=4: