Merged revisions 1907-1940,1942-1967 via svnmerge from
[scons.git] / src / engine / SCons / Node / FS.py
index 2830a20057a897cceda435ee230eb72ba08b47eb..c9a1443f026586e1e3a1c08c287a648141652bd3 100644 (file)
@@ -5,9 +5,8 @@ File system nodes.
 These Nodes represent the canonical external objects that people think
 of when they think of building software: files and directories.
 
-This initializes a "default_fs" Node with an FS at the current directory
-for its own purposes, and for use by scripts or modules looking for the
-canonical default.
+This holds a "default_fs" variable that should be initialized with an FS
+that can be used by scripts or modules looking for the canonical default.
 
 """
 
@@ -48,11 +47,16 @@ import cStringIO
 import SCons.Action
 from SCons.Debug import logInstanceCreation
 import SCons.Errors
+import SCons.Memoize
 import SCons.Node
-import SCons.Sig.MD5
+import SCons.Subst
 import SCons.Util
 import SCons.Warnings
 
+# The max_drift value:  by default, use a cached signature value for
+# any file that's been untouched for more than two days.
+default_max_drift = 2*24*60*60
+
 #
 # We stringify these file system Nodes a lot.  Turning a file system Node
 # into a string is non-trivial, because the final string representation
@@ -91,10 +95,33 @@ def save_strings(val):
 # there should be *no* changes to the external file system(s)...
 #
 
-def _copy_func(src, dest):
+if hasattr(os, 'link'):
+    def _hardlink_func(fs, src, dst):
+        # If the source is a symlink, we can't just hard-link to it
+        # because a relative symlink may point somewhere completely
+        # different.  We must disambiguate the symlink and then
+        # hard-link the final destination file.
+        while fs.islink(src):
+            link = fs.readlink(src)
+            if not os.path.isabs(link):
+                src = link
+            else:
+                src = os.path.join(os.path.dirname(src), link)
+        fs.link(src, dst)
+else:
+    _hardlink_func = None
+
+if hasattr(os, 'symlink'):
+    def _softlink_func(fs, src, dst):
+        fs.symlink(src, dst)
+else:
+    _softlink_func = None
+
+def _copy_func(fs, src, dest):
     shutil.copy2(src, dest)
-    st=os.stat(src)
-    os.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
+    st = fs.stat(src)
+    fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
+
 
 Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy',
                     'hard-copy', 'soft-copy', 'copy']
@@ -109,16 +136,6 @@ def set_duplicate(duplicate):
     # underlying implementations.  We do this inside this function,
     # not in the top-level module code, so that we can remap os.link
     # and os.symlink for testing purposes.
-    try:
-        _hardlink_func = os.link
-    except AttributeError:
-        _hardlink_func = None
-
-    try:
-        _softlink_func = os.symlink
-    except AttributeError:
-        _softlink_func = None
-
     link_dict = {
         'hard' : _hardlink_func,
         'soft' : _softlink_func,
@@ -148,12 +165,19 @@ def LinkFunc(target, source, env):
     if not Link_Funcs:
         # Set a default order of link functions.
         set_duplicate('hard-soft-copy')
+    fs = source[0].fs
     # Now link the files with the previously specified order.
     for func in Link_Funcs:
         try:
-            func(src,dest)
+            func(fs, src, dest)
             break
-        except OSError:
+        except (IOError, OSError):
+            # An OSError indicates something happened like a permissions
+            # problem or an attempt to symlink across file-system
+            # boundaries.  An IOError indicates something like the file
+            # not existing.  In either case, keeping trying additional
+            # functions in the list and only raise an error if the last
+            # one failed.
             if func == Link_Funcs[-1]:
                 # exception of the last link method (copy) are fatal
                 raise
@@ -176,9 +200,8 @@ Unlink = SCons.Action.Action(UnlinkFunc, None)
 
 def MkdirFunc(target, source, env):
     t = target[0]
-    p = t.abspath
-    if not t.fs.exists(p):
-        t.fs.mkdir(p)
+    if not t.exists():
+        t.fs.mkdir(t.abspath)
     return 0
 
 Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None)
@@ -189,12 +212,14 @@ def get_MkdirBuilder():
     global MkdirBuilder
     if MkdirBuilder is None:
         import SCons.Builder
+        import SCons.Defaults
         # "env" will get filled in by Executor.get_build_env()
         # calling SCons.Defaults.DefaultEnvironment() when necessary.
         MkdirBuilder = SCons.Builder.Builder(action = Mkdir,
                                              env = None,
                                              explain = None,
                                              is_explicit = None,
+                                             target_scanner = SCons.Defaults.DirEntryScanner,
                                              name = "MkdirBuilder")
     return MkdirBuilder
 
@@ -202,13 +227,18 @@ def CacheRetrieveFunc(target, source, env):
     t = target[0]
     fs = t.fs
     cachedir, cachefile = t.cachepath()
-    if fs.exists(cachefile):
-        if SCons.Action.execute_actions:
+    if not fs.exists(cachefile):
+        fs.CacheDebug('CacheRetrieve(%s):  %s not in cache\n', t, cachefile)
+        return 1
+    fs.CacheDebug('CacheRetrieve(%s):  retrieving from %s\n', t, cachefile)
+    if SCons.Action.execute_actions:
+        if fs.islink(cachefile):
+            fs.symlink(fs.readlink(cachefile), t.path)
+        else:
             fs.copy2(cachefile, t.path)
-            st = fs.stat(cachefile)
-            fs.chmod(t.path, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
-        return 0
-    return 1
+        st = fs.stat(cachefile)
+        fs.chmod(t.path, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
+    return 0
 
 def CacheRetrieveString(target, source, env):
     t = target[0]
@@ -223,29 +253,44 @@ CacheRetrieveSilent = SCons.Action.Action(CacheRetrieveFunc, None)
 
 def CachePushFunc(target, source, env):
     t = target[0]
+    if t.nocache:
+        return
     fs = t.fs
     cachedir, cachefile = t.cachepath()
     if fs.exists(cachefile):
-        # Don't bother copying it if it's already there.
+        # Don't bother copying it if it's already there.  Note that
+        # usually this "shouldn't happen" because if the file already
+        # existed in cache, we'd have retrieved the file from there,
+        # not built it.  This can happen, though, in a race, if some
+        # other person running the same build pushes their copy to
+        # the cache after we decide we need to build it but before our
+        # build completes.
+        fs.CacheDebug('CachePush(%s):  %s already exists in cache\n', t, cachefile)
         return
 
+    fs.CacheDebug('CachePush(%s):  pushing to %s\n', t, cachefile)
+
     if not fs.isdir(cachedir):
         fs.makedirs(cachedir)
 
     tempfile = cachefile+'.tmp'
     try:
-        fs.copy2(t.path, tempfile)
+        if fs.islink(t.path):
+            fs.symlink(fs.readlink(t.path), tempfile)
+        else:
+            fs.copy2(t.path, tempfile)
         fs.rename(tempfile, cachefile)
         st = fs.stat(t.path)
         fs.chmod(cachefile, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
-    except OSError:
-        # It's possible someone else tried writing the file at the same
-        # time we did.  Print a warning but don't stop the build, since
-        # it doesn't affect the correctness of the build.
+    except (IOError, OSError):
+        # It's possible someone else tried writing the file at the
+        # same time we did, or else that there was some problem like
+        # the CacheDir being on a separate file system that's full.
+        # In any case, inability to push a file to cache doesn't affect
+        # the correctness of the build, so just print a warning.
         SCons.Warnings.warn(SCons.Warnings.CacheWriteErrorWarning,
                             "Unable to copy %s to cache. Cache file is %s"
                                 % (str(target), cachefile))
-        return
 
 CachePush = SCons.Action.Action(CachePushFunc, None)
 
@@ -281,44 +326,6 @@ def get_DefaultRCSBuilder():
                                                   name = "DefaultRCSBuilder")
     return DefaultRCSBuilder
 
-#
-class ParentOfRoot:
-    """
-    An instance of this class is used as the parent of the root of a
-    filesystem (POSIX) or drive (Win32). This isn't actually a node,
-    but it looks enough like one so that we don't have to have
-    special purpose code everywhere to deal with dir being None. 
-    This class is an instance of the Null object pattern.
-    """
-    def __init__(self):
-        self.abspath = ''
-        self.path = ''
-        self.name=''
-        self.duplicate=0
-        self.srcdir=None
-        self.build_dirs=[]
-        
-    def is_under(self, dir):
-        return 0
-
-    def up(self):
-        return None
-
-    def getRepositories(self):
-        return []
-
-    def get_dir(self):
-        return None
-
-    def src_builder(self):
-        return _null
-
-    def entry_abspath(self, name):
-        return name
-
-    def entry_path(self, name):
-        return name
-
 # Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem.
 _is_cygwin = sys.platform == "cygwin"
 if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin:
@@ -328,31 +335,119 @@ else:
     def _my_normcase(x):
         return string.upper(x)
 
+
+
+class DiskChecker:
+    def __init__(self, type, do, ignore):
+        self.type = type
+        self.do = do
+        self.ignore = ignore
+        self.set_do()
+    def set_do(self):
+        self.__call__ = self.do
+    def set_ignore(self):
+        self.__call__ = self.ignore
+    def set(self, list):
+        if self.type in list:
+            self.set_do()
+        else:
+            self.set_ignore()
+
+def do_diskcheck_match(node, predicate, errorfmt):
+    result = predicate()
+    try:
+        # If calling the predicate() cached a None value from stat(),
+        # remove it so it doesn't interfere with later attempts to
+        # build this Node as we walk the DAG.  (This isn't a great way
+        # to do this, we're reaching into an interface that doesn't
+        # really belong to us, but it's all about performance, so
+        # for now we'll just document the dependency...)
+        if node._memo['stat'] is None:
+            del node._memo['stat']
+    except (AttributeError, KeyError):
+        pass
+    if result:
+        raise TypeError, errorfmt % node.abspath
+
+def ignore_diskcheck_match(node, predicate, errorfmt):
+    pass
+
+def do_diskcheck_rcs(node, name):
+    try:
+        rcs_dir = node.rcs_dir
+    except AttributeError:
+        if node.entry_exists_on_disk('RCS'):
+            rcs_dir = node.Dir('RCS')
+        else:
+            rcs_dir = None
+        node.rcs_dir = rcs_dir
+    if rcs_dir:
+        return rcs_dir.entry_exists_on_disk(name+',v')
+    return None
+
+def ignore_diskcheck_rcs(node, name):
+    return None
+
+def do_diskcheck_sccs(node, name):
+    try:
+        sccs_dir = node.sccs_dir
+    except AttributeError:
+        if node.entry_exists_on_disk('SCCS'):
+            sccs_dir = node.Dir('SCCS')
+        else:
+            sccs_dir = None
+        node.sccs_dir = sccs_dir
+    if sccs_dir:
+        return sccs_dir.entry_exists_on_disk('s.'+name)
+    return None
+
+def ignore_diskcheck_sccs(node, name):
+    return None
+
+diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match)
+diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs)
+diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs)
+
+diskcheckers = [
+    diskcheck_match,
+    diskcheck_rcs,
+    diskcheck_sccs,
+]
+
+def set_diskcheck(list):
+    for dc in diskcheckers:
+        dc.set(list)
+
+def diskcheck_types():
+    return map(lambda dc: dc.type, diskcheckers)
+
+
+
 class EntryProxy(SCons.Util.Proxy):
     def __get_abspath(self):
         entry = self.get()
-        return SCons.Util.SpecialAttrWrapper(entry.get_abspath(),
+        return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(),
                                              entry.name + "_abspath")
 
     def __get_filebase(self):
         name = self.get().name
-        return SCons.Util.SpecialAttrWrapper(SCons.Util.splitext(name)[0],
+        return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0],
                                              name + "_filebase")
 
     def __get_suffix(self):
         name = self.get().name
-        return SCons.Util.SpecialAttrWrapper(SCons.Util.splitext(name)[1],
+        return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1],
                                              name + "_suffix")
 
     def __get_file(self):
         name = self.get().name
-        return SCons.Util.SpecialAttrWrapper(name, name + "_file")
+        return SCons.Subst.SpecialAttrWrapper(name, name + "_file")
 
     def __get_base_path(self):
         """Return the file's directory and file name, with the
         suffix stripped."""
         entry = self.get()
-        return SCons.Util.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0],
+        return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0],
                                              entry.name + "_base")
 
     def __get_posix_path(self):
@@ -363,9 +458,9 @@ class EntryProxy(SCons.Util.Proxy):
         else:
             entry = self.get()
             r = string.replace(entry.get_path(), os.sep, '/')
-            return SCons.Util.SpecialAttrWrapper(r, entry.name + "_posix")
+            return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
 
-    def __get_win32_path(self):
+    def __get_windows_path(self):
         """Return the path with \ as the path separator,
         regardless of platform."""
         if os.sep == '\\':
@@ -373,7 +468,7 @@ class EntryProxy(SCons.Util.Proxy):
         else:
             entry = self.get()
             r = string.replace(entry.get_path(), os.sep, '\\')
-            return SCons.Util.SpecialAttrWrapper(r, entry.name + "_win32")
+            return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows")
 
     def __get_srcnode(self):
         return EntryProxy(self.get().srcnode())
@@ -396,7 +491,8 @@ class EntryProxy(SCons.Util.Proxy):
     
     dictSpecialAttrs = { "base"     : __get_base_path,
                          "posix"    : __get_posix_path,
-                         "win32"    : __get_win32_path,
+                         "windows"  : __get_windows_path,
+                         "win32"    : __get_windows_path,
                          "srcpath"  : __get_srcnode,
                          "srcdir"   : __get_srcdir,
                          "dir"      : __get_dir,
@@ -412,7 +508,7 @@ class EntryProxy(SCons.Util.Proxy):
         # This is how we implement the "special" attributes
         # such as base, posix, srcdir, etc.
         try:
-            return self.dictSpecialAttrs[name](self)
+            attr_function = self.dictSpecialAttrs[name]
         except KeyError:
             try:
                 attr = SCons.Util.Proxy.__getattr__(self, name)
@@ -427,6 +523,8 @@ class EntryProxy(SCons.Util.Proxy):
                     classname = classname[:-2]
                 raise AttributeError, "%s instance '%s' has no attribute '%s'" % (classname, entry.name, name)
             return attr
+        else:
+            return attr_function(self)
 
 class Base(SCons.Node.Node):
     """A generic class for file system entries.  This class is for
@@ -441,6 +539,8 @@ class Base(SCons.Node.Node):
     object identity comparisons.
     """
 
+    memoizer_counters = []
+
     def __init__(self, name, directory, fs):
         """Initialize a generic Node.FS.Base object.
         
@@ -452,6 +552,7 @@ class Base(SCons.Node.Node):
         SCons.Node.Node.__init__(self)
 
         self.name = name
+        self.suffix = SCons.Util.splitext(name)[1]
         self.fs = fs
 
         assert directory, "A directory must be provided"
@@ -461,25 +562,21 @@ class Base(SCons.Node.Node):
             self.path = name
         else:
             self.path = directory.entry_path(name)
+        if directory.tpath == '.':
+            self.tpath = name
+        else:
+            self.tpath = directory.entry_tpath(name)
+        self.path_elements = directory.path_elements + [self]
 
         self.dir = directory
         self.cwd = None # will hold the SConscript directory for target nodes
         self.duplicate = directory.duplicate
 
-    def clear(self):
-        """Completely clear a Node.FS.Base object of all its cached
-        state (so that it can be re-evaluated by interfaces that do
-        continuous integration builds).
-        __cache_reset__
-        """
-        SCons.Node.Node.clear(self)
-
     def get_dir(self):
         return self.dir
 
     def get_suffix(self):
-        "__cacheable__"
-        return SCons.Util.splitext(self.name)[1]
+        return self.suffix
 
     def rfile(self):
         return self
@@ -492,9 +589,16 @@ class Base(SCons.Node.Node):
             return self._save_str()
         return self._get_str()
 
+    memoizer_counters.append(SCons.Memoize.CountValue('_save_str'))
+
     def _save_str(self):
-        "__cacheable__"
-        return self._get_str()
+        try:
+            return self._memo['_save_str']
+        except KeyError:
+            pass
+        result = self._get_str()
+        self._memo['_save_str'] = result
+        return result
 
     def _get_str(self):
         if self.duplicate or self.is_derived():
@@ -503,14 +607,49 @@ class Base(SCons.Node.Node):
 
     rstr = __str__
 
+    memoizer_counters.append(SCons.Memoize.CountValue('stat'))
+
+    def stat(self):
+        try: return self._memo['stat']
+        except KeyError: pass
+        try: result = self.fs.stat(self.abspath)
+        except os.error: result = None
+        self._memo['stat'] = result
+        return result
+
     def exists(self):
-        "__cacheable__"
-        return self.fs.exists(self.abspath)
+        return not self.stat() is None
 
     def rexists(self):
-        "__cacheable__"
         return self.rfile().exists()
 
+    def getmtime(self):
+        st = self.stat()
+        if st: return st[stat.ST_MTIME]
+        else: return None
+
+    def getsize(self):
+        st = self.stat()
+        if st: return st[stat.ST_SIZE]
+        else: return None
+
+    def isdir(self):
+        st = self.stat()
+        return not st is None and stat.S_ISDIR(st[stat.ST_MODE])
+
+    def isfile(self):
+        st = self.stat()
+        return not st is None and stat.S_ISREG(st[stat.ST_MODE])
+
+    if hasattr(os, 'symlink'):
+        def islink(self):
+            try: st = self.fs.lstat(self.abspath)
+            except os.error: return 0
+            return stat.S_ISLNK(st[stat.ST_MODE])
+    else:
+        def islink(self):
+            return 0                    # no symlinks
+
     def is_under(self, dir):
         if self is dir:
             return 1
@@ -524,7 +663,7 @@ class Base(SCons.Node.Node):
         """If this node is in a build path, return the node
         corresponding to its source file.  Otherwise, return
         ourself.
-        __cacheable__"""
+        """
         dir=self.dir
         name=self.name
         while dir:
@@ -533,7 +672,7 @@ class Base(SCons.Node.Node):
                                         klass=self.__class__)
                 return srcnode
             name = dir.name + os.sep + name
-            dir=dir.get_dir()
+            dir = dir.up()
         return self
 
     def get_path(self, dir=None):
@@ -541,18 +680,15 @@ class Base(SCons.Node.Node):
         Node.FS.Base object that owns us."""
         if not dir:
             dir = self.fs.getcwd()
-        path_elems = []
-        d = self
-        if d == dir:
-            path_elems.append('.')
-        else:
-            while d != dir and not isinstance(d, ParentOfRoot):
-                path_elems.append(d.name)
-                d = d.dir
-            path_elems.reverse()
-        ret = string.join(path_elems, os.sep)
-        return ret
-            
+        if self == dir:
+            return '.'
+        path_elems = self.path_elements
+        try: i = path_elems.index(dir)
+        except ValueError: pass
+        else: path_elems = path_elems[i+1:]
+        path_elems = map(lambda n: n.name, path_elems)
+        return string.join(path_elems, os.sep)
+
     def set_src_builder(self, builder):
         """Set the source code builder for this node."""
         self.sbuilder = builder
@@ -591,6 +727,52 @@ class Base(SCons.Node.Node):
             self._proxy = ret
             return ret
 
+    def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext):
+        return self.dir.Entry(prefix + splitext(self.name)[0] + suffix)
+
+    def _Rfindalldirs_key(self, pathlist):
+        return pathlist
+
+    memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key))
+
+    def Rfindalldirs(self, pathlist):
+        """
+        Return all of the directories for a given path list, including
+        corresponding "backing" directories in any repositories.
+
+        The Node lookups are relative to this Node (typically a
+        directory), so memoizing result saves cycles from looking
+        up the same path for each target in a given directory.
+        """
+        try:
+            memo_dict = self._memo['Rfindalldirs']
+        except KeyError:
+            memo_dict = {}
+            self._memo['Rfindalldirs'] = memo_dict
+        else:
+            try:
+                return memo_dict[pathlist]
+            except KeyError:
+                pass
+
+        create_dir_relative_to_self = self.Dir
+        result = []
+        for path in pathlist:
+            if isinstance(path, SCons.Node.Node):
+                result.append(path)
+            else:
+                dir = create_dir_relative_to_self(path)
+                result.extend(dir.get_all_rdirs())
+
+        memo_dict[pathlist] = result
+
+        return result
+
+    def RDirs(self, pathlist):
+        """Search for a list of directories in the Repository list."""
+        cwd = self.cwd or self.fs._cwd
+        return cwd.Rfindalldirs(pathlist)
+
 class Entry(Base):
     """This is the class for generic Node.FS entries--that is, things
     that could be a File or a Dir, but we're just not sure yet.
@@ -599,14 +781,42 @@ class Entry(Base):
     time comes, and then call the same-named method in the transformed
     class."""
 
-    def disambiguate(self):
-        if self.fs.isdir(self.abspath):
+    def diskcheck_match(self):
+        pass
+
+    def disambiguate(self, must_exist=None):
+        """
+        """
+        if self.isdir():
             self.__class__ = Dir
             self._morph()
-        else:
+        elif self.isfile():
             self.__class__ = File
             self._morph()
             self.clear()
+        else:
+            # There was nothing on-disk at this location, so look in
+            # the src directory.
+            #
+            # We can't just use self.srcnode() straight away because
+            # that would create an actual Node for this file in the src
+            # directory, and there might not be one.  Instead, use the
+            # dir_on_disk() method to see if there's something on-disk
+            # with that name, in which case we can go ahead and call
+            # self.srcnode() to create the right type of entry.
+            srcdir = self.dir.srcnode()
+            if srcdir != self.dir and \
+               srcdir.entry_exists_on_disk(self.name) and \
+               self.srcnode().isdir():
+                self.__class__ = Dir
+                self._morph()
+            elif must_exist:
+                msg = "No such file or directory: '%s'" % self.abspath
+                raise SCons.Errors.UserError, msg
+            else:
+                self.__class__ = File
+                self._morph()
+                self.clear()
         return self
 
     def rfile(self):
@@ -617,11 +827,6 @@ class Entry(Base):
         self.clear()
         return File.rfile(self)
 
-    def get_found_includes(self, env, scanner, path):
-        """If we're looking for included files, it's because this Entry
-        is really supposed to be a File itself."""
-        return self.disambiguate().get_found_includes(env, scanner, path)
-
     def scanner_key(self):
         return self.get_suffix()
 
@@ -631,29 +836,17 @@ class Entry(Base):
         Since this should return the real contents from the file
         system, we check to see into what sort of subclass we should
         morph this Entry."""
-        if self.fs.isfile(self.abspath):
-            self.__class__ = File
-            self._morph()
-            return self.get_contents()
-        if self.fs.isdir(self.abspath):
-            self.__class__ = Dir
-            self._morph()
+        try:
+            self = self.disambiguate(must_exist=1)
+        except SCons.Errors.UserError:
+            # There was nothing on disk with which to disambiguate
+            # this entry.  Leave it as an Entry, but return a null
+            # string so calls to get_contents() in emitters and the
+            # like (e.g. in qt.py) don't have to disambiguate by hand
+            # or catch the exception.
+            return ''
+        else:
             return self.get_contents()
-        if self.fs.islink(self.abspath):
-            return ''             # avoid errors for dangling symlinks
-        raise AttributeError
-
-    def exists(self):
-        """Return if the Entry exists.  Check the file system to see
-        what we should turn into first.  Assume a file if there's no
-        directory."""
-        return self.disambiguate().exists()
-
-    def calc_signature(self, calc=None):
-        """Return the Entry's calculated signature.  Check the file
-        system to see what we should turn into first.  Assume a file if
-        there's no directory."""
-        return self.disambiguate().calc_signature(calc)
 
     def must_be_a_Dir(self):
         """Called to make sure a Node is a Dir.  Since we're an
@@ -662,6 +855,29 @@ class Entry(Base):
         self._morph()
         return self
 
+    # The following methods can get called before the Taskmaster has
+    # had a chance to call disambiguate() directly to see if this Entry
+    # should really be a Dir or a File.  We therefore use these to call
+    # disambiguate() transparently (from our caller's point of view).
+    #
+    # Right now, this minimal set of methods has been derived by just
+    # looking at some of the methods that will obviously be called early
+    # in any of the various Taskmasters' calling sequences, and then
+    # empirically figuring out which additional methods are necessary
+    # to make various tests pass.
+
+    def exists(self):
+        """Return if the Entry exists.  Check the file system to see
+        what we should turn into first.  Assume a file if there's no
+        directory."""
+        return self.disambiguate().exists()
+
+    def rel_path(self, other):
+        d = self.disambiguate()
+        if d.__class__ == Entry:
+            raise "rel_path() could not disambiguate File/Dir"
+        return d.rel_path(other)
+
 # This is for later so we can differentiate between Entry the class and Entry
 # the method of the FS class.
 _classEntry = Entry
@@ -669,7 +885,8 @@ _classEntry = Entry
 
 class LocalFS:
 
-    __metaclass__ = SCons.Memoize.Memoized_Metaclass
+    if SCons.Memoize.use_memoizer:
+        __metaclass__ = SCons.Memoize.Memoized_Metaclass
     
     # This class implements an abstraction layer for operations involving
     # a local file system.  Essentially, this wraps any function in
@@ -695,12 +912,16 @@ class LocalFS:
         return os.path.exists(path)
     def getmtime(self, path):
         return os.path.getmtime(path)
+    def getsize(self, path):
+        return os.path.getsize(path)
     def isdir(self, path):
         return os.path.isdir(path)
     def isfile(self, path):
         return os.path.isfile(path)
     def link(self, src, dst):
         return os.link(src, dst)
+    def lstat(self, path):
+        return os.lstat(path)
     def listdir(self, path):
         return os.listdir(path)
     def makedirs(self, path):
@@ -721,19 +942,16 @@ class LocalFS:
     if hasattr(os, 'symlink'):
         def islink(self, path):
             return os.path.islink(path)
-        def exists_or_islink(self, path):
-            return os.path.exists(path) or os.path.islink(path)
     else:
         def islink(self, path):
             return 0                    # no symlinks
-        exists_or_islink = exists
 
-if not SCons.Memoize.has_metaclass:
-    _FSBase = LocalFS
-    class LocalFS(SCons.Memoize.Memoizer, _FSBase):
-        def __init__(self, *args, **kw):
-            apply(_FSBase.__init__, (self,)+args, kw)
-            SCons.Memoize.Memoizer.__init__(self)
+    if hasattr(os, 'readlink'):
+        def readlink(self, file):
+            return os.readlink(file)
+    else:
+        def readlink(self, file):
+            return ''
 
 
 #class RemoteFS:
@@ -747,6 +965,8 @@ if not SCons.Memoize.has_metaclass:
 
 class FS(LocalFS):
 
+    memoizer_counters = []
+
     def __init__(self, path = None):
         """Initialize the Node.FS subsystem.
 
@@ -756,37 +976,39 @@ class FS(LocalFS):
 
         The path argument must be a valid absolute path.
         """
-        if __debug__: logInstanceCreation(self)
-        self.Top = None
-        if path == None:
-            self.pathTop = os.getcwd()
-        else:
-            self.pathTop = path
+        if __debug__: logInstanceCreation(self, 'Node.FS')
+
+        self._memo = {}
+
         self.Root = {}
         self.SConstruct_dir = None
         self.CachePath = None
         self.cache_force = None
         self.cache_show = None
+        self.max_drift = default_max_drift
 
-    def set_toplevel_dir(self, path):
-        assert not self.Top, "You can only set the top-level path on an FS object that has not had its File, Dir, or Entry methods called yet."
-        self.pathTop = path
+        self.Top = None
+        if path is None:
+            self.pathTop = os.getcwd()
+        else:
+            self.pathTop = path
+        self.defaultDrive = _my_normcase(os.path.splitdrive(self.pathTop)[0])
 
-    def clear_cache(self):
-        "__cache_reset__"
-        pass
+        self.Top = self._doLookup(Dir, os.path.normpath(self.pathTop))
+        self.Top.path = '.'
+        self.Top.tpath = '.'
+        self._cwd = self.Top
     
     def set_SConstruct_dir(self, dir):
         self.SConstruct_dir = dir
-        
-    def __setTopLevelDir(self):
-        if not self.Top:
-            self.Top = self._doLookup(Dir, os.path.normpath(self.pathTop))
-            self.Top.path = '.'
-            self._cwd = self.Top
-        
+
+    def get_max_drift(self):
+        return self.max_drift
+
+    def set_max_drift(self, max_drift):
+        self.max_drift = max_drift
+
     def getcwd(self):
-        self.__setTopLevelDir()
         return self._cwd
 
     def __checkClass(self, node, klass):
@@ -799,6 +1021,11 @@ class FS(LocalFS):
         raise TypeError, "Tried to lookup %s '%s' as a %s." % \
               (node.__class__.__name__, node.path, klass.__name__)
         
+    def _doLookup_key(self, fsclass, name, directory = None, create = 1):
+        return (fsclass, name, directory)
+
+    memoizer_counters.append(SCons.Memoize.CountDict('_doLookup', _doLookup_key))
+
     def _doLookup(self, fsclass, name, directory = None, create = 1):
         """This method differs from the File and Dir factory methods in
         one important way: the meaning of the directory parameter.
@@ -806,23 +1033,43 @@ class FS(LocalFS):
         name is expected to be an absolute path.  If you try to look up a
         relative path with directory=None, then an AssertionError will be
         raised.
-        __cacheable__"""
+        """
+        memo_key = (fsclass, name, directory)
+        try:
+            memo_dict = self._memo['_doLookup']
+        except KeyError:
+            memo_dict = {}
+            self._memo['_doLookup'] = memo_dict
+        else:
+            try:
+                return memo_dict[memo_key]
+            except KeyError:
+                pass
 
         if not name:
-            # This is a stupid hack to compensate for the fact
-            # that the POSIX and Win32 versions of os.path.normpath()
-            # behave differently.  In particular, in POSIX:
+            # This is a stupid hack to compensate for the fact that the
+            # POSIX and Windows versions of os.path.normpath() behave
+            # differently in older versions of Python.  In particular,
+            # in POSIX:
             #   os.path.normpath('./') == '.'
-            # in Win32
+            # in Windows:
             #   os.path.normpath('./') == ''
             #   os.path.normpath('.\\') == ''
             #
             # This is a definite bug in the Python library, but we have
             # to live with it.
             name = '.'
-        path_comp = string.split(name, os.sep)
-        drive, path_first = os.path.splitdrive(path_comp[0])
-        if not path_first:
+        path_orig = string.split(name, os.sep)
+        path_norm = string.split(_my_normcase(name), os.sep)
+
+        first_orig = path_orig.pop(0)   # strip first element
+        unused = path_norm.pop(0)   # strip first element
+
+        drive, path_first = os.path.splitdrive(first_orig)
+        if path_first:
+            path_orig = [ path_first, ] + path_orig
+            path_norm = [ _my_normcase(path_first), ] + path_norm
+        else:
             # Absolute path
             drive = _my_normcase(drive)
             try:
@@ -830,65 +1077,71 @@ class FS(LocalFS):
             except KeyError:
                 if not create:
                     raise SCons.Errors.UserError
-                directory = RootDir(drive, ParentOfRoot(), self)
+                directory = RootDir(drive, self)
                 self.Root[drive] = directory
-            path_comp = path_comp[1:]
-        else:
-            path_comp = [ path_first, ] + path_comp[1:]
+                if not drive:
+                    self.Root[self.defaultDrive] = directory
+                elif drive == self.defaultDrive:
+                    self.Root[''] = directory
+
+        if not path_orig:
+            memo_dict[memo_key] = directory
+            return directory
 
-        if not path_comp:
-            path_comp = ['']
+        last_orig = path_orig.pop()     # strip last element
+        last_norm = path_norm.pop()     # strip last element
             
         # Lookup the directory
-        for path_name in path_comp[:-1]:
-            path_norm = _my_normcase(path_name)
+        for orig, norm in map(None, path_orig, path_norm):
+            try:
+                entries = directory.entries
+            except AttributeError:
+                # We tried to look up the entry in either an Entry or
+                # a File.  Give whatever it is a chance to do what's
+                # appropriate: morph into a Dir or raise an exception.
+                directory.must_be_a_Dir()
+                entries = directory.entries
             try:
-                d = directory.entries[path_norm]
+                directory = entries[norm]
             except KeyError:
                 if not create:
                     raise SCons.Errors.UserError
 
-                # look at the actual filesystem and make sure there isn't
-                # a file already there
-                path = directory.entry_path(path_name)
-                if self.isfile(path):
-                    raise TypeError, \
-                          "File %s found where directory expected." % path
-
-                dir_temp = Dir(path_name, directory, self)
-                directory.entries[path_norm] = dir_temp
-                directory.add_wkid(dir_temp)
-                directory = dir_temp
-            else:
-                directory = d.must_be_a_Dir()
+                d = Dir(orig, directory, self)
+
+                # Check the file system (or not, as configured) to make
+                # sure there isn't already a file there.
+                d.diskcheck_match()
+
+                directory.entries[norm] = d
+                directory.add_wkid(d)
+                directory = d
+
+        directory.must_be_a_Dir()
 
-        entry_norm = _my_normcase(path_comp[-1])
         try:
-            e = directory.entries[entry_norm]
+            e = directory.entries[last_norm]
         except KeyError:
             if not create:
                 raise SCons.Errors.UserError
 
-            # make sure we don't create File nodes when there is actually
-            # a directory at that path on the disk, and vice versa
-            path = directory.entry_path(path_comp[-1])
-            if fsclass == File:
-                if self.isdir(path):
-                    raise TypeError, \
-                          "Directory %s found where file expected." % path
-            elif fsclass == Dir:
-                if self.isfile(path):
-                    raise TypeError, \
-                          "File %s found where directory expected." % path
-            
-            result = fsclass(path_comp[-1], directory, self)
-            directory.entries[entry_norm] = result 
+            result = fsclass(last_orig, directory, self)
+
+            # Check the file system (or not, as configured) to make
+            # sure there isn't already a directory at the path on
+            # disk where we just created a File node, and vice versa.
+            result.diskcheck_match()
+
+            directory.entries[last_norm] = result 
             directory.add_wkid(result)
         else:
             result = self.__checkClass(e, fsclass)
+
+        memo_dict[memo_key] = result
+
         return result 
 
-    def __transformPath(self, name, directory):
+    def _transformPath(self, name, directory):
         """Take care of setting up the correct top-level directory,
         usually in preparation for a call to doLookup().
 
@@ -898,7 +1151,14 @@ class FS(LocalFS):
         If directory is None, and name is a relative path,
         then the same applies.
         """
-        self.__setTopLevelDir()
+        if not SCons.Util.is_String(name):
+            # This handles cases where the object is a Proxy wrapping
+            # a Node.FS.File object (e.g.).  It would be good to handle
+            # this more directly some day by having the callers of this
+            # function recognize that a Proxy can be treated like the
+            # underlying object (that is, get rid of the isinstance()
+            # calls that explicitly look for a Node.FS.Base object).
+            name = str(name)
         if name and name[0] == '#':
             directory = self.Top
             name = name[1:]
@@ -906,7 +1166,8 @@ class FS(LocalFS):
                 # Correct such that '#/foo' is equivalent
                 # to '#foo'.
                 name = name[1:]
-            name = os.path.join('.', os.path.normpath(name))
+            name = os.path.normpath(os.path.join('.', name))
+            return (name, directory)
         elif not directory:
             directory = self._cwd
         return (os.path.normpath(name), directory)
@@ -916,7 +1177,6 @@ class FS(LocalFS):
         If change_os_dir is true, we will also change the "real" cwd
         to match.
         """
-        self.__setTopLevelDir()
         curr=self._cwd
         try:
             if not dir is None:
@@ -943,7 +1203,7 @@ class FS(LocalFS):
         else:
             if directory and not isinstance(directory, Dir):
                 directory = self.Dir(directory)
-            name, directory = self.__transformPath(name, directory)
+            name, directory = self._transformPath(name, directory)
             return self._doLookup(klass, name, directory, create)
     
     def File(self, name, directory = None, create = 1):
@@ -956,7 +1216,6 @@ class FS(LocalFS):
         This method will raise TypeError if a directory is found at the
         specified path.
         """
-
         return self.Entry(name, directory, create, File)
     
     def Dir(self, name, directory = None, create = 1):
@@ -969,20 +1228,16 @@ class FS(LocalFS):
         This method will raise TypeError if a normal file is found at the
         specified path.
         """
-
         return self.Entry(name, directory, create, Dir)
     
     def BuildDir(self, build_dir, src_dir, duplicate=1):
         """Link the supplied build directory to the source directory
         for purposes of building files."""
         
-        self.__setTopLevelDir()
         if not isinstance(src_dir, SCons.Node.Node):
             src_dir = self.Dir(src_dir)
         if not isinstance(build_dir, SCons.Node.Node):
             build_dir = self.Dir(build_dir)
-        if not src_dir.is_under(self.Top):
-            raise SCons.Errors.UserError, "Source directory must be under top of build tree."
         if src_dir.is_under(build_dir):
             raise SCons.Errors.UserError, "Source directory cannot be under build directory."
         if build_dir.srcdir:
@@ -996,134 +1251,41 @@ class FS(LocalFS):
         for d in dirs:
             if not isinstance(d, SCons.Node.Node):
                 d = self.Dir(d)
-            self.__setTopLevelDir()
             self.Top.addRepository(d)
 
-    def do_Rsearch(self, path, func, clazz=_classEntry, cwd=None, verbose=lambda x: x):
-        """Search for something in a Repository.  Returns the first
-        one found in the list, or None if there isn't one.
-        __cacheable__
-        """
-        if isinstance(path, SCons.Node.Node):
-            return path
-
-        path, dir = self.__transformPath(path, cwd)
-        d, name = os.path.split(path)
-        norm_name = _my_normcase(name)
-        if d:
-            dir = dir.Dir(d)
-        try:
-            node = dir.entries[norm_name]
-        except KeyError:
-            node = dir.node_on_disk(name, clazz)
-        else:
-            node = func(node)
-            if node:
-                dir = node.get_dir()
-        if node:
-            verbose("... FOUND '%s' in '%s'\n" % (name, dir))
-            return node
-        fname = '.'
-        while dir:
-            for rep in dir.getRepositories():
-                rdir = rep.Dir(fname)
-                try:
-                    node = rdir.entries[norm_name]
-                except KeyError:
-                    node = rdir.node_on_disk(name, clazz)
-                else:
-                    node = func(node)
-                if node:
-                    verbose("... FOUND '%s' in '%s'\n" % (name, dir))
-                    return node
-            fname = dir.name + os.sep + fname
-            dir = dir.get_dir()
-        return None
+    def CacheDebugWrite(self, fmt, target, cachefile):
+        self.CacheDebugFP.write(fmt % (target, os.path.split(cachefile)[1]))
 
-    def Rsearch(self, path, clazz=_classEntry, cwd=None):
-        def func(node):
-            if node.exists() and \
-               (isinstance(node, Dir) or not node.is_derived()):
-                   return node
-            return None
-        return self.do_Rsearch(path, func, clazz, cwd)
+    def CacheDebugQuiet(self, fmt, target, cachefile):
+        pass
 
-    def Rsearchall(self, pathlist, must_exist=1, clazz=_classEntry, cwd=None):
-        """Search for a list of somethings in the Repository list.
-        __cacheable__
-        """
-        result = []
-        if SCons.Util.is_String(pathlist):
-            pathlist = string.split(pathlist, os.pathsep)
-        if not SCons.Util.is_List(pathlist):
-            pathlist = [pathlist]
+    CacheDebug = CacheDebugQuiet
 
-        if must_exist:
-            select = lambda x, clazz=clazz: isinstance(x, clazz) and x.exists()
+    def CacheDebugEnable(self, file):
+        if file == '-':
+            self.CacheDebugFP = sys.stdout
         else:
-            select = lambda x, clazz=clazz: isinstance(x, clazz)
-
-        for path in filter(None, pathlist):
-            if isinstance(path, SCons.Node.Node):
-                result.append(path)
-                continue
-
-            path, dir = self.__transformPath(path, cwd)
-            d, name = os.path.split(path)
-            norm_name = _my_normcase(name)
-            if d:
-                dir = dir.Dir(d)
-            try:
-                node = dir.entries[norm_name]
-            except KeyError:
-                # If there's no Node on disk, we'll filter
-                # out the returned None below.
-                if must_exist:
-                    n = dir.node_on_disk(name, clazz)
-                else:
-                    n = self._doLookup(clazz, name, dir)
-                    dir.srcdir_duplicate(name, clazz)
-                result.append(n)
-            else:
-                if not must_exist or node.exists():
-                    result.append(node)
-                if isinstance(node, Dir):
-                    result.extend(filter(select, node.getRepositories()))
-                if node:
-                    dir = node.get_dir()
-            fname = '.'
-            while dir:
-                for rep in dir.getRepositories():
-                    rdir = rep.Dir(fname)
-                    try:
-                        node = rdir.entries[norm_name]
-                    except KeyError:
-                        # If there's no Node on disk, we'll filter
-                        # out the returned None below.
-                        if must_exist:
-                            n = rdir.node_on_disk(name, clazz)
-                        else:
-                            n = self._doLookup(clazz, name, rdir)
-                            rdir.srcdir_duplicate(name, clazz)
-                        result.append(n)
-                    else:
-                        if (not must_exist or node.exists()) and \
-                           (isinstance(node, Dir) or not node.is_derived()):
-                            result.append(node)
-                fname = dir.name + os.sep + fname
-                dir = dir.get_dir()
-
-        return filter(None, result)
+            self.CacheDebugFP = open(file, 'w')
+        self.CacheDebug = self.CacheDebugWrite
 
     def CacheDir(self, path):
-        self.CachePath = path
+        try:
+            import SCons.Sig.MD5
+        except ImportError:
+            msg = "No MD5 module available, CacheDir() not supported"
+            SCons.Warnings.warn(SCons.Warnings.NoMD5ModuleWarning, msg)
+        else:
+            self.CachePath = path
 
     def build_dir_target_climb(self, orig, dir, tail):
         """Create targets in corresponding build directories
 
         Climb the directory tree, and look up path names
         relative to any linked build directories we find.
-        __cacheable__
+
+        Even though this loops and walks up the tree, we don't memoize
+        the return value because this is really only used to process
+        the command-line targets.
         """
         targets = []
         message = None
@@ -1142,11 +1304,21 @@ class FS(LocalFS):
             message = fmt % string.join(map(str, targets))
         return targets, message
 
+class DirNodeInfo(SCons.Node.NodeInfoBase):
+    pass
+
+class DirBuildInfo(SCons.Node.BuildInfoBase):
+    pass
 
 class Dir(Base):
     """A class for directories in a file system.
     """
 
+    memoizer_counters = []
+
+    NodeInfo = DirNodeInfo
+    BuildInfo = DirBuildInfo
+
     def __init__(self, name, directory, fs):
         if __debug__: logInstanceCreation(self, 'Node.FS.Dir')
         Base.__init__(self, name, directory, fs)
@@ -1159,7 +1331,7 @@ class Dir(Base):
         Set up this directory's entries and hook it into the file
         system tree.  Specify that directories (this Node) don't use
         signatures for calculating whether they're current.
-        __cache_reset__"""
+        """
 
         self.repositories = []
         self.srcdir = None
@@ -1168,13 +1340,19 @@ class Dir(Base):
         self.entries['.'] = self
         self.entries['..'] = self.dir
         self.cwd = self
-        self.builder = get_MkdirBuilder()
         self.searched = 0
         self._sconsign = None
         self.build_dirs = []
 
-    def disambiguate(self):
-        return self
+        # Don't just reset the executor, replace its action list,
+        # because it might have some pre-or post-actions that need to
+        # be preserved.
+        self.builder = get_MkdirBuilder()
+        self.get_executor().set_action_list(self.builder.action)
+
+    def diskcheck_match(self):
+        diskcheck_match(self, self.isfile,
+                        "File %s found where directory expected.")
 
     def __clearRepositoryCache(self, duplicate=None):
         """Called when we change the repository(ies) for a directory.
@@ -1219,57 +1397,113 @@ class Dir(Base):
         srcdir.build_dirs.append(self)
 
     def getRepositories(self):
-        """Returns a list of repositories for this directory."""
+        """Returns a list of repositories for this directory.
+        """
         if self.srcdir and not self.duplicate:
-            try:
-                return self._srcreps
-            except AttributeError:
-                self._srcreps = self.fs.Rsearchall(self.srcdir.path,
-                                                   clazz=Dir,
-                                                   must_exist=0,
-                                                   cwd=self.fs.Top) \
-                                + self.repositories
-                return self._srcreps
+            return self.srcdir.get_all_rdirs() + self.repositories
         return self.repositories
 
-    def addRepository(self, dir):
-        if not dir in self.repositories and dir != self:
-            self.repositories.append(dir)
-            self.__clearRepositoryCache()
+    memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs'))
 
-    def up(self):
-        return self.entries['..']
+    def get_all_rdirs(self):
+        try:
+            return self._memo['get_all_rdirs']
+        except KeyError:
+            pass
+
+        result = [self]
+        fname = '.'
+        dir = self
+        while dir:
+            for rep in dir.getRepositories():
+                result.append(rep.Dir(fname))
+            fname = dir.name + os.sep + fname
+            dir = dir.up()
+
+        self._memo['get_all_rdirs'] = result
+
+        return result
+
+    def addRepository(self, dir):
+        if dir != self and not dir in self.repositories:
+            self.repositories.append(dir)
+            dir.tpath = '.'
+            self.__clearRepositoryCache()
+
+    def up(self):
+        return self.entries['..']
+
+    def _rel_path_key(self, other):
+        return str(other)
+
+    memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key))
+
+    def rel_path(self, other):
+        """Return a path to "other" relative to this directory.
+        """
+        try:
+            memo_dict = self._memo['rel_path']
+        except KeyError:
+            memo_dict = {}
+            self._memo['rel_path'] = memo_dict
+        else:
+            try:
+                return memo_dict[other]
+            except KeyError:
+                pass
+
+        if self is other:
+
+            result = '.'
+
+        elif not other in self.path_elements:
+
+            try:
+                other_dir = other.get_dir()
+            except AttributeError:
+                result = str(other)
+            else:
+                if other_dir is None:
+                    result = other.name
+                else:
+                    dir_rel_path = self.rel_path(other_dir)
+                    if dir_rel_path == '.':
+                        result = other.name
+                    else:
+                        result = dir_rel_path + os.sep + other.name
 
-    def root(self):
-        if not self.entries['..']:
-            return self
         else:
-            return self.entries['..'].root()
 
-    def scan(self):
-        if not self.implicit is None:
-            return
-        self.implicit = []
-        self.implicit_dict = {}
-        self._children_reset()
+            i = self.path_elements.index(other) + 1
+
+            path_elems = ['..'] * (len(self.path_elements) - i) \
+                         + map(lambda n: n.name, other.path_elements[i:])
+             
+            result = string.join(path_elems, os.sep)
 
-        dont_scan = lambda k: k not in ['.', '..', '.sconsign']
-        deps = filter(dont_scan, self.entries.keys())
-        # keys() is going to give back the entries in an internal,
-        # unsorted order.  Sort 'em so the order is deterministic.
-        deps.sort()
-        entries = map(lambda n, e=self.entries: e[n], deps)
+        memo_dict[other] = result
 
-        self._add_child(self.implicit, self.implicit_dict, entries)
+        return result
+
+    def get_env_scanner(self, env, kw={}):
+        import SCons.Defaults
+        return SCons.Defaults.DirEntryScanner
+
+    def get_target_scanner(self):
+        import SCons.Defaults
+        return SCons.Defaults.DirEntryScanner
 
     def get_found_includes(self, env, scanner, path):
-        """Return the included implicit dependencies in this file.
-        Cache results so we only scan the file once per path
-        regardless of how many times this information is requested.
-        __cacheable__"""
+        """Return this directory's implicit dependencies.
+
+        We don't bother caching the results because the scan typically
+        shouldn't be requested more than once (as opposed to scanning
+        .h file contents, which can be requested as many times as the
+        files is #included by other files).
+        """
         if not scanner:
             return []
-        # Clear cached info for this Node.  If we already visited this
+        # Clear cached info for this Dir.  If we already visited this
         # directory on our walk down the tree (because we didn't know at
         # that point it was being used as the source for another Node)
         # then we may have calculated build signature before realizing
@@ -1286,6 +1520,36 @@ class Dir(Base):
         if not self.builder is MkdirBuilder:
             apply(SCons.Node.Node.build, [self,], kw)
 
+    def _create(self):
+        """Create this directory, silently and without worrying about
+        whether the builder is the default or not."""
+        listDirs = []
+        parent = self
+        while parent:
+            if parent.exists():
+                break
+            listDirs.append(parent)
+            p = parent.up()
+            if p is None:
+                raise SCons.Errors.StopError, parent.path
+            parent = p
+        listDirs.reverse()
+        for dirnode in listDirs:
+            try:
+                # Don't call dirnode.build(), call the base Node method
+                # directly because we definitely *must* create this
+                # directory.  The dirnode.build() method will suppress
+                # the build if it's the default builder.
+                SCons.Node.Node.build(dirnode)
+                dirnode.get_executor().nullify()
+                # The build() action may or may not have actually
+                # created the directory, depending on whether the -n
+                # option was used or not.  Delete the _exists and
+                # _rexists attributes so they can be reevaluated.
+                dirnode.clear()
+            except OSError:
+                pass
+
     def multiple_side_effect_has_builder(self):
         global MkdirBuilder
         return not self.builder is MkdirBuilder and self.has_builder()
@@ -1301,10 +1565,8 @@ class Dir(Base):
 
     def get_contents(self):
         """Return aggregate contents of all our children."""
-        contents = cStringIO.StringIO()
-        for kid in self.children():
-            contents.write(kid.get_contents())
-        return contents.getvalue()
+        contents = map(lambda n: n.get_contents(), self.children())
+        return  string.join(contents, '')
 
     def prepare(self):
         pass
@@ -1313,29 +1575,26 @@ class Dir(Base):
         pass
 
     def current(self, calc=None):
-        """If all of our children were up-to-date, then this
-        directory was up-to-date, too."""
+        """If any child is not up-to-date, then this directory isn't,
+        either."""
         if not self.builder is MkdirBuilder and not self.exists():
             return 0
-        state = 0
+        up_to_date = SCons.Node.up_to_date
         for kid in self.children():
-            s = kid.get_state()
-            if s and (not state or s > state):
-                state = s
-        import SCons.Node
-        if state == 0 or state == SCons.Node.up_to_date:
-            return 1
-        else:
-            return 0
+            if kid.get_state() > up_to_date:
+                return 0
+        return 1
 
     def rdir(self):
-        "__cacheable__"
-        rdir = self
         if not self.exists():
-            n = self.fs.Rsearch(self.path, clazz=Dir, cwd=self.fs.Top)
-            if n:
-                rdir = n
-        return rdir
+            norm_name = _my_normcase(self.name)
+            for dir in self.dir.get_all_rdirs():
+                try: node = dir.entries[norm_name]
+                except KeyError: node = dir.dir_on_disk(self.name)
+                if node and node.exists() and \
+                    (isinstance(dir, Dir) or isinstance(dir, Entry)):
+                        return node
+        return self
 
     def sconsign(self):
         """Return the .sconsign file info for this directory,
@@ -1366,50 +1625,130 @@ class Dir(Base):
     def entry_path(self, name):
         return self.path + os.sep + name
 
+    def entry_tpath(self, name):
+        return self.tpath + os.sep + name
+
     def must_be_a_Dir(self):
         """Called to make sure a Node is a Dir.  Since we're already
         one, this is a no-op for us."""
         return self
 
     def entry_exists_on_disk(self, name):
-        """__cacheable__"""
-        return self.fs.exists(self.entry_abspath(name))
+        try:
+            d = self.on_disk_entries
+        except AttributeError:
+            d = {}
+            try:
+                entries = os.listdir(self.abspath)
+            except OSError:
+                pass
+            else:
+                for entry in map(_my_normcase, entries):
+                    d[entry] = 1
+            self.on_disk_entries = d
+        return d.has_key(_my_normcase(name))
+
+    memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list'))
 
-    def rcs_on_disk(self, name):
-        rcspath = 'RCS' + os.sep + name+',v'
-        return self.entry_exists_on_disk(rcspath)
+    def srcdir_list(self):
+        try:
+            return self._memo['srcdir_list']
+        except KeyError:
+            pass
 
-    def sccs_on_disk(self, name):
-        sccspath = 'SCCS' + os.sep + 's.'+name
-        return self.entry_exists_on_disk(sccspath)
+        result = []
 
-    def srcdir_duplicate(self, name, clazz):
-        dname = '.'
+        dirname = '.'
         dir = self
         while dir:
             if dir.srcdir:
-                srcdir = dir.srcdir.Dir(dname)
-                if srcdir.entry_exists_on_disk(name):
-                    srcnode = self.fs._doLookup(clazz, name, srcdir)
-                    if self.duplicate:
-                        node = self.fs._doLookup(clazz, name, self)
-                        node.do_duplicate(srcnode)
-                        return node
-                    else:
-                        return srcnode
-            dname = dir.name + os.sep + dname
-            dir = dir.get_dir()
+                d = dir.srcdir.Dir(dirname)
+                if d.is_under(dir):
+                    # Shouldn't source from something in the build path:
+                    # build_dir is probably under src_dir, in which case
+                    # we are reflecting.
+                    break
+                result.append(d)
+            dirname = dir.name + os.sep + dirname
+            dir = dir.up()
+
+        self._memo['srcdir_list'] = result
+
+        return result
+
+    def srcdir_duplicate(self, name):
+        for dir in self.srcdir_list():
+            if dir.entry_exists_on_disk(name):
+                srcnode = dir.File(name)
+                if self.duplicate:
+                    node = self.File(name)
+                    node.do_duplicate(srcnode)
+                    return node
+                else:
+                    return srcnode
         return None
 
-    def node_on_disk(self, name, clazz):
-        if self.entry_exists_on_disk(name) or \
-           self.sccs_on_disk(name) or \
-           self.rcs_on_disk(name):
+    def _srcdir_find_file_key(self, filename):
+        return filename
+
+    memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key))
+
+    def srcdir_find_file(self, filename):
+        try:
+            memo_dict = self._memo['srcdir_find_file']
+        except KeyError:
+            memo_dict = {}
+            self._memo['srcdir_find_file'] = memo_dict
+        else:
             try:
-                return self.fs._doLookup(clazz, name, self)
-            except TypeError:
+                return memo_dict[filename]
+            except KeyError:
                 pass
-        return self.srcdir_duplicate(name, clazz)
+
+        def func(node):
+            if (isinstance(node, File) or isinstance(node, Entry)) and \
+               (node.is_derived() or node.is_pseudo_derived() or node.exists()):
+                    return node
+            return None
+
+        norm_name = _my_normcase(filename)
+
+        for rdir in self.get_all_rdirs():
+            try: node = rdir.entries[norm_name]
+            except KeyError: node = rdir.file_on_disk(filename)
+            else: node = func(node)
+            if node:
+                result = (node, self)
+                memo_dict[filename] = result
+                return result
+
+        for srcdir in self.srcdir_list():
+            for rdir in srcdir.get_all_rdirs():
+                try: node = rdir.entries[norm_name]
+                except KeyError: node = rdir.file_on_disk(filename)
+                else: node = func(node)
+                if node:
+                    result = (File(filename, self, self.fs), srcdir)
+                    memo_dict[filename] = result
+                    return result
+
+        result = (None, None)
+        memo_dict[filename] = result
+        return result
+
+    def dir_on_disk(self, name):
+        if self.entry_exists_on_disk(name):
+            try: return self.Dir(name)
+            except TypeError: pass
+        return None
+
+    def file_on_disk(self, name):
+        if self.entry_exists_on_disk(name) or \
+           diskcheck_rcs(self, name) or \
+           diskcheck_sccs(self, name):
+            try: return self.File(name)
+            except TypeError: pass
+        return self.srcdir_duplicate(name)
 
 class RootDir(Dir):
     """A class for the root directory of a file system.
@@ -1419,30 +1758,139 @@ class RootDir(Dir):
     add a separator when creating the path names of entries within
     this directory.
     """
-    def __init__(self, name, directory, fs):
+    def __init__(self, name, fs):
         if __debug__: logInstanceCreation(self, 'Node.FS.RootDir')
-        Base.__init__(self, name, directory, fs)
-        self.path = self.path + os.sep
-        self.abspath = self.abspath + os.sep
+        # We're going to be our own parent directory (".." entry and .dir
+        # attribute) so we have to set up some values so Base.__init__()
+        # won't gag won't it calls some of our methods.
+        self.abspath = ''
+        self.path = ''
+        self.tpath = ''
+        self.path_elements = []
+        self.duplicate = 0
+        Base.__init__(self, name, self, fs)
+
+        # Now set our paths to what we really want them to be: the
+        # initial drive letter (the name) plus the directory separator.
+        self.abspath = name + os.sep
+        self.path = name + os.sep
+        self.tpath = name + os.sep
         self._morph()
 
+    def __str__(self):
+        return self.abspath
+
     def entry_abspath(self, name):
         return self.abspath + name
 
     def entry_path(self, name):
         return self.path + name
 
-class BuildInfo:
-    bsig = None
-    def __cmp__(self, other):
-        try:
-            return cmp(self.bsig, other.bsig)
-        except AttributeError:
+    def entry_tpath(self, name):
+        return self.tpath + name
+
+    def is_under(self, dir):
+        if self is dir:
             return 1
+        else:
+            return 0
+
+    def up(self):
+        return None
+
+    def get_dir(self):
+        return None
+
+    def src_builder(self):
+        return _null
+
+class FileNodeInfo(SCons.Node.NodeInfoBase):
+    def __init__(self, node):
+        SCons.Node.NodeInfoBase.__init__(self, node)
+        self.update(node)
+    def __cmp__(self, other):
+        try: return cmp(self.bsig, other.bsig)
+        except AttributeError: return 1
+    def update(self, node):
+        self.timestamp = node.get_timestamp()
+        self.size = node.getsize()
+
+class FileBuildInfo(SCons.Node.BuildInfoBase):
+    def __init__(self, node):
+        SCons.Node.BuildInfoBase.__init__(self, node)
+        self.node = node
+    def convert_to_sconsign(self):
+        """Convert this FileBuildInfo object for writing to a .sconsign file
+
+        We hung onto the node that we refer to so that we can translate
+        the lists of bsources, bdepends and bimplicit Nodes into strings
+        relative to the node, but we don't want to write out that Node
+        itself to the .sconsign file, so we delete the attribute in
+        preparation.
+        """
+        rel_path = self.node.rel_path
+        delattr(self, 'node')
+        for attr in ['bsources', 'bdepends', 'bimplicit']:
+            try:
+                val = getattr(self, attr)
+            except AttributeError:
+                pass
+            else:
+                setattr(self, attr, map(rel_path, val))
+    def convert_from_sconsign(self, dir, name):
+        """Convert a newly-read FileBuildInfo object for in-SCons use
+
+        An on-disk BuildInfo comes without a reference to the node for
+        which it's intended, so we have to convert the arguments and add
+        back a self.node attribute.  We don't worry here about converting
+        the bsources, bdepends and bimplicit lists from strings to Nodes
+        because they're not used in the normal case of just deciding
+        whether or not to rebuild things.
+        """
+        self.node = dir.Entry(name)
+    def prepare_dependencies(self):
+        """Prepare a FileBuildInfo object for explaining what changed
+
+        The bsources, bdepends and bimplicit lists have all been stored
+        on disk as paths relative to the Node for which they're stored
+        as dependency info.  Convert the strings to actual Nodes (for
+        use by the --debug=explain code and --implicit-cache).
+        """
+        Entry_func = self.node.dir.Entry
+        for attr in ['bsources', 'bdepends', 'bimplicit']:
+            try:
+                val = getattr(self, attr)
+            except AttributeError:
+                pass
+            else:
+                setattr(self, attr, map(Entry_func, val))
+    def format(self):
+        result = [ self.ninfo.format() ]
+        bkids = self.bsources + self.bdepends + self.bimplicit
+        bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs
+        for i in xrange(len(bkids)):
+            result.append(str(bkids[i]) + ': ' + bkidsigs[i].format())
+        return string.join(result, '\n')
+
+class NodeInfo(FileNodeInfo):
+    pass
+
+class BuildInfo(FileBuildInfo):
+    pass
 
 class File(Base):
     """A class for files in a file system.
     """
+
+    memoizer_counters = []
+
+    NodeInfo = FileNodeInfo
+    BuildInfo = FileBuildInfo
+
+    def diskcheck_match(self):
+        diskcheck_match(self, self.isdir,
+                        "Directory %s found where file expected.")
+
     def __init__(self, name, directory, fs):
         if __debug__: logInstanceCreation(self, 'Node.FS.File')
         Base.__init__(self, name, directory, fs)
@@ -1458,39 +1906,47 @@ class File(Base):
         the SConscript directory of this file."""
         return self.fs.Dir(name, self.cwd)
 
+    def Dirs(self, pathlist):
+        """Create a list of directories relative to the SConscript
+        directory of this file."""
+        return map(lambda p, s=self: s.Dir(p), pathlist)
+
     def File(self, name):
         """Create a file node named 'name' relative to
         the SConscript directory of this file."""
         return self.fs.File(name, self.cwd)
 
-    def RDirs(self, pathlist):
-        """Search for a list of directories in the Repository list."""
-        return self.fs.Rsearchall(pathlist, clazz=Dir, must_exist=0,
-                                  cwd=self.cwd)
+    #def generate_build_dict(self):
+    #    """Return an appropriate dictionary of values for building
+    #    this File."""
+    #    return {'Dir' : self.Dir,
+    #            'File' : self.File,
+    #            'RDirs' : self.RDirs}
 
     def _morph(self):
-        """Turn a file system node into a File object.  __cache_reset__"""
+        """Turn a file system node into a File object."""
         self.scanner_paths = {}
         if not hasattr(self, '_local'):
             self._local = 0
 
-    def disambiguate(self):
-        return self
-
-    def root(self):
-        return self.dir.root()
-
     def scanner_key(self):
         return self.get_suffix()
 
     def get_contents(self):
         if not self.rexists():
             return ''
-        return open(self.rfile().abspath, "rb").read()
+        fname = self.rfile().abspath
+        try:
+            r = open(fname, "rb").read()
+        except EnvironmentError, e:
+            if not e.filename:
+                e.filename = fname
+            raise
+        return r
 
     def get_timestamp(self):
         if self.rexists():
-            return self.fs.getmtime(self.rfile().abspath)
+            return self.rfile().getmtime()
         else:
             return 0
 
@@ -1500,75 +1956,72 @@ class File(Base):
         # in one build (SConstruct file) is a source in a different build.
         # See test/chained-build.py for the use case.
         entry = self.get_stored_info()
-        for key, val in obj.__dict__.items():
-            entry.__dict__[key] = val
+        entry.merge(obj)
         self.dir.sconsign().set_entry(self.name, entry)
 
     def get_stored_info(self):
-        "__cacheable__"
         try:
             stored = self.dir.sconsign().get_entry(self.name)
         except (KeyError, OSError):
-            return BuildInfo()
+            return self.new_binfo()
         else:
-            if isinstance(stored, BuildInfo):
-                return stored
-            # The stored build information isn't a BuildInfo object.
-            # This probably means it's an old SConsignEntry from SCons
-            # 0.95 or before.  The relevant attribute names are the same,
-            # though, so just copy the attributes over to an object of
-            # the correct type.
-            binfo = BuildInfo()
-            for key, val in stored.__dict__.items():
-                setattr(binfo, key, val)
-            return binfo
+            if not hasattr(stored, 'ninfo'):
+                # Transition:  The .sconsign file entry has no NodeInfo
+                # object, which means it's a slightly older BuildInfo.
+                # Copy over the relevant attributes.
+                ninfo = stored.ninfo = self.new_ninfo()
+                for attr in ninfo.__dict__.keys():
+                    try:
+                        setattr(ninfo, attr, getattr(stored, attr))
+                    except AttributeError:
+                        pass
+            return stored
 
     def get_stored_implicit(self):
         binfo = self.get_stored_info()
-        try:
-            return binfo.bimplicit
-        except AttributeError:
-            return None
+        binfo.prepare_dependencies()
+        try: return binfo.bimplicit
+        except AttributeError: return None
+
+    def rel_path(self, other):
+        return self.dir.rel_path(other)
+
+    def _get_found_includes_key(self, env, scanner, path):
+        return (id(env), id(scanner), path)
+
+    memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key))
 
     def get_found_includes(self, env, scanner, path):
         """Return the included implicit dependencies in this file.
         Cache results so we only scan the file once per path
         regardless of how many times this information is requested.
-        __cacheable__"""
-        if not scanner:
-            return []
-        return scanner(self, env, path)
+        """
+        memo_key = (id(env), id(scanner), path)
+        try:
+            memo_dict = self._memo['get_found_includes']
+        except KeyError:
+            memo_dict = {}
+            self._memo['get_found_includes'] = memo_dict
+        else:
+            try:
+                return memo_dict[memo_key]
+            except KeyError:
+                pass
+
+        if scanner:
+            result = scanner(self, env, path)
+            result = map(lambda N: N.disambiguate(), result)
+        else:
+            result = []
+
+        memo_dict[memo_key] = result
+
+        return result
 
     def _createDir(self):
         # ensure that the directories for this node are
         # created.
-
-        listDirs = []
-        parent=self.dir
-        while parent:
-            if parent.exists():
-                break
-            listDirs.append(parent)
-            p = parent.up()
-            if isinstance(p, ParentOfRoot):
-                raise SCons.Errors.StopError, parent.path
-            parent = p
-        listDirs.reverse()
-        for dirnode in listDirs:
-            try:
-                # Don't call dirnode.build(), call the base Node method
-                # directly because we definitely *must* create this
-                # directory.  The dirnode.build() method will suppress
-                # the build if it's the default builder.
-                SCons.Node.Node.build(dirnode)
-                dirnode.get_executor().nullify()
-                # The build() action may or may not have actually
-                # created the directory, depending on whether the -n
-                # option was used or not.  Delete the _exists and
-                # _rexists attributes so they can be reevaluated.
-                dirnode.clear()
-            except OSError:
-                pass
+        self.dir._create()
 
     def retrieve_from_cache(self):
         """Try to retrieve the node's content from a cache
@@ -1598,31 +2051,47 @@ class File(Base):
 
         Returns true iff the node was successfully retrieved.
         """
+        if self.nocache:
+            return None
         b = self.is_derived()
         if not b and not self.has_src_builder():
             return None
+
+        retrieved = None
         if b and self.fs.CachePath:
             if self.fs.cache_show:
                 if CacheRetrieveSilent(self, [], None, execute=1) == 0:
                     self.build(presub=0, execute=0)
-                    return 1
-            elif CacheRetrieve(self, [], None, execute=1) == 0:
-                return 1
-        return None
+                    retrieved = 1
+            else:
+                if CacheRetrieve(self, [], None, execute=1) == 0:
+                    retrieved = 1
+            if retrieved:
+                # Record build signature information, but don't
+                # push it out to cache.  (We just got it from there!)
+                self.set_state(SCons.Node.executed)
+                SCons.Node.Node.built(self)
+
+        return retrieved
+
 
     def built(self):
         """Called just after this node is successfully built.
-        __cache_reset__"""
+        """
         # Push this file out to cache before the superclass Node.built()
         # method has a chance to clear the build signature, which it
         # will do if this file has a source scanner.
-        if self.fs.CachePath and self.fs.exists(self.path):
+        #
+        # We have to clear the memoized values *before* we push it to
+        # cache so that the memoization of the self.exists() return
+        # value doesn't interfere.
+        self.clear_memoized_values()
+        if self.fs.CachePath and self.exists():
             CachePush(self, [], None)
-        self.fs.clear_cache()
         SCons.Node.Node.built(self)
 
     def visited(self):
-        if self.fs.CachePath and self.fs.cache_force and self.fs.exists(self.path):
+        if self.fs.CachePath and self.fs.cache_force and self.exists():
             CachePush(self, None, None)
 
     def has_src_builder(self):
@@ -1644,9 +2113,9 @@ class File(Base):
             else:
                 scb = self.dir.src_builder()
                 if scb is _null:
-                    if self.dir.sccs_on_disk(self.name):
+                    if diskcheck_sccs(self.dir, self.name):
                         scb = get_DefaultSCCSBuilder()
-                    elif self.dir.rcs_on_disk(self.name):
+                    elif diskcheck_rcs(self.dir, self.name):
                         scb = get_DefaultRCSBuilder()
                     else:
                         scb = None
@@ -1663,11 +2132,10 @@ class File(Base):
         return self.fs.build_dir_target_climb(self, self.dir, [self.name])
 
     def is_pseudo_derived(self):
-        "__cacheable__"
         return self.has_src_builder()
 
     def _rmv_existing(self):
-        '__cache_reset__'
+        self.clear_memoized_values()
         Unlink(self, [], None)
         
     def prepare(self):
@@ -1687,7 +2155,7 @@ class File(Base):
 
     def remove(self):
         """Remove this file."""
-        if self.fs.exists_or_islink(self.path):
+        if self.exists() or self.islink():
             self.fs.unlink(self.path)
             return 1
         return None
@@ -1710,32 +2178,42 @@ class File(Base):
         # _rexists attributes so they can be reevaluated.
         self.clear()
 
+    memoizer_counters.append(SCons.Memoize.CountValue('exists'))
+
     def exists(self):
-        "__cacheable__"
-        # Duplicate from source path if we are set up to do this.
-        if self.duplicate and not self.is_derived() and not self.linked:
-            src=self.srcnode()
-            if src is self:
-                return Base.exists(self)
-            src = src.rfile()
-            if src.abspath != self.abspath and src.exists():
-                self.do_duplicate(src)
-        return Base.exists(self)
-
-    def new_binfo(self):
-        return BuildInfo()
-
-    def del_cinfo(self):
-        try:
-            del self.binfo.csig
-        except AttributeError:
-            pass
         try:
-            del self.binfo.timestamp
-        except AttributeError:
+            return self._memo['exists']
+        except KeyError:
             pass
+        # Duplicate from source path if we are set up to do this.
+        if self.duplicate and not self.is_derived() and not self.linked:
+            src = self.srcnode()
+            if not src is self:
+                # At this point, src is meant to be copied in a build directory.
+                src = src.rfile()
+                if src.abspath != self.abspath:
+                    if src.exists():
+                        self.do_duplicate(src)
+                        # Can't return 1 here because the duplication might
+                        # not actually occur if the -n option is being used.
+                    else:
+                        # The source file does not exist.  Make sure no old
+                        # copy remains in the build directory.
+                        if Base.exists(self) or self.islink():
+                            self.fs.unlink(self.path)
+                        # Return None explicitly because the Base.exists() call
+                        # above will have cached its value if the file existed.
+                        self._memo['exists'] = None
+                        return None
+        result = Base.exists(self)
+        self._memo['exists'] = result
+        return result
+
+    #
+    # SIGNATURE SUBSYSTEM
+    #
 
-    def calc_csig(self, calc=None):
+    def get_csig(self, calc=None):
         """
         Generate a node's content signature, the digested signature
         of its content.
@@ -1744,190 +2222,201 @@ class File(Base):
         cache - alternate node to use for the signature cache
         returns - the content signature
         """
-        if calc is None:
-            calc = self.calculator()
-
         try:
-            return self.binfo.csig
+            return self.binfo.ninfo.csig
         except AttributeError:
             pass
-        
-        if calc.max_drift >= 0:
-            old = self.get_stored_info()
-        else:
-            old = BuildInfo()
 
-        try:
-            mtime = self.get_timestamp()
-        except OSError:
-            mtime = 0
-            raise SCons.Errors.UserError, "no such %s" % self
+        if calc is None:
+            calc = self.calculator()
 
-        try:
-            if (old.timestamp and old.csig and old.timestamp == mtime):
-                # use the signature stored in the .sconsign file
-                csig = old.csig
-            else:
-                csig = calc.module.signature(self)
-        except AttributeError:
-            csig = calc.module.signature(self)
+        max_drift = self.fs.max_drift
+        mtime = self.get_timestamp()
+        use_stored = max_drift >= 0 and (time.time() - mtime) > max_drift
 
-        if calc.max_drift >= 0 and (time.time() - mtime) > calc.max_drift:
+        csig = None
+        if use_stored:
+            old = self.get_stored_info().ninfo
             try:
-                binfo = self.binfo
+                if old.timestamp and old.csig and old.timestamp == mtime:
+                    csig = old.csig
             except AttributeError:
-                binfo = self.binfo = self.new_binfo()
-            binfo.csig = csig
-            binfo.timestamp = mtime
+                pass
+        if csig is None:
+            csig = calc.module.signature(self)
+
+        binfo = self.get_binfo()
+        ninfo = binfo.ninfo
+        ninfo.csig = csig
+        ninfo.update(self)
+
+        if use_stored:
             self.store_info(binfo)
 
         return csig
 
+    #
+    #
+    #
+
+    def is_up_to_date(self, node=None, bi=None):
+        """Returns if the node is up-to-date with respect to stored
+        BuildInfo.  The default is to compare it against our own
+        previously stored BuildInfo, but the stored BuildInfo from another
+        Node (typically one in a Repository) can be used instead."""
+        if bi is None:
+            if node is None:
+                node = self
+            bi = node.get_stored_info()
+        new = self.get_binfo()
+        return new == bi
+
     def current(self, calc=None):
         self.binfo = self.gen_binfo(calc)
         return self._cur2()
     def _cur2(self):
-        "__cacheable__"
-        if self.always_build:
-            return None
         if not self.exists():
             # The file doesn't exist locally...
             r = self.rfile()
             if r != self:
                 # ...but there is one in a Repository...
-                old = r.get_stored_info()
-                if old == self.binfo:
+                if self.is_up_to_date(r):
                     # ...and it's even up-to-date...
                     if self._local:
                         # ...and they'd like a local copy.
                         LocalCopy(self, r, None)
-                        self.store_info(self.binfo)
+                        self.store_info(self.get_binfo())
                     return 1
             return None
         else:
-            old = self.get_stored_info()
-            return (old == self.binfo)
+            return self.is_up_to_date()
+
+    memoizer_counters.append(SCons.Memoize.CountValue('rfile'))
 
     def rfile(self):
-        "__cacheable__"
-        rfile = self
+        try:
+            return self._memo['rfile']
+        except KeyError:
+            pass
+        result = self
         if not self.exists():
-            n = self.fs.Rsearch(self.path, clazz=File,
-                                cwd=self.fs.Top)
-            if n:
-                rfile = n
-        return rfile
+            norm_name = _my_normcase(self.name)
+            for dir in self.dir.get_all_rdirs():
+                try: node = dir.entries[norm_name]
+                except KeyError: node = dir.file_on_disk(self.name)
+                if node and node.exists() and \
+                   (isinstance(node, File) or isinstance(node, Entry) \
+                    or not node.is_derived()):
+                        result = node
+                        break
+        self._memo['rfile'] = result
+        return result
 
     def rstr(self):
         return str(self.rfile())
 
     def cachepath(self):
-        if not self.fs.CachePath:
+        if self.nocache or not self.fs.CachePath:
             return None, None
-        if self.binfo.bsig is None:
+        ninfo = self.get_binfo().ninfo
+        if not hasattr(ninfo, 'bsig'):
+            import SCons.Errors
+            raise SCons.Errors.InternalError, "cachepath(%s) found no bsig" % self.path
+        elif ninfo.bsig is None:
+            import SCons.Errors
             raise SCons.Errors.InternalError, "cachepath(%s) found a bsig of None" % self.path
         # Add the path to the cache signature, because multiple
         # targets built by the same action will all have the same
         # build signature, and we have to differentiate them somehow.
-        cache_sig = SCons.Sig.MD5.collect([self.binfo.bsig, self.path])
+        import SCons.Sig.MD5
+        cache_sig = SCons.Sig.MD5.collect([ninfo.bsig, self.path])
         subdir = string.upper(cache_sig[0])
         dir = os.path.join(self.fs.CachePath, subdir)
         return dir, os.path.join(dir, cache_sig)
 
-    def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext):
-        return self.dir.File(prefix + splitext(self.name)[0] + suffix)
-
     def must_be_a_Dir(self):
         """Called to make sure a Node is a Dir.  Since we're already a
         File, this is a TypeError..."""
         raise TypeError, "Tried to lookup File '%s' as a Dir." % self.path
 
-default_fs = FS()
+default_fs = None
 
-def find_file(filename, paths, node_factory=default_fs.File, verbose=None):
+class FileFinder:
     """
-    find_file(str, [Dir()]) -> [nodes]
-
-    filename - a filename to find
-    paths - a list of directory path *nodes* to search in.  Can be
-            represented as a list, a tuple, or a callable that is
-            called with no arguments and returns the list or tuple.
-
-    returns - the node created from the found file.
-
-    Find a node corresponding to either a derived file or a file
-    that exists already.
-
-    Only the first file found is returned, and none is returned
-    if no file is found.
     """
-    if verbose:
-        if not SCons.Util.is_String(verbose):
-            verbose = "find_file"
-        if not callable(verbose):
-            verbose = '  %s: ' % verbose
-            verbose = lambda s, v=verbose: sys.stdout.write(v + s)
-    else:
-        verbose = lambda x: x
+    if SCons.Memoize.use_memoizer:
+        __metaclass__ = SCons.Memoize.Memoized_Metaclass
 
-    filedir, filename = os.path.split(filename)
-    if filedir:
-        lookup_dir = lambda d, fd=filedir: d.Dir(fd)
-    else:
-        lookup_dir = lambda d: d
+    memoizer_counters = []
 
-    if callable(paths):
-        paths = paths()
+    def __init__(self):
+        self._memo = {}
 
-    # Give Entries a chance to morph into Dirs.
-    paths = map(lambda p: p.must_be_a_Dir(), paths)
+    def _find_file_key(self, filename, paths, verbose=None):
+        return (filename, paths)
+        
+    memoizer_counters.append(SCons.Memoize.CountDict('find_file', _find_file_key))
 
-    for pathdir in paths:
-        verbose("looking for '%s' in '%s' ...\n" % (filename, pathdir))
-        dir = lookup_dir(pathdir)
-        def func(node):
-            if isinstance(node, SCons.Node.FS.File) and \
-               (node.is_derived() or node.is_pseudo_derived() or node.exists()):
-                    return node
-            return None
+    def find_file(self, filename, paths, verbose=None):
+        """
+        find_file(str, [Dir()]) -> [nodes]
 
-        node = default_fs.do_Rsearch(filename, func, File, dir, verbose)
-        if node:
-            return node
+        filename - a filename to find
+        paths - a list of directory path *nodes* to search in.  Can be
+                represented as a list, a tuple, or a callable that is
+                called with no arguments and returns the list or tuple.
 
-        dirname = '.'
-        while dir:
-            if dir.srcdir:
-                d = dir.srcdir.Dir(dirname)
-                if d.is_under(dir):
-                    # Shouldn't source from something in the build path:
-                    # build_dir is probably under src_dir, in which case
-                    # we are reflecting.
-                    break
-                node = dir.fs.do_Rsearch(filename, func, File, d, verbose)
-                if node:
-                    return File(filename, dir.Dir(dirname), dir.fs)
-            dirname = dir.name + os.sep + dirname
-            dir = dir.get_dir()
+        returns - the node created from the found file.
 
-    return None
+        Find a node corresponding to either a derived file or a file
+        that exists already.
 
-def find_files(filenames, paths, node_factory = default_fs.File):
-    """
-    find_files([str], [Dir()]) -> [nodes]
+        Only the first file found is returned, and none is returned
+        if no file is found.
+        """
+        memo_key = self._find_file_key(filename, paths)
+        try:
+            memo_dict = self._memo['find_file']
+        except KeyError:
+            memo_dict = {}
+            self._memo['find_file'] = memo_dict
+        else:
+            try:
+                return memo_dict[memo_key]
+            except KeyError:
+                pass
 
-    filenames - a list of filenames to find
-    paths - a list of directory path *nodes* to search in
+        if verbose:
+            if not SCons.Util.is_String(verbose):
+                verbose = "find_file"
+            if not callable(verbose):
+                verbose = '  %s: ' % verbose
+                verbose = lambda s, v=verbose: sys.stdout.write(v + s)
+        else:
+            verbose = lambda x: x
 
-    returns - the nodes created from the found files.
+        filedir, filename = os.path.split(filename)
+        if filedir:
+            def filedir_lookup(p, fd=filedir):
+                try:
+                    return p.Dir(fd)
+                except TypeError:
+                    # We tried to look up a Dir, but it seems there's
+                    # already a File (or something else) there.  No big.
+                    return None
+            paths = filter(None, map(filedir_lookup, paths))
+
+        result = None
+        for dir in paths:
+            verbose("looking for '%s' in '%s' ...\n" % (filename, dir))
+            node, d = dir.srcdir_find_file(filename)
+            if node:
+                verbose("... FOUND '%s' in '%s'\n" % (filename, d))
+                result = node
+                break
 
-    Finds nodes corresponding to either derived files or files
-    that exist already.
+        memo_dict[memo_key] = result
 
-    Only the first file found is returned for each filename,
-    and any files that aren't found are ignored.
-    """
-    nodes = map(lambda x, paths=paths, node_factory=node_factory:
-                       find_file(x, paths, node_factory),
-                filenames)
-    return filter(lambda x: x != None, nodes)
+        return result
+
+find_file = FileFinder().find_file