http://scons.tigris.org/issues/show_bug.cgi?id=2345
[scons.git] / src / engine / SCons / Executor.py
index ffc1ba3831cf7a29145a3b1e0872aeca180896c3..feedf57933c00e436e65e123f45858fabced0051 100644 (file)
@@ -27,13 +27,87 @@ Nodes.
 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
 #
+from __future__ import generators  ### KEEP FOR COMPATIBILITY FIXERS
 
 __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
 
-import string
+import collections
 
 from SCons.Debug import logInstanceCreation
-import SCons.Util
+import SCons.Errors
+import SCons.Memoize
+
+
+class Batch:
+    """Remembers exact association between targets
+    and sources of executor."""
+    def __init__(self, targets=[], sources=[]):
+        self.targets = targets
+        self.sources = sources
+
+
+
+class TSList(collections.UserList):
+    """A class that implements $TARGETS or $SOURCES expansions by wrapping
+    an executor Method.  This class is used in the Executor.lvars()
+    to delay creation of NodeList objects until they're needed.
+
+    Note that we subclass collections.UserList purely so that the
+    is_Sequence() function will identify an object of this class as
+    a list during variable expansion.  We're not really using any
+    collections.UserList methods in practice.
+    """
+    def __init__(self, func):
+        self.func = func
+    def __getattr__(self, attr):
+        nl = self.func()
+        return getattr(nl, attr)
+    def __getitem__(self, i):
+        nl = self.func()
+        return nl[i]
+    def __getslice__(self, i, j):
+        nl = self.func()
+        i = max(i, 0); j = max(j, 0)
+        return nl[i:j]
+    def __str__(self):
+        nl = self.func()
+        return str(nl)
+    def __repr__(self):
+        nl = self.func()
+        return repr(nl)
+
+class TSObject:
+    """A class that implements $TARGET or $SOURCE expansions by wrapping
+    an Executor method.
+    """
+    def __init__(self, func):
+        self.func = func
+    def __getattr__(self, attr):
+        n = self.func()
+        return getattr(n, attr)
+    def __str__(self):
+        n = self.func()
+        if n:
+            return str(n)
+        return ''
+    def __repr__(self):
+        n = self.func()
+        if n:
+            return repr(n)
+        return ''
+
+def rfile(node):
+    """
+    A function to return the results of a Node's rfile() method,
+    if it exists, and the Node itself otherwise (if it's a Value
+    Node, e.g.).
+    """
+    try:
+        rfile = node.rfile
+    except AttributeError:
+        return node
+    else:
+        return rfile()
 
 
 class Executor:
@@ -47,6 +121,8 @@ class Executor:
     if SCons.Memoize.use_memoizer:
         __metaclass__ = SCons.Memoize.Memoized_Metaclass
 
+    memoizer_counters = []
+
     def __init__(self, action, env=None, overridelist=[{}],
                  targets=[], sources=[], builder_kw={}):
         if __debug__: logInstanceCreation(self, 'Executor.Executor')
@@ -55,25 +131,171 @@ class Executor:
         self.post_actions = []
         self.env = env
         self.overridelist = overridelist
-        self.targets = targets
-        self.sources = sources[:]
+        if targets or sources:
+            self.batches = [Batch(targets[:], sources[:])]
+        else:
+            self.batches = []
         self.builder_kw = builder_kw
+        self._memo = {}
+
+    def get_lvars(self):
+        try:
+            return self.lvars
+        except AttributeError:
+            self.lvars = {
+                'CHANGED_SOURCES' : TSList(self._get_changed_sources),
+                'CHANGED_TARGETS' : TSList(self._get_changed_targets),
+                'SOURCE' : TSObject(self._get_source),
+                'SOURCES' : TSList(self._get_sources),
+                'TARGET' : TSObject(self._get_target),
+                'TARGETS' : TSList(self._get_targets),
+                'UNCHANGED_SOURCES' : TSList(self._get_unchanged_sources),
+                'UNCHANGED_TARGETS' : TSList(self._get_unchanged_targets),
+            }
+            return self.lvars
+
+    def _get_changes(self):
+        cs = []
+        ct = []
+        us = []
+        ut = []
+        for b in self.batches:
+            if b.targets[0].is_up_to_date():
+                us.extend(list(map(rfile, b.sources)))
+                ut.extend(b.targets)
+            else:
+                cs.extend(list(map(rfile, b.sources)))
+                ct.extend(b.targets)
+        self._changed_sources_list = SCons.Util.NodeList(cs)
+        self._changed_targets_list = SCons.Util.NodeList(ct)
+        self._unchanged_sources_list = SCons.Util.NodeList(us)
+        self._unchanged_targets_list = SCons.Util.NodeList(ut)
+
+    def _get_changed_sources(self, *args, **kw):
+        try:
+            return self._changed_sources_list
+        except AttributeError:
+            self._get_changes()
+            return self._changed_sources_list
+
+    def _get_changed_targets(self, *args, **kw):
+        try:
+            return self._changed_targets_list
+        except AttributeError:
+            self._get_changes()
+            return self._changed_targets_list
+
+    def _get_source(self, *args, **kw):
+        #return SCons.Util.NodeList([rfile(self.batches[0].sources[0]).get_subst_proxy()])
+        return rfile(self.batches[0].sources[0]).get_subst_proxy()
+
+    def _get_sources(self, *args, **kw):
+        return SCons.Util.NodeList([rfile(n).get_subst_proxy() for n in self.get_all_sources()])
+
+    def _get_target(self, *args, **kw):
+        #return SCons.Util.NodeList([self.batches[0].targets[0].get_subst_proxy()])
+        return self.batches[0].targets[0].get_subst_proxy()
+
+    def _get_targets(self, *args, **kw):
+        return SCons.Util.NodeList([n.get_subst_proxy() for n in self.get_all_targets()])
+
+    def _get_unchanged_sources(self, *args, **kw):
+        try:
+            return self._unchanged_sources_list
+        except AttributeError:
+            self._get_changes()
+            return self._unchanged_sources_list
+
+    def _get_unchanged_targets(self, *args, **kw):
+        try:
+            return self._unchanged_targets_list
+        except AttributeError:
+            self._get_changes()
+            return self._unchanged_targets_list
+
+    def get_action_targets(self):
+        if not self.action_list:
+            return []
+        targets_string = self.action_list[0].get_targets(self.env, self)
+        if targets_string[0] == '$':
+            targets_string = targets_string[1:]
+        return self.get_lvars()[targets_string]
 
     def set_action_list(self, action):
+        import SCons.Util
         if not SCons.Util.is_List(action):
             if not action:
-                raise SCons.Errors.UserError, "Executor must have an action."
+                import SCons.Errors
+                raise SCons.Errors.UserError("Executor must have an action.")
             action = [action]
         self.action_list = action
 
     def get_action_list(self):
         return self.pre_actions + self.action_list + self.post_actions
 
+    def get_all_targets(self):
+        """Returns all targets for all batches of this Executor."""
+        result = []
+        for batch in self.batches:
+            # TODO(1.5):  remove the list() cast
+            result.extend(list(batch.targets))
+        return result
+
+    def get_all_sources(self):
+        """Returns all sources for all batches of this Executor."""
+        result = []
+        for batch in self.batches:
+            # TODO(1.5):  remove the list() cast
+            result.extend(list(batch.sources))
+        return result
+
+    def get_all_children(self):
+        """Returns all unique children (dependencies) for all batches
+        of this Executor.
+
+        The Taskmaster can recognize when it's already evaluated a
+        Node, so we don't have to make this list unique for its intended
+        canonical use case, but we expect there to be a lot of redundancy
+        (long lists of batched .cc files #including the same .h files
+        over and over), so removing the duplicates once up front should
+        save the Taskmaster a lot of work.
+        """
+        result = SCons.Util.UniqueList([])
+        for target in self.get_all_targets():
+            result.extend(target.children())
+        return result
+
+    def get_all_prerequisites(self):
+        """Returns all unique (order-only) prerequisites for all batches
+        of this Executor.
+        """
+        result = SCons.Util.UniqueList([])
+        for target in self.get_all_targets():
+            # TODO(1.5):  remove the list() cast
+            result.extend(list(target.prerequisites))
+        return result
+
+    def get_action_side_effects(self):
+
+        """Returns all side effects for all batches of this
+        Executor used by the underlying Action.
+        """
+        result = SCons.Util.UniqueList([])
+        for target in self.get_action_targets():
+            result.extend(target.side_effects)
+        return result
+
+    memoizer_counters.append(SCons.Memoize.CountValue('get_build_env'))
+
     def get_build_env(self):
         """Fetch or create the appropriate build Environment
         for this Executor.
-        __cacheable__
         """
+        try:
+            return self._memo['get_build_env']
+        except KeyError:
+            pass
+
         # Create the build environment instance with appropriate
         # overrides.  These get evaluated against the current
         # environment's construction variables so that users can
@@ -87,53 +309,92 @@ class Executor:
         env = self.env or SCons.Defaults.DefaultEnvironment()
         build_env = env.Override(overrides)
 
+        self._memo['get_build_env'] = build_env
+
         return build_env
 
     def get_build_scanner_path(self, scanner):
-        """Fetch the scanner path for this executor's targets
-        and sources.
+        """Fetch the scanner path for this executor's targets and sources.
         """
         env = self.get_build_env()
         try:
-            cwd = self.targets[0].cwd
+            cwd = self.batches[0].targets[0].cwd
         except (IndexError, AttributeError):
             cwd = None
-        return scanner.path(env, cwd, self.targets, self.sources)
+        return scanner.path(env, cwd,
+                            self.get_all_targets(),
+                            self.get_all_sources())
 
     def get_kw(self, kw={}):
         result = self.builder_kw.copy()
         result.update(kw)
+        result['executor'] = self
         return result
 
-    def do_nothing(self, target, exitstatfunc, kw):
-        pass
+    def do_nothing(self, target, kw):
+        return 0
 
-    def do_execute(self, target, exitstatfunc, kw):
+    def do_execute(self, target, kw):
         """Actually execute the action list."""
         env = self.get_build_env()
         kw = self.get_kw(kw)
+        status = 0
         for act in self.get_action_list():
-            apply(act,
-                  (self.targets, self.sources, env, exitstatfunc),
-                  kw)
+            #args = (self.get_all_targets(), self.get_all_sources(), env)
+            args = ([], [], env)
+            status = act(*args, **kw)
+            if isinstance(status, SCons.Errors.BuildError):
+                status.executor = self
+                raise status
+            elif status:
+                msg = "Error %s" % status
+                raise SCons.Errors.BuildError(
+                    errstr=msg, 
+                    node=self.batches[0].targets,
+                    executor=self, 
+                    action=act)
+        return status
 
     # use extra indirection because with new-style objects (Python 2.2
     # and above) we can't override special methods, and nullify() needs
     # to be able to do this.
 
-    def __call__(self, target, exitstatfunc, **kw):
-        self.do_execute(target, exitstatfunc, kw)
+    def __call__(self, target, **kw):
+        return self.do_execute(target, kw)
 
     def cleanup(self):
-        "__reset_cache__"
-        pass
+        self._memo = {}
 
     def add_sources(self, sources):
         """Add source files to this Executor's list.  This is necessary
         for "multi" Builders that can be called repeatedly to build up
         a source file list for a given target."""
-        slist = filter(lambda x, s=self.sources: x not in s, sources)
-        self.sources.extend(slist)
+        # TODO(batch):  extend to multiple batches
+        assert (len(self.batches) == 1)
+        # TODO(batch):  remove duplicates?
+        sources = [x for x in sources if x not in self.batches[0].sources]
+        self.batches[0].sources.extend(sources)
+
+    def get_sources(self):
+        return self.batches[0].sources
+
+    def add_batch(self, targets, sources):
+        """Add pair of associated target and source to this Executor's list.
+        This is necessary for "batch" Builders that can be called repeatedly
+        to build up a list of matching target and source files that will be
+        used in order to update multiple target files at once from multiple
+        corresponding source files, for tools like MSVC that support it."""
+        self.batches.append(Batch(targets, sources))
+
+    def prepare(self):
+        """
+        Preparatory checks for whether this Executor can go ahead
+        and (try to) build its targets.
+        """
+        for s in self.get_all_sources():
+            if s.missing():
+                msg = "Source `%s' not found, needed by target `%s'."
+                raise SCons.Errors.StopError(msg % (s, self.batches[0].targets[0]))
 
     def add_pre_action(self, action):
         self.pre_actions.append(action)
@@ -145,31 +406,38 @@ class Executor:
 
     def my_str(self):
         env = self.get_build_env()
-        get = lambda action, t=self.targets, s=self.sources, e=env: \
-                     action.genstring(t, s, e)
-        return string.join(map(get, self.get_action_list()), "\n")
+        return "\n".join([action.genstring(self.get_all_targets(),
+                                           self.get_all_sources(),
+                                           env)
+                          for action in self.get_action_list()])
 
 
     def __str__(self):
-        "__cacheable__"
         return self.my_str()
 
     def nullify(self):
-        "__reset_cache__"
+        self.cleanup()
         self.do_execute = self.do_nothing
-        self.my_str     = lambda S=self: ''
+        self.my_str     = lambda: ''
+
+    memoizer_counters.append(SCons.Memoize.CountValue('get_contents'))
 
     def get_contents(self):
-        """Fetch the signature contents.  This, along with
-        get_raw_contents(), is the real reason this class exists, so we
-        can compute this once and cache it regardless of how many target
-        or source Nodes there are.
-        __cacheable__
+        """Fetch the signature contents.  This is the main reason this
+        class exists, so we can compute this once and cache it regardless
+        of how many target or source Nodes there are.
         """
+        try:
+            return self._memo['get_contents']
+        except KeyError:
+            pass
         env = self.get_build_env()
-        get = lambda action, t=self.targets, s=self.sources, e=env: \
-                     action.get_contents(t, s, e)
-        return string.join(map(get, self.get_action_list()), "")
+        result = "".join([action.get_contents(self.get_all_targets(),
+                                              self.get_all_sources(),
+                                              env)
+                          for action in self.get_action_list()])
+        self._memo['get_contents'] = result
+        return result
 
     def get_timestamp(self):
         """Fetch a time stamp for this Executor.  We don't have one, of
@@ -179,11 +447,13 @@ class Executor:
         return 0
 
     def scan_targets(self, scanner):
-        self.scan(scanner, self.targets)
+        # TODO(batch):  scan by batches
+        self.scan(scanner, self.get_all_targets())
 
     def scan_sources(self, scanner):
-        if self.sources:
-            self.scan(scanner, self.sources)
+        # TODO(batch):  scan by batches
+        if self.batches[0].sources:
+            self.scan(scanner, self.get_all_sources())
 
     def scan(self, scanner, node_list):
         """Scan a list of this Executor's files (targets or sources) for
@@ -191,53 +461,117 @@ class Executor:
         This essentially short-circuits an N*M scan of the sources for
         each individual target, which is a hell of a lot more efficient.
         """
-        map(lambda N: N.disambiguate(), node_list)
-
         env = self.get_build_env()
-        select_specific_scanner = lambda t: (t[0], t[1].select(t[0]))
-        remove_null_scanners = lambda t: not t[1] is None
-        add_scanner_path = lambda t, s=self: \
-                                  (t[0], t[1], s.get_build_scanner_path(t[1]))
+
+        # TODO(batch):  scan by batches)
+        deps = []
         if scanner:
-            scanner_list = map(lambda n, s=scanner: (n, s), node_list)
+            for node in node_list:
+                node.disambiguate()
+                s = scanner.select(node)
+                if not s:
+                    continue
+                path = self.get_build_scanner_path(s)
+                deps.extend(node.get_implicit_deps(env, s, path))
         else:
             kw = self.get_kw()
-            get_initial_scanners = lambda n, e=env, kw=kw: \
-                                          (n, n.get_env_scanner(e, kw))
-            scanner_list = map(get_initial_scanners, node_list)
-            scanner_list = filter(remove_null_scanners, scanner_list)
-
-        scanner_list = map(select_specific_scanner, scanner_list)
-        scanner_list = filter(remove_null_scanners, scanner_list)
-        scanner_path_list = map(add_scanner_path, scanner_list)
-        deps = []
-        for node, scanner, path in scanner_path_list:
-            deps.extend(node.get_implicit_deps(env, scanner, path))
-
-        for tgt in self.targets:
+            for node in node_list:
+                node.disambiguate()
+                scanner = node.get_env_scanner(env, kw)
+                if not scanner:
+                    continue
+                scanner = scanner.select(node)
+                if not scanner:
+                    continue
+                path = self.get_build_scanner_path(scanner)
+                deps.extend(node.get_implicit_deps(env, scanner, path))
+
+        deps.extend(self.get_implicit_deps())
+
+        for tgt in self.get_all_targets():
             tgt.add_to_implicit(deps)
 
-    def get_missing_sources(self):
-        """
-        __cacheable__
-        """
-        return filter(lambda s: s.missing(), self.sources)
+    def _get_unignored_sources_key(self, node, ignore=()):
+        return (node,) + tuple(ignore)
+
+    memoizer_counters.append(SCons.Memoize.CountDict('get_unignored_sources', _get_unignored_sources_key))
 
-    def get_unignored_sources(self, ignore):
-        """__cacheable__"""
-        sourcelist = self.sources
+    def get_unignored_sources(self, node, ignore=()):
+        key = (node,) + tuple(ignore)
+        try:
+            memo_dict = self._memo['get_unignored_sources']
+        except KeyError:
+            memo_dict = {}
+            self._memo['get_unignored_sources'] = memo_dict
+        else:
+            try:
+                return memo_dict[key]
+            except KeyError:
+                pass
+
+        if node:
+            # TODO:  better way to do this (it's a linear search,
+            # but it may not be critical path)?
+            sourcelist = []
+            for b in self.batches:
+                if node in b.targets:
+                    sourcelist = b.sources
+                    break
+        else:
+            sourcelist = self.get_all_sources()
         if ignore:
-            sourcelist = filter(lambda s, i=ignore: not s in i, sourcelist)
+            idict = {}
+            for i in ignore:
+                idict[i] = 1
+            sourcelist = [s for s in sourcelist if s not in idict]
+
+        memo_dict[key] = sourcelist
+
         return sourcelist
 
-    def process_sources(self, func, ignore=[]):
-        """__cacheable__"""
-        return map(func, self.get_unignored_sources(ignore))
+    def get_implicit_deps(self):
+        """Return the executor's implicit dependencies, i.e. the nodes of
+        the commands to be executed."""
+        result = []
+        build_env = self.get_build_env()
+        for act in self.get_action_list():
+            deps = act.get_implicit_deps(self.get_all_targets(),
+                                         self.get_all_sources(),
+                                         build_env)
+            result.extend(deps)
+        return result
+
+
+
+_batch_executors = {}
+
+def GetBatchExecutor(key):
+    return _batch_executors[key]
+
+def AddBatchExecutor(key, executor):
+    assert key not in _batch_executors
+    _batch_executors[key] = executor
+
+nullenv = None
 
 
-_Executor = Executor
+def get_NullEnvironment():
+    """Use singleton pattern for Null Environments."""
+    global nullenv
 
-class Null(_Executor):
+    import SCons.Util
+    class NullEnvironment(SCons.Util.Null):
+        import SCons.CacheDir
+        _CacheDir_path = None
+        _CacheDir = SCons.CacheDir.CacheDir(None)
+        def get_CacheDir(self):
+            return self._CacheDir
+
+    if not nullenv:
+        nullenv = NullEnvironment()
+    return nullenv
+
+class Null:
     """A null Executor, with a null build Environment, that does
     nothing when the rest of the methods call it.
 
@@ -247,24 +581,58 @@ class Null(_Executor):
     """
     def __init__(self, *args, **kw):
         if __debug__: logInstanceCreation(self, 'Executor.Null')
-        kw['action'] = []
-        apply(_Executor.__init__, (self,), kw)
+        self.batches = [Batch(kw['targets'][:], [])]
     def get_build_env(self):
-        class NullEnvironment:
-            def get_scanner(self, key):
-                return None
-        return NullEnvironment()
+        return get_NullEnvironment()
     def get_build_scanner_path(self):
         return None
     def cleanup(self):
         pass
+    def prepare(self):
+        pass
+    def get_unignored_sources(self, *args, **kw):
+        return tuple(())
+    def get_action_targets(self):
+        return []
+    def get_action_list(self):
+        return []
+    def get_all_targets(self):
+        return self.batches[0].targets
+    def get_all_sources(self):
+        return self.batches[0].targets[0].sources
+    def get_all_children(self):
+        return self.get_all_sources()
+    def get_all_prerequisites(self):
+        return []
+    def get_action_side_effects(self):
+        return []
+    def __call__(self, *args, **kw):
+        return 0
+    def get_contents(self):
+        return ''
+    def _morph(self):
+        """Morph this Null executor to a real Executor object."""
+        batches = self.batches
+        self.__class__ = Executor
+        self.__init__([])            
+        self.batches = batches
 
+    # The following methods require morphing this Null Executor to a
+    # real Executor object.
 
+    def add_pre_action(self, action):
+        self._morph()
+        self.add_pre_action(action)
+    def add_post_action(self, action):
+        self._morph()
+        self.add_post_action(action)
+    def set_action_list(self, action):
+        self._morph()
+        self.set_action_list(action)
 
-if SCons.Memoize.use_old_memoization():
-    _Base = Executor
-    class Executor(SCons.Memoize.Memoizer, _Base):
-        def __init__(self, *args, **kw):
-            SCons.Memoize.Memoizer.__init__(self)
-            apply(_Base.__init__, (self,)+args, kw)
 
+# Local Variables:
+# tab-width:4
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=4 shiftwidth=4: