Remove more unnecessary list generation. (trunk r6912)
authorZac Medico <zmedico@gentoo.org>
Fri, 22 Jun 2007 02:09:41 +0000 (02:09 -0000)
committerZac Medico <zmedico@gentoo.org>
Fri, 22 Jun 2007 02:09:41 +0000 (02:09 -0000)
svn path=/main/branches/2.1.2/; revision=6931

pym/cache/template.py
pym/cache/util.py
pym/cvstree.py
pym/portage.py
pym/portage_checksum.py
pym/portage_util.py

index 4ffd9b9efd2a1177271e5a3981a963066976f901..d19bb88928b720dc8f6145e3103055e6b06e5927 100644 (file)
@@ -151,7 +151,7 @@ class database(object):
                        if key not in self.__known_keys:
                                raise InvalidRestriction(key, match, "Key isn't valid")
 
-               for cpv in self.keys():
+               for cpv in self.iterkeys():
                        cont = True
                        vals = self[cpv]
                        for key, match in restricts.iteritems():
index ac09c132c4c18141e66ed9c46aa928a9c1e6ee98..0e81a399f6d64bd0b46d06eef69c38e90df2bc77 100644 (file)
@@ -18,8 +18,7 @@ def mirror_cache(valid_nodes_iterable, src_cache, trg_cache, eclass_cache=None,
        else:
                noise=verbose_instance
 
-       dead_nodes = {}
-       dead_nodes = dict.fromkeys(trg_cache.keys())
+       dead_nodes = set(trg_cache.iterkeys())
        count=0
 
        if not trg_cache.autocommits:
@@ -28,8 +27,7 @@ def mirror_cache(valid_nodes_iterable, src_cache, trg_cache, eclass_cache=None,
        for x in valid_nodes_iterable:
 #              print "processing x=",x
                count+=1
-               if dead_nodes.has_key(x):
-                       del dead_nodes[x]
+               dead_nodes.discard(x)
                try:    entry = src_cache[x]
                except KeyError, e:
                        noise.missing_entry(x)
@@ -105,7 +103,6 @@ def mirror_cache(valid_nodes_iterable, src_cache, trg_cache, eclass_cache=None,
                except cache_errors.CacheError, ce:
                        noise.exception(ce)
                        del ce
-       dead_nodes.clear()
        noise.finish()
 
 
index 30f143cd869dc082460d50490ef43eef2c7a0e6c..ca94d3512433c4268b87c92fd4f8b9e73041e7aa 100644 (file)
@@ -64,12 +64,12 @@ def findnew(entries,recursive=0,basedir=""):
        if basedir and basedir[-1]!="/":
                basedir=basedir+"/"
        mylist=[]
-       for myfile in entries["files"].keys():
+       for myfile in entries["files"]:
                if "cvs" in entries["files"][myfile]["status"]:
                        if "0" == entries["files"][myfile]["revision"]:
                                mylist.append(basedir+myfile)
        if recursive:
-               for mydir in entries["dirs"].keys():
+               for mydir in entries["dirs"]:
                        mylist+=findnew(entries["dirs"][mydir],recursive,basedir+mydir)
        return mylist
                                        
@@ -81,14 +81,14 @@ def findchanged(entries,recursive=0,basedir=""):
        if basedir and basedir[-1]!="/":
                basedir=basedir+"/"
        mylist=[]
-       for myfile in entries["files"].keys():
+       for myfile in entries["files"]:
                if "cvs" in entries["files"][myfile]["status"]:
                        if "current" not in entries["files"][myfile]["status"]:
                                if "exists" in entries["files"][myfile]["status"]:
                                        if entries["files"][myfile]["revision"]!="0":
                                                mylist.append(basedir+myfile)
        if recursive:
-               for mydir in entries["dirs"].keys():
+               for mydir in entries["dirs"]:
                        mylist+=findchanged(entries["dirs"][mydir],recursive,basedir+mydir)
        return mylist
        
@@ -100,13 +100,13 @@ def findmissing(entries,recursive=0,basedir=""):
        if basedir and basedir[-1]!="/":
                basedir=basedir+"/"
        mylist=[]
-       for myfile in entries["files"].keys():
+       for myfile in entries["files"]:
                if "cvs" in entries["files"][myfile]["status"]:
                        if "exists" not in entries["files"][myfile]["status"]:
                                if "removed" not in entries["files"][myfile]["status"]:
                                        mylist.append(basedir+myfile)
        if recursive:
-               for mydir in entries["dirs"].keys():
+               for mydir in entries["dirs"]:
                        mylist+=findmissing(entries["dirs"][mydir],recursive,basedir+mydir)
        return mylist
 
@@ -120,11 +120,11 @@ def findunadded(entries,recursive=0,basedir=""):
        mylist=[]
 
        #ignore what cvs ignores.
-       for myfile in entries["files"].keys():
+       for myfile in entries["files"]:
                if "cvs" not in entries["files"][myfile]["status"]:
                        mylist.append(basedir+myfile)
        if recursive:
-               for mydir in entries["dirs"].keys():
+               for mydir in entries["dirs"]:
                        mylist+=findunadded(entries["dirs"][mydir],recursive,basedir+mydir)
        return mylist
 
@@ -135,11 +135,11 @@ def findremoved(entries,recursive=0,basedir=""):
        if basedir and basedir[-1]!="/":
                basedir=basedir+"/"
        mylist=[]
-       for myfile in entries["files"].keys():
+       for myfile in entries["files"]:
                if "removed" in entries["files"][myfile]["status"]:
                        mylist.append(basedir+myfile)
        if recursive:
-               for mydir in entries["dirs"].keys():
+               for mydir in entries["dirs"]:
                        mylist+=findremoved(entries["dirs"][mydir],recursive,basedir+mydir)
        return mylist
 
index 1bd0330115161bf0cd5a0524c38dbec7fd557470..44451269d1480a5db94160d2e434020ec22170b2 100644 (file)
@@ -4999,7 +4999,7 @@ class fakedbapi(dbapi):
 
        def cp_all(self):
                returnme=[]
-               for x in self.cpdict.keys():
+               for x in self.cpdict:
                        returnme.extend(self.cpdict[x])
                return returnme
 
@@ -5878,7 +5878,7 @@ class portdbapi(dbapi):
                        pass
 
        def close_caches(self):
-               for x in self.auxdb.keys():
+               for x in self.auxdb:
                        self.auxdb[x].sync()
                self.auxdb.clear()
 
@@ -6053,7 +6053,7 @@ class portdbapi(dbapi):
                returnme = []
                for x in mylist:
                        if x == "INHERITED":
-                               returnme.append(' '.join(mydata.get("_eclasses_", {}).keys()))
+                               returnme.append(' '.join(mydata.get("_eclasses_", [])))
                        else:
                                returnme.append(mydata.get(x,""))
 
index 885535ee04bb7336cf47d0e64f6738b346db239b..72290e350b64ac865ac6d4cde0393ccb67ada0ab 100644 (file)
@@ -79,7 +79,7 @@ def perform_md5(x, calc_prelink=0):
 
 def perform_all(x, calc_prelink=0):
        mydict = {}
-       for k in hashfunc_map.keys():
+       for k in hashfunc_map:
                mydict[k] = perform_checksum(x, hashfunc_map[k], calc_prelink)[0]
        return mydict
 
@@ -131,10 +131,10 @@ def verify_all(filename, mydict, calc_prelink=0, strict=0):
                got = " ".join(got)
                return False, ("Insufficient data for checksum verification", got, expected)
 
-       for x in mydict.keys():
+       for x in mydict:
                if   x == "size":
                        continue
-               elif x in hashfunc_map.keys():
+               elif x in hashfunc_map:
                        myhash = perform_checksum(filename, x, calc_prelink=calc_prelink)[0]
                        if mydict[x] != myhash:
                                if strict:
index ec359e206a8d066ff6b6ff7cc6d2cfa0b39c222a..d71add57e48b684ac2c4b624c7dfb6e763bfef2a 100644 (file)
@@ -76,7 +76,7 @@ def map_dictlist_vals(func,myDict):
        """Performs a function on each value of each key in a dictlist.
        Returns a new dictlist."""
        new_dl = {}
-       for key in myDict.keys():
+       for key in myDict:
                new_dl[key] = []
                new_dl[key] = map(func,myDict[key])
        return new_dl
@@ -118,7 +118,7 @@ def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0
        for mydict in original_dicts:
                if mydict is None:
                        continue
-               for y in mydict.keys():
+               for y in mydict:
                        if not y in final_dict:
                                final_dict[y] = []
                        
@@ -280,7 +280,7 @@ def writedict(mydict,myfilename,writekey=True):
                        for x in mydict.values():
                                myfile.write(x+"\n")
                else:
-                       for x in mydict.keys():
+                       for x in mydict:
                                myfile.write("%s %s\n" % (x, " ".join(mydict[x])))
                myfile.close()
        except IOError: