Remove more unnecessary list generation.
authorZac Medico <zmedico@gentoo.org>
Thu, 21 Jun 2007 11:40:51 +0000 (11:40 -0000)
committerZac Medico <zmedico@gentoo.org>
Thu, 21 Jun 2007 11:40:51 +0000 (11:40 -0000)
svn path=/main/trunk/; revision=6912

pym/portage/cache/template.py
pym/portage/cache/util.py
pym/portage/checksum.py
pym/portage/cvstree.py
pym/portage/dbapi/bintree.py
pym/portage/dbapi/porttree.py
pym/portage/dbapi/vartree.py
pym/portage/dbapi/virtual.py
pym/portage/util.py

index 6fe9f3b009fc3777b569e666d6875f7f4dcbd9e0..161b07f55d0cf8be4ecff1303f18f4ff2145810a 100644 (file)
@@ -151,7 +151,7 @@ class database(object):
                        if key not in self.__known_keys:
                                raise InvalidRestriction(key, match, "Key isn't valid")
 
-               for cpv in self.keys():
+               for cpv in self.iterkeys():
                        cont = True
                        vals = self[cpv]
                        for key, match in restricts.iteritems():
index 0732de4e4b3d7e2c7cabea3dc418cfffc1108bb7..4f0d563d293d49c8c946c69f284a42e3675dd735 100644 (file)
@@ -18,8 +18,7 @@ def mirror_cache(valid_nodes_iterable, src_cache, trg_cache, eclass_cache=None,
        else:
                noise=verbose_instance
 
-       dead_nodes = {}
-       dead_nodes = dict.fromkeys(trg_cache.keys())
+       dead_nodes = set(trg_cache.iterkeys())
        count=0
 
        if not trg_cache.autocommits:
@@ -28,8 +27,7 @@ def mirror_cache(valid_nodes_iterable, src_cache, trg_cache, eclass_cache=None,
        for x in valid_nodes_iterable:
 #              print "processing x=",x
                count+=1
-               if dead_nodes.has_key(x):
-                       del dead_nodes[x]
+               dead_nodes.discard(x)
                try:    entry = src_cache[x]
                except KeyError, e:
                        noise.missing_entry(x)
@@ -105,7 +103,6 @@ def mirror_cache(valid_nodes_iterable, src_cache, trg_cache, eclass_cache=None,
                except cache_errors.CacheError, ce:
                        noise.exception(ce)
                        del ce
-       dead_nodes.clear()
        noise.finish()
 
 
index 5a902ab8c723b575d76abf9ab4b01dd0160d3a7e..a29f3c3258d506f9907a8b86d465997d5bbc9e36 100644 (file)
@@ -79,7 +79,7 @@ def perform_md5(x, calc_prelink=0):
 
 def perform_all(x, calc_prelink=0):
        mydict = {}
-       for k in hashfunc_map.keys():
+       for k in hashfunc_map:
                mydict[k] = perform_checksum(x, hashfunc_map[k], calc_prelink)[0]
        return mydict
 
@@ -131,10 +131,10 @@ def verify_all(filename, mydict, calc_prelink=0, strict=0):
                got = " ".join(got)
                return False, ("Insufficient data for checksum verification", got, expected)
 
-       for x in mydict.keys():
+       for x in mydict:
                if   x == "size":
                        continue
-               elif x in hashfunc_map.keys():
+               elif x in hashfunc_map:
                        myhash = perform_checksum(filename, x, calc_prelink=calc_prelink)[0]
                        if mydict[x] != myhash:
                                if strict:
index 30f143cd869dc082460d50490ef43eef2c7a0e6c..ca94d3512433c4268b87c92fd4f8b9e73041e7aa 100644 (file)
@@ -64,12 +64,12 @@ def findnew(entries,recursive=0,basedir=""):
        if basedir and basedir[-1]!="/":
                basedir=basedir+"/"
        mylist=[]
-       for myfile in entries["files"].keys():
+       for myfile in entries["files"]:
                if "cvs" in entries["files"][myfile]["status"]:
                        if "0" == entries["files"][myfile]["revision"]:
                                mylist.append(basedir+myfile)
        if recursive:
-               for mydir in entries["dirs"].keys():
+               for mydir in entries["dirs"]:
                        mylist+=findnew(entries["dirs"][mydir],recursive,basedir+mydir)
        return mylist
                                        
@@ -81,14 +81,14 @@ def findchanged(entries,recursive=0,basedir=""):
        if basedir and basedir[-1]!="/":
                basedir=basedir+"/"
        mylist=[]
-       for myfile in entries["files"].keys():
+       for myfile in entries["files"]:
                if "cvs" in entries["files"][myfile]["status"]:
                        if "current" not in entries["files"][myfile]["status"]:
                                if "exists" in entries["files"][myfile]["status"]:
                                        if entries["files"][myfile]["revision"]!="0":
                                                mylist.append(basedir+myfile)
        if recursive:
-               for mydir in entries["dirs"].keys():
+               for mydir in entries["dirs"]:
                        mylist+=findchanged(entries["dirs"][mydir],recursive,basedir+mydir)
        return mylist
        
@@ -100,13 +100,13 @@ def findmissing(entries,recursive=0,basedir=""):
        if basedir and basedir[-1]!="/":
                basedir=basedir+"/"
        mylist=[]
-       for myfile in entries["files"].keys():
+       for myfile in entries["files"]:
                if "cvs" in entries["files"][myfile]["status"]:
                        if "exists" not in entries["files"][myfile]["status"]:
                                if "removed" not in entries["files"][myfile]["status"]:
                                        mylist.append(basedir+myfile)
        if recursive:
-               for mydir in entries["dirs"].keys():
+               for mydir in entries["dirs"]:
                        mylist+=findmissing(entries["dirs"][mydir],recursive,basedir+mydir)
        return mylist
 
@@ -120,11 +120,11 @@ def findunadded(entries,recursive=0,basedir=""):
        mylist=[]
 
        #ignore what cvs ignores.
-       for myfile in entries["files"].keys():
+       for myfile in entries["files"]:
                if "cvs" not in entries["files"][myfile]["status"]:
                        mylist.append(basedir+myfile)
        if recursive:
-               for mydir in entries["dirs"].keys():
+               for mydir in entries["dirs"]:
                        mylist+=findunadded(entries["dirs"][mydir],recursive,basedir+mydir)
        return mylist
 
@@ -135,11 +135,11 @@ def findremoved(entries,recursive=0,basedir=""):
        if basedir and basedir[-1]!="/":
                basedir=basedir+"/"
        mylist=[]
-       for myfile in entries["files"].keys():
+       for myfile in entries["files"]:
                if "removed" in entries["files"][myfile]["status"]:
                        mylist.append(basedir+myfile)
        if recursive:
-               for mydir in entries["dirs"].keys():
+               for mydir in entries["dirs"]:
                        mylist+=findremoved(entries["dirs"][mydir],recursive,basedir+mydir)
        return mylist
 
index 713a8c612738e0098197af460be2193574487132..3242cc158f2c25bf28f2ae000e10d4ba35ecdd63 100644 (file)
@@ -506,8 +506,7 @@ class binarytree(object):
                        # since it will provide no benefit due to the need to read CATEGORY
                        # from xpak.
                        if update_pkgindex and os.access(self.pkgdir, os.W_OK):
-                               cpv_all = self._pkg_paths.keys()
-                               stale = set(metadata).difference(cpv_all)
+                               stale = [cpv for cpv in metadata if cpv not in self._pkg_paths]
                                for cpv in stale:
                                        del metadata[cpv]
                                from portage.util import atomic_ofstream
index 0f23ce70e9c3c61c965f163709820535ccf3dd89..524a36fa2ecaa549091d34c1ed02377d091606b5 100644 (file)
@@ -144,7 +144,7 @@ class portdbapi(dbapi):
                if not hasattr(self, "auxdb"):
                        # unhandled exception thrown from constructor
                        return
-               for x in self.auxdb.keys():
+               for x in self.auxdb:
                        self.auxdb[x].sync()
                self.auxdb.clear()
 
@@ -192,7 +192,7 @@ class portdbapi(dbapi):
                repository ID's
                TreeMap = {id: path}
                """
-               return [k for k in self.treemap.keys() if k]
+               return [k for k in self.treemap if k]
 
        def findname2(self, mycpv, mytree=None):
                """ 
@@ -348,7 +348,7 @@ class portdbapi(dbapi):
                returnme = []
                for x in mylist:
                        if x == "INHERITED":
-                               returnme.append(' '.join(mydata.get("_eclasses_", {}).keys()))
+                               returnme.append(' '.join(mydata.get("_eclasses_", [])))
                        else:
                                returnme.append(mydata.get(x,""))
 
index 2c1e036940b2963224e7f6d4b68ccd95e4794f79..50b9d9d9415da0d6c5573d956913bbb110960431 100644 (file)
@@ -106,7 +106,7 @@ class PreservedLibsRegistry(object):
        
        def hasEntries(self):
                """ Check if this registry contains any records. """
-               return (len(self._data.keys()) > 0)
+               return len(self._data) > 0
        
        def getPreservedLibs(self):
                """ Return a mapping of packages->preserved objects.
@@ -114,7 +114,7 @@ class PreservedLibsRegistry(object):
                        @rtype Dict cpv->list-of-paths
                """
                rValue = {}
-               for cps in self._data.keys():
+               for cps in self._data:
                        rValue[self._data[cps][0]] = self._data[cps][2]
                return rValue
 
@@ -154,7 +154,7 @@ class LibraryPackageMap(object):
                                        else:
                                                obj_dict[lib].append(mysplit[0])
                mapfile = open(self._filename, "w")
-               for lib in obj_dict.keys():
+               for lib in obj_dict:
                        mapfile.write(lib+" "+",".join(obj_dict[lib])+"\n")
                mapfile.close()
 
@@ -1337,10 +1337,10 @@ class dblink(object):
 
                # get list of libraries from old package instance
                old_contents = self._installed_instance.getcontents().keys()
-               old_libs = set([os.path.basename(x) for x in old_contents]).intersection(libmap.keys())
+               old_libs = set([os.path.basename(x) for x in old_contents]).intersection(libmap)
 
                # get list of libraries from new package instance
-               mylibs = set([os.path.basename(x) for x in mycontents]).intersection(libmap.keys())
+               mylibs = set([os.path.basename(x) for x in mycontents]).intersection(libmap)
 
                # check which libs are present in the old, but not the new package instance
                preserve_libs = old_libs.difference(mylibs)
index 055503ab8102a004db6fbc632db052aaafb31b36..983e39b7f4ae6ebd957eb7e62ef46c17f306a157 100644 (file)
@@ -40,7 +40,7 @@ class fakedbapi(dbapi):
 
        def cp_all(self):
                returnme=[]
-               for x in self.cpdict.keys():
+               for x in self.cpdict:
                        returnme.extend(self.cpdict[x])
                return returnme
 
index 46fc6e462a764a4021c33da3d388f2124cb2d776..c2d109ae644eab2c3058b1026c7e67e1bfcd635f 100644 (file)
@@ -76,7 +76,7 @@ def map_dictlist_vals(func,myDict):
        """Performs a function on each value of each key in a dictlist.
        Returns a new dictlist."""
        new_dl = {}
-       for key in myDict.keys():
+       for key in myDict:
                new_dl[key] = []
                new_dl[key] = map(func,myDict[key])
        return new_dl
@@ -118,7 +118,7 @@ def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0
        for mydict in original_dicts:
                if mydict is None:
                        continue
-               for y in mydict.keys():
+               for y in mydict:
                        if not y in final_dict:
                                final_dict[y] = []
                        
@@ -280,7 +280,7 @@ def writedict(mydict,myfilename,writekey=True):
                        for x in mydict.values():
                                myfile.write(x+"\n")
                else:
-                       for x in mydict.keys():
+                       for x in mydict:
                                myfile.write("%s %s\n" % (x, " ".join(mydict[x])))
                myfile.close()
        except IOError: