Use dict.(keys|values|items)() instead of dict.(iterkeys|itervalues|iteritems)()...
authorArfrever Frehtes Taifersar Arahesis <arfrever@gentoo.org>
Mon, 21 Sep 2009 16:07:07 +0000 (16:07 -0000)
committerArfrever Frehtes Taifersar Arahesis <arfrever@gentoo.org>
Mon, 21 Sep 2009 16:07:07 +0000 (16:07 -0000)
(2to3-3.1 -f dict -nw ${FILES})

svn path=/main/trunk/; revision=14327

59 files changed:
bin/egencache
bin/emaint
bin/glsa-check
bin/portageq
bin/quickpkg
bin/repoman
pym/_emerge/BinpkgFetcher.py
pym/_emerge/BlockerCache.py
pym/_emerge/EbuildMetadataPhase.py
pym/_emerge/JobStatusDisplay.py
pym/_emerge/MetadataRegen.py
pym/_emerge/Package.py
pym/_emerge/PackageVirtualDbapi.py
pym/_emerge/PipeReader.py
pym/_emerge/PollSelectAdapter.py
pym/_emerge/RepoDisplay.py
pym/_emerge/RootConfig.py
pym/_emerge/Scheduler.py
pym/_emerge/SpawnProcess.py
pym/_emerge/SubProcess.py
pym/_emerge/actions.py
pym/_emerge/clear_caches.py
pym/_emerge/depgraph.py
pym/_emerge/main.py
pym/_emerge/unmerge.py
pym/portage/__init__.py
pym/portage/cache/anydbm.py
pym/portage/cache/ebuild_xattr.py
pym/portage/cache/mappings.py
pym/portage/cache/sqlite.py
pym/portage/cache/template.py
pym/portage/checksum.py
pym/portage/cvstree.py
pym/portage/dbapi/bintree.py
pym/portage/dbapi/porttree.py
pym/portage/dbapi/vartree.py
pym/portage/dbapi/virtual.py
pym/portage/debug.py
pym/portage/dep.py
pym/portage/dispatch_conf.py
pym/portage/eclass_cache.py
pym/portage/elog/__init__.py
pym/portage/elog/mod_mail_summary.py
pym/portage/getbinpkg.py
pym/portage/manifest.py
pym/portage/news.py
pym/portage/output.py
pym/portage/process.py
pym/portage/proxy/lazyimport.py
pym/portage/sets/base.py
pym/portage/sets/files.py
pym/portage/sets/libs.py
pym/portage/tests/env/config/test_PackageKeywordsFile.py
pym/portage/tests/env/config/test_PackageUseFile.py
pym/portage/tests/env/config/test_PortageModulesFile.py
pym/portage/update.py
pym/portage/util.py
pym/portage/xpak.py
pym/repoman/utilities.py

index 3af060ef8f0ef0c837ab186e9f4c21156000324c..5c59e6dd05fcfaddc392cf3224fb7bc11660b741 100755 (executable)
@@ -168,7 +168,7 @@ class GenCache(object):
                                        # mtime on the ebuild (and the corresponding cache entry).
                                        # See bug #139134.
                                        max_mtime = sc.mtime
-                                       for ec, (loc, ec_mtime) in metadata['_eclasses_'].iteritems():
+                                       for ec, (loc, ec_mtime) in metadata['_eclasses_'].items():
                                                if max_mtime < ec_mtime:
                                                        max_mtime = ec_mtime
                                        if max_mtime == sc.mtime:
@@ -201,7 +201,7 @@ class GenCache(object):
                dead_nodes = set()
                if self._global_cleanse:
                        try:
-                               for cpv in trg_cache.iterkeys():
+                               for cpv in trg_cache.keys():
                                        cp = cpv_getkey(cpv)
                                        if cp is None:
                                                self.returncode |= 1
@@ -221,7 +221,7 @@ class GenCache(object):
                else:
                        cp_set = self._cp_set
                        try:
-                               for cpv in trg_cache.iterkeys():
+                               for cpv in trg_cache.keys():
                                        cp = cpv_getkey(cpv)
                                        if cp is None:
                                                self.returncode |= 1
index 407f8a66d7371926f8fc6eb1bd7e5f0edefe7654..611c323da9f5af7420c586c16ab976da2f529623 100755 (executable)
@@ -209,7 +209,7 @@ class BinhostHandler(object):
                                bintree.populated = False
 
                                del pkgindex.packages[:]
-                               pkgindex.packages.extend(metadata.itervalues())
+                               pkgindex.packages.extend(metadata.values())
                                from portage.util import atomic_ofstream
                                f = atomic_ofstream(self._pkgindex_file)
                                try:
@@ -475,7 +475,7 @@ def emaint_main(myargv):
                "cleanresume":CleanResume
        }
 
-       module_names = modules.keys()
+       module_names = list(modules.keys())
        module_names.sort()
        module_names.insert(0, "all")
 
index e9c28f9e6371880f39ae1381c3ae33af4120475f..787cf39dba3b1e96019ff5c44918d75ff7468888 100755 (executable)
@@ -172,7 +172,7 @@ def summarylist(myglsalist, fd1=sys.stdout, fd2=sys.stderr):
 
                fd1.write(color(myglsa.nr) + " " + color(status) + " " + color(access) + myglsa.title + " (")
                if not verbose:
-                       for pkg in myglsa.packages.keys()[:3]:
+                       for pkg in list(myglsa.packages.keys())[:3]:
                                fd1.write(" " + pkg + " ")
                        if len(myglsa.packages) > 3:
                                fd1.write("... ")
index 34c4fb401e05620a771369693cdfb79ab71e228e..cea14231fe9d88eb68a165998064ef1c554c3686 100755 (executable)
@@ -222,7 +222,7 @@ def owners(argv):
        owners = vardb._owners.get_owners(files)
 
        msg = []
-       for pkg, owned_files in owners.iteritems():
+       for pkg, owned_files in owners.items():
                cpv = pkg.mycpv
                msg.append("%s\n" % cpv)
                for f in sorted(owned_files):
@@ -560,7 +560,7 @@ def usage(argv):
        #
        non_commands = frozenset(['exithandler', 'main',
                'usage', 'writemsg', 'writemsg_stdout'])
-       commands = sorted(k for k, v in globals().iteritems() \
+       commands = sorted(k for k, v in globals().items() \
                if type(v) is types.FunctionType and k not in non_commands)
 
        for name in commands:
index fd172857083ade9eb51dc377361e8a98de324ca9..383fe20fb8cacbb863a02ccc649c827971a1d56c 100755 (executable)
@@ -124,7 +124,7 @@ def quickpkg_main(options, args, eout):
                                required_metadata["CATEGORY"] = category
                                required_metadata["PF"] = pf
                                update_metadata = {}
-                               for k, v in required_metadata.iteritems():
+                               for k, v in required_metadata.items():
                                        if v != existing_metadata[k]:
                                                update_metadata[k] = v
                                if update_metadata:
index 13c126e303c099903765f66f5d7327f4fdf27ad8..9502262060c4bf473629a014f40db32f8aa262e8 100755 (executable)
@@ -153,7 +153,7 @@ def ParseArgs(args, qahelp):
                'scan' : 'Scan directory tree for QA issues' 
        }
 
-       mode_keys = modes.keys()
+       mode_keys = list(modes.keys())
        mode_keys.sort()
 
        parser = RepomanOptionParser(formatter=RepomanHelpFormatter(), usage="%prog [options] [mode]")
@@ -198,7 +198,7 @@ def ParseArgs(args, qahelp):
        parser.add_option('--without-mask', dest='without_mask', action='store_true',
                default=False, help='behave as if no package.mask entries exist (not allowed with commit mode)')
 
-       parser.add_option('--mode', type='choice', dest='mode', choices=modes.keys(), 
+       parser.add_option('--mode', type='choice', dest='mode', choices=list(modes.keys()), 
                help='specify which mode repoman will run in (default=full)')
 
        parser.on_tail("\n " + green("Modes".ljust(20) + " Description\n"))
@@ -208,7 +208,7 @@ def ParseArgs(args, qahelp):
 
        parser.on_tail("\n " + green("QA keyword".ljust(20) + " Description\n"))
 
-       sorted_qa = qahelp.keys()
+       sorted_qa = list(qahelp.keys())
        sorted_qa.sort()
        for k in sorted_qa:
                parser.on_tail(" %s %s\n" % (k.ljust(20), qahelp[k]))
@@ -330,7 +330,7 @@ qahelp={
        "upstream.workaround":"The ebuild works around an upstream bug, an upstream bug should be filed and tracked in bugs.gentoo.org"
 }
 
-qacats = qahelp.keys()
+qacats = list(qahelp.keys())
 qacats.sort()
 
 qawarnings = set((
@@ -735,7 +735,7 @@ def dev_keywords(profiles):
        want to add the --include-dev option.
        """
        type_arch_map = {}
-       for arch, arch_profiles in profiles.iteritems():
+       for arch, arch_profiles in profiles.items():
                for prof in arch_profiles:
                        arch_set = type_arch_map.get(prof.status)
                        if arch_set is None:
@@ -872,7 +872,7 @@ check_ebuild_notadded = not \
        (vcs == "svn" and repolevel < 3 and options.mode != "commit")
 
 # Build a regex from thirdpartymirrors for the SRC_URI.mirror check.
-thirdpartymirrors = portage.flatten(repoman_settings.thirdpartymirrors().values())
+thirdpartymirrors = portage.flatten(list(repoman_settings.thirdpartymirrors().values()))
 
 for x in scanlist:
        #ebuilds and digests added to cvs respectively.
@@ -900,7 +900,7 @@ for x in scanlist:
                                        fetchlist_dict=fetchlist_dict)
                                mf.create(requiredDistfiles=None,
                                        assumeDistHashesAlways=True)
-                               for distfiles in fetchlist_dict.itervalues():
+                               for distfiles in fetchlist_dict.values():
                                        for distfile in distfiles:
                                                if os.path.isfile(os.path.join(distdir, distfile)):
                                                        mf.fhashdict['DIST'].pop(distfile, None)
@@ -1242,7 +1242,7 @@ for x in scanlist:
                pkg = pkgs[y]
 
                if pkg.invalid:
-                       for k, msgs in pkg.invalid.iteritems():
+                       for k, msgs in pkg.invalid.items():
                                for msg in msgs:
                                        stats[k] = stats[k] + 1
                                        fails[k].append("%s %s" % (relative_path, msg))
@@ -1253,7 +1253,7 @@ for x in scanlist:
                inherited = pkg.inherited
                live_ebuild = live_eclasses.intersection(inherited)
 
-               for k, v in myaux.iteritems():
+               for k, v in myaux.items():
                        if not isinstance(v, basestring):
                                continue
                        m = non_ascii_re.search(v)
index 1f244cb1aa7421c295fda0d0129002bf9e86ebe1..576d00a60c15f77959a08fd5229dd47c74c37eaf 100644 (file)
@@ -78,7 +78,7 @@ class BinpkgFetcher(SpawnProcess):
                        "FILE"    : os.path.basename(pkg_path)
                }
 
-               fetch_env = dict(settings.iteritems())
+               fetch_env = dict(settings.items())
                fetch_args = [portage.util.varexpand(x, mydict=fcmd_vars) \
                        for x in portage.util.shlex_split(fcmd)]
 
index c4270242f184b84bb29fdd7399b785bb1dd8300c..7c6be8ada52f3a7ecb87a0ac0dd7b3721d02a823 100644 (file)
@@ -71,7 +71,7 @@ class BlockerCache(portage.cache.mappings.MutableMapping):
                        # Validate all the atoms and counters so that
                        # corruption is detected as soon as possible.
                        invalid_items = set()
-                       for k, v in self._cache_data["blockers"].iteritems():
+                       for k, v in self._cache_data["blockers"].items():
                                if not isinstance(k, basestring):
                                        invalid_items.add(k)
                                        continue
index 1231b1b115feb10c1f480d024ff06d5a0900dbf5..2ff0b64db3644c4dfad43935ce218348552cf04d 100644 (file)
@@ -70,7 +70,7 @@ class EbuildMetadataPhase(SubProcess):
                fd_pipes.setdefault(2, sys.stderr.fileno())
 
                # flush any pending output
-               for fd in fd_pipes.itervalues():
+               for fd in fd_pipes.values():
                        if fd == sys.stdout.fileno():
                                sys.stdout.flush()
                        if fd == sys.stderr.fileno():
index fbda727ee600fdaf35ec841e099c6c4dc98caea5..1c80c5ffa0a699e6fb1a06883f16cd82d98a4c06 100644 (file)
@@ -60,7 +60,7 @@ class JobStatusDisplay(object):
                object.__setattr__(self, "_isatty", isatty)
                if not isatty or not self._init_term():
                        term_codes = {}
-                       for k, capname in self._termcap_name_map.iteritems():
+                       for k, capname in self._termcap_name_map.items():
                                term_codes[k] = self._default_term_codes[capname]
                        object.__setattr__(self, "_term_codes", term_codes)
                encoding = sys.getdefaultencoding()
@@ -106,7 +106,7 @@ class JobStatusDisplay(object):
                        return False
 
                term_codes = {}
-               for k, capname in self._termcap_name_map.iteritems():
+               for k, capname in self._termcap_name_map.items():
                        code = tigetstr(capname)
                        if code is None:
                                code = self._default_term_codes[capname]
index 4c237d34c035be22d2f107fd139deec2484e7edb..88b157fd0f4e20be5b56defd369ad337e0b8d9cb 100644 (file)
@@ -89,7 +89,7 @@ class MetadataRegen(PollScheduler):
                if self._global_cleanse:
                        for mytree in portdb.porttrees:
                                try:
-                                       dead_nodes[mytree] = set(portdb.auxdb[mytree].iterkeys())
+                                       dead_nodes[mytree] = set(portdb.auxdb[mytree].keys())
                                except CacheError as e:
                                        portage.writemsg("Error listing cache entries for " + \
                                                "'%s': %s, continuing...\n" % (mytree, e),
@@ -103,7 +103,7 @@ class MetadataRegen(PollScheduler):
                        for mytree in portdb.porttrees:
                                try:
                                        dead_nodes[mytree] = set(cpv for cpv in \
-                                               portdb.auxdb[mytree].iterkeys() \
+                                               portdb.auxdb[mytree].keys() \
                                                if cpv_getkey(cpv) in cp_set)
                                except CacheError as e:
                                        portage.writemsg("Error listing cache entries for " + \
@@ -119,7 +119,7 @@ class MetadataRegen(PollScheduler):
                                        if portdb.findname2(y, mytree=mytree)[0]:
                                                dead_nodes[mytree].discard(y)
 
-                       for mytree, nodes in dead_nodes.iteritems():
+                       for mytree, nodes in dead_nodes.items():
                                auxdb = portdb.auxdb[mytree]
                                for y in nodes:
                                        try:
index 0ea119ef81ed80630e337b5fdc1ef3b39998ae78..15471d1b230bd8e9b1628bc1bfb57af5603ef216 100644 (file)
@@ -171,7 +171,7 @@ class _PackageMetadataWrapper(dict):
                        # USE is lazy, but we want it to show up in self.keys().
                        self['USE'] = ''
                self.update(metadata)
-               for k, v in self.iteritems():
+               for k, v in self.items():
                        if k == 'INHERITED':
                                if isinstance(v, basestring):
                                        v = frozenset(v.split())
index f041e1d8cb1bb5df8da3eeb27bff2bb95338b6cb..adf269116728cd9244ecf2f78611b51e7560041e 100644 (file)
@@ -32,13 +32,13 @@ class PackageVirtualDbapi(portage.dbapi):
                obj = PackageVirtualDbapi(self.settings)
                obj._match_cache = self._match_cache.copy()
                obj._cp_map = self._cp_map.copy()
-               for k, v in obj._cp_map.iteritems():
+               for k, v in obj._cp_map.items():
                        obj._cp_map[k] = v[:]
                obj._cpv_map = self._cpv_map.copy()
                return obj
 
        def __iter__(self):
-               return self._cpv_map.itervalues()
+               return iter(self._cpv_map.values())
 
        def __contains__(self, item):
                existing = self._cpv_map.get(item.cpv)
index 5b64306e792f299cf1264f90e656054668de7442..369918de368f62ff261d49b658826e1732165ac7 100644 (file)
@@ -24,7 +24,7 @@ class PipeReader(AbstractPollTask):
        def _start(self):
                self._reg_ids = set()
                self._read_data = []
-               for k, f in self.input_files.iteritems():
+               for k, f in self.input_files.items():
                        fcntl.fcntl(f.fileno(), fcntl.F_SETFL,
                                fcntl.fcntl(f.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
                        self._reg_ids.add(self.scheduler.register(f.fileno(),
@@ -65,7 +65,7 @@ class PipeReader(AbstractPollTask):
 
                if event & PollConstants.POLLIN:
 
-                       for f in self.input_files.itervalues():
+                       for f in self.input_files.values():
                                if fd == f.fileno():
                                        break
 
@@ -97,7 +97,7 @@ class PipeReader(AbstractPollTask):
                        self._reg_ids = None
 
                if self.input_files is not None:
-                       for f in self.input_files.itervalues():
+                       for f in self.input_files.values():
                                f.close()
                        self.input_files = None
 
index 64f17094e3ec5b30c3f35672c2daf70489e91b08..7b7d3d95ad51c39b7c4afe96e07e559ad5d0ddb8 100644 (file)
@@ -48,7 +48,7 @@ class PollSelectAdapter(PollConstants):
 
                select_args = self._select_args
                if select_args is None:
-                       select_args = [self._registered.keys(), [], []]
+                       select_args = [list(self._registered.keys()), [], []]
 
                if timeout is not None:
                        select_args = select_args[:]
index e98be93c40456a70ec49050f335fed876491e035..67bf62672a4b98679bf5b781cb4add54a9c73dc7 100644 (file)
@@ -10,7 +10,7 @@ class RepoDisplay(object):
                self._shown_repos = {}
                self._unknown_repo = False
                repo_paths = set()
-               for root_config in roots.itervalues():
+               for root_config in roots.values():
                        portdir = root_config.settings.get("PORTDIR")
                        if portdir:
                                repo_paths.add(portdir)
@@ -23,7 +23,7 @@ class RepoDisplay(object):
                        for repo_path in repo_paths ]
 
                # pre-allocate index for PORTDIR so that it always has index 0.
-               for root_config in roots.itervalues():
+               for root_config in roots.values():
                        portdb = root_config.trees["porttree"].dbapi
                        portdir = portdb.porttree_root
                        if portdir:
@@ -58,7 +58,7 @@ class RepoDisplay(object):
                if shown_repos or self._unknown_repo:
                        output.append("Portage tree and overlays:\n")
                show_repo_paths = list(shown_repos)
-               for repo_path, repo_index in shown_repos.iteritems():
+               for repo_path, repo_index in shown_repos.items():
                        show_repo_paths[repo_index] = repo_path
                if show_repo_paths:
                        for index, repo_path in enumerate(show_repo_paths):
index 48af991204715524199a59132ff4dfb50371595c..70e9334821d9a7f169faaa722101c7da59968197 100644 (file)
@@ -15,7 +15,7 @@ class RootConfig(object):
        }
 
        tree_pkg_map = {}
-       for k, v in pkg_tree_map.iteritems():
+       for k, v in pkg_tree_map.items():
                tree_pkg_map[v] = k
 
        def __init__(self, settings, trees, setconfig):
index 83ffd4b9f1a4563528073826f7626b7ee0de1050..78b9e57d8f23165a3bb25a050830bdc4fb6315bc 100644 (file)
@@ -561,7 +561,7 @@ class Scheduler(PollScheduler):
 
                digest = '--digest' in self.myopts
                if not digest:
-                       for pkgsettings in self.pkgsettings.itervalues():
+                       for pkgsettings in self.pkgsettings.values():
                                if 'digest' in pkgsettings.features:
                                        digest = True
                                        break
@@ -604,7 +604,7 @@ class Scheduler(PollScheduler):
 
                shown_verifying_msg = False
                quiet_settings = {}
-               for myroot, pkgsettings in self.pkgsettings.iteritems():
+               for myroot, pkgsettings in self.pkgsettings.items():
                        quiet_config = portage.config(clone=pkgsettings)
                        quiet_config["PORTAGE_QUIET"] = "1"
                        quiet_config.backup_changes("PORTAGE_QUIET")
@@ -759,7 +759,7 @@ class Scheduler(PollScheduler):
                # any of bad_resume_opts from leaking in
                # via EMERGE_DEFAULT_OPTS.
                resume_opts["--ignore-default-opts"] = True
-               for myopt, myarg in resume_opts.iteritems():
+               for myopt, myarg in resume_opts.items():
                        if myopt not in bad_resume_opts:
                                if myarg is True:
                                        mynewargv.append(myopt)
index 4a99b50c7cc6b7e0ca478ec031a1c2c3957fcd30..1486e6773f8bb4ce1aae1ac9ba4ea8f19c0a0881 100644 (file)
@@ -43,7 +43,7 @@ class SpawnProcess(SubProcess):
                fd_pipes.setdefault(2, sys.stderr.fileno())
 
                # flush any pending output
-               for fd in fd_pipes.itervalues():
+               for fd in fd_pipes.values():
                        if fd == sys.stdout.fileno():
                                sys.stdout.flush()
                        if fd == sys.stderr.fileno():
index 0df94b9e7c16c1a14790b0555818d5e9908e459b..5889f556f61cf8469268b9e0596657bb200ce04c 100644 (file)
@@ -91,7 +91,7 @@ class SubProcess(AbstractPollTask):
                        self._reg_id = None
 
                if self._files is not None:
-                       for f in self._files.itervalues():
+                       for f in self._files.values():
                                f.close()
                        self._files = None
 
index 5c351a0920e2687b14aeaa54fc1be8be991189b0..0bdda1fc54d628576374f8997c3a364f9c0ef195 100644 (file)
@@ -744,7 +744,7 @@ def calc_depclean(settings, trees, ldpath_mtimes,
                                continue
 
        set_args = {}
-       for s, package_set in required_sets.iteritems():
+       for s, package_set in required_sets.items():
                set_atom = SETPREFIX + s
                set_arg = SetArg(arg=set_atom, set=package_set,
                        root_config=resolver._frozen_config.roots[myroot])
@@ -927,7 +927,7 @@ def calc_depclean(settings, trees, ldpath_mtimes,
                        if not consumers:
                                continue
 
-                       for lib, lib_consumers in consumers.items():
+                       for lib, lib_consumers in list(consumers.items()):
                                for consumer_file in list(lib_consumers):
                                        if pkg_dblink.isowner(consumer_file, myroot):
                                                lib_consumers.remove(consumer_file)
@@ -937,7 +937,7 @@ def calc_depclean(settings, trees, ldpath_mtimes,
                        if not consumers:
                                continue
 
-                       for lib, lib_consumers in consumers.iteritems():
+                       for lib, lib_consumers in consumers.items():
 
                                soname = soname_cache.get(lib)
                                if soname is None:
@@ -963,8 +963,8 @@ def calc_depclean(settings, trees, ldpath_mtimes,
                if consumer_map:
 
                        search_files = set()
-                       for consumers in consumer_map.itervalues():
-                               for lib, consumer_providers in consumers.iteritems():
+                       for consumers in consumer_map.values():
+                               for lib, consumer_providers in consumers.items():
                                        for lib_consumer, providers in consumer_providers:
                                                search_files.add(lib_consumer)
                                                search_files.update(providers)
@@ -972,8 +972,8 @@ def calc_depclean(settings, trees, ldpath_mtimes,
                        writemsg_level(">>> Assigning files to packages...\n")
                        file_owners = real_vardb._owners.getFileOwnerMap(search_files)
 
-                       for pkg, consumers in consumer_map.items():
-                               for lib, consumer_providers in consumers.items():
+                       for pkg, consumers in list(consumer_map.items()):
+                               for lib, consumer_providers in list(consumers.items()):
                                        lib_consumers = set()
 
                                        for lib_consumer, providers in consumer_providers:
@@ -1044,7 +1044,7 @@ def calc_depclean(settings, trees, ldpath_mtimes,
                        # and also add any dependencies pulled in by the provider.
                        writemsg_level(">>> Adding lib providers to graph...\n")
 
-                       for pkg, consumers in consumer_map.iteritems():
+                       for pkg, consumers in consumer_map.items():
                                for consumer_dblink in set(chain(*consumers.values())):
                                        consumer_pkg = vardb.get(("installed", myroot,
                                                consumer_dblink.mycpv, "nomerge"))
@@ -1287,7 +1287,7 @@ def action_info(settings, trees, myopts, myfiles):
        libtool_vers = ",".join(trees["/"]["vartree"].dbapi.match("sys-devel/libtool"))
 
        if "--verbose" in myopts:
-               myvars=settings.keys()
+               myvars=list(settings.keys())
        else:
                myvars = ['GENTOO_MIRRORS', 'CONFIG_PROTECT', 'CONFIG_PROTECT_MASK',
                          'PORTDIR', 'DISTDIR', 'PKGDIR', 'PORTAGE_TMPDIR',
@@ -1647,7 +1647,7 @@ def action_metadata(settings, portdb, myopts, porttrees=None):
 
        for tree_data in porttrees_data:
                try:
-                       dead_nodes = set(tree_data.dest_db.iterkeys())
+                       dead_nodes = set(tree_data.dest_db.keys())
                except CacheError as e:
                        writemsg_level("Error listing cache entries for " + \
                                "'%s': %s, continuing...\n" % (tree_data.path, e),
@@ -2553,7 +2553,7 @@ def git_sync_timestamps(settings, portdir):
                        continue
 
                inconsistent = False
-               for ec, (ec_path, ec_mtime) in ec_mtimes.iteritems():
+               for ec, (ec_path, ec_mtime) in ec_mtimes.items():
                        updated_mtime = updated_ec_mtimes.get(ec)
                        if updated_mtime is not None and updated_mtime != ec_mtime:
                                writemsg_level("!!! Inconsistent eclass mtime: %s %s\n" % \
@@ -2567,7 +2567,7 @@ def git_sync_timestamps(settings, portdir):
                if current_eb_mtime != eb_mtime:
                        os.utime(eb_path, (eb_mtime, eb_mtime))
 
-               for ec, (ec_path, ec_mtime) in ec_mtimes.iteritems():
+               for ec, (ec_path, ec_mtime) in ec_mtimes.items():
                        if ec in updated_ec_mtimes:
                                continue
                        ec_path = os.path.join(ec_dir, ec + ".eclass")
@@ -2586,7 +2586,7 @@ def load_emerge_config(trees=None):
                        kwargs[k] = v
        trees = portage.create_trees(trees=trees, **kwargs)
 
-       for root, root_trees in trees.iteritems():
+       for root, root_trees in trees.items():
                settings = root_trees["vartree"].settings
                setconfig = load_default_config(settings, root_trees)
                root_trees["root_config"] = RootConfig(settings, root_trees, setconfig)
index 1fd667f36972149fe1a4b31d0f0dfdedb1cdaba7..83ab77eb42aacd4bf28041cedd07eb0a53560dad 100644 (file)
@@ -6,7 +6,7 @@ import gc
 import portage
 
 def clear_caches(trees):
-       for d in trees.itervalues():
+       for d in trees.values():
                d["porttree"].dbapi.melt()
                d["porttree"].dbapi._aux_cache.clear()
                d["bintree"].dbapi._aux_cache.clear()
index 44bb7b45f47c8f4c06b0020b3207321a89182e8f..ed2e88af4cd6881ba989734576a465fea462c491 100644 (file)
@@ -163,7 +163,7 @@ class _dynamic_depgraph_config(object):
                        runtime_pkg_mask = {}
                else:
                        runtime_pkg_mask = dict((k, v.copy()) for (k, v) in \
-                               runtime_pkg_mask.iteritems())
+                               runtime_pkg_mask.items())
                self._runtime_pkg_mask = runtime_pkg_mask
                self._need_restart = False
 
@@ -281,7 +281,7 @@ class depgraph(object):
                # missed update from each SLOT.
                missed_updates = {}
                for pkg, mask_reasons in \
-                       self._dynamic_config._runtime_pkg_mask.iteritems():
+                       self._dynamic_config._runtime_pkg_mask.items():
                        if pkg.installed:
                                # Exclude installed here since we only
                                # want to show available updates.
@@ -291,7 +291,7 @@ class depgraph(object):
                                other_pkg, mask_type, parent_atoms = missed_updates[k]
                                if other_pkg > pkg:
                                        continue
-                       for mask_type, parent_atoms in mask_reasons.iteritems():
+                       for mask_type, parent_atoms in mask_reasons.items():
                                if not parent_atoms:
                                        continue
                                missed_updates[k] = (pkg, mask_type, parent_atoms)
@@ -301,7 +301,7 @@ class depgraph(object):
                        return
 
                missed_update_types = {}
-               for pkg, mask_type, parent_atoms in missed_updates.itervalues():
+               for pkg, mask_type, parent_atoms in missed_updates.values():
                        missed_update_types.setdefault(mask_type,
                                []).append((pkg, parent_atoms))
 
@@ -395,7 +395,7 @@ class depgraph(object):
                explanation_columns = 70
                explanations = 0
                for (slot_atom, root), slot_nodes \
-                       in self._dynamic_config._slot_collision_info.iteritems():
+                       in self._dynamic_config._slot_collision_info.items():
                        msg.append(str(slot_atom))
                        if root != '/':
                                msg.append(" for %s" % (root,))
@@ -599,7 +599,7 @@ class depgraph(object):
                packages that have been pulled into a given slot.
                """
                for (slot_atom, root), slot_nodes \
-                       in self._dynamic_config._slot_collision_info.iteritems():
+                       in self._dynamic_config._slot_collision_info.items():
 
                        all_parent_atoms = set()
                        for pkg in slot_nodes:
@@ -1171,7 +1171,7 @@ class depgraph(object):
                # by dep_zapdeps. We preserve actual parent/child relationships
                # here in order to avoid distorting the dependency graph like
                # <=portage-2.1.6.x did.
-               for virt_pkg, atoms in selected_atoms.iteritems():
+               for virt_pkg, atoms in selected_atoms.items():
 
                        # Just assume depth + 1 here for now, though it's not entirely
                        # accurate since multilple levels of indirect virtual deps may
@@ -1744,7 +1744,7 @@ class depgraph(object):
                        args_set.add(atom)
 
                self._dynamic_config._set_atoms.clear()
-               self._dynamic_config._set_atoms.update(chain(*self._dynamic_config._sets.itervalues()))
+               self._dynamic_config._set_atoms.update(chain(*self._dynamic_config._sets.values()))
                atom_arg_map = self._dynamic_config._atom_arg_map
                atom_arg_map.clear()
                for arg in args:
@@ -1760,7 +1760,7 @@ class depgraph(object):
                # Invalidate the package selection cache, since
                # arguments influence package selections.
                self._dynamic_config._highest_pkg_cache.clear()
-               for trees in self._dynamic_config._filtered_trees.itervalues():
+               for trees in self._dynamic_config._filtered_trees.values():
                        trees["porttree"].dbapi._clear_cache()
 
        def _greedy_slots(self, root_config, atom, blocker_lookahead=False):
@@ -1809,7 +1809,7 @@ class depgraph(object):
                        except portage.exception.InvalidDependString:
                                continue
                        blocker_atoms = []
-                       for atoms in selected_atoms.itervalues():
+                       for atoms in selected_atoms.values():
                                blocker_atoms.extend(x for x in atoms if x.blocker)
                        blockers[pkg] = InternalPackageSet(initial_atoms=blocker_atoms)
 
@@ -3062,7 +3062,7 @@ class depgraph(object):
                                        (running_root, running_portage.cpv, e), noiselevel=-1)
                                del e
                                portage_rdepend = {running_portage : []}
-                       for atoms in portage_rdepend.itervalues():
+                       for atoms in portage_rdepend.values():
                                runtime_deps.update(atom for atom in atoms \
                                        if not atom.blocker)
 
@@ -3648,7 +3648,7 @@ class depgraph(object):
                        # Reduce noise by pruning packages that are only
                        # pulled in by other conflict packages.
                        pruned_pkgs = set()
-                       for pkg, parent_atoms in conflict_pkgs.iteritems():
+                       for pkg, parent_atoms in conflict_pkgs.items():
                                relevant_parent = False
                                for parent, atom in parent_atoms:
                                        if parent not in conflict_pkgs:
@@ -3665,7 +3665,7 @@ class depgraph(object):
                        indent = "  "
                        # Max number of parents shown, to avoid flooding the display.
                        max_parents = 3
-                       for pkg, parent_atoms in conflict_pkgs.iteritems():
+                       for pkg, parent_atoms in conflict_pkgs.items():
 
                                pruned_list = set()
 
@@ -4538,7 +4538,7 @@ class depgraph(object):
                                        "merged because it is listed in\n")
                        msg.append("package.provided:\n\n")
                        problems_sets = set()
-                       for (arg, atom), refs in arg_refs.iteritems():
+                       for (arg, atom), refs in arg_refs.items():
                                ref_string = ""
                                if refs:
                                        problems_sets.update(refs)
@@ -5243,7 +5243,7 @@ def get_masking_status(pkg, pkgsettings, root_config):
                        mreasons.append("CHOST: %s" % \
                                pkg.metadata["CHOST"])
                if pkg.invalid:
-                       for msg_type, msgs in pkg.invalid.iteritems():
+                       for msg_type, msgs in pkg.invalid.items():
                                for msg in msgs:
                                        mreasons.append("invalid: %s" % (msg,))
 
index 6ac1b50eb59c6122fc5099755eae37af29d1e6a4..09522375f2f8b94b51838b9fdd85f0ae1e366e66 100644 (file)
@@ -255,7 +255,7 @@ def display_preserved_libs(vardbapi, myopts):
                                        samefile_map[obj_key] = alt_paths
                                alt_paths.add(f)
 
-                       for alt_paths in samefile_map.itervalues():
+                       for alt_paths in samefile_map.values():
                                alt_paths = sorted(alt_paths)
                                for p in alt_paths:
                                        print(colorize("WARN", " * ") + " - %s" % (p,))
@@ -422,13 +422,13 @@ def insert_optional_args(args):
                        continue
 
                match = None
-               for k, arg_choices in short_arg_opts.iteritems():
+               for k, arg_choices in short_arg_opts.items():
                        if k in arg:
                                match = k
                                break
 
                if match is None:
-                       for k, arg_choices in short_arg_opts_n.iteritems():
+                       for k, arg_choices in short_arg_opts_n.items():
                                if k in arg:
                                        match = k
                                        break
@@ -631,14 +631,14 @@ def parse_opts(tmpcmdline, silent=False):
        for myopt in options:
                parser.add_option(myopt, action="store_true",
                        dest=myopt.lstrip("--").replace("-", "_"), default=False)
-       for shortopt, longopt in shortmapping.iteritems():
+       for shortopt, longopt in shortmapping.items():
                parser.add_option("-" + shortopt, action="store_true",
                        dest=longopt.lstrip("--").replace("-", "_"), default=False)
-       for myalias, myopt in longopt_aliases.iteritems():
+       for myalias, myopt in longopt_aliases.items():
                parser.add_option(myalias, action="store_true",
                        dest=myopt.lstrip("--").replace("-", "_"), default=False)
 
-       for myopt, kwargs in argument_options.iteritems():
+       for myopt, kwargs in argument_options.items():
                shortopt = kwargs.pop("shortopt", None)
                args = [myopt]
                if shortopt is not None:
@@ -941,7 +941,7 @@ def expand_set_arguments(myfiles, myaction, root_config):
 
 def repo_name_check(trees):
        missing_repo_names = set()
-       for root, root_trees in trees.iteritems():
+       for root, root_trees in trees.items():
                if "porttree" in root_trees:
                        portdb = root_trees["porttree"].dbapi
                        missing_repo_names.update(portdb.porttrees)
@@ -973,7 +973,7 @@ def repo_name_check(trees):
 
 def repo_name_duplicate_check(trees):
        ignored_repos = {}
-       for root, root_trees in trees.iteritems():
+       for root, root_trees in trees.items():
                if 'porttree' in root_trees:
                        portdb = root_trees['porttree'].dbapi
                        if portdb.mysettings.get('PORTAGE_REPO_DUPLICATE_WARN') != '0':
@@ -1003,7 +1003,7 @@ def repo_name_duplicate_check(trees):
        return bool(ignored_repos)
 
 def config_protect_check(trees):
-       for root, root_trees in trees.iteritems():
+       for root, root_trees in trees.items():
                if not root_trees["root_config"].settings.get("CONFIG_PROTECT"):
                        msg = "!!! CONFIG_PROTECT is empty"
                        if root != "/":
@@ -1013,7 +1013,7 @@ def config_protect_check(trees):
 def profile_check(trees, myaction):
        if myaction in ("help", "info", "sync", "version"):
                return os.EX_OK
-       for root, root_trees in trees.iteritems():
+       for root, root_trees in trees.items():
                if root_trees["root_config"].settings.profiles:
                        continue
                # generate some profile related warning messages
@@ -1095,7 +1095,7 @@ def emerge_main():
                repo_name_duplicate_check(trees)
                config_protect_check(trees)
 
-       for mytrees in trees.itervalues():
+       for mytrees in trees.values():
                mydb = mytrees["porttree"].dbapi
                # Freeze the portdbapi for performance (memoize all xmatch results).
                mydb.freeze()
index 710a57b2869cb53ae0ec1c0f6b4e5894f74f8c4e..ddcfa8180b2a1227d8346d82ff0606c610be163a 100644 (file)
@@ -243,7 +243,7 @@ def unmerge(root_config, myopts, unmerge_action,
                                        slotmap[myslot][vartree.dbapi.cpv_counter(mypkg)] = mypkg
 
                                for myslot in slotmap:
-                                       counterkeys = slotmap[myslot].keys()
+                                       counterkeys = list(slotmap[myslot].keys())
                                        if not counterkeys:
                                                continue
                                        counterkeys.sort()
@@ -411,7 +411,7 @@ def unmerge(root_config, myopts, unmerge_action,
                                unordered[cp] = cp_dict
                                for k in d:
                                        cp_dict[k] = set()
-                       for k, v in d.iteritems():
+                       for k, v in d.items():
                                cp_dict[k].update(v)
                pkgmap = [unordered[cp] for cp in sorted(unordered)]
 
@@ -419,7 +419,7 @@ def unmerge(root_config, myopts, unmerge_action,
                selected = pkgmap[x]["selected"]
                if not selected:
                        continue
-               for mytype, mylist in pkgmap[x].iteritems():
+               for mytype, mylist in pkgmap[x].items():
                        if mytype == "selected":
                                continue
                        mylist.difference_update(all_selected)
index fe696b6dc3a2b7a4c5ba279b89096c2321b2e7ae..e8532e60d7a68ee8b8cca01f474661a46c6b7d8b 100644 (file)
@@ -183,7 +183,7 @@ class _unicode_func_wrapper(object):
                        wrapped_kwargs = dict(
                                (_unicode_encode(k, encoding=encoding, errors='strict'),
                                _unicode_encode(v, encoding=encoding, errors='strict'))
-                               for k, v in kwargs.iteritems())
+                               for k, v in kwargs.items())
                else:
                        wrapped_kwargs = {}
 
@@ -784,13 +784,13 @@ class digraph(object):
                        return list(self.nodes[node][0])
                children = []
                if hasattr(ignore_priority, '__call__'):
-                       for child, priorities in self.nodes[node][0].iteritems():
+                       for child, priorities in self.nodes[node][0].items():
                                for priority in priorities:
                                        if not ignore_priority(priority):
                                                children.append(child)
                                                break
                else:
-                       for child, priorities in self.nodes[node][0].iteritems():
+                       for child, priorities in self.nodes[node][0].items():
                                if ignore_priority < priorities[-1]:
                                        children.append(child)
                return children
@@ -801,13 +801,13 @@ class digraph(object):
                        return list(self.nodes[node][1])
                parents = []
                if hasattr(ignore_priority, '__call__'):
-                       for parent, priorities in self.nodes[node][1].iteritems():
+                       for parent, priorities in self.nodes[node][1].items():
                                for priority in priorities:
                                        if not ignore_priority(priority):
                                                parents.append(parent)
                                                break
                else:
-                       for parent, priorities in self.nodes[node][1].iteritems():
+                       for parent, priorities in self.nodes[node][1].items():
                                if ignore_priority < priorities[-1]:
                                        parents.append(parent)
                return parents
@@ -826,7 +826,7 @@ class digraph(object):
                elif hasattr(ignore_priority, '__call__'):
                        for node in self.order:
                                is_leaf_node = True
-                               for child, priorities in self.nodes[node][0].iteritems():
+                               for child, priorities in self.nodes[node][0].items():
                                        for priority in priorities:
                                                if not ignore_priority(priority):
                                                        is_leaf_node = False
@@ -838,7 +838,7 @@ class digraph(object):
                else:
                        for node in self.order:
                                is_leaf_node = True
-                               for child, priorities in self.nodes[node][0].iteritems():
+                               for child, priorities in self.nodes[node][0].items():
                                        if ignore_priority < priorities[-1]:
                                                is_leaf_node = False
                                                break
@@ -860,7 +860,7 @@ class digraph(object):
                elif hasattr(ignore_priority, '__call__'):
                        for node in self.order:
                                is_root_node = True
-                               for parent, priorities in self.nodes[node][1].iteritems():
+                               for parent, priorities in self.nodes[node][1].items():
                                        for priority in priorities:
                                                if not ignore_priority(priority):
                                                        is_root_node = False
@@ -872,7 +872,7 @@ class digraph(object):
                else:
                        for node in self.order:
                                is_root_node = True
-                               for parent, priorities in self.nodes[node][1].iteritems():
+                               for parent, priorities in self.nodes[node][1].items():
                                        if ignore_priority < priorities[-1]:
                                                is_root_node = False
                                                break
@@ -888,16 +888,16 @@ class digraph(object):
                clone = digraph()
                clone.nodes = {}
                memo = {}
-               for children, parents, node in self.nodes.itervalues():
+               for children, parents, node in self.nodes.values():
                        children_clone = {}
-                       for child, priorities in children.iteritems():
+                       for child, priorities in children.items():
                                priorities_clone = memo.get(id(priorities))
                                if priorities_clone is None:
                                        priorities_clone = priorities[:]
                                        memo[id(priorities)] = priorities_clone
                                children_clone[child] = priorities_clone
                        parents_clone = {}
-                       for parent, priorities in parents.iteritems():
+                       for parent, priorities in parents.items():
                                priorities_clone = memo.get(id(priorities))
                                if priorities_clone is None:
                                        priorities_clone = priorities[:]
@@ -941,7 +941,7 @@ class digraph(object):
                                output("depends on\n")
                        else:
                                output("(no children)\n")
-                       for child, priorities in self.nodes[node][0].iteritems():
+                       for child, priorities in self.nodes[node][0].items():
                                output("  %s (%s)\n" % (child, priorities[-1],))
 
 #parse /etc/env.d and generate /etc/profile.env
@@ -1135,7 +1135,7 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None,
                not ld_cache_update and \
                contents is not None:
                libdir_contents_changed = False
-               for mypath, mydata in contents.iteritems():
+               for mypath, mydata in contents.items():
                        if mydata[0] not in ("obj","sym"):
                                continue
                        head, tail = os.path.split(mypath)
@@ -1740,7 +1740,7 @@ class config(object):
                                for x in self.profiles]
                        for pkeyworddict in rawpkeywords:
                                cpdict = {}
-                               for k, v in pkeyworddict.iteritems():
+                               for k, v in pkeyworddict.items():
                                        cpdict.setdefault(k.cp, {})[k] = v
                                self._pkeywords_list.append(cpdict)
 
@@ -1758,7 +1758,7 @@ class config(object):
                                recursive=1) for x in self.profiles]
                        for pusemaskdict in rawpusemask:
                                cpdict = {}
-                               for k, v in pusemaskdict.iteritems():
+                               for k, v in pusemaskdict.items():
                                        cpdict.setdefault(k.cp, {})[k] = v
                                self.pusemask_list.append(cpdict)
                        del rawpusemask
@@ -1768,7 +1768,7 @@ class config(object):
                                juststrings=True, recursive=1) for x in self.profiles]
                        for rawpusedict in rawprofileuse:
                                cpdict = {}
-                               for k, v in rawpusedict.iteritems():
+                               for k, v in rawpusedict.items():
                                        cpdict.setdefault(k.cp, {})[k] = v
                                self.pkgprofileuse.append(cpdict)
                        del rawprofileuse
@@ -1784,7 +1784,7 @@ class config(object):
                                for x in self.profiles]
                        for rawpusefdict in rawpuseforce:
                                cpdict = {}
-                               for k, v in rawpusefdict.iteritems():
+                               for k, v in rawpusefdict.items():
                                        cpdict.setdefault(k.cp, {})[k] = v
                                self.puseforce_list.append(cpdict)
                        del rawpuseforce
@@ -1840,7 +1840,7 @@ class config(object):
 
                        # Avoid potential UnicodeDecodeError exceptions later.
                        env_unicode = dict((_unicode_decode(k), _unicode_decode(v))
-                               for k, v in env.iteritems())
+                               for k, v in env.items())
 
                        self.backupenv = env_unicode
 
@@ -1848,7 +1848,7 @@ class config(object):
                                # Remove duplicate values so they don't override updated
                                # profile.env values later (profile.env is reloaded in each
                                # call to self.regenerate).
-                               for k, v in env_d.iteritems():
+                               for k, v in env_d.items():
                                        try:
                                                if self.backupenv[k] == v:
                                                        del self.backupenv[k]
@@ -1965,14 +1965,14 @@ class config(object):
                                pmask_locations.append(abs_user_config)
                                pusedict = grabdict_package(
                                        os.path.join(abs_user_config, "package.use"), recursive=1)
-                               for k, v in pusedict.iteritems():
+                               for k, v in pusedict.items():
                                        self.pusedict.setdefault(k.cp, {})[k] = v
 
                                #package.keywords
                                pkgdict = grabdict_package(
                                        os.path.join(abs_user_config, "package.keywords"),
                                        recursive=1)
-                               for k, v in pkgdict.iteritems():
+                               for k, v in pkgdict.items():
                                        # default to ~arch if no specific keyword is given
                                        if not v:
                                                mykeywordlist = []
@@ -1990,7 +1990,7 @@ class config(object):
                                #package.license
                                licdict = grabdict_package(os.path.join(
                                        abs_user_config, "package.license"), recursive=1)
-                               for k, v in licdict.iteritems():
+                               for k, v in licdict.items():
                                        cp = k.cp
                                        cp_dict = self._plicensedict.get(cp)
                                        if not cp_dict:
@@ -2001,7 +2001,7 @@ class config(object):
                                #package.properties
                                propdict = grabdict_package(os.path.join(
                                        abs_user_config, "package.properties"), recursive=1)
-                               for k, v in propdict.iteritems():
+                               for k, v in propdict.items():
                                        cp = k.cp
                                        cp_dict = self._ppropertiesdict.get(cp)
                                        if not cp_dict:
@@ -2214,7 +2214,7 @@ class config(object):
                }
 
                for mypath, (gid, mode, modemask, preserve_perms) \
-                       in dir_mode_map.iteritems():
+                       in dir_mode_map.items():
                        mydir = os.path.join(self["ROOT"], mypath)
                        if preserve_perms and os.path.isdir(mydir):
                                # Only adjust permissions on some directories if
@@ -2617,7 +2617,7 @@ class config(object):
                for i, pkgprofileuse_dict in enumerate(self.pkgprofileuse):
                        cpdict = pkgprofileuse_dict.get(cp)
                        if cpdict:
-                               keys = cpdict.keys()
+                               keys = list(cpdict.keys())
                                while keys:
                                        bestmatch = best_match_to_list(cpv_slot, keys)
                                        if bestmatch:
@@ -2647,7 +2647,7 @@ class config(object):
                self.puse = ""
                cpdict = self.pusedict.get(cp)
                if cpdict:
-                       keys = cpdict.keys()
+                       keys = list(cpdict.keys())
                        while keys:
                                self.pusekey = best_match_to_list(cpv_slot, keys)
                                if self.pusekey:
@@ -2744,7 +2744,7 @@ class config(object):
                # allows packages that support LINGUAS but don't
                # declare it in IUSE to use the variable outside of the
                # USE_EXPAND context.
-               for k, use_expand_iuse in use_expand_iuses.iteritems():
+               for k, use_expand_iuse in use_expand_iuses.items():
                        if k + '_*' in use:
                                use.update( x for x in use_expand_iuse if x not in usemask )
                        k = k.upper()
@@ -2799,7 +2799,7 @@ class config(object):
                for i, pusemask_dict in enumerate(self.pusemask_list):
                        cpdict = pusemask_dict.get(cp)
                        if cpdict:
-                               keys = cpdict.keys()
+                               keys = list(cpdict.keys())
                                while keys:
                                        best_match = best_match_to_list(pkg, keys)
                                        if best_match:
@@ -2822,7 +2822,7 @@ class config(object):
                for i, puseforce_dict in enumerate(self.puseforce_list):
                        cpdict = puseforce_dict.get(cp)
                        if cpdict:
-                               keys = cpdict.keys()
+                               keys = list(cpdict.keys())
                                while keys:
                                        best_match = best_match_to_list(pkg, keys)
                                        if best_match:
@@ -2939,7 +2939,7 @@ class config(object):
                matches = False
                if pkgdict:
                        cpv_slot_list = ["%s:%s" % (cpv, metadata["SLOT"])]
-                       for atom, pkgkeywords in pkgdict.iteritems():
+                       for atom, pkgkeywords in pkgdict.items():
                                if match_from_list(atom, cpv_slot_list):
                                        matches = True
                                        pgroups.extend(pkgkeywords)
@@ -3007,7 +3007,7 @@ class config(object):
                if cpdict:
                        accept_license = list(self._accept_license)
                        cpv_slot = "%s:%s" % (cpv, metadata["SLOT"])
-                       for atom in match_to_list(cpv_slot, cpdict.keys()):
+                       for atom in match_to_list(cpv_slot, list(cpdict.keys())):
                                accept_license.extend(cpdict[atom])
 
                licenses = set(flatten(dep.use_reduce(dep.paren_reduce(
@@ -3088,7 +3088,7 @@ class config(object):
                if cpdict:
                        accept_properties = list(self._accept_properties)
                        cpv_slot = "%s:%s" % (cpv, metadata["SLOT"])
-                       for atom in match_to_list(cpv_slot, cpdict.keys()):
+                       for atom in match_to_list(cpv_slot, list(cpdict.keys())):
                                accept_properties.extend(cpdict[atom])
 
                properties = set(flatten(dep.use_reduce(dep.paren_reduce(
@@ -3468,7 +3468,7 @@ class config(object):
                        virtuals_file = os.path.join(x, "virtuals")
                        virtuals_dict = grabdict(virtuals_file)
                        atoms_dict = {}
-                       for k, v in virtuals_dict.iteritems():
+                       for k, v in virtuals_dict.items():
                                try:
                                        virt_atom = portage.dep.Atom(k)
                                except portage.exception.InvalidAtom:
@@ -3527,7 +3527,7 @@ class config(object):
 
        def _populate_treeVirtuals(self, vartree):
                """Reduce the provides into a list by CP."""
-               for provide, cpv_list in vartree.get_all_provides().iteritems():
+               for provide, cpv_list in vartree.get_all_provides().items():
                        try:
                                provide = dep.Atom(provide)
                        except exception.InvalidAtom:
@@ -3547,7 +3547,7 @@ class config(object):
                # Virtuals by profile+tree preferences.
                ptVirtuals   = {}
 
-               for virt, installed_list in self.treeVirtuals.iteritems():
+               for virt, installed_list in self.treeVirtuals.items():
                        profile_list = self.dirVirtuals.get(virt, None)
                        if not profile_list:
                                continue
@@ -3631,7 +3631,7 @@ class config(object):
                        yield (k, self[k])
 
        def items(self):
-               return list(self.iteritems())
+               return list(self.items())
 
        def __setitem__(self,mykey,myvalue):
                "set a value; will be thrown away at reset() time"
@@ -3850,7 +3850,7 @@ def spawn(mystring, mysettings, debug=0, free=0, droppriv=0, sesandbox=0, fakero
        # it needs to be flushed before allowing a child process to use it
        # so that output always shows in the correct order.
        stdout_filenos = (sys.stdout.fileno(), sys.stderr.fileno())
-       for fd in fd_pipes.itervalues():
+       for fd in fd_pipes.values():
                if fd in stdout_filenos:
                        sys.stdout.flush()
                        sys.stderr.flush()
@@ -4011,7 +4011,7 @@ def _spawn_fetch(settings, args, **kwargs):
                if args[0] != BASH_BINARY:
                        args = [BASH_BINARY, "-c", "exec \"$@\"", args[0]] + args
 
-       rval = spawn_func(args, env=dict(settings.iteritems()), **kwargs)
+       rval = spawn_func(args, env=dict(iter(settings.items())), **kwargs)
 
        return rval
 
@@ -4302,7 +4302,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
 
        file_uri_tuples = []
        if isinstance(myuris, dict):
-               for myfile, uri_set in myuris.iteritems():
+               for myfile, uri_set in myuris.items():
                        for myuri in uri_set:
                                file_uri_tuples.append((myfile, myuri))
        else:
@@ -4365,7 +4365,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
 
        # Prefer thirdpartymirrors over normal mirrors in cases when
        # the file does not yet exist on the normal mirrors.
-       for myfile, uris in thirdpartymirror_uris.iteritems():
+       for myfile, uris in thirdpartymirror_uris.items():
                primaryuri_dict.setdefault(myfile, []).extend(uris)
 
        can_fetch=True
@@ -4700,7 +4700,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
                                                                        mysettings.get("PORTAGE_QUIET", None) == "1"
                                                                digests = mydigests.get(myfile)
                                                                if digests:
-                                                                       digests = digests.keys()
+                                                                       digests = list(digests.keys())
                                                                        digests.sort()
                                                                        eout.ebegin(
                                                                                "%s %s ;-)" % (myfile, " ".join(digests)))
@@ -5207,7 +5207,7 @@ def digestgen(myarchives, mysettings, overwrite=1, manifestonly=0, myportdb=None
                        writemsg(_("!!! Permission Denied: %s\n") % (e,), noiselevel=-1)
                        return 0
                if "assume-digests" not in mysettings.features:
-                       distlist = mf.fhashdict.get("DIST", {}).keys()
+                       distlist = list(mf.fhashdict.get("DIST", {}).keys())
                        distlist.sort()
                        auto_assumed = []
                        for filename in distlist:
@@ -5271,7 +5271,7 @@ def digestcheck(myfiles, mysettings, strict=0, justmanifest=0):
                        return 1
        mf = Manifest(pkgdir, mysettings["DISTDIR"])
        manifest_empty = True
-       for d in mf.fhashdict.itervalues():
+       for d in mf.fhashdict.values():
                if d:
                        manifest_empty = False
                        break
@@ -6178,7 +6178,7 @@ def _prepare_features_dirs(mysettings):
        droppriv = secpass >= 2 and \
                "userpriv" in mysettings.features and \
                "userpriv" not in restrict
-       for myfeature, kwargs in features_dirs.iteritems():
+       for myfeature, kwargs in features_dirs.items():
                if myfeature in mysettings.features:
                        failure = False
                        basedir = mysettings.get(kwargs["basedir_var"])
@@ -8307,7 +8307,7 @@ def getmaskingstatus(mycpv, settings=None, portdb=None):
        matches = False
        if pkgdict:
                cpv_slot_list = ["%s:%s" % (mycpv, metadata["SLOT"])]
-               for atom, pkgkeywords in pkgdict.iteritems():
+               for atom, pkgkeywords in pkgdict.items():
                        if match_from_list(atom, cpv_slot_list):
                                matches = True
                                pgroups.extend(pkgkeywords)
@@ -8413,9 +8413,9 @@ class FetchlistDict(portage.cache.mappings.Mapping):
                self.portdb = mydbapi
        def __getitem__(self, pkg_key):
                """Returns the complete fetch list for a given package."""
-               return self.portdb.getFetchMap(pkg_key, mytree=self.mytree).keys()
+               return list(self.portdb.getFetchMap(pkg_key, mytree=self.mytree).keys())
        def __contains__(self, cpv):
-               return cpv in self.keys()
+               return cpv in list(self.keys())
        def has_key(self, pkg_key):
                """Returns true if the given package exists within pkgdir."""
                return pkg_key in self
@@ -8712,7 +8712,7 @@ def _global_updates(trees, prev_mtimes):
                        # until after _all_ of the above updates have
                        # been processed because the mtimedb will
                        # automatically commit when killed by ctrl C.
-                       for mykey, mtime in timestamps.iteritems():
+                       for mykey, mtime in timestamps.items():
                                prev_mtimes[mykey] = mtime
 
                # We gotta do the brute force updates for these now.
@@ -8785,7 +8785,7 @@ class MtimeDB(dict):
                mtimedbkeys = set(("info", "ldpath", "resume", "resume_backup",
                        "starttime", "updates", "version"))
 
-               for k in d.keys():
+               for k in list(d.keys()):
                        if k not in mtimedbkeys:
                                writemsg(_("Deleting invalid mtimedb key: %s\n") % str(k))
                                del d[k]
index 5b771adadd73378b96be5f60fbf0b2f2f83b5940..143c321e14c371914d5d75fa79d9fedfec7c54af 100644 (file)
@@ -51,7 +51,7 @@ class database(fs_template.FsBased):
                self._ensure_access(self._db_path)
 
        def iteritems(self):
-               return self.__db.iteritems()
+               return iter(self.__db.items())
 
        def _getitem(self, cpv):
                # we override getitem because it's just a cpickling of the data handed in.
@@ -64,7 +64,7 @@ class database(fs_template.FsBased):
                del self.__db[cpv]
 
        def __iter__(self):
-               return iter(self.__db.keys())
+               return iter(list(self.__db.keys()))
 
        def __contains__(self, cpv):
                return cpv in self.__db
index 4bcc6c8466b25c2afad434ff19a95b047d51ac35..7ea4aaaca5e8cf961bb1db8c4e5b9abbb47b1a20 100644 (file)
@@ -123,7 +123,7 @@ class database(fs_template.FsBased):
        def _setitem(self, cpv, values):
                path = self.__get_path(cpv)
                max = self.max_len
-               for key,value in values.iteritems():
+               for key,value in values.items():
                        # mtime comes in as long so need to convert to strings
                        s = str(value)
                        # We need to split long values
index 066927ac146a8ec942b69afe3a97a3f8898daa04..69e37fa153d38bbd0919415bf1757c56342fadb8 100644 (file)
@@ -23,7 +23,7 @@ class Mapping(object):
        """
 
        def __iter__(self):
-               return self.iterkeys()
+               return iter(self.keys())
 
        def keys(self):
                return list(self.__iter__())
@@ -48,14 +48,14 @@ class Mapping(object):
                return self.__iter__()
 
        def itervalues(self):
-               for _, v in self.iteritems():
+               for _, v in self.items():
                        yield v
 
        def values(self):
-               return [v for _, v in self.iteritems()]
+               return [v for _, v in self.items()]
 
        def items(self):
-               return list(self.iteritems())
+               return list(self.items())
 
        def get(self, key, default=None):
                try:
@@ -64,10 +64,10 @@ class Mapping(object):
                        return default
 
        def __repr__(self):
-               return repr(dict(self.iteritems()))
+               return repr(dict(self.items()))
 
        def __len__(self):
-               return len(self.keys())
+               return len(list(self.keys()))
 
        if sys.hexversion >= 0x3000000:
                items = iteritems
@@ -80,7 +80,7 @@ class MutableMapping(Mapping):
        """
 
        def clear(self):
-               for key in self.keys():
+               for key in list(self.keys()):
                        del self[key]
 
        def setdefault(self, key, default=None):
@@ -105,7 +105,7 @@ class MutableMapping(Mapping):
 
        def popitem(self):
                try:
-                       k, v = self.iteritems().next()
+                       k, v = iter(self.items()).next()
                except StopIteration:
                        raise KeyError('container is empty')
                del self[k]
@@ -226,9 +226,9 @@ class ProtectedDict(MutableMapping):
                        
 
        def __iter__(self):
-               for k in self.new.iterkeys():
+               for k in self.new.keys():
                        yield k
-               for k in self.orig.iterkeys():
+               for k in self.orig.keys():
                        if k not in self.blacklist and k not in self.new:
                                yield k
 
@@ -333,12 +333,12 @@ def slot_dict_class(keys, prefix="_val_"):
                                        self.update(kwargs)
 
                        def __iter__(self):
-                               for k, v in self.iteritems():
+                               for k, v in self.items():
                                        yield k
 
                        def __len__(self):
                                l = 0
-                               for i in self.iteritems():
+                               for i in self.items():
                                        l += 1
                                return l
 
@@ -353,14 +353,14 @@ def slot_dict_class(keys, prefix="_val_"):
                                                pass
 
                        def items(self):
-                               return list(self.iteritems())
+                               return list(self.items())
 
                        def itervalues(self):
-                               for k, v in self.iteritems():
+                               for k, v in self.items():
                                        yield v
 
                        def values(self):
-                               return list(self.itervalues())
+                               return list(self.values())
 
                        def __delitem__(self, k):
                                try:
@@ -442,7 +442,7 @@ def slot_dict_class(keys, prefix="_val_"):
 
                        def popitem(self):
                                try:
-                                       k, v = self.iteritems().next()
+                                       k, v = iter(self.items()).next()
                                except StopIteration:
                                        raise KeyError('container is empty')
                                del self[k]
@@ -461,7 +461,7 @@ def slot_dict_class(keys, prefix="_val_"):
                                                pass
 
                        def __str__(self):
-                               return str(dict(self.iteritems()))
+                               return str(dict(self.items()))
 
                        if sys.hexversion >= 0x3000000:
                                items = iteritems
index 2bb740722ba9ee5dae21baa848afae4ae056305b..48dfd3aacd5f9d7a7d7198686d5e772b3e9cc843 100644 (file)
@@ -106,7 +106,7 @@ class database(fs_template.FsBased):
                        self._allowed_keys
 
                cursor = self._db_cursor
-               for k, v in self._db_table.iteritems():
+               for k, v in self._db_table.items():
                        if self._db_table_exists(v["table_name"]):
                                create_statement = self._db_table_get_create(v["table_name"])
                                if create_statement != v["create"]:
index f4e9dfb8de0ace5f5d2c44943aae1961e35730d2..e90c9dba5d029c4cfa570ee0798598f533beb43c 100644 (file)
@@ -67,7 +67,7 @@ class database(object):
                        raise cache_errors.ReadOnlyRestriction()
                if self.cleanse_keys:
                        d=ProtectedDict(values)
-                       for k in d.keys():
+                       for k in list(d.keys()):
                                if d[k] == '':
                                        del d[k]
                        if self.serialize_eclasses and "_eclasses_" in values:
@@ -109,17 +109,17 @@ class database(object):
                return cpv in self
 
        def keys(self):
-               return tuple(self.iterkeys())
+               return tuple(self.keys())
 
        def iterkeys(self):
                return iter(self)
 
        def iteritems(self):
-               for x in self.iterkeys():
+               for x in self.keys():
                        yield (x, self[x])
 
        def items(self):
-               return list(self.iteritems())
+               return list(self.items())
 
        def sync(self, rate=0):
                self.sync_rate = rate
@@ -151,7 +151,7 @@ class database(object):
                if self.iterkeys is database.iterkeys:
                        # prevent a possible recursive loop
                        raise NotImplementedError(self)
-               return self.iterkeys()
+               return iter(self.keys())
 
        def get(self, k, x=None):
                try:
@@ -169,7 +169,7 @@ class database(object):
 
                import re
                restricts = {}
-               for key,match in match_dict.iteritems():
+               for key,match in match_dict.items():
                        # XXX this sucks.
                        try:
                                if isinstance(match, basestring):
@@ -181,10 +181,10 @@ class database(object):
                        if key not in self.__known_keys:
                                raise InvalidRestriction(key, match, "Key isn't valid")
 
-               for cpv in self.iterkeys():
+               for cpv in self.keys():
                        cont = True
                        vals = self[cpv]
-                       for key, match in restricts.iteritems():
+                       for key, match in restricts.items():
                                if not match(vals[key]):
                                        cont = False
                                        break
index 93076d3041692de6b9c9d226872b74a3372a407e..9cc2d82e773905b8d618017840690f47bbc5a244 100644 (file)
@@ -134,7 +134,7 @@ def perform_all(x, calc_prelink=0):
        return mydict
 
 def get_valid_checksum_keys():
-       return hashfunc_map.keys()
+       return list(hashfunc_map.keys())
 
 def get_hash_origin(hashtype):
        if hashtype not in hashfunc_map:
index 0dd8f10bc644e2e976e273e3eae17d1b2c43fecd..eaf7f37a5c1e0ad7adb492c1649eb6b0c262e2bd 100644 (file)
@@ -89,12 +89,12 @@ def findoption(entries, pattern, recursive=0, basedir=""):
        basedir."""
        if not basedir.endswith("/"):
                basedir += "/"
-       for myfile, mydata in entries["files"].iteritems():
+       for myfile, mydata in entries["files"].items():
                if "cvs" in mydata["status"]:
                        if pattern.search(mydata["flags"]):
                                yield basedir+myfile
        if recursive:
-               for mydir, mydata in entries["dirs"].iteritems():
+               for mydir, mydata in entries["dirs"].items():
                        for x in findoption(mydata, pattern,
                                recursive, basedir+mydir):
                                yield x
index c37242419b398e0f6291de653263a5eb3c035e00..ff277a8fadfad02576cbbcf530dff15b89548d8e 100644 (file)
@@ -114,7 +114,7 @@ class bindbapi(fakedbapi):
                mytbz2 = portage.xpak.tbz2(tbz2path)
                mydata = mytbz2.get_data()
 
-               for k, v in values.iteritems():
+               for k, v in values.items():
                        k = _unicode_encode(k,
                                encoding=_encodings['repo.content'], errors='backslashreplace')
                        v = _unicode_encode(v,
@@ -645,7 +645,7 @@ class binarytree(object):
                        # from xpak.
                        if update_pkgindex and os.access(self.pkgdir, os.W_OK):
                                del pkgindex.packages[:]
-                               pkgindex.packages.extend(metadata.itervalues())
+                               pkgindex.packages.extend(iter(metadata.values()))
                                self._update_pkgindex_header(pkgindex.header)
                                from portage.util import atomic_ofstream
                                f = atomic_ofstream(self._pkgindex_file)
@@ -741,7 +741,7 @@ class binarytree(object):
                                        # Remote package instances override local package
                                        # if they are not identical.
                                        hash_names = ["SIZE"] + self._pkgindex_hashes
-                                       for cpv, local_metadata in metadata.iteritems():
+                                       for cpv, local_metadata in metadata.items():
                                                remote_metadata = self._remotepkgs.get(cpv)
                                                if remote_metadata is None:
                                                        continue
index 98af60d7f725970da35d854f2ff8f2aa7fcb1089..bf97af63e35f26bdf20245c20bc591f71272b298 100644 (file)
@@ -201,10 +201,10 @@ class portdbapi(dbapi):
                # earlier ones that correspond to the same name.
                porttrees = [x for x in porttrees if x not in identically_named_paths]
                ignored_map = {}
-               for path, repo_name in identically_named_paths.iteritems():
+               for path, repo_name in identically_named_paths.items():
                        ignored_map.setdefault(repo_name, []).append(path)
                self._ignored_repos = tuple((repo_name, tuple(paths)) \
-                       for repo_name, paths in ignored_map.iteritems())
+                       for repo_name, paths in ignored_map.items())
 
                self.porttrees = porttrees
                porttree_root = porttrees[0]
@@ -232,7 +232,7 @@ class portdbapi(dbapi):
                repo_aliases = {}
                if local_repo_configs is not None:
                        default_loc_repo_config = local_repo_configs.get('DEFAULT')
-                       for repo_name, loc_repo_conf in local_repo_configs.iteritems():
+                       for repo_name, loc_repo_conf in local_repo_configs.items():
                                if loc_repo_conf.aliases is not None:
                                        for alias in loc_repo_conf.aliases:
                                                overridden_alias = repo_aliases.get(alias)
@@ -503,7 +503,7 @@ class portdbapi(dbapi):
 
                i = metadata
                if hasattr(metadata, "iteritems"):
-                       i = metadata.iteritems()
+                       i = iter(metadata.items())
                metadata = dict(i)
 
                if metadata.get("INHERITED", False):
@@ -761,7 +761,7 @@ class portdbapi(dbapi):
 
                all_uris = []
                all_files = []
-               for filename, uris in uri_map.iteritems():
+               for filename, uris in uri_map.items():
                        for uri in uris:
                                all_uris.append(uri)
                                all_files.append(filename)
@@ -858,7 +858,7 @@ class portdbapi(dbapi):
                                                y == "CVS":
                                                continue
                                        d[x+"/"+y] = None
-               l = d.keys()
+               l = list(d.keys())
                l.sort()
                return l
 
@@ -915,7 +915,7 @@ class portdbapi(dbapi):
                                (mycp, self.mysettings["PORTAGE_CONFIGROOT"]), noiselevel=-1)
                        mylist = []
                else:
-                       mylist = d.keys()
+                       mylist = list(d.keys())
                # Always sort in ascending order here since it's handy
                # and the result can be easily cached and reused.
                self._cpv_sort_ascending(mylist)
index 5db04e4b1794405234a773d06fb2a745068fba49..635ba6026cd06297f20dc3ee2fe29707fd6ce2c9 100644 (file)
@@ -605,8 +605,8 @@ class LinkageMap(object):
                rValue = []
                if not self._libs:
                        self.rebuild()
-               for arch_map in self._libs.itervalues():
-                       for soname_map in arch_map.itervalues():
+               for arch_map in self._libs.values():
+                       for soname_map in arch_map.values():
                                for obj_key in soname_map.providers:
                                        rValue.extend(self._obj_properties[obj_key][4])
                return rValue
@@ -1019,7 +1019,7 @@ class vardbapi(dbapi):
                                self.invalidentry(self.getpath(y))
                                continue
                        d[mysplit[0]+"/"+mysplit[1]] = None
-               return d.keys()
+               return list(d.keys())
 
        def checkblockers(self, origdep):
                pass
@@ -1224,7 +1224,7 @@ class vardbapi(dbapi):
                        cache_valid = cache_mtime == mydir_mtime
                if cache_valid:
                        # Migrate old metadata to unicode.
-                       for k, v in metadata.iteritems():
+                       for k, v in metadata.items():
                                metadata[k] = _unicode_decode(v,
                                        encoding=_encodings['repo.content'], errors='replace')
 
@@ -1298,7 +1298,7 @@ class vardbapi(dbapi):
                treetype="vartree", vartree=self.vartree)
                if not mylink.exists():
                        raise KeyError(cpv)
-               for k, v in values.iteritems():
+               for k, v in values.items():
                        if v:
                                mylink.setfile(k, v)
                        else:
@@ -1561,7 +1561,7 @@ class vardbapi(dbapi):
                def getFileOwnerMap(self, path_iter):
                        owners = self.get_owners(path_iter)
                        file_owners = {}
-                       for pkg_dblink, files in owners.iteritems():
+                       for pkg_dblink, files in owners.items():
                                for f in files:
                                        owner_set = file_owners.get(f)
                                        if owner_set is None:
@@ -2183,7 +2183,7 @@ class dblink(object):
                        cpv_lib_map = self._find_unused_preserved_libs()
                        if cpv_lib_map:
                                self._remove_preserved_libs(cpv_lib_map)
-                               for cpv, removed in cpv_lib_map.iteritems():
+                               for cpv, removed in cpv_lib_map.items():
                                        if not self.vartree.dbapi.cpv_exists(cpv):
                                                for dblnk in others_in_slot:
                                                        if dblnk.mycpv == cpv:
@@ -2359,7 +2359,7 @@ class dblink(object):
 
                if pkgfiles:
                        self.updateprotect()
-                       mykeys = pkgfiles.keys()
+                       mykeys = list(pkgfiles.keys())
                        mykeys.sort()
                        mykeys.reverse()
 
@@ -2829,7 +2829,7 @@ class dblink(object):
 
                # Create consumer nodes and add them to the graph.
                # Note that consumers can also be providers.
-               for provider_node, consumers in consumer_map.iteritems():
+               for provider_node, consumers in consumer_map.items():
                        for c in consumers:
                                if self.isowner(c, root):
                                        continue
@@ -2955,7 +2955,7 @@ class dblink(object):
                        return node
 
                linkmap = self.vartree.dbapi.linkmap
-               for cpv, plibs in plib_dict.iteritems():
+               for cpv, plibs in plib_dict.items():
                        for f in plibs:
                                path_cpv_map[f] = cpv
                                preserved_node = path_to_node(f)
@@ -3036,7 +3036,7 @@ class dblink(object):
                os = _os_merge
 
                files_to_remove = set()
-               for files in cpv_lib_map.itervalues():
+               for files in cpv_lib_map.values():
                        files_to_remove.update(files)
                files_to_remove = sorted(files_to_remove)
                showMessage = self._display_merge
@@ -3088,7 +3088,7 @@ class dblink(object):
                        plib_dict = self.vartree.dbapi.plib_registry.getPreservedLibs()
                        plib_cpv_map = {}
                        plib_paths = set()
-                       for cpv, paths in plib_dict.iteritems():
+                       for cpv, paths in plib_dict.items():
                                plib_paths.update(paths)
                                for f in paths:
                                        plib_cpv_map[f] = cpv
@@ -3266,7 +3266,7 @@ class dblink(object):
                                k = (s.st_dev, s.st_ino)
                                inode_map.setdefault(k, []).append((path, s))
                suspicious_hardlinks = []
-               for path_list in inode_map.itervalues():
+               for path_list in inode_map.values():
                        path, s = path_list[0]
                        if len(path_list) == s.st_nlink:
                                # All hardlinks seem to be owned by this package.
@@ -3644,7 +3644,7 @@ class dblink(object):
                        owners = self.vartree.dbapi._owners.get_owners(collisions)
                        self.vartree.dbapi.flush_cache()
 
-                       for pkg, owned_files in owners.iteritems():
+                       for pkg, owned_files in owners.items():
                                cpv = pkg.mycpv
                                msg = []
                                msg.append("%s" % cpv)
@@ -3891,7 +3891,7 @@ class dblink(object):
                # and update the contents of the packages that owned them.
                plib_registry = self.vartree.dbapi.plib_registry
                plib_dict = plib_registry.getPreservedLibs()
-               for cpv, paths in plib_collisions.iteritems():
+               for cpv, paths in plib_collisions.items():
                        if cpv not in plib_dict:
                                continue
                        if cpv == self.mycpv:
@@ -3948,7 +3948,7 @@ class dblink(object):
                cpv_lib_map = self._find_unused_preserved_libs()
                if cpv_lib_map:
                        self._remove_preserved_libs(cpv_lib_map)
-                       for cpv, removed in cpv_lib_map.iteritems():
+                       for cpv, removed in cpv_lib_map.items():
                                if not self.vartree.dbapi.cpv_exists(cpv):
                                        continue
                                self.vartree.dbapi.removeFromContents(cpv, removed)
@@ -4428,7 +4428,7 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None):
        curval = 0
        if onProgress:
                onProgress(maxval, 0)
-       paths = contents.keys()
+       paths = list(contents.keys())
        paths.sort()
        for path in paths:
                curval += 1
index bf90f305bfcd3c144693a741e12a4c3e901159df..5d1e933a1aeb33c708be6165d61f66f1c90ead3b 100644 (file)
@@ -59,7 +59,7 @@ class fakedbapi(dbapi):
                return list(self.cpdict)
 
        def cpv_all(self):
-               return self.cpvdict.keys()
+               return list(self.cpvdict.keys())
 
        def cpv_inject(self, mycpv, metadata=None):
                """Adds a cpv to the list of available packages. See the
index 343f8ab6e4150ffc62ee9131885c1c163a787b8e..18e8482e45acefb479a4d67e311f5bbc0ff29a28 100644 (file)
@@ -89,7 +89,7 @@ class trace_handler(object):
 
                # We omit items that will lead to unreasonable bloat of the trace
                # output (and resulting log file).
-               for k, v in my_locals.iteritems():
+               for k, v in my_locals.items():
                        my_repr = repr(v)
                        if len(my_repr) > self.max_repr_length:
                                my_locals[k] = "omitted"
index 9d055912dd9b02e6c1a7f2a4854dfedafa6b164a..c8b9f6d44e16b56dcd94936a99210eae4ca5b476 100644 (file)
@@ -393,16 +393,16 @@ class _use_dep(object):
                self.required = frozenset(chain(
                        enabled_flags,
                        disabled_flags,
-                       *conditional.values()
+                       *list(conditional.values())
                ))
 
                self.enabled = frozenset(enabled_flags)
                self.disabled = frozenset(disabled_flags)
                self.conditional = None
 
-               for v in conditional.itervalues():
+               for v in conditional.values():
                        if v:
-                               for k, v in conditional.iteritems():
+                               for k, v in conditional.items():
                                        conditional[k] = frozenset(v)
                                self.conditional = conditional
                                break
index 096ce0a7c9f7c8031346945b26e98c3484da5631..377eb45c3657f4f47b778870dab96d845be93d61 100644 (file)
@@ -36,7 +36,7 @@ def read_config(mandatory_opts):
 
        # Handle quote removal here, since KeyValuePairFileLoader doesn't do that.
     quotes = "\"'"
-    for k, v in opts.iteritems():
+    for k, v in opts.items():
         if v[:1] in quotes and v[:1] == v[-1:]:
             opts[k] = v[1:-1]
 
index 30b83ea3d3b2756d26b43eecd4ce6f034aa325c0..2c3c66c86a5230f833741c6a0b634bca52dd6eef 100644 (file)
@@ -114,7 +114,7 @@ class cache(object):
        def is_eclass_data_valid(self, ec_dict):
                if not isinstance(ec_dict, dict):
                        return False
-               for eclass, tup in ec_dict.iteritems():
+               for eclass, tup in ec_dict.items():
                        cached_data = self.eclasses.get(eclass, None)
                        """ Only use the mtime for validation since the probability of a
                        collision is small and, depending on the cache implementation, the
index c689d821d28ee1cab42716cf192736fdf1745a91..53c0a85f779ca032074081ab841ec65674598c98 100644 (file)
@@ -100,7 +100,7 @@ def elog_process(cpv, mysettings, phasefilter=None):
        else:
                all_logentries[cpv] = ebuild_logentries
 
-       for key in _preserve_logentries.keys():
+       for key in list(_preserve_logentries.keys()):
                if key in all_logentries:
                        all_logentries[key] = _merge_logentries(_preserve_logentries[key], all_logentries[key])
                else:
@@ -140,7 +140,7 @@ def elog_process(cpv, mysettings, phasefilter=None):
                        listener(mysettings, str(key), default_logentries, default_fulllog)
 
                # pass the processing to the individual modules
-               for s, levels in logsystems.iteritems():
+               for s, levels in logsystems.items():
                        # allow per module overrides of PORTAGE_ELOG_CLASSES
                        if levels:
                                mod_logentries = filter_loglevels(all_logentries[key], levels)
index 4380146d2d4dcd4acbed62b5ed70e36cc7d93e67..3ea0e6fe93df7b9af28e98136bd7a1c872eb7a90 100644 (file)
@@ -31,7 +31,7 @@ def finalize(mysettings=None):
        an older version of portage will import the module from a newer version
        when it upgrades itself."""
        global _items
-       for mysettings, items in _items.itervalues():
+       for mysettings, items in _items.values():
                _finalize(mysettings, items)
        _items.clear()
 
@@ -59,7 +59,7 @@ def _finalize(mysettings, items):
                 mybody += "- %s\n" % key
 
        mymessage = portage.mail.create_message(myfrom, myrecipient, mysubject,
-               mybody, attachments=items.values())
+               mybody, attachments=list(items.values()))
 
        def timeout_handler(signum, frame):
                raise PortageException("Timeout in finalize() for elog system 'mail_summary'")
index 5d3864fd123e182e0ef10d44b8dd9fadc5ddeac2..35bef052a038bec44493b7df348b8520b01c7e84 100644 (file)
@@ -801,7 +801,7 @@ class PackageIndex(object):
                        if not mycpv:
                                continue
                        if self._default_pkg_data:
-                               for k, v in self._default_pkg_data.iteritems():
+                               for k, v in self._default_pkg_data.items():
                                        d.setdefault(k, v)
                        if self._inherited_keys:
                                for k in self._inherited_keys:
@@ -814,7 +814,7 @@ class PackageIndex(object):
                if self.modified:
                        self.header["TIMESTAMP"] = str(long(time.time()))
                        self.header["PACKAGES"] = str(len(self.packages))
-               keys = self.header.keys()
+               keys = list(self.header.keys())
                keys.sort()
                self._writepkgindex(pkgfile, [(k, self.header[k]) \
                        for k in keys if self.header[k]])
@@ -827,10 +827,10 @@ class PackageIndex(object):
                                        if v is not None and v == metadata.get(k):
                                                del metadata[k]
                        if self._default_pkg_data:
-                               for k, v in self._default_pkg_data.iteritems():
+                               for k, v in self._default_pkg_data.items():
                                        if metadata.get(k) == v:
                                                metadata.pop(k, None)
-                       keys = metadata.keys()
+                       keys = list(metadata.keys())
                        keys.sort()
                        self._writepkgindex(pkgfile,
                                [(k, metadata[k]) for k in keys if metadata[k]])
index cd2609c3bf00c56a771aa89f707b8d3a9b4a56d0..7b20a3c2d56160637fd652f30191102eb26c115a 100644 (file)
@@ -68,13 +68,13 @@ def parseManifest2(mysplit):
 class ManifestEntry(object):
        __slots__ = ("type", "name", "hashes")
        def __init__(self, **kwargs):
-               for k, v in kwargs.iteritems():
+               for k, v in kwargs.items():
                        setattr(self, k, v)
 
 class Manifest2Entry(ManifestEntry):
        def __str__(self):
                myline = " ".join([self.type, self.name, str(self.hashes["size"])])
-               myhashkeys = self.hashes.keys()
+               myhashkeys = list(self.hashes.keys())
                myhashkeys.remove("size")
                myhashkeys.sort()
                for h in myhashkeys:
@@ -202,15 +202,15 @@ class Manifest(object):
                return myhashdict
 
        def _createManifestEntries(self):
-               mytypes = self.fhashdict.keys()
+               mytypes = list(self.fhashdict.keys())
                mytypes.sort()
                for t in mytypes:
-                       myfiles = self.fhashdict[t].keys()
+                       myfiles = list(self.fhashdict[t].keys())
                        myfiles.sort()
                        for f in myfiles:
                                myentry = Manifest2Entry(
                                        type=t, name=f, hashes=self.fhashdict[t][f].copy())
-                               myhashkeys = myentry.hashes.keys()
+                               myhashkeys = list(myentry.hashes.keys())
                                myhashkeys.sort()
                                for h in myhashkeys:
                                        if h not in ["size"] + portage.const.MANIFEST2_HASH_FUNCTIONS:
index 84d9ab877e8d1abe1ea6243a4fdb8818d533c38a..c178a8c4859c13baa4b50304f23d60351c46b19b 100644 (file)
@@ -235,7 +235,7 @@ class NewsItem(object):
                                'profile' : profile }
 
                all_match = True
-               for values in self.restrictions.itervalues():
+               for values in self.restrictions.values():
                        any_match = False
                        for restriction in values:
                                if restriction.checkRestriction(**kwargs):
@@ -269,7 +269,7 @@ class NewsItem(object):
                        restricts = {  _installedRE : DisplayInstalledRestriction,
                                        _profileRE : DisplayProfileRestriction,
                                        _keywordRE : DisplayKeywordRestriction }
-                       for regex, restriction in restricts.iteritems():
+                       for regex, restriction in restricts.items():
                                match = regex.match(line)
                                if match:
                                        restrict = restriction(match.groups()[0].strip())
index 5f310d30010657c69599d7a4ff177818f5090cf1..6044f2bbc366d56ebc4ce3e4aeb255e045452383 100644 (file)
@@ -759,10 +759,10 @@ def _init(config_root='/'):
        codes = object.__getattribute__(codes, '_attr')
        _styles = object.__getattribute__(_styles, '_attr')
 
-       for k, v in codes.iteritems():
+       for k, v in codes.items():
                codes[k] = _unicode_decode(v)
 
-       for k, v in _styles.iteritems():
+       for k, v in _styles.items():
                _styles[k] = _unicode_decode(v)
 
        try:
index b49f061f14b353de8830b6c7ab759d1d0fd51f27..6ce304b4f58a9b66e53a5e0308b808f0cb0a735f 100644 (file)
@@ -189,7 +189,7 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False,
 
        # Avoid a potential UnicodeEncodeError from os.execve().
        env_bytes = {}
-       for k, v in env.iteritems():
+       for k, v in env.items():
                env_bytes[_unicode_encode(k, encoding=_encodings['content'])] = \
                        _unicode_encode(v, encoding=_encodings['content'])
        env = env_bytes
index 62428543ef60dcd8699c9880bf527545f0347740..c62f9078895e24c599e8f851ae828436f877ea85 100644 (file)
@@ -50,7 +50,7 @@ def _unregister_module_proxy(name):
                                object.__getattribute__(proxy, '_get_target')()
 
                        modules = sys.modules
-                       for name, proxy_list in list(_module_proxies.iteritems()):
+                       for name, proxy_list in list(_module_proxies.items()):
                                if name not in modules:
                                        continue
                                # First delete this name from the dict so that
index 4b39e9df97d57d0e53fdb8fbcbebe12e4888e612..0c08ee1948b1a5f9a572e13859f7027017c219f4 100644 (file)
@@ -126,7 +126,7 @@ class PackageSet(object):
                                rev_transform[atom] = atom
                        else:
                                rev_transform[Atom(atom.replace(atom.cp, pkg.cp, 1))] = atom
-               best_match = best_match_to_list(pkg, rev_transform.iterkeys())
+               best_match = best_match_to_list(pkg, iter(rev_transform.keys()))
                if best_match:
                        return rev_transform[best_match]
                return None
index 15ecd752bae1cf12c67691bc774d82949ebf3b15..8a7e4d84d3db46636725b15e482d4cc98f68af80 100644 (file)
@@ -93,7 +93,7 @@ class StaticFileSet(EditablePackageSet):
                                        # in the latest new slot that may be available.
                                        atoms.append(a)
                        else:
-                               atoms = data.keys()
+                               atoms = list(data.keys())
                        self._setAtoms(atoms)
                        self._mtime = mtime
                
@@ -183,7 +183,7 @@ class ConfigFileSet(PackageSet):
 
        def load(self):
                data, errors = self.loader.load()
-               self._setAtoms(data.keys())
+               self._setAtoms(list(data.keys()))
        
        def singleBuilder(self, options, settings, trees):
                if not "filename" in options:
@@ -254,7 +254,7 @@ class WorldSet(EditablePackageSet):
                                        raise
                                del e
                                data = {}
-                       atoms = data.keys()
+                       atoms = list(data.keys())
                        self._mtime = mtime
                        atoms_changed = True
                else:
@@ -274,7 +274,7 @@ class WorldSet(EditablePackageSet):
                                        raise
                                del e
                                data = {}
-                       nonatoms = data.keys()
+                       nonatoms = list(data.keys())
                        self._mtime2 = mtime
                        atoms_changed = True
                else:
index d7e4c731a77971d65998e14bdb166bdff503a645..b8aa3c41c8ba697226c13c0b6af83ca2efcff5b1 100644 (file)
@@ -64,7 +64,7 @@ class PreservedLibraryConsumerSet(LibraryConsumerSet):
                consumers = set()
                if reg:
                        plib_dict = reg.getPreservedLibs()
-                       for libs in plib_dict.itervalues():
+                       for libs in plib_dict.values():
                                for lib in libs:
                                        if self.debug:
                                                print(lib)
@@ -74,7 +74,7 @@ class PreservedLibraryConsumerSet(LibraryConsumerSet):
                                        consumers.update(self.dbapi.linkmap.findConsumers(lib))
                        # Don't rebuild packages just because they contain preserved
                        # libs that happen to be consumers of other preserved libs.
-                       for libs in plib_dict.itervalues():
+                       for libs in plib_dict.values():
                                consumers.difference_update(libs)
                else:
                        return
index 228cf2ecb6004b0d7e53ddf5e75e7b559ddc4886..d016331255779bd23fc36a156ae04fd6055829dd 100644 (file)
@@ -23,7 +23,7 @@ class PackageKeywordsFileTestCase(TestCase):
                        f = PackageKeywordsFile(self.fname)
                        f.load()
                        i = 0
-                       for cpv, keyword in f.iteritems():
+                       for cpv, keyword in f.items():
                                self.assertEqual( cpv, self.cpv[i] )
                                [k for k in keyword if self.assertTrue(k in self.keywords)]
                                i = i + 1
index 575ede505df39bcdf1f86723bb467cc82550dab2..4157d60978401258e970ccc41750ea4b391613a8 100644 (file)
@@ -22,7 +22,7 @@ class PackageUseFileTestCase(TestCase):
                try:
                        f = PackageUseFile(self.fname)
                        f.load()
-                       for cpv, use in f.iteritems():
+                       for cpv, use in f.items():
                                self.assertEqual( cpv, self.cpv )
                                [flag for flag in use if self.assertTrue(flag in self.useflags)]
                finally:
index 41a44c4cc0ff9c1cebdc64ca659dd136a92ae803..8a37b73bd17de02850b6f8a03c976357eed6b169 100644 (file)
@@ -32,7 +32,7 @@ class PortageModulesFileTestCase(TestCase):
        def BuildFile(self):
                fd, self.fname = mkstemp()
                f = os.fdopen(fd, 'w')
-               for k, v in self.items.iteritems():
+               for k, v in self.items.items():
                        f.write('%s=%s\n' % (k,v))
                f.close()
 
index 0941ba06b45846b09dff4e7e23b2cc163ebd3529..1ea15e7e929df24cc2598ff46138cb427ed4ac33 100644 (file)
@@ -56,7 +56,7 @@ def update_dbentries(update_iter, mydata):
        """Performs update commands and returns a
        dict containing only the updated items."""
        updated_items = {}
-       for k, mycontent in mydata.iteritems():
+       for k, mycontent in mydata.items():
                k_unicode = _unicode_decode(k,
                        encoding=_encodings['repo.content'], errors='replace')
                if k_unicode not in ignored_dbentries:
@@ -83,7 +83,7 @@ def fixdbentries(update_iter, dbdir):
                        mode='r', encoding=_encodings['repo.content'],
                        errors='replace').read()
        updated_items = update_dbentries(update_iter, mydata)
-       for myfile, mycontent in updated_items.iteritems():
+       for myfile, mycontent in updated_items.items():
                file_path = os.path.join(dbdir, myfile)
                write_atomic(file_path, mycontent, encoding=_encodings['repo.content'])
        return len(updated_items) > 0
@@ -231,7 +231,7 @@ def update_config_files(config_root, protect, protect_mask, update_iter):
        # update /etc/portage/packages.*
        ignore_line_re = re.compile(r'^#|^\s*$')
        for update_cmd in update_iter:
-               for x, contents in file_contents.iteritems():
+               for x, contents in file_contents.items():
                        for pos, line in enumerate(contents):
                                if ignore_line_re.match(line):
                                        continue
index 60e72fbb4af08ef6c5015fddf6641a0c444385c9..5f1a42c2f13e6ddaefcbf2c0608d24d049b6adeb 100644 (file)
@@ -220,7 +220,7 @@ def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
                                return None
                if final_dict is None:
                        final_dict = {}
-               for y in mydict.keys():
+               for y in list(mydict.keys()):
                        if True:
                                if y in final_dict and (incremental or (y in incrementals)):
                                        final_dict[y] += " "+mydict[y][:]
@@ -247,7 +247,7 @@ def stack_lists(lists, incremental=1):
                                        new_list[y] = True
                        else:
                                new_list[y] = True
-       return new_list.keys()
+       return list(new_list.keys())
 
 def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
        """
@@ -287,7 +287,7 @@ def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
                else:
                        newdict[myline[0]] = myline[1:]
        if juststrings:
-               for k, v in newdict.iteritems():
+               for k, v in newdict.items():
                        newdict[k] = " ".join(v)
        return newdict
 
@@ -299,7 +299,7 @@ def grabdict_package(myfilename, juststrings=0, recursive=0):
        # "RuntimeError: dictionary changed size during iteration"
        # when an invalid atom is deleted.
        atoms = {}
-       for k, v in pkgs.iteritems():
+       for k, v in pkgs.items():
                try:
                        k = Atom(k)
                except InvalidAtom:
index 29c28af32cbe7068ded9f3a1b08b2f95605f8adc..b5878befb5c14194a9d719fda315aafc5594432e 100644 (file)
@@ -99,7 +99,7 @@ def xpak_mem(mydata):
        """Create an xpack segement from a map object."""
 
        mydata_encoded = {}
-       for k, v in mydata.iteritems():
+       for k, v in mydata.items():
                k = _unicode_encode(k,
                        encoding=_encodings['repo.content'], errors='backslashreplace')
                v = _unicode_encode(v,
@@ -112,7 +112,7 @@ def xpak_mem(mydata):
        indexpos=0
        dataglob = _unicode_encode('')
        datapos=0
-       for x, newglob in mydata.iteritems():
+       for x, newglob in mydata.items():
                mydatasize=len(newglob)
                indexglob=indexglob+encodeint(len(x))+x+encodeint(datapos)+encodeint(mydatasize)
                indexpos=indexpos+4+len(x)+4+4
index 23b85ffe31cf0ebc98c6c1689a73de02fd7bf29a..45160cfc833fb45f4fabea13d5e5c9a38f35de40 100644 (file)
@@ -220,7 +220,7 @@ def format_qa_output(formatter, stats, fails, dofull, dofail, options, qawarning
        full = options.mode == 'full'
        # we only want key value pairs where value > 0 
        for category, number in \
-               filter(lambda myitem: myitem[1] > 0, stats.iteritems()):
+               filter(lambda myitem: myitem[1] > 0, iter(stats.items())):
                formatter.add_literal_data(_unicode_decode("  " + category.ljust(30)))
                if category in qawarnings:
                        formatter.push_style("WARN")